From 49ac092bc25ee03ede38a8445636ab0ebcf81101 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Thu, 4 Jan 2024 14:03:54 -0500 Subject: [PATCH 01/22] feat: Preset API handler (#421) --- CHANGELOG.rst | 2 + examples/preset.py | 26 ++++++++++ setup.cfg | 1 + src/shillelagh/adapters/api/preset.py | 55 ++++++++++++++++++++ tests/adapters/api/preset_test.py | 73 +++++++++++++++++++++++++++ 5 files changed, 157 insertions(+) create mode 100644 examples/preset.py create mode 100644 src/shillelagh/adapters/api/preset.py create mode 100644 tests/adapters/api/preset_test.py diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 93283f94..524528a6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ Changelog Next ==== +- Add custom adapter for the Preset API (#421) + Version 1.2.12 - 2023-12-05 =========================== diff --git a/examples/preset.py b/examples/preset.py new file mode 100644 index 00000000..4225f496 --- /dev/null +++ b/examples/preset.py @@ -0,0 +1,26 @@ +""" +A simple example querying the Preset API. +""" + +from shillelagh.backends.apsw.db import connect + +if __name__ == "__main__": + connection = connect( + ":memory:", + adapter_kwargs={ + "presetapi": { + # create a token/secret at https://manage.app.preset.io/app/user + "access_token": "XXX", + "access_secret": "YYY", + }, + }, + ) + cursor = connection.cursor() + + SQL = """ + SELECT * FROM + "https://api.app.preset.io/v1/teams/" + LIMIT 1 + """ + for row in cursor.execute(SQL): + print(row) diff --git a/setup.cfg b/setup.cfg index 90f21ecb..674a4059 100644 --- a/setup.cfg +++ b/setup.cfg @@ -159,6 +159,7 @@ shillelagh.adapter = holidaysmemory = shillelagh.adapters.memory.holidays:HolidaysMemory htmltableapi = shillelagh.adapters.api.html_table:HTMLTableAPI pandasmemory = shillelagh.adapters.memory.pandas:PandasMemory + presetapi = shillelagh.adapters.api.preset:PresetAPI s3selectapi = shillelagh.adapters.api.s3select:S3SelectAPI socrataapi = shillelagh.adapters.api.socrata:SocrataAPI systemapi = shillelagh.adapters.api.system:SystemAPI diff --git a/src/shillelagh/adapters/api/preset.py b/src/shillelagh/adapters/api/preset.py new file mode 100644 index 00000000..fce3e5dc --- /dev/null +++ b/src/shillelagh/adapters/api/preset.py @@ -0,0 +1,55 @@ +""" +Simple adapter for the Preset API (https://preset.io/). + +This is a derivation of the generic JSON adapter that handles Preset auth. +""" + +from typing import Any, Optional, cast + +import requests +from yarl import URL + +from shillelagh.adapters.api.generic_json import GenericJSONAPI + + +def get_jwt_token(access_token: str, access_secret: str) -> str: + """ + Get JWT token from access token and access secret. + """ + response = requests.post( + "https://api.app.preset.io/v1/auth/", + json={"name": access_token, "secret": access_secret}, + headers={"Content-Type": "application/json"}, + timeout=60, + ) + response.raise_for_status() + payload = response.json() + return cast(str, payload["payload"]["access_token"]) + + +class PresetAPI(GenericJSONAPI): + """ + Custom JSON adapter that handlers Preset auth. + """ + + default_path = "$.payload[*]" + cache_name = "preset_cache" + + @classmethod + def supports(cls, uri: str, fast: bool = True, **kwargs: Any) -> Optional[bool]: + parsed = URL(uri) + return parsed.scheme in ("http", "https") and parsed.host == "api.app.preset.io" + + def __init__( + self, + uri: str, + path: Optional[str] = None, + access_token: Optional[str] = None, + access_secret: Optional[str] = None, + ): + if access_token is None or access_secret is None: + raise ValueError("access_token and access_secret must be provided") + + jwt_token = get_jwt_token(access_token, access_secret) + request_headers = {"Authorization": f"Bearer {jwt_token}"} + super().__init__(uri, path=path, request_headers=request_headers) diff --git a/tests/adapters/api/preset_test.py b/tests/adapters/api/preset_test.py new file mode 100644 index 00000000..68e6164d --- /dev/null +++ b/tests/adapters/api/preset_test.py @@ -0,0 +1,73 @@ +""" +Test the Preset adapter. +""" + +import re +from datetime import timedelta + +import pytest +from pytest_mock import MockerFixture +from requests_mock.mocker import Mocker + +from shillelagh.adapters.api.preset import PresetAPI +from shillelagh.backends.apsw.db import connect + +DO_NOT_CACHE = timedelta(seconds=-1) + + +def test_preset(mocker: MockerFixture, requests_mock: Mocker) -> None: + """ + Test a simple query. + """ + mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) + + # for datassette + requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) + + requests_mock.post( + "https://api.app.preset.io/v1/auth/", + json={"payload": {"access_token": "SECRET"}}, + ) + requests_mock.get( + "https://api.app.preset.io/v1/teams/", + json={"payload": [{"id": 1, "name": "Team 1"}]}, + ) + + connection = connect( + ":memory:", + adapter_kwargs={ + "presetapi": { + "access_token": "XXX", + "access_secret": "YYY", + }, + }, + ) + cursor = connection.cursor() + + sql = 'SELECT * FROM "https://api.app.preset.io/v1/teams/"' + rows = list(cursor.execute(sql)) + assert rows == [(1, "Team 1")] + + +def test_preset_missing_token(mocker: MockerFixture) -> None: + """ + Test a simple query. + """ + mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) + + connection = connect(":memory:") + cursor = connection.cursor() + + sql = 'SELECT * FROM "https://api.app.preset.io/v1/teams/"' + with pytest.raises(ValueError) as exc_info: + cursor.execute(sql) + assert str(exc_info.value) == "access_token and access_secret must be provided" + + +def test_supports() -> None: + """ + Test the ``supports`` method. + """ + assert PresetAPI.supports("/etc/password") is False + assert PresetAPI.supports("https://example.org/data.html") is False + assert PresetAPI.supports("https://api.app.preset.io/v1/teams/") is True From d227ddbdafa9f5e689bd6aa866cce34cdb68842c Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Thu, 4 Jan 2024 14:05:33 -0500 Subject: [PATCH 02/22] chore: release 1.2.13 (#422) --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 524528a6..55292134 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,9 @@ Changelog Next ==== +Version 1.2.13 - 2024-01-04 +=========================== + - Add custom adapter for the Preset API (#421) Version 1.2.12 - 2023-12-05 From ba84a1b632301e04653c6aa73179ddc0d509244b Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Fri, 5 Jan 2024 12:11:14 -0500 Subject: [PATCH 03/22] fix: allow Preset handler to query workspaces (#423) --- CHANGELOG.rst | 2 ++ src/shillelagh/adapters/api/preset.py | 4 +++- tests/adapters/api/preset_test.py | 8 ++++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 55292134..65ef72f3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ Changelog Next ==== +- Preset adapter can now query workspaces (#422) + Version 1.2.13 - 2024-01-04 =========================== diff --git a/src/shillelagh/adapters/api/preset.py b/src/shillelagh/adapters/api/preset.py index fce3e5dc..5ed258f8 100644 --- a/src/shillelagh/adapters/api/preset.py +++ b/src/shillelagh/adapters/api/preset.py @@ -38,7 +38,9 @@ class PresetAPI(GenericJSONAPI): @classmethod def supports(cls, uri: str, fast: bool = True, **kwargs: Any) -> Optional[bool]: parsed = URL(uri) - return parsed.scheme in ("http", "https") and parsed.host == "api.app.preset.io" + return parsed.scheme in ("http", "https") and ( + parsed.host == "preset.io" or parsed.host.endswith(".preset.io") + ) def __init__( self, diff --git a/tests/adapters/api/preset_test.py b/tests/adapters/api/preset_test.py index 68e6164d..06ba42e7 100644 --- a/tests/adapters/api/preset_test.py +++ b/tests/adapters/api/preset_test.py @@ -71,3 +71,11 @@ def test_supports() -> None: assert PresetAPI.supports("/etc/password") is False assert PresetAPI.supports("https://example.org/data.html") is False assert PresetAPI.supports("https://api.app.preset.io/v1/teams/") is True + assert ( + PresetAPI.supports( + "https://db64ce64.us1a.app-sdx.preset.io/sqllab/?savedQueryId=1", + ) + is True + ) + assert PresetAPI.supports("https://preset.io/") is True + assert PresetAPI.supports("https://phishingpreset.io/") is False From 973ee00d77b883a3522ae67c556ae2075a810afd Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Fri, 5 Jan 2024 12:12:46 -0500 Subject: [PATCH 04/22] chore: release 1.2.14 (#424) --- CHANGELOG.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 65ef72f3..265b70d6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,7 +5,10 @@ Changelog Next ==== -- Preset adapter can now query workspaces (#422) +Version 1.2.14 - 2024-01-05 +=========================== + +- Preset adapter can now query workspaces (#423) Version 1.2.13 - 2024-01-04 =========================== From 5f983c9a264484c7ac414be08ea61fb98f6432e9 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Tue, 13 Feb 2024 16:45:28 -0500 Subject: [PATCH 05/22] feat(preset): improvements to handler (#427) * feat(preset): improvements to handler * Handle pagination, limit, and offset * Update docs * Fix bug --- CHANGELOG.rst | 2 + docs/adapters.rst | 28 +++ examples/preset.py | 18 +- setup.cfg | 1 + src/shillelagh/adapters/api/preset.py | 145 +++++++++++++++- tests/adapters/api/preset_test.py | 238 +++++++++++++++++++++++++- 6 files changed, 418 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 265b70d6..2a2ecb27 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ Changelog Next ==== +- Preset adapter now handles pagination, offset and limit (#427) + Version 1.2.14 - 2024-01-05 =========================== diff --git a/docs/adapters.rst b/docs/adapters.rst index 5f7b7798..b3d40db7 100644 --- a/docs/adapters.rst +++ b/docs/adapters.rst @@ -475,3 +475,31 @@ The generic XML adapter is based on the generic JSON; the only difference is tha Would get mapped to two columns, ``foo`` and ``baz``, with values ``bar`` and ``{"qux": "quux"}`` respectively. + +Preset (https://preset.io) +========================== + +There are two adapters based on the generic JSON adapter that are specific to `Preset `_. They handle authentication and pagination of the APIs, so they're more efficient than the generic one. + +To configure, you need an access token and secret: + +.. code-block:: python + + from shillelagh.backends.apsw.db import connect + + connection = connect( + ":memory:", + # create tokens/secrets at https://manage.app.preset.io/app/user + adapter_kwargs={ + "presetapi": { + "access_token": "", + "access_secret": "", + }, + "presetworkspaceapi": { + "access_token": "", + "access_secret": "", + }, + }, + ) + +The token and secret should normally be the same, but because the workspace API is slightly different from the main Preset API they were implemented as different adapters. diff --git a/examples/preset.py b/examples/preset.py index 4225f496..16a77dcf 100644 --- a/examples/preset.py +++ b/examples/preset.py @@ -7,11 +7,15 @@ if __name__ == "__main__": connection = connect( ":memory:", + # create tokens/secrets at https://manage.app.preset.io/app/user adapter_kwargs={ "presetapi": { - # create a token/secret at https://manage.app.preset.io/app/user - "access_token": "XXX", - "access_secret": "YYY", + "access_token": "", + "access_secret": "", + }, + "presetworkspaceapi": { + "access_token": "", + "access_secret": "", }, }, ) @@ -24,3 +28,11 @@ """ for row in cursor.execute(SQL): print(row) + + SQL = """ + SELECT * FROM + "https://d90230ca.us1a.app-sdx.preset.io/api/v1/chart/" + LIMIT 12 + """ + for row in cursor.execute(SQL): + print(row) diff --git a/setup.cfg b/setup.cfg index 674a4059..535bfe37 100644 --- a/setup.cfg +++ b/setup.cfg @@ -160,6 +160,7 @@ shillelagh.adapter = htmltableapi = shillelagh.adapters.api.html_table:HTMLTableAPI pandasmemory = shillelagh.adapters.memory.pandas:PandasMemory presetapi = shillelagh.adapters.api.preset:PresetAPI + presetworkspaceapi = shillelagh.adapters.api.preset:PresetWorkspaceAPI s3selectapi = shillelagh.adapters.api.s3select:S3SelectAPI socrataapi = shillelagh.adapters.api.socrata:SocrataAPI systemapi = shillelagh.adapters.api.system:SystemAPI diff --git a/src/shillelagh/adapters/api/preset.py b/src/shillelagh/adapters/api/preset.py index 5ed258f8..14cb9691 100644 --- a/src/shillelagh/adapters/api/preset.py +++ b/src/shillelagh/adapters/api/preset.py @@ -4,20 +4,37 @@ This is a derivation of the generic JSON adapter that handles Preset auth. """ -from typing import Any, Optional, cast +import logging +import re +from typing import Any, Dict, Iterator, List, Optional, Set, Tuple, cast +import prison import requests +from jsonpath import JSONPath from yarl import URL from shillelagh.adapters.api.generic_json import GenericJSONAPI +from shillelagh.exceptions import ProgrammingError +from shillelagh.fields import Order +from shillelagh.filters import Filter +from shillelagh.lib import analyze, flatten +from shillelagh.typing import RequestedOrder, Row +_logger = logging.getLogger(__name__) -def get_jwt_token(access_token: str, access_secret: str) -> str: +MAX_PAGE_SIZE = 100 + + +def get_jwt_token(uri: str, access_token: str, access_secret: str) -> str: """ Get JWT token from access token and access secret. """ + parsed = URL(uri) + environment = parsed.host.split(".")[-3] + api_uri = f"https://api.{environment}.preset.io/v1/auth/" + response = requests.post( - "https://api.app.preset.io/v1/auth/", + api_uri, json={"name": access_token, "secret": access_secret}, headers={"Content-Type": "application/json"}, timeout=60, @@ -29,7 +46,7 @@ def get_jwt_token(access_token: str, access_secret: str) -> str: class PresetAPI(GenericJSONAPI): """ - Custom JSON adapter that handlers Preset auth. + Custom JSON adapter that handles Preset auth. """ default_path = "$.payload[*]" @@ -38,8 +55,9 @@ class PresetAPI(GenericJSONAPI): @classmethod def supports(cls, uri: str, fast: bool = True, **kwargs: Any) -> Optional[bool]: parsed = URL(uri) - return parsed.scheme in ("http", "https") and ( - parsed.host == "preset.io" or parsed.host.endswith(".preset.io") + return ( + parsed.scheme in ("http", "https") + and re.match(r"api\.app(-\w+)?\.preset\.io", parsed.host) is not None ) def __init__( @@ -52,6 +70,119 @@ def __init__( if access_token is None or access_secret is None: raise ValueError("access_token and access_secret must be provided") - jwt_token = get_jwt_token(access_token, access_secret) + jwt_token = get_jwt_token(uri, access_token, access_secret) request_headers = {"Authorization": f"Bearer {jwt_token}"} super().__init__(uri, path=path, request_headers=request_headers) + + +def get_urls( + resource_url: str, + offset: Optional[int] = None, + limit: Optional[int] = None, + page_size: int = MAX_PAGE_SIZE, +) -> Iterator[Tuple[str, slice]]: + """ + Get all paginated URLs to download data from together with a limit/offset slice. + """ + start = offset or 0 + stop = start + limit if limit is not None else None + + baseurl = URL(resource_url) + query = baseurl.query.get("q", "()") + try: + params = prison.loads(query) + except Exception: # pylint: disable=broad-except + yield str(baseurl), slice(start, stop) + return + + # assume the user knows better and keep the URL unmodified + if "page" in params or "page_size" in params: + yield str(baseurl), slice(start, stop) + return + + page = start // page_size + start = start % page_size + remaining = limit if limit is not None else float("inf") + while True: + params["page"] = page + params["page_size"] = min(start + remaining, page_size) + yield str(baseurl.with_query({"q": prison.dumps(params)})), slice(start, None) + + remaining -= page_size - start + if remaining <= 0: + break + + page += 1 + start = 0 + + +class PresetWorkspaceAPI(PresetAPI): + """ + Adapter for Preset workspaces. + """ + + supports_limit = True + supports_offset = True + + default_path = "$.result[*]" + cache_name = "preset_cache" + + @classmethod + def supports(cls, uri: str, fast: bool = True, **kwargs: Any) -> Optional[bool]: + parsed = URL(uri) + return ( + parsed.scheme in ("http", "https") + and parsed.host != "api.app.preset.io" + and parsed.host.endswith(".preset.io") + ) + + def _set_columns(self) -> None: + # request only a single page of results to infer schema + rows = list(self.get_data({}, [], limit=MAX_PAGE_SIZE)) + column_names = list(rows[0].keys()) if rows else [] + + _, order, types = analyze(iter(rows)) + + self.columns = { + column_name: types[column_name]( + filters=[], + order=order.get(column_name, Order.NONE), + exact=False, + ) + for column_name in column_names + if column_name != "rowid" + } + + def get_data( # pylint: disable=unused-argument, too-many-arguments, too-many-locals + self, + bounds: Dict[str, Filter], + order: List[Tuple[str, RequestedOrder]], + limit: Optional[int] = None, + offset: Optional[int] = None, + requested_columns: Optional[Set[str]] = None, + **kwargs: Any, + ) -> Iterator[Row]: + for url, slice_ in get_urls(self.uri, offset, limit, MAX_PAGE_SIZE): + response = self._session.get(str(url)) + payload = response.json() + if not response.ok: + messages = "\n".join( + error.get("message", str(error)) + for error in payload.get("errors", []) + ) + raise ProgrammingError(f"Error: {messages}") + + parser = JSONPath(self.path) + rows = parser.parse(payload)[slice_] + if not rows: + break + + for i, row in enumerate(rows): + row = { + k: v + for k, v in (row or {}).items() + if requested_columns is None or k in requested_columns + } + row["rowid"] = i + _logger.debug(row) + yield flatten(row) diff --git a/tests/adapters/api/preset_test.py b/tests/adapters/api/preset_test.py index 06ba42e7..a3f236fb 100644 --- a/tests/adapters/api/preset_test.py +++ b/tests/adapters/api/preset_test.py @@ -9,8 +9,9 @@ from pytest_mock import MockerFixture from requests_mock.mocker import Mocker -from shillelagh.adapters.api.preset import PresetAPI +from shillelagh.adapters.api.preset import PresetAPI, PresetWorkspaceAPI, get_urls from shillelagh.backends.apsw.db import connect +from shillelagh.exceptions import ProgrammingError DO_NOT_CACHE = timedelta(seconds=-1) @@ -71,11 +72,240 @@ def test_supports() -> None: assert PresetAPI.supports("/etc/password") is False assert PresetAPI.supports("https://example.org/data.html") is False assert PresetAPI.supports("https://api.app.preset.io/v1/teams/") is True + assert PresetAPI.supports("https://api.appxpreset.io/v1/teams/") is False + assert PresetAPI.supports("https://api.app-sdx.preset.io/v1/teams/") is True assert ( PresetAPI.supports( - "https://db64ce64.us1a.app-sdx.preset.io/sqllab/?savedQueryId=1", + "https://abcdef01.us1a.app.preset.io/sqllab/?savedQueryId=1", + ) + is False + ) + + assert PresetWorkspaceAPI.supports("https://api.app.preset.io/v1/teams/") is False + assert ( + PresetWorkspaceAPI.supports( + "https://abcdef01.us1a.app.preset.io/sqllab/?savedQueryId=1", ) is True ) - assert PresetAPI.supports("https://preset.io/") is True - assert PresetAPI.supports("https://phishingpreset.io/") is False + assert ( + PresetWorkspaceAPI.supports( + "https://abcdef01.us1a.app-sdx.preset.io/sqllab/?savedQueryId=1", + ) + is True + ) + + +def test_get_urls() -> None: + """ + Test the ``get_urls`` function. + """ + gen = get_urls( + "https://abcdef01.us1a.app-sdx.preset.io/api/v1/chart/", + offset=45, + limit=50, + page_size=42, + ) + + url, slice_ = next(gen) + assert ( + url + == "https://abcdef01.us1a.app-sdx.preset.io/api/v1/chart/?q=(page:1,page_size:42)" + ) + assert slice_.start == 3 + url, slice_ = next(gen) + assert ( + url + == "https://abcdef01.us1a.app-sdx.preset.io/api/v1/chart/?q=(page:2,page_size:11)" + ) + assert slice_.start == 0 + with pytest.raises(StopIteration): + next(gen) + + +def test_get_urls_unable_to_parse() -> None: + """ + Test the ``get_urls`` function when the URL query can't be parsed. + """ + + gen = get_urls("https://example.org/?q=(((") + assert next(gen)[0] == "https://example.org/?q=(((" + with pytest.raises(StopIteration): + next(gen) + + +def test_get_urls_with_page_parameters() -> None: + """ + Test the ``get_urls`` function when the URL already has page parameters. + """ + + gen = get_urls("https://example.org/?q=(page:0,page_size:42)") + assert next(gen)[0] == "https://example.org/?q=(page:0,page_size:42)" + with pytest.raises(StopIteration): + next(gen) + + +def test_preset_workspace(mocker: MockerFixture, requests_mock: Mocker) -> None: + """ + Test a simple query to a Preset workspace. + """ + mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) + + # for datassette + requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) + + requests_mock.post( + "https://api.app.preset.io/v1/auth/", + json={"payload": {"access_token": "SECRET"}}, + ) + requests_mock.get( + "https://abcdef01.us1a.app.preset.io/api/v1/chart/?q=(page:0,page_size:100)", + json={"result": [{"id": 1, "slice_name": "Team 1"}]}, + ) + requests_mock.get( + "https://abcdef01.us1a.app.preset.io/api/v1/chart/?q=(page:1,page_size:100)", + json={"result": []}, + ) + + connection = connect( + ":memory:", + adapter_kwargs={ + "presetworkspaceapi": { + "access_token": "XXX", + "access_secret": "YYY", + }, + }, + ) + cursor = connection.cursor() + + sql = 'SELECT * FROM "https://abcdef01.us1a.app.preset.io/api/v1/chart/"' + rows = list(cursor.execute(sql)) + assert rows == [(1, "Team 1")] + + +def test_preset_workspace_pagination( + mocker: MockerFixture, + requests_mock: Mocker, +) -> None: + """ + Test pagination in a query to a Preset workspace. + """ + mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) + + # for datassette + requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) + + requests_mock.post( + "https://api.app.preset.io/v1/auth/", + json={"payload": {"access_token": "SECRET"}}, + ) + requests_mock.get( + "https://abcdef01.us1a.app.preset.io/api/v1/chart/?q=(page:0,page_size:100)", + json={ + "result": [{"id": i + 1, "slice_name": f"Team {i+1}"} for i in range(100)], + }, + ) + requests_mock.get( + "https://abcdef01.us1a.app.preset.io/api/v1/chart/?q=(page:1,page_size:3)", + json={ + "result": [ + {"id": i + 101, "slice_name": f"Team {i+101}"} for i in range(3) + ], + }, + ) + + connection = connect( + ":memory:", + adapter_kwargs={ + "presetworkspaceapi": { + "access_token": "XXX", + "access_secret": "YYY", + }, + }, + ) + cursor = connection.cursor() + + sql = 'SELECT * FROM "https://abcdef01.us1a.app.preset.io/api/v1/chart/" LIMIT 5 OFFSET 98' + rows = list(cursor.execute(sql)) + assert rows == [ + (99, "Team 99"), + (100, "Team 100"), + (101, "Team 101"), + (102, "Team 102"), + (103, "Team 103"), + ] + + +def test_preset_workspace_error(mocker: MockerFixture, requests_mock: Mocker) -> None: + """ + Test error handling when accessing a workspace API. + """ + mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) + + # for datassette + requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) + + requests_mock.post( + "https://api.app.preset.io/v1/auth/", + json={"payload": {"access_token": "SECRET"}}, + ) + requests_mock.get( + "https://abcdef01.us1a.app.preset.io/api/v1/chart/?q=(page:0,page_size:100)", + json={ + "errors": [ + { + "message": "Your session has expired. Please refresh the page to sign in.", + "error_type": "GENERIC_BACKEND_ERROR", + "level": "error", + "extra": { + "issue_codes": [ + { + "code": 1011, + "message": "Issue 1011 - Superset encountered an unexpected error.", + }, + ], + }, + }, + ], + }, + status_code=500, + ) + + connection = connect( + ":memory:", + adapter_kwargs={ + "presetworkspaceapi": { + "access_token": "XXX", + "access_secret": "YYY", + }, + }, + ) + cursor = connection.cursor() + + sql = 'SELECT * FROM "https://abcdef01.us1a.app.preset.io/api/v1/chart/"' + with pytest.raises(ProgrammingError) as excinfo: + cursor.execute(sql) + assert ( + str(excinfo.value) + == "Error: Your session has expired. Please refresh the page to sign in." + ) + + +def test_preset_workspace_no_urls(mocker: MockerFixture, requests_mock: Mocker) -> None: + """ + Test when no URLs are returned. + """ + mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) + mocker.patch("shillelagh.adapters.api.preset.get_urls", return_value=[]) + + requests_mock.post( + "https://api.app.preset.io/v1/auth/", + json={"payload": {"access_token": "SECRET"}}, + ) + + adapter = PresetWorkspaceAPI( + "https://abcdef01.us1a.app.preset.io/api/v1/chart/", + access_token="XXX", + access_secret="YYY", + ) + assert list(adapter.get_data({}, [])) == [] From 9883537f463e1c819e5dbece5534a35085cd42bc Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Tue, 13 Feb 2024 16:48:50 -0500 Subject: [PATCH 06/22] chore: release 1.2.15 (#428) --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 2a2ecb27..d56c2c7b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,9 @@ Changelog Next ==== +Version 1.2.15 - 2024-02-13 +=========================== + - Preset adapter now handles pagination, offset and limit (#427) Version 1.2.14 - 2024-01-05 From d07450f11c9ceb90786ce56c9a73aae9250b140b Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Wed, 14 Feb 2024 12:28:29 -0500 Subject: [PATCH 07/22] feat: allow defining the cache timeout (#429) --- CHANGELOG.rst | 2 + src/shillelagh/adapters/api/generic_json.py | 13 +++++- src/shillelagh/adapters/api/preset.py | 14 +++++-- tests/adapters/api/generic_json_test.py | 45 +++++++++++---------- tests/adapters/api/generic_xml_test.py | 13 +++--- tests/adapters/api/preset_test.py | 37 +++++++---------- 6 files changed, 66 insertions(+), 58 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d56c2c7b..8356359b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ Changelog Next ==== +- Allow for custom expiration time in the generic JSON/XML adapters (#429) + Version 1.2.15 - 2024-02-13 =========================== diff --git a/src/shillelagh/adapters/api/generic_json.py b/src/shillelagh/adapters/api/generic_json.py index 57dc8198..3d73f1d3 100644 --- a/src/shillelagh/adapters/api/generic_json.py +++ b/src/shillelagh/adapters/api/generic_json.py @@ -59,7 +59,15 @@ def supports(cls, uri: str, fast: bool = True, **kwargs: Any) -> Optional[bool]: else: request_headers = kwargs.get("request_headers", {}) - session = get_session(request_headers, cls.cache_name, CACHE_EXPIRATION) + cache_expiration = kwargs.get( + "cache_expiration", + CACHE_EXPIRATION.total_seconds(), + ) + session = get_session( + request_headers, + cls.cache_name, + timedelta(seconds=cache_expiration), + ) response = session.head(str(parsed)) return cls.content_type in response.headers.get("content-type", "") @@ -87,6 +95,7 @@ def __init__( uri: str, path: Optional[str] = None, request_headers: Optional[Dict[str, str]] = None, + cache_expiration: float = CACHE_EXPIRATION.total_seconds(), ): super().__init__() @@ -96,7 +105,7 @@ def __init__( self._session = get_session( request_headers or {}, self.cache_name, - CACHE_EXPIRATION, + timedelta(seconds=cache_expiration), ) self._set_columns() diff --git a/src/shillelagh/adapters/api/preset.py b/src/shillelagh/adapters/api/preset.py index 14cb9691..1f4de582 100644 --- a/src/shillelagh/adapters/api/preset.py +++ b/src/shillelagh/adapters/api/preset.py @@ -13,7 +13,7 @@ from jsonpath import JSONPath from yarl import URL -from shillelagh.adapters.api.generic_json import GenericJSONAPI +from shillelagh.adapters.api.generic_json import CACHE_EXPIRATION, GenericJSONAPI from shillelagh.exceptions import ProgrammingError from shillelagh.fields import Order from shillelagh.filters import Filter @@ -60,19 +60,25 @@ def supports(cls, uri: str, fast: bool = True, **kwargs: Any) -> Optional[bool]: and re.match(r"api\.app(-\w+)?\.preset\.io", parsed.host) is not None ) - def __init__( + def __init__( # pylint: disable=too-many-arguments self, uri: str, path: Optional[str] = None, access_token: Optional[str] = None, access_secret: Optional[str] = None, + cache_expiration: float = CACHE_EXPIRATION.total_seconds(), ): if access_token is None or access_secret is None: raise ValueError("access_token and access_secret must be provided") jwt_token = get_jwt_token(uri, access_token, access_secret) request_headers = {"Authorization": f"Bearer {jwt_token}"} - super().__init__(uri, path=path, request_headers=request_headers) + super().__init__( + uri, + path=path, + request_headers=request_headers, + cache_expiration=cache_expiration, + ) def get_urls( @@ -82,7 +88,7 @@ def get_urls( page_size: int = MAX_PAGE_SIZE, ) -> Iterator[Tuple[str, slice]]: """ - Get all paginated URLs to download data from together with a limit/offset slice. + Get all paginated URLs to download data together with a limit/offset slice. """ start = offset or 0 stop = start + limit if limit is not None else None diff --git a/tests/adapters/api/generic_json_test.py b/tests/adapters/api/generic_json_test.py index 351d383e..b0774b63 100644 --- a/tests/adapters/api/generic_json_test.py +++ b/tests/adapters/api/generic_json_test.py @@ -3,10 +3,8 @@ """ import re -from datetime import timedelta import pytest -from pytest_mock import MockerFixture from requests_mock.mocker import Mocker from yarl import URL @@ -15,17 +13,13 @@ from shillelagh.exceptions import ProgrammingError from shillelagh.typing import Maybe -DO_NOT_CACHE = timedelta(seconds=-1) - baseurl = URL("https://api.stlouisfed.org/fred/series") -def test_generic_json(mocker: MockerFixture, requests_mock: Mocker) -> None: +def test_generic_json(requests_mock: Mocker) -> None: """ Test a simple query. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) - # for datassette requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) @@ -63,7 +57,10 @@ def test_generic_json(mocker: MockerFixture, requests_mock: Mocker) -> None: }, ) - connection = connect(":memory:") + connection = connect( + ":memory:", + adapter_kwargs={"genericjsonapi": {"cache_expiration": -1}}, + ) cursor = connection.cursor() sql = f'SELECT * FROM "{url}"' @@ -110,15 +107,10 @@ def test_generic_json(mocker: MockerFixture, requests_mock: Mocker) -> None: assert str(excinfo.value) == "Error: An error occurred" -def test_generic_json_complex_type( - mocker: MockerFixture, - requests_mock: Mocker, -) -> None: +def test_generic_json_complex_type(requests_mock: Mocker) -> None: """ Test a query where columns are complex. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) - # for datassette and other probing adapters requests_mock.head("https://example.org/-/versions.json", status_code=404) @@ -134,7 +126,10 @@ def test_generic_json_complex_type( ], ) - connection = connect(":memory:") + connection = connect( + ":memory:", + adapter_kwargs={"genericjsonapi": {"cache_expiration": -1}}, + ) cursor = connection.cursor() sql = f'SELECT * FROM "{url}"' @@ -161,11 +156,10 @@ def test_supports(requests_mock: Mocker) -> None: assert GenericJSONAPI.supports("https://example.org/data.json", fast=False) is True -def test_request_headers(mocker: MockerFixture, requests_mock: Mocker) -> None: +def test_request_headers(requests_mock: Mocker) -> None: """ Test passing requests headers. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) supports = requests_mock.head( "https://example.org/data.json", headers={"content-type": "application/json"}, @@ -191,12 +185,18 @@ def test_request_headers(mocker: MockerFixture, requests_mock: Mocker) -> None: "https://example.org/data.json", fast=False, request_headers={"foo": "bar"}, + cache_expiration=-1, ) assert supports.last_request.headers["foo"] == "bar" connection = connect( ":memory:", - adapter_kwargs={"genericjsonapi": {"request_headers": {"foo": "bar"}}}, + adapter_kwargs={ + "genericjsonapi": { + "request_headers": {"foo": "bar"}, + "cache_expiration": -1, + }, + }, ) cursor = connection.cursor() @@ -206,11 +206,10 @@ def test_request_headers(mocker: MockerFixture, requests_mock: Mocker) -> None: assert data.last_request.headers["foo"] == "bar" -def test_request_headers_in_url(mocker: MockerFixture, requests_mock: Mocker) -> None: +def test_request_headers_in_url(requests_mock: Mocker) -> None: """ Test passing requests headers. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) supports = requests_mock.head( "https://example.org/data.json", headers={"content-type": "application/json"}, @@ -235,10 +234,14 @@ def test_request_headers_in_url(mocker: MockerFixture, requests_mock: Mocker) -> GenericJSONAPI.supports( "https://example.org/data.json?_s_headers=(foo:bar)", fast=False, + cache_expiration=-1, ) assert supports.last_request.headers["foo"] == "bar" - connection = connect(":memory:") + connection = connect( + ":memory:", + adapter_kwargs={"genericjsonapi": {"cache_expiration": -1}}, + ) cursor = connection.cursor() sql = 'SELECT * FROM "https://example.org/?_s_headers=(foo:bar)"' diff --git a/tests/adapters/api/generic_xml_test.py b/tests/adapters/api/generic_xml_test.py index c499297e..fef617ce 100644 --- a/tests/adapters/api/generic_xml_test.py +++ b/tests/adapters/api/generic_xml_test.py @@ -4,10 +4,8 @@ import re import xml.etree.ElementTree as ET -from datetime import timedelta import pytest -from pytest_mock import MockerFixture from requests_mock.mocker import Mocker from yarl import URL @@ -15,8 +13,6 @@ from shillelagh.backends.apsw.db import connect from shillelagh.exceptions import ProgrammingError -DO_NOT_CACHE = timedelta(seconds=-1) - baseurl = URL("https://api.congress.gov/v3/bill/118") @@ -41,12 +37,10 @@ def test_element_to_dict() -> None: } -def test_generic_xml(mocker: MockerFixture, requests_mock: Mocker) -> None: +def test_generic_xml(requests_mock: Mocker) -> None: """ Test a simple query. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) - # for datassette requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) @@ -160,7 +154,10 @@ def test_generic_xml(mocker: MockerFixture, requests_mock: Mocker) -> None: """, ) - connection = connect(":memory:") + connection = connect( + ":memory:", + adapter_kwargs={"genericxmlapi": {"cache_expiration": -1}}, + ) cursor = connection.cursor() sql = f'SELECT congress, type, latestAction FROM "{url}"' diff --git a/tests/adapters/api/preset_test.py b/tests/adapters/api/preset_test.py index a3f236fb..9802f01c 100644 --- a/tests/adapters/api/preset_test.py +++ b/tests/adapters/api/preset_test.py @@ -3,7 +3,6 @@ """ import re -from datetime import timedelta import pytest from pytest_mock import MockerFixture @@ -13,15 +12,11 @@ from shillelagh.backends.apsw.db import connect from shillelagh.exceptions import ProgrammingError -DO_NOT_CACHE = timedelta(seconds=-1) - -def test_preset(mocker: MockerFixture, requests_mock: Mocker) -> None: +def test_preset(requests_mock: Mocker) -> None: """ Test a simple query. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) - # for datassette requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) @@ -40,6 +35,7 @@ def test_preset(mocker: MockerFixture, requests_mock: Mocker) -> None: "presetapi": { "access_token": "XXX", "access_secret": "YYY", + "cache_expiration": -1, }, }, ) @@ -50,13 +46,14 @@ def test_preset(mocker: MockerFixture, requests_mock: Mocker) -> None: assert rows == [(1, "Team 1")] -def test_preset_missing_token(mocker: MockerFixture) -> None: +def test_preset_missing_token() -> None: """ Test a simple query. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) - - connection = connect(":memory:") + connection = connect( + ":memory:", + adapter_kwargs={"presetapi": {"cache_expiration": -1}}, + ) cursor = connection.cursor() sql = 'SELECT * FROM "https://api.app.preset.io/v1/teams/"' @@ -145,12 +142,10 @@ def test_get_urls_with_page_parameters() -> None: next(gen) -def test_preset_workspace(mocker: MockerFixture, requests_mock: Mocker) -> None: +def test_preset_workspace(requests_mock: Mocker) -> None: """ Test a simple query to a Preset workspace. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) - # for datassette requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) @@ -173,6 +168,7 @@ def test_preset_workspace(mocker: MockerFixture, requests_mock: Mocker) -> None: "presetworkspaceapi": { "access_token": "XXX", "access_secret": "YYY", + "cache_expiration": -1, }, }, ) @@ -183,15 +179,10 @@ def test_preset_workspace(mocker: MockerFixture, requests_mock: Mocker) -> None: assert rows == [(1, "Team 1")] -def test_preset_workspace_pagination( - mocker: MockerFixture, - requests_mock: Mocker, -) -> None: +def test_preset_workspace_pagination(requests_mock: Mocker) -> None: """ Test pagination in a query to a Preset workspace. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) - # for datassette requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) @@ -220,6 +211,7 @@ def test_preset_workspace_pagination( "presetworkspaceapi": { "access_token": "XXX", "access_secret": "YYY", + "cache_expiration": -1, }, }, ) @@ -236,12 +228,10 @@ def test_preset_workspace_pagination( ] -def test_preset_workspace_error(mocker: MockerFixture, requests_mock: Mocker) -> None: +def test_preset_workspace_error(requests_mock: Mocker) -> None: """ Test error handling when accessing a workspace API. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) - # for datassette requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) @@ -277,6 +267,7 @@ def test_preset_workspace_error(mocker: MockerFixture, requests_mock: Mocker) -> "presetworkspaceapi": { "access_token": "XXX", "access_secret": "YYY", + "cache_expiration": -1, }, }, ) @@ -295,7 +286,6 @@ def test_preset_workspace_no_urls(mocker: MockerFixture, requests_mock: Mocker) """ Test when no URLs are returned. """ - mocker.patch("shillelagh.adapters.api.generic_json.CACHE_EXPIRATION", DO_NOT_CACHE) mocker.patch("shillelagh.adapters.api.preset.get_urls", return_value=[]) requests_mock.post( @@ -307,5 +297,6 @@ def test_preset_workspace_no_urls(mocker: MockerFixture, requests_mock: Mocker) "https://abcdef01.us1a.app.preset.io/api/v1/chart/", access_token="XXX", access_secret="YYY", + cache_expiration=-1, ) assert list(adapter.get_data({}, [])) == [] From f796e363bf3bb4547d23a1fe413a1e4dc277b034 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Johannes=20K=C3=B6rner?= Date: Thu, 15 Feb 2024 14:39:47 +0100 Subject: [PATCH 08/22] fix(gsheets): handle getting values from rows in wide tables (#426) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix get_values_from_row for wide tables Previously, get_values_from_row used max() to get the "highest index" column. However, max("AA") < max("Z") which leads to problems down the road. * updated to test compatibility with wide tables as well * moved to generator * ignore mypy typing error * Update AUTHORS.rst Add Johannes Körner --- AUTHORS.rst | 1 + src/shillelagh/adapters/api/gsheets/lib.py | 2 +- tests/adapters/api/gsheets/lib_test.py | 4 ++-- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/AUTHORS.rst b/AUTHORS.rst index 6cef163d..8b85aa5b 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -10,3 +10,4 @@ Contributors * Alex Rothberg * Elias Sebbar * Arash Afghahi +* Johannes Körner diff --git a/src/shillelagh/adapters/api/gsheets/lib.py b/src/shillelagh/adapters/api/gsheets/lib.py index bd943060..4ec1c6ce 100644 --- a/src/shillelagh/adapters/api/gsheets/lib.py +++ b/src/shillelagh/adapters/api/gsheets/lib.py @@ -206,7 +206,7 @@ def get_values_from_row(row: Row, column_map: Dict[str, str]) -> List[Any]: >>> get_values_from_row(row, column_map) ['BR', '', 10] """ - n_cols = get_index_from_letters(max(column_map.values())) + 1 + n_cols = max(get_index_from_letters(val) for val in column_map.values()) + 1 row = {column_map[k]: v for k, v in row.items() if k in column_map} return [row.get(column, "") for column in itertools.islice(gen_letters(), n_cols)] diff --git a/tests/adapters/api/gsheets/lib_test.py b/tests/adapters/api/gsheets/lib_test.py index 6f7257c5..7370b133 100644 --- a/tests/adapters/api/gsheets/lib_test.py +++ b/tests/adapters/api/gsheets/lib_test.py @@ -270,9 +270,9 @@ def test_get_values_from_row() -> None: """ Test ``get_values_from_row``. """ - column_map = {"country": "A", "cnt": "C"} + column_map = {"country": "Z", "cnt": "AB"} row = {"country": "BR", "cnt": 10} - assert get_values_from_row(row, column_map) == ["BR", "", 10] + assert get_values_from_row(row, column_map) == 25 * [""] + ["BR", "", 10] # type: ignore def test_get_credentials(mocker: MockerFixture): From f40fa604919402e4b6cd45d7485bc380442334e5 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Thu, 22 Feb 2024 15:22:42 -0500 Subject: [PATCH 09/22] fix: use better JSONPath library (#431) --- CHANGELOG.rst | 1 + requirements/test.txt | 4 +-- setup.cfg | 8 ++--- src/shillelagh/adapters/api/generic_json.py | 5 ++-- src/shillelagh/adapters/api/github.py | 6 ++-- src/shillelagh/adapters/api/preset.py | 5 ++-- tests/adapters/api/generic_json_test.py | 33 +++++++++++++++++++++ 7 files changed, 47 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 8356359b..98e3f78a 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,6 +6,7 @@ Next ==== - Allow for custom expiration time in the generic JSON/XML adapters (#429) +- Use a different JSONPath library that handles root better (#431) Version 1.2.15 - 2024-02-13 =========================== diff --git a/requirements/test.txt b/requirements/test.txt index cc5af2f0..2b531444 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -86,8 +86,6 @@ jmespath==1.0.1 # via # boto3 # botocore -jsonpath-python==1.0.6 - # via shillelagh lazy-object-proxy==1.7.1 # via astroid mccabe==0.7.0 @@ -156,6 +154,8 @@ python-dateutil==2.8.2 # holidays # pandas # shillelagh +python-jsonpath==0.10.3 + # via shillelagh pytz==2022.1 # via pandas pyyaml==6.0 diff --git a/setup.cfg b/setup.cfg index 535bfe37..af8c68b2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -80,7 +80,6 @@ testing = google-auth>=1.23.0 holidays>=0.23 html5lib>=1.1 - jsonpath-python>=1.0.5 pandas>=1.2.2 pip-tools>=6.4.0 pre-commit>=2.13.0 @@ -94,6 +93,7 @@ testing = pytest-integration==0.2.2 pytest-mock>=3.5.1 pytest>=7.2.0 + python-jsonpath>=0.10.3 requests-mock>=1.8.0 tabulate==0.8.9 yarl>=1.8.1 @@ -106,12 +106,12 @@ all = google-auth>=1.23.0 holidays>=0.23 html5lib>=1.1 - jsonpath-python>=1.0.5 pandas>=1.2.2 prison>=0.2.1 prompt_toolkit>=3 psutil>=5.8.0 pygments>=2.8 + python-jsonpath>=0.10.3 tabulate==0.8.9 yarl>=1.8.1 docs = @@ -123,15 +123,15 @@ console = pygments>=2.8 tabulate==0.8.9 genericjsonapi = - jsonpath-python>=1.0.5 prison>=0.2.1 + python-jsonpath>=0.10.3 yarl>=1.8.1 genericxmlapi = defusedxml>=0.7.1 prison>=0.2.1 yarl>=1.8.1 githubapi = - jsonpath-python>=1.0.5 + python-jsonpath>=0.10.3 gsheetsapi = google-auth>=1.23.0 holidaysmemory = diff --git a/src/shillelagh/adapters/api/generic_json.py b/src/shillelagh/adapters/api/generic_json.py index 3d73f1d3..5aca6b09 100644 --- a/src/shillelagh/adapters/api/generic_json.py +++ b/src/shillelagh/adapters/api/generic_json.py @@ -8,8 +8,8 @@ from datetime import timedelta from typing import Any, Dict, Iterator, List, Optional, Set, Tuple, Union +import jsonpath import prison -from jsonpath import JSONPath from yarl import URL from shillelagh.adapters.base import Adapter @@ -145,8 +145,7 @@ def get_data( # pylint: disable=unused-argument, too-many-arguments if not response.ok: raise ProgrammingError(f'Error: {payload["message"]}') - parser = JSONPath(self.path) - for i, row in enumerate(parser.parse(payload)): + for i, row in enumerate(jsonpath.findall(self.path, payload)): row = { k: v for k, v in (row or {}).items() diff --git a/src/shillelagh/adapters/api/github.py b/src/shillelagh/adapters/api/github.py index fb787c5b..662ad8b6 100644 --- a/src/shillelagh/adapters/api/github.py +++ b/src/shillelagh/adapters/api/github.py @@ -6,8 +6,8 @@ from dataclasses import dataclass from typing import Any, Dict, Iterator, List, Optional, Tuple +import jsonpath import requests_cache -from jsonpath import JSONPath from shillelagh.adapters.base import Adapter from shillelagh.exceptions import ProgrammingError @@ -177,7 +177,7 @@ def _get_single_resource( payload = response.json() row = { - column.name: JSONPath(column.json_path).parse(payload)[0] + column.name: jsonpath.findall(column.json_path, payload)[0] for column in TABLES[self.base][self.resource] } row["rowid"] = 0 @@ -231,7 +231,7 @@ def _get_multiple_resources( break row = { - column.name: JSONPath(column.json_path).parse(resource)[0] + column.name: jsonpath.findall(column.json_path, resource)[0] for column in TABLES[self.base][self.resource] } row["rowid"] = rowid diff --git a/src/shillelagh/adapters/api/preset.py b/src/shillelagh/adapters/api/preset.py index 1f4de582..fb5f4b52 100644 --- a/src/shillelagh/adapters/api/preset.py +++ b/src/shillelagh/adapters/api/preset.py @@ -8,9 +8,9 @@ import re from typing import Any, Dict, Iterator, List, Optional, Set, Tuple, cast +import jsonpath import prison import requests -from jsonpath import JSONPath from yarl import URL from shillelagh.adapters.api.generic_json import CACHE_EXPIRATION, GenericJSONAPI @@ -178,8 +178,7 @@ def get_data( # pylint: disable=unused-argument, too-many-arguments, too-many-l ) raise ProgrammingError(f"Error: {messages}") - parser = JSONPath(self.path) - rows = parser.parse(payload)[slice_] + rows = jsonpath.findall(self.path, payload)[slice_] if not rows: break diff --git a/tests/adapters/api/generic_json_test.py b/tests/adapters/api/generic_json_test.py index b0774b63..68dfcc6b 100644 --- a/tests/adapters/api/generic_json_test.py +++ b/tests/adapters/api/generic_json_test.py @@ -248,3 +248,36 @@ def test_request_headers_in_url(requests_mock: Mocker) -> None: rows = list(cursor.execute(sql)) assert rows == [("bar", '["one", "two"]')] assert data.last_request.headers["foo"] == "bar" + + +def test_single_row(requests_mock: Mocker) -> None: + """ + Test a query where the response is a single row as a dictionary. + """ + # for datassette + requests_mock.get(re.compile(".*-/versions.json.*"), status_code=404) + + url = "https://www.boredapi.com/api/activity?participants=1#$" + requests_mock.head(str(url), headers={"content-type": "application/json"}) + requests_mock.get( + str(url), + json={ + "activity": "Solve a Rubik's cube", + "type": "recreational", + "participants": 1, + "price": 0, + "link": "", + "key": "4151544", + "accessibility": 0.1, + }, + ) + + connection = connect( + ":memory:", + adapter_kwargs={"genericjsonapi": {"cache_expiration": -1}}, + ) + cursor = connection.cursor() + + sql = f'SELECT * FROM "{url}"' + rows = list(cursor.execute(sql)) + assert rows == [("Solve a Rubik's cube", "recreational", 1, 0, "", "4151544", 0.1)] From e3b4875f8072d729a9f7d9076692ce6878e4abfb Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Thu, 22 Feb 2024 15:38:28 -0500 Subject: [PATCH 10/22] chore: 1.2.16 release (#432) --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 98e3f78a..7667d14c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,9 @@ Changelog Next ==== +Version 1.2.16 - 2024-02-22 +=========================== + - Allow for custom expiration time in the generic JSON/XML adapters (#429) - Use a different JSONPath library that handles root better (#431) From 92f415a63fbe4d4583cf28f54a470680c7e17f52 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida Date: Fri, 23 Feb 2024 14:27:56 -0500 Subject: [PATCH 11/22] feat: support GitHub issues (#433) * feat: support GitHub issues * Better support for JSON fields --- CHANGELOG.rst | 2 + src/shillelagh/adapters/api/github.py | 52 +- tests/adapters/api/github_test.py | 93 +- tests/fakes/__init__.py | 6 +- tests/fakes/github_issues_response.json | 2521 +++++++++++++++++ ...sponse.json => github_pulls_response.json} | 0 6 files changed, 2664 insertions(+), 10 deletions(-) create mode 100644 tests/fakes/github_issues_response.json rename tests/fakes/{github_response.json => github_pulls_response.json} (100%) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7667d14c..039fc5ed 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ Changelog Next ==== +- Add support for GitHub issues (#433) + Version 1.2.16 - 2024-02-22 =========================== diff --git a/src/shillelagh/adapters/api/github.py b/src/shillelagh/adapters/api/github.py index 662ad8b6..0eabdabe 100644 --- a/src/shillelagh/adapters/api/github.py +++ b/src/shillelagh/adapters/api/github.py @@ -1,6 +1,7 @@ """ An adapter for GitHub. """ +import json import logging import urllib.parse from dataclasses import dataclass @@ -20,6 +21,18 @@ PAGE_SIZE = 100 +class JSONString(Field[Any, str]): + """ + A field to handle JSON values. + """ + + type = "TEXT" + db_api_type = "STRING" + + def parse(self, value: Any) -> Optional[str]: + return value if value is None else json.dumps(value) + + @dataclass class Column: """ @@ -63,6 +76,26 @@ class Column: Column("closed_at", "closed_at", StringDateTime()), Column("merged_at", "merged_at", StringDateTime()), ], + "issues": [ + Column("url", "html_url", String()), + Column("id", "id", Integer()), + Column("number", "number", Integer(filters=[Equal])), + Column("state", "state", String(filters=[Equal]), Equal("all")), + Column("title", "title", String()), + Column("userid", "user.id", Integer()), + Column("username", "user.login", String()), + Column("draft", "draft", Boolean()), + Column("locked", "locked", Boolean()), + Column("comments", "comments", Integer()), + Column("created_at", "created_at", StringDateTime()), + Column("updated_at", "updated_at", StringDateTime()), + Column("closed_at", "closed_at", StringDateTime()), + Column("body", "body", String()), + Column("author_association", "author_association", String()), + Column("labels", "labels[*].name", JSONString()), + Column("assignees", "assignees[*].login", JSONString()), + Column("reactions", "reactions", JSONString()), + ], }, } @@ -177,7 +210,7 @@ def _get_single_resource( payload = response.json() row = { - column.name: jsonpath.findall(column.json_path, payload)[0] + column.name: get_value(column, payload) for column in TABLES[self.base][self.resource] } row["rowid"] = 0 @@ -231,7 +264,7 @@ def _get_multiple_resources( break row = { - column.name: jsonpath.findall(column.json_path, resource)[0] + column.name: get_value(column, resource) for column in TABLES[self.base][self.resource] } row["rowid"] = rowid @@ -240,3 +273,18 @@ def _get_multiple_resources( rowid += 1 page += 1 + + +def get_value(column: Column, resource: Dict[str, Any]) -> Any: + """ + Extract the value of a column from a resource. + """ + values = jsonpath.findall(column.json_path, resource) + + if isinstance(column.field, JSONString): + return values + + try: + return values[0] + except IndexError: + return None diff --git a/tests/adapters/api/github_test.py b/tests/adapters/api/github_test.py index b7470c3c..e64f778f 100644 --- a/tests/adapters/api/github_test.py +++ b/tests/adapters/api/github_test.py @@ -14,7 +14,11 @@ from shillelagh.exceptions import ProgrammingError from shillelagh.filters import Equal -from ...fakes import github_response, github_single_response +from ...fakes import ( + github_issues_response, + github_pulls_response, + github_single_response, +) def test_github(mocker: MockerFixture, requests_mock: Mocker) -> None: @@ -27,7 +31,7 @@ def test_github(mocker: MockerFixture, requests_mock: Mocker) -> None: ) page1_url = "https://api.github.com/repos/apache/superset/pulls?state=all&per_page=100&page=1" - requests_mock.get(page1_url, json=github_response) + requests_mock.get(page1_url, json=github_pulls_response) page2_url = "https://api.github.com/repos/apache/superset/pulls?state=all&per_page=100&page=2" requests_mock.get(page2_url, json=[]) @@ -206,11 +210,11 @@ def test_github_limit_offset(mocker: MockerFixture, requests_mock: Mocker) -> No page2_url = ( "https://api.github.com/repos/apache/superset/pulls?state=all&per_page=5&page=2" ) - requests_mock.get(page2_url, json=github_response[:5]) + requests_mock.get(page2_url, json=github_pulls_response[:5]) page3_url = ( "https://api.github.com/repos/apache/superset/pulls?state=all&per_page=5&page=3" ) - requests_mock.get(page3_url, json=github_response[5:]) + requests_mock.get(page3_url, json=github_pulls_response[5:]) connection = connect(":memory:") cursor = connection.cursor() @@ -466,11 +470,11 @@ def test_get_multiple_resources(mocker: MockerFixture, requests_mock: Mocker) -> page2_url = ( "https://api.github.com/repos/apache/superset/pulls?state=all&per_page=5&page=2" ) - requests_mock.get(page2_url, json=github_response[:5]) + requests_mock.get(page2_url, json=github_pulls_response[:5]) page3_url = ( "https://api.github.com/repos/apache/superset/pulls?state=all&per_page=5&page=3" ) - requests_mock.get(page3_url, json=github_response[5:]) + requests_mock.get(page3_url, json=github_pulls_response[5:]) adapter = GitHubAPI("repos", "apache", "superset", "pulls") rows = adapter._get_multiple_resources( # pylint: disable=protected-access @@ -560,3 +564,80 @@ def test_get_multiple_resources(mocker: MockerFixture, requests_mock: Mocker) -> "rowid": 4, }, ] + + +def test_github_missing_field(mocker: MockerFixture, requests_mock: Mocker) -> None: + """ + Test a request when the response is missing a field. + + For example, some issues don't have the ``draft`` field in the response. + """ + mocker.patch( + "shillelagh.adapters.api.github.requests_cache.CachedSession", + return_value=Session(), + ) + + page1_url = "https://api.github.com/repos/apache/superset/issues?state=all&per_page=100&page=1" + requests_mock.get(page1_url, json=github_issues_response) + page2_url = "https://api.github.com/repos/apache/superset/issues?state=all&per_page=100&page=2" + requests_mock.get(page2_url, json=[]) + + connection = connect(":memory:") + cursor = connection.cursor() + + sql = """ + SELECT draft FROM + "https://api.github.com/repos/apache/superset/issues" + LIMIT 10 + """ + data = list(cursor.execute(sql)) + assert data == [ + (False,), + (False,), + (None,), + (None,), + (False,), + (None,), + (False,), + (None,), + (False,), + (False,), + ] + + +def test_github_json_field(mocker: MockerFixture, requests_mock: Mocker) -> None: + """ + Test a request when the response has a JSON field. + """ + mocker.patch( + "shillelagh.adapters.api.github.requests_cache.CachedSession", + return_value=Session(), + ) + + page1_url = "https://api.github.com/repos/apache/superset/issues?state=all&per_page=100&page=1" + requests_mock.get(page1_url, json=github_issues_response) + page2_url = "https://api.github.com/repos/apache/superset/issues?state=all&per_page=100&page=2" + requests_mock.get(page2_url, json=[]) + + connection = connect(":memory:") + cursor = connection.cursor() + + sql = """ + SELECT labels FROM + "https://api.github.com/repos/apache/superset/issues" + WHERE labels != '[]' + LIMIT 10 + """ + data = list(cursor.execute(sql)) + assert data == [ + ('["size/M", "dependencies:npm", "github_actions", "packages"]',), + ('["size/S"]',), + ('["size/M"]',), + ('["size/M", "api"]',), + ('["size/L", "api"]',), + ('["size/XS"]',), + ('["size/XS", "dependencies:npm"]',), + ('["size/S"]',), + ('["size/XS", "hold:review-after-release"]',), + ('["size/M", "review-checkpoint", "plugins"]',), + ] diff --git a/tests/fakes/__init__.py b/tests/fakes/__init__.py index c24e8129..d062b2b7 100644 --- a/tests/fakes/__init__.py +++ b/tests/fakes/__init__.py @@ -128,7 +128,9 @@ def delete_data(self, row_id: int) -> None: datasette_results = [tuple(row) for row in json.load(fp)] with open(os.path.join(dirname, "incidents.json"), encoding="utf-8") as fp: incidents = json.load(fp) -with open(os.path.join(dirname, "github_response.json"), encoding="utf-8") as fp: - github_response = json.load(fp) +with open(os.path.join(dirname, "github_pulls_response.json"), encoding="utf-8") as fp: + github_pulls_response = json.load(fp) +with open(os.path.join(dirname, "github_issues_response.json"), encoding="utf-8") as fp: + github_issues_response = json.load(fp) with open(os.path.join(dirname, "github_single_response.json"), encoding="utf-8") as fp: github_single_response = json.load(fp) diff --git a/tests/fakes/github_issues_response.json b/tests/fakes/github_issues_response.json new file mode 100644 index 00000000..aef385cc --- /dev/null +++ b/tests/fakes/github_issues_response.json @@ -0,0 +1,2521 @@ +[ + { + "url": "https://api.github.com/repos/apache/superset/issues/27232", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27232/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27232/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27232/events", + "html_url": "https://github.com/apache/superset/pull/27232", + "id": 2151207116, + "node_id": "PR_kwDOAlosUs5nwxwB", + "number": 27232, + "title": "chore: Removes Chromatic workflow and dependencies", + "user": { + "login": "michael-s-molina", + "id": 70410625, + "node_id": "MDQ6VXNlcjcwNDEwNjI1", + "avatar_url": "https://avatars.githubusercontent.com/u/70410625?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/michael-s-molina", + "html_url": "https://github.com/michael-s-molina", + "followers_url": "https://api.github.com/users/michael-s-molina/followers", + "following_url": "https://api.github.com/users/michael-s-molina/following{/other_user}", + "gists_url": "https://api.github.com/users/michael-s-molina/gists{/gist_id}", + "starred_url": "https://api.github.com/users/michael-s-molina/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/michael-s-molina/subscriptions", + "organizations_url": "https://api.github.com/users/michael-s-molina/orgs", + "repos_url": "https://api.github.com/users/michael-s-molina/repos", + "events_url": "https://api.github.com/users/michael-s-molina/events{/privacy}", + "received_events_url": "https://api.github.com/users/michael-s-molina/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329203144, + "node_id": "MDU6TGFiZWwxMzI5MjAzMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/M", + "name": "size/M", + "color": "705FA3", + "default": false, + "description": "" + }, + { + "id": 6496358600, + "node_id": "LA_kwDOAlosUs8AAAABgzaQyA", + "url": "https://api.github.com/repos/apache/superset/labels/dependencies:npm", + "name": "dependencies:npm", + "color": "ededed", + "default": false, + "description": null + }, + { + "id": 6501451649, + "node_id": "LA_kwDOAlosUs8AAAABg4RHgQ", + "url": "https://api.github.com/repos/apache/superset/labels/github_actions", + "name": "github_actions", + "color": "000000", + "default": false, + "description": "Pull requests that update GitHub Actions code" + }, + { + "id": 6502628977, + "node_id": "LA_kwDOAlosUs8AAAABg5Y-cQ", + "url": "https://api.github.com/repos/apache/superset/labels/packages", + "name": "packages", + "color": "E795E1", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 3, + "created_at": "2024-02-23T14:28:27Z", + "updated_at": "2024-02-23T16:44:18Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27232", + "html_url": "https://github.com/apache/superset/pull/27232", + "diff_url": "https://github.com/apache/superset/pull/27232.diff", + "patch_url": "https://github.com/apache/superset/pull/27232.patch", + "merged_at": null + }, + "body": "### SUMMARY\r\nRemoves the Chromatic GitHub workflow and NPM dependencies that were introduced in https://github.com/apache/superset/pull/21095. This workflow is not maintained anymore and it also overlaps with Applitools which is used for visual testing.\r\n\r\n### TESTING INSTRUCTIONS\r\nCheck that the Chromatic workflow is not running.\r\n\r\n### ADDITIONAL INFORMATION\r\n- [ ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27232/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27232/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27229", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27229/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27229/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27229/events", + "html_url": "https://github.com/apache/superset/pull/27229", + "id": 2151044246, + "node_id": "PR_kwDOAlosUs5nwN0Z", + "number": 27229, + "title": "#23375 set columns numeric datatypes when exporting to excel", + "user": { + "login": "squalou", + "id": 4623644, + "node_id": "MDQ6VXNlcjQ2MjM2NDQ=", + "avatar_url": "https://avatars.githubusercontent.com/u/4623644?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/squalou", + "html_url": "https://github.com/squalou", + "followers_url": "https://api.github.com/users/squalou/followers", + "following_url": "https://api.github.com/users/squalou/following{/other_user}", + "gists_url": "https://api.github.com/users/squalou/gists{/gist_id}", + "starred_url": "https://api.github.com/users/squalou/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/squalou/subscriptions", + "organizations_url": "https://api.github.com/users/squalou/orgs", + "repos_url": "https://api.github.com/users/squalou/repos", + "events_url": "https://api.github.com/users/squalou/events{/privacy}", + "received_events_url": "https://api.github.com/users/squalou/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1330950144, + "node_id": "MDU6TGFiZWwxMzMwOTUwMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/S", + "name": "size/S", + "color": "C8B6FF", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-23T13:01:27Z", + "updated_at": "2024-02-23T13:01:31Z", + "closed_at": null, + "author_association": "FIRST_TIME_CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27229", + "html_url": "https://github.com/apache/superset/pull/27229", + "diff_url": "https://github.com/apache/superset/pull/27229.diff", + "patch_url": "https://github.com/apache/superset/pull/27229.patch", + "merged_at": null + }, + "body": "### SUMMARY\r\n\r\nWhen exporting to Excel, currently datatypes are not specified, leading to decimal numbers being stored as strings, which may lead to issues when opening the file.\r\n\r\nDepending on the locale, the issue may not even be visible, Excel managing to convert things. (that's the case in us/en locale). When using another locale (say fr), then numbers, output with '.' as decimal separator and stored as strings won't be usable as numbers in Excel.\r\n\r\nThe idea here is to use the same \"datatype guessing\" method already existing, and use it to convert dataframe columns types when required before exporting.\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n\r\nnot applicable\r\n\r\n### TESTING INSTRUCTIONS\r\n\r\n\r\n- Create a Table dashboard with numercical and decimal numbers in it. Ideally add some strings and dates.\r\n- Export to Excel\r\n- you can then unzip the .xlsx file, and open sheet1.xml in a text editor to check the export\r\n- Numbers appear directly in xml cell reference\r\n- Strings are not visible directly, instead a `` markup is used containing an id to the string\r\n\r\nEditing the sheet1.xml file is the safest way to check the issue, due to magical operations softwares like Excel, LibreOffice Calc or others perform when opening files that may hide the issue.\r\n\r\n\r\n### ADDITIONAL INFORMATION\r\n\r\n\r\n\r\n- [x] Has associated issue: Fixes #23375\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27229/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27229/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27228", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27228/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27228/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27228/events", + "html_url": "https://github.com/apache/superset/issues/27228", + "id": 2150822753, + "node_id": "I_kwDOAlosUs6AMvNh", + "number": 27228, + "title": "custom SQL aliases in raw records are not being used as column labels in chart", + "user": { + "login": "khaledrazemTHG", + "id": 113435313, + "node_id": "U_kgDOBsLisQ", + "avatar_url": "https://avatars.githubusercontent.com/u/113435313?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/khaledrazemTHG", + "html_url": "https://github.com/khaledrazemTHG", + "followers_url": "https://api.github.com/users/khaledrazemTHG/followers", + "following_url": "https://api.github.com/users/khaledrazemTHG/following{/other_user}", + "gists_url": "https://api.github.com/users/khaledrazemTHG/gists{/gist_id}", + "starred_url": "https://api.github.com/users/khaledrazemTHG/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/khaledrazemTHG/subscriptions", + "organizations_url": "https://api.github.com/users/khaledrazemTHG/orgs", + "repos_url": "https://api.github.com/users/khaledrazemTHG/repos", + "events_url": "https://api.github.com/users/khaledrazemTHG/events{/privacy}", + "received_events_url": "https://api.github.com/users/khaledrazemTHG/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-23T10:42:19Z", + "updated_at": "2024-02-23T10:42:19Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "### Bug description\n\nI am using raw records in this chart and sql so I can rename the field, but the table is showing the original field name not the one i assigned as you can see from the fields on the left\n\n### How to reproduce the bug\n\n1- Create new table\r\n2- Click on raw records\r\n3- add a column using custom sql\r\n4- rename to a different title\r\n5- generate table\n\n### Screenshots/recordings\n\n\"Screenshot\r\n\n\n### Superset version\n\nmaster / latest-dev\n\n### Python version\n\n3.9\n\n### Node version\n\n16\n\n### Browser\n\nChrome\n\n### Additional context\n\n_No response_\n\n### Checklist\n\n- [X] I have searched Superset docs and Slack and didn't find a solution to my problem.\n- [X] I have searched the GitHub issue tracker and didn't find a similar bug report.\n- [X] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the \"additional context\" section.", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27228/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27228/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27227", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27227/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27227/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27227/events", + "html_url": "https://github.com/apache/superset/issues/27227", + "id": 2150590571, + "node_id": "I_kwDOAlosUs6AL2hr", + "number": 27227, + "title": "superset 2.1.0 frontend start to use the \"npm run dev-server\", will appear many warnings ", + "user": { + "login": "gufenqing", + "id": 77658522, + "node_id": "MDQ6VXNlcjc3NjU4NTIy", + "avatar_url": "https://avatars.githubusercontent.com/u/77658522?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/gufenqing", + "html_url": "https://github.com/gufenqing", + "followers_url": "https://api.github.com/users/gufenqing/followers", + "following_url": "https://api.github.com/users/gufenqing/following{/other_user}", + "gists_url": "https://api.github.com/users/gufenqing/gists{/gist_id}", + "starred_url": "https://api.github.com/users/gufenqing/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/gufenqing/subscriptions", + "organizations_url": "https://api.github.com/users/gufenqing/orgs", + "repos_url": "https://api.github.com/users/gufenqing/repos", + "events_url": "https://api.github.com/users/gufenqing/events{/privacy}", + "received_events_url": "https://api.github.com/users/gufenqing/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-23T08:14:59Z", + "updated_at": "2024-02-23T08:14:59Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "npm run dev-server will appear many warnings \r\n![npmrundevservererror](https://github.com/apache/superset/assets/77658522/2a707aa1-3411-4f5a-bcad-c04c29eba57f)\r\n\r\n![image](https://github.com/apache/superset/assets/77658522/0510fb1e-e00b-4dc7-9af4-f42add32ec01)\r\n\r\n\r\nI hope that will get the assistance to how to solves the warning and correctly see the superset page\r\n\r\n\r\n\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27227/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27227/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27226", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27226/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27226/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27226/events", + "html_url": "https://github.com/apache/superset/pull/27226", + "id": 2150567026, + "node_id": "PR_kwDOAlosUs5nuk3U", + "number": 27226, + "title": "fix(helm): update init containers to include extra env vars (cont.)", + "user": { + "login": "oscep", + "id": 117340990, + "node_id": "U_kgDOBv57Pg", + "avatar_url": "https://avatars.githubusercontent.com/u/117340990?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/oscep", + "html_url": "https://github.com/oscep", + "followers_url": "https://api.github.com/users/oscep/followers", + "following_url": "https://api.github.com/users/oscep/following{/other_user}", + "gists_url": "https://api.github.com/users/oscep/gists{/gist_id}", + "starred_url": "https://api.github.com/users/oscep/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/oscep/subscriptions", + "organizations_url": "https://api.github.com/users/oscep/orgs", + "repos_url": "https://api.github.com/users/oscep/repos", + "events_url": "https://api.github.com/users/oscep/events{/privacy}", + "received_events_url": "https://api.github.com/users/oscep/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329203144, + "node_id": "MDU6TGFiZWwxMzI5MjAzMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/M", + "name": "size/M", + "color": "705FA3", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 3, + "created_at": "2024-02-23T07:57:14Z", + "updated_at": "2024-02-23T10:26:55Z", + "closed_at": null, + "author_association": "FIRST_TIME_CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27226", + "html_url": "https://github.com/apache/superset/pull/27226", + "diff_url": "https://github.com/apache/superset/pull/27226.diff", + "patch_url": "https://github.com/apache/superset/pull/27226.patch", + "merged_at": null + }, + "body": "This PR is based on [Steven Luther's work](https://github.com/apache/superset/pull/25378) and is an attempt to push it across the finish line. I take no credit for those contributions. I'll leave the PR description as is:\r\n\r\n### SUMMARY\r\nThe init containers in the helm chart do not get the same environment variables as the other pods. This causes issues when the init containers attempt to ping the database, and the database variables are overridden in environment variables.\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n### TESTING INSTRUCTIONS\r\n* Add env vars to `values.yaml`, `extraEnvRaw`, or `extraEnv` example:\r\n\r\n```yaml\r\nextraEnvRaw:\r\n # Load DB password from other secret (e.g. for zalando operator)\r\n - name: DB_PASS\r\n valueFrom:\r\n secretKeyRef:\r\n name: superset.superset-postgres.credentials.postgresql.acid.zalan.do\r\n key: password\r\n```\r\n\r\n```yaml\r\nextraEnv:\r\n GUNICORN_TIMEOUT: 300\r\n```\r\n\r\n* Run `helm template superset .`\r\n* Verify changes, in this case `deployment.yaml`, shortened for readability.\r\n\r\nWith no values in `envFromRaw` or `extraEnv`:\r\n\r\n```yaml\r\n# Source: superset/templates/deployment.yaml\r\n[...]\r\n initContainers:\r\n - command:\r\n - /bin/sh\r\n - -c\r\n - dockerize -wait \"tcp://$DB_HOST:$DB_PORT\" -timeout 120s\r\n envFrom:\r\n - secretRef:\r\n name: 'superset-test-env'\r\n image: 'apache/superset:dockerize'\r\n imagePullPolicy: 'IfNotPresent'\r\n name: wait-for-postgres\r\n containers:\r\n[...]\r\n```\r\n\r\nWith no values in `envFromRaw`, but `GUNICORN_TIMEOUT` set in `extraEnv`:\r\n\r\n```yaml\r\n# Source: superset/templates/deployment.yaml\r\n[...]\r\n initContainers:\r\n - command:\r\n - /bin/sh\r\n - -c\r\n - dockerize -wait \"tcp://$DB_HOST:$DB_PORT\" -timeout 120s\r\n envFrom:\r\n - secretRef:\r\n name: 'superset-test-env'\r\n image: 'apache/superset:dockerize'\r\n imagePullPolicy: 'IfNotPresent'\r\n name: wait-for-postgres\r\n env:\r\n - name: \"GUNICORN_TIMEOUT\"\r\n value: \"300\"\r\n containers:\r\n[...]\r\n```\r\n\r\nWith `DB_PASS` set in `envFromRaw`, no values set in `extraEnv`:\r\n\r\n```yaml\r\n[...]\r\n initContainers:\r\n - command:\r\n - /bin/sh\r\n - -c\r\n - dockerize -wait \"tcp://$DB_HOST:$DB_PORT\" -timeout 120s\r\n envFrom:\r\n - secretRef:\r\n name: 'superset-test-env'\r\n image: 'apache/superset:dockerize'\r\n imagePullPolicy: 'IfNotPresent'\r\n name: wait-for-postgres\r\n env:\r\n - name: DB_PASS\r\n valueFrom:\r\n secretKeyRef:\r\n key: password\r\n name: superset.superset-postgres.credentials.postgresql.acid.zalan.do\r\n containers:\r\n[...]\r\n```\r\n\r\nWith both `DB_PASS` and `GUNICORN_TIMEOUT` set:\r\n\r\n```yaml\r\n[...]\r\n initContainers:\r\n - command:\r\n - /bin/sh\r\n - -c\r\n - dockerize -wait \"tcp://$DB_HOST:$DB_PORT\" -timeout 120s\r\n envFrom:\r\n - secretRef:\r\n name: 'superset-test-env'\r\n image: 'apache/superset:dockerize'\r\n imagePullPolicy: 'IfNotPresent'\r\n name: wait-for-postgres\r\n env:\r\n - name: \"GUNICORN_TIMEOUT\"\r\n value: \"300\"\r\n - name: DB_PASS\r\n valueFrom:\r\n secretKeyRef:\r\n key: password\r\n name: superset.superset-postgres.credentials.postgresql.acid.zalan.do\r\n\r\n containers:\r\n[...]\r\n```\r\n\r\n### ADDITIONAL INFORMATION\r\n* [x] Has associated issue: Fixes [Init containers should be getting same ENV var setups like the final containers #24805](https://github.com/apache/superset/issues/24805)\r\n* [ ] Required feature flags:\r\n* [ ] Changes UI\r\n* [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n \r\n * [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n * [ ] Confirm DB migration upgrade and downgrade tested\r\n * [ ] Runtime estimates and downtime expectations provided\r\n* [ ] Introduces new feature or API\r\n* [ ] Removes existing feature or API\r\n\r\n\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27226/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27226/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27224", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27224/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27224/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27224/events", + "html_url": "https://github.com/apache/superset/issues/27224", + "id": 2150500031, + "node_id": "I_kwDOAlosUs6ALga_", + "number": 27224, + "title": "Bulk delete charts pagination disappeared", + "user": { + "login": "Davidkramer1999", + "id": 74869455, + "node_id": "MDQ6VXNlcjc0ODY5NDU1", + "avatar_url": "https://avatars.githubusercontent.com/u/74869455?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Davidkramer1999", + "html_url": "https://github.com/Davidkramer1999", + "followers_url": "https://api.github.com/users/Davidkramer1999/followers", + "following_url": "https://api.github.com/users/Davidkramer1999/following{/other_user}", + "gists_url": "https://api.github.com/users/Davidkramer1999/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Davidkramer1999/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Davidkramer1999/subscriptions", + "organizations_url": "https://api.github.com/users/Davidkramer1999/orgs", + "repos_url": "https://api.github.com/users/Davidkramer1999/repos", + "events_url": "https://api.github.com/users/Davidkramer1999/events{/privacy}", + "received_events_url": "https://api.github.com/users/Davidkramer1999/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-23T07:17:56Z", + "updated_at": "2024-02-23T07:17:56Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "### Bug description\n\nWhen selecting for example all charst in bulk mode the pagination disappeared seen in screenshow below.\r\n\n\n### How to reproduce the bug\n\n1. Go to Charts\r\n2. Select all charts\r\n3. Delete all \r\n4. Pagination will disappeared and \"No data will be shown\" \n\n### Screenshots/recordings\n\n1.Selecting bulk remove on chart \r\n![image](https://github.com/apache/superset/assets/74869455/2a56ef57-ebe3-4835-b57e-6b0e7df35623)\r\n\r\n2. No data but we can see from previous screenshot that there are still 2 pages of charts... \r\n![image](https://github.com/apache/superset/assets/74869455/d148aa99-271b-40a9-9704-979ae3f5f15f)\r\n\r\n3. Now we refresh the side at there are still 2 sites...\r\n![image](https://github.com/apache/superset/assets/74869455/c3c23321-529c-4d82-bede-40de08141105)\r\n\n\n### Superset version\n\n3.1.1\n\n### Python version\n\n3.9\n\n### Node version\n\n16\n\n### Browser\n\nChrome\n\n### Additional context\n\nSreenshots are from edge browser but the same thing happens on chrome as well.\n\n### Checklist\n\n- [ ] I have searched Superset docs and Slack and didn't find a solution to my problem.\n- [ ] I have searched the GitHub issue tracker and didn't find a similar bug report.\n- [ ] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the \"additional context\" section.", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27224/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27224/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27223", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27223/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27223/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27223/events", + "html_url": "https://github.com/apache/superset/pull/27223", + "id": 2150353105, + "node_id": "PR_kwDOAlosUs5nt1UB", + "number": 27223, + "title": "feat(REST API): Adding REST API for advanced/force delete of a user #27207", + "user": { + "login": "mknadh", + "id": 7744468, + "node_id": "MDQ6VXNlcjc3NDQ0Njg=", + "avatar_url": "https://avatars.githubusercontent.com/u/7744468?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/mknadh", + "html_url": "https://github.com/mknadh", + "followers_url": "https://api.github.com/users/mknadh/followers", + "following_url": "https://api.github.com/users/mknadh/following{/other_user}", + "gists_url": "https://api.github.com/users/mknadh/gists{/gist_id}", + "starred_url": "https://api.github.com/users/mknadh/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/mknadh/subscriptions", + "organizations_url": "https://api.github.com/users/mknadh/orgs", + "repos_url": "https://api.github.com/users/mknadh/repos", + "events_url": "https://api.github.com/users/mknadh/events{/privacy}", + "received_events_url": "https://api.github.com/users/mknadh/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329203144, + "node_id": "MDU6TGFiZWwxMzI5MjAzMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/M", + "name": "size/M", + "color": "705FA3", + "default": false, + "description": "" + }, + { + "id": 2859155980, + "node_id": "MDU6TGFiZWwyODU5MTU1OTgw", + "url": "https://api.github.com/repos/apache/superset/labels/api", + "name": "api", + "color": "91C99C", + "default": false, + "description": "Related to the REST API" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-23T04:36:32Z", + "updated_at": "2024-02-23T04:37:00Z", + "closed_at": null, + "author_association": "FIRST_TIME_CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27223", + "html_url": "https://github.com/apache/superset/pull/27223", + "diff_url": "https://github.com/apache/superset/pull/27223.diff", + "patch_url": "https://github.com/apache/superset/pull/27223.patch", + "merged_at": null + }, + "body": "feat(REST API): Adding REST API for advanced/force delete of a user #27207\r\n\r\n### SUMMARY\r\nThis feature addresses existing issue since longtime. \r\nhttps://github.com/apache/superset/issues/13345\r\n\r\n(cannot delete user \"Associated data exists, please delete them first\" #13345 \r\n\r\n### ADDITIONAL INFORMATION\r\n\r\n\r\n- [x ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ x] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27223/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27223/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27222", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27222/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27222/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27222/events", + "html_url": "https://github.com/apache/superset/issues/27222", + "id": 2150348008, + "node_id": "I_kwDOAlosUs6AK7To", + "number": 27222, + "title": "report exception (email-chart-text)", + "user": { + "login": "liangliangGit", + "id": 35413857, + "node_id": "MDQ6VXNlcjM1NDEzODU3", + "avatar_url": "https://avatars.githubusercontent.com/u/35413857?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/liangliangGit", + "html_url": "https://github.com/liangliangGit", + "followers_url": "https://api.github.com/users/liangliangGit/followers", + "following_url": "https://api.github.com/users/liangliangGit/following{/other_user}", + "gists_url": "https://api.github.com/users/liangliangGit/gists{/gist_id}", + "starred_url": "https://api.github.com/users/liangliangGit/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/liangliangGit/subscriptions", + "organizations_url": "https://api.github.com/users/liangliangGit/orgs", + "repos_url": "https://api.github.com/users/liangliangGit/repos", + "events_url": "https://api.github.com/users/liangliangGit/events{/privacy}", + "received_events_url": "https://api.github.com/users/liangliangGit/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-23T04:29:15Z", + "updated_at": "2024-02-23T12:39:13Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "### Bug description\r\n\r\nI have set up a scheduled report to send emails, where the report specifies the text mode for charts. If the chart results contain no data, the email will not be sent. However, if the chart results do include data, the email will be sent as expected.\r\n\r\nIn cases where the chart returns no data, the email can still be successfully sent when both CSV and PNG model are selected for the chart.\r\n\r\n### How to reproduce the bug\r\n\r\n1.go to report\r\n2.choose chart\r\n3.choose Send As Text\r\n4.schedule email to be sent at a specific time.\r\n\r\n### Screenshots/recordings\r\n\r\n_No response_\r\n\r\n### Superset version\r\n\r\n3.0.4\r\n\r\n### Python version\r\n\r\n3.9\r\n\r\n### Node version\r\n\r\n16\r\n\r\n### Browser\r\n\r\nChrome\r\n\r\n### Additional context\r\n\r\n_No response_\r\n\r\n### Checklist\r\n\r\n- [X] I have searched Superset docs and Slack and didn't find a solution to my problem.\r\n- [X] I have searched the GitHub issue tracker and didn't find a similar bug report.\r\n- [X] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the \"additional context\" section.", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27222/reactions", + "total_count": 1, + "+1": 1, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27222/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27221", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27221/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27221/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27221/events", + "html_url": "https://github.com/apache/superset/pull/27221", + "id": 2150333357, + "node_id": "PR_kwDOAlosUs5ntw9R", + "number": 27221, + "title": "feat(REST API): Apache Superset \"Factory Reset\" REST API #27207", + "user": { + "login": "mknadh", + "id": 7744468, + "node_id": "MDQ6VXNlcjc3NDQ0Njg=", + "avatar_url": "https://avatars.githubusercontent.com/u/7744468?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/mknadh", + "html_url": "https://github.com/mknadh", + "followers_url": "https://api.github.com/users/mknadh/followers", + "following_url": "https://api.github.com/users/mknadh/following{/other_user}", + "gists_url": "https://api.github.com/users/mknadh/gists{/gist_id}", + "starred_url": "https://api.github.com/users/mknadh/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/mknadh/subscriptions", + "organizations_url": "https://api.github.com/users/mknadh/orgs", + "repos_url": "https://api.github.com/users/mknadh/repos", + "events_url": "https://api.github.com/users/mknadh/events{/privacy}", + "received_events_url": "https://api.github.com/users/mknadh/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1331560260, + "node_id": "MDU6TGFiZWwxMzMxNTYwMjYw", + "url": "https://api.github.com/repos/apache/superset/labels/size/L", + "name": "size/L", + "color": "6D53B9", + "default": false, + "description": "" + }, + { + "id": 2859155980, + "node_id": "MDU6TGFiZWwyODU5MTU1OTgw", + "url": "https://api.github.com/repos/apache/superset/labels/api", + "name": "api", + "color": "91C99C", + "default": false, + "description": "Related to the REST API" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-23T04:11:13Z", + "updated_at": "2024-02-23T04:18:26Z", + "closed_at": null, + "author_association": "FIRST_TIME_CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27221", + "html_url": "https://github.com/apache/superset/pull/27221", + "diff_url": "https://github.com/apache/superset/pull/27221.diff", + "patch_url": "https://github.com/apache/superset/pull/27221.patch", + "merged_at": null + }, + "body": "feat(REST API): Apache Superset \"Factory Reset\" REST API #27207\r\n\r\n### SUMMARY\r\nOver time, Apache Superset instances can accumulate large amounts of data, including charts, dashboards, saved queries, and other artifacts. There might be scenarios where users want to start fresh with a clean slate, removing all existing data.\r\n\r\nTesting and Development: Developers and administrators often require a quick and efficient way to reset Apache Superset instances to a default state for testing purposes or when setting up development environments.\r\n\r\nData Privacy and Security: In some cases, there might be sensitive or confidential data stored within Apache Superset that needs to be wiped out completely to ensure data privacy and security compliance.\r\n\r\nWhat is the proposal?\r\n\r\nImplement a Factory Reset API: Develop an API endpoint that allows users to trigger a factory reset of Apache Superset. This API should be designed to delete all existing data, including charts, dashboards, saved queries, databases, and other related artifacts.\r\n\r\nAuthorization and Confirmation: Ensure that the API requires appropriate authorization to prevent unauthorized access. Additionally, consider implementing a confirmation mechanism to prevent accidental data loss.\r\n\r\nDocumentation and Best Practices: Provide comprehensive documentation on how to use the Factory Reset API, along with best practices for when and how to perform a reset. Include warnings about data loss and the irreversible nature of the operation.\r\nError Handling and Logging: Implement robust error handling mechanisms within the API to handle any unexpected errors gracefully. Additionally, log all reset operations for auditing purposes.\r\n\r\nIntegration with Configuration Management: Optionally, provide integration with configuration management tools or scripts to automate the process of resetting Apache Superset instances in a controlled and reproducible manner.\r\n\r\n### ADDITIONAL INFORMATION\r\n\r\n\r\n- [x ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ x] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27221/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27221/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27220", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27220/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27220/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27220/events", + "html_url": "https://github.com/apache/superset/pull/27220", + "id": 2150300980, + "node_id": "PR_kwDOAlosUs5ntp7-", + "number": 27220, + "title": "chore: add file extension on the file uploaded on slack", + "user": { + "login": "okayhooni", + "id": 81631424, + "node_id": "MDQ6VXNlcjgxNjMxNDI0", + "avatar_url": "https://avatars.githubusercontent.com/u/81631424?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/okayhooni", + "html_url": "https://github.com/okayhooni", + "followers_url": "https://api.github.com/users/okayhooni/followers", + "following_url": "https://api.github.com/users/okayhooni/following{/other_user}", + "gists_url": "https://api.github.com/users/okayhooni/gists{/gist_id}", + "starred_url": "https://api.github.com/users/okayhooni/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/okayhooni/subscriptions", + "organizations_url": "https://api.github.com/users/okayhooni/orgs", + "repos_url": "https://api.github.com/users/okayhooni/repos", + "events_url": "https://api.github.com/users/okayhooni/events{/privacy}", + "received_events_url": "https://api.github.com/users/okayhooni/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329183122, + "node_id": "MDU6TGFiZWwxMzI5MTgzMTIy", + "url": "https://api.github.com/repos/apache/superset/labels/size/XS", + "name": "size/XS", + "color": "E2D8FF", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 1, + "created_at": "2024-02-23T03:23:26Z", + "updated_at": "2024-02-23T03:37:31Z", + "closed_at": null, + "author_association": "CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27220", + "html_url": "https://github.com/apache/superset/pull/27220", + "diff_url": "https://github.com/apache/superset/pull/27220.diff", + "patch_url": "https://github.com/apache/superset/pull/27220.patch", + "merged_at": null + }, + "body": "### SUMMARY\r\n- I received a reported issue by our company members, that files downloaded through Slack are not opening properly.\r\n- Upon investigation, I found that the problem is due to the absence of file extensions in the uploaded filenames on Slack. Consequently, the files are attempting to open with a default text editor.\r\n- So, I added a simple logic to append file extension with argument of slack file_upload API\r\n - https://api.slack.com/methods/files.upload\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n![image](https://github.com/apache/superset/assets/81631424/77ec2e92-fe47-4b0b-88a9-4bc423d7d5e6)\r\n\r\n### TESTING INSTRUCTIONS\r\nJust do make alerts of notification with attached image", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27220/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27220/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27219", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27219/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27219/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27219/events", + "html_url": "https://github.com/apache/superset/pull/27219", + "id": 2150205535, + "node_id": "PR_kwDOAlosUs5ntVdI", + "number": 27219, + "title": "build(deps): bump re-resizable from 6.6.1 to 6.9.11 in /superset-frontend", + "user": { + "login": "justinpark", + "id": 1392866, + "node_id": "MDQ6VXNlcjEzOTI4NjY=", + "avatar_url": "https://avatars.githubusercontent.com/u/1392866?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/justinpark", + "html_url": "https://github.com/justinpark", + "followers_url": "https://api.github.com/users/justinpark/followers", + "following_url": "https://api.github.com/users/justinpark/following{/other_user}", + "gists_url": "https://api.github.com/users/justinpark/gists{/gist_id}", + "starred_url": "https://api.github.com/users/justinpark/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/justinpark/subscriptions", + "organizations_url": "https://api.github.com/users/justinpark/orgs", + "repos_url": "https://api.github.com/users/justinpark/repos", + "events_url": "https://api.github.com/users/justinpark/events{/privacy}", + "received_events_url": "https://api.github.com/users/justinpark/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329183122, + "node_id": "MDU6TGFiZWwxMzI5MTgzMTIy", + "url": "https://api.github.com/repos/apache/superset/labels/size/XS", + "name": "size/XS", + "color": "E2D8FF", + "default": false, + "description": "" + }, + { + "id": 6496358600, + "node_id": "LA_kwDOAlosUs8AAAABgzaQyA", + "url": "https://api.github.com/repos/apache/superset/labels/dependencies:npm", + "name": "dependencies:npm", + "color": "ededed", + "default": false, + "description": null + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 1, + "created_at": "2024-02-23T01:05:38Z", + "updated_at": "2024-02-23T01:39:42Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27219", + "html_url": "https://github.com/apache/superset/pull/27219", + "diff_url": "https://github.com/apache/superset/pull/27219.diff", + "patch_url": "https://github.com/apache/superset/pull/27219.patch", + "merged_at": null + }, + "body": "Bumps [re-resizable](https://github.com/bokuweb/react-resizable-box) from 6.6.1 to 6.9.11.\r\nFixes the failed cypress test at #26933\r\n\r\n
\r\nRelease notes\r\n

Sourced from re-resizable's releases.

\r\n
\r\n

v6.6.11

\r\n

Full Changelog: https://github.com/bokuweb/re-resizable/compare/v6.6.10...v6.6.11

\r\n

v6.6.10

\r\n

What's Changed

\r\n\r\n\r\n
\r\n

... (truncated)

\r\n
\r\n
\r\nChangelog\r\n

Sourced from re-resizable's changelog.

\r\n
\r\n

Changelog

\r\n\r\n\r\n

6.9.11 (2023-08-10)

\r\n

:nail_care: Enhancement

\r\n
    \r\n
  • improve enable type.
  • \r\n
\r\n

6.9.9 (2022-04-26)

\r\n

:nail_care: Enhancement

\r\n
    \r\n
  • use native endsWith.
  • \r\n
  • remove fast-memoize.
  • \r\n
\r\n

6.9.8 (2022-04-22)

\r\n

:nail_care: Enhancement

\r\n
    \r\n
  • use flushSync in mouseMove.
  • \r\n
\r\n

6.9.6 (2022-04-22)

\r\n

:nail_care: Enhancement

\r\n
    \r\n
  • add react and react-dom to peer deps.
  • \r\n
\r\n

6.9.5 (2022-03-14)

\r\n\r\n
\r\n

... (truncated)

\r\n
\r\n
\r\nCommits\r\n\r\n
\r\n
\r\n\r\n\r\n[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=re-resizable&package-manager=npm_and_yarn&previous-version=6.6.1&new-version=6.9.11)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)\r\n\r\nDependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`.\r\n\r\n[//]: # (dependabot-automerge-start)\r\nDependabot will merge this PR once CI passes on it, as requested by @rusackas.\r\n\r\n[//]: # (dependabot-automerge-end)\r\n\r\n---\r\n\r\n
\r\nDependabot commands and options\r\n
\r\n\r\nYou can trigger Dependabot actions by commenting on this PR:\r\n- `@dependabot rebase` will rebase this PR\r\n- `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it\r\n- `@dependabot merge` will merge this PR after your CI passes on it\r\n- `@dependabot squash and merge` will squash and merge this PR after your CI passes on it\r\n- `@dependabot cancel merge` will cancel a previously requested merge and block automerging\r\n- `@dependabot reopen` will reopen this PR if it is closed\r\n- `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually\r\n- `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency\r\n- `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself)\r\n- `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself)\r\n- `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)\r\n\r\n\r\n
", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27219/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27219/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27217", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27217/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27217/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27217/events", + "html_url": "https://github.com/apache/superset/pull/27217", + "id": 2150112152, + "node_id": "PR_kwDOAlosUs5ntA6m", + "number": 27217, + "title": "fix(sqlglot): Address regressions introduced in #26476", + "user": { + "login": "john-bodley", + "id": 4567245, + "node_id": "MDQ6VXNlcjQ1NjcyNDU=", + "avatar_url": "https://avatars.githubusercontent.com/u/4567245?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/john-bodley", + "html_url": "https://github.com/john-bodley", + "followers_url": "https://api.github.com/users/john-bodley/followers", + "following_url": "https://api.github.com/users/john-bodley/following{/other_user}", + "gists_url": "https://api.github.com/users/john-bodley/gists{/gist_id}", + "starred_url": "https://api.github.com/users/john-bodley/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/john-bodley/subscriptions", + "organizations_url": "https://api.github.com/users/john-bodley/orgs", + "repos_url": "https://api.github.com/users/john-bodley/repos", + "events_url": "https://api.github.com/users/john-bodley/events{/privacy}", + "received_events_url": "https://api.github.com/users/john-bodley/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1330950144, + "node_id": "MDU6TGFiZWwxMzMwOTUwMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/S", + "name": "size/S", + "color": "C8B6FF", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 1, + "created_at": "2024-02-22T23:22:36Z", + "updated_at": "2024-02-23T14:06:55Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27217", + "html_url": "https://github.com/apache/superset/pull/27217", + "diff_url": "https://github.com/apache/superset/pull/27217.diff", + "patch_url": "https://github.com/apache/superset/pull/27217.patch", + "merged_at": null + }, + "body": "\r\n\r\n### SUMMARY\r\n\r\nThis PR addresses some regressions introducted in https://github.com/apache/superset/pull/26476 when `sqlparse` was replaced with `sqlglot` for SQL parsing. These issues were reported by our users who were unable to access their saved queries after we deployed Apache Superset 3.1.1.\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n\r\n\r\n### TESTING INSTRUCTIONS\r\n\r\nAdded unit tests.\r\n\r\n### ADDITIONAL INFORMATION\r\n\r\n\r\n- [ ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27217/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27217/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27213", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27213/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27213/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27213/events", + "html_url": "https://github.com/apache/superset/pull/27213", + "id": 2149675626, + "node_id": "PR_kwDOAlosUs5nrgxY", + "number": 27213, + "title": "fix(trino): bumping trino to fix hudi schema fetching", + "user": { + "login": "rusackas", + "id": 812905, + "node_id": "MDQ6VXNlcjgxMjkwNQ==", + "avatar_url": "https://avatars.githubusercontent.com/u/812905?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/rusackas", + "html_url": "https://github.com/rusackas", + "followers_url": "https://api.github.com/users/rusackas/followers", + "following_url": "https://api.github.com/users/rusackas/following{/other_user}", + "gists_url": "https://api.github.com/users/rusackas/gists{/gist_id}", + "starred_url": "https://api.github.com/users/rusackas/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/rusackas/subscriptions", + "organizations_url": "https://api.github.com/users/rusackas/orgs", + "repos_url": "https://api.github.com/users/rusackas/repos", + "events_url": "https://api.github.com/users/rusackas/events{/privacy}", + "received_events_url": "https://api.github.com/users/rusackas/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329183122, + "node_id": "MDU6TGFiZWwxMzI5MTgzMTIy", + "url": "https://api.github.com/repos/apache/superset/labels/size/XS", + "name": "size/XS", + "color": "E2D8FF", + "default": false, + "description": "" + }, + { + "id": 6599365865, + "node_id": "LA_kwDOAlosUs8AAAABiVpU6Q", + "url": "https://api.github.com/repos/apache/superset/labels/hold:review-after-release", + "name": "hold:review-after-release", + "color": "FBCA04", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 1, + "created_at": "2024-02-22T18:22:33Z", + "updated_at": "2024-02-22T18:36:12Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27213", + "html_url": "https://github.com/apache/superset/pull/27213", + "diff_url": "https://github.com/apache/superset/pull/27213.diff", + "patch_url": "https://github.com/apache/superset/pull/27213.patch", + "merged_at": null + }, + "body": "\r\n\r\n### SUMMARY\r\n\r\nBumps the Trino version as mentioned on https://github.com/apache/superset/issues/21945\r\nThis is an old bug and a small PR, so we can hold this during the 4.0 stabilization window. \r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n\r\n\r\n### TESTING INSTRUCTIONS\r\n\r\n\r\n### ADDITIONAL INFORMATION\r\n\r\n\r\n- [ ] Has associated issue: Fixes https://github.com/apache/superset/issues/21945\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27213/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27213/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27209", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27209/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27209/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27209/events", + "html_url": "https://github.com/apache/superset/pull/27209", + "id": 2149540646, + "node_id": "PR_kwDOAlosUs5nrCsi", + "number": 27209, + "title": "fix: Allow only dttm columns in comparison filter in Period over Period chart", + "user": { + "login": "kgabryje", + "id": 15073128, + "node_id": "MDQ6VXNlcjE1MDczMTI4", + "avatar_url": "https://avatars.githubusercontent.com/u/15073128?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/kgabryje", + "html_url": "https://github.com/kgabryje", + "followers_url": "https://api.github.com/users/kgabryje/followers", + "following_url": "https://api.github.com/users/kgabryje/following{/other_user}", + "gists_url": "https://api.github.com/users/kgabryje/gists{/gist_id}", + "starred_url": "https://api.github.com/users/kgabryje/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/kgabryje/subscriptions", + "organizations_url": "https://api.github.com/users/kgabryje/orgs", + "repos_url": "https://api.github.com/users/kgabryje/repos", + "events_url": "https://api.github.com/users/kgabryje/events{/privacy}", + "received_events_url": "https://api.github.com/users/kgabryje/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329203144, + "node_id": "MDU6TGFiZWwxMzI5MjAzMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/M", + "name": "size/M", + "color": "705FA3", + "default": false, + "description": "" + }, + { + "id": 5807047135, + "node_id": "LA_kwDOAlosUs8AAAABWiCB3w", + "url": "https://api.github.com/repos/apache/superset/labels/review-checkpoint", + "name": "review-checkpoint", + "color": "FF5A5F", + "default": false, + "description": "Last PR reviewed during the daily review standup" + }, + { + "id": 6424921785, + "node_id": "LA_kwDOAlosUs8AAAABfvSGuQ", + "url": "https://api.github.com/repos/apache/superset/labels/plugins", + "name": "plugins", + "color": "E795E1", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 1, + "created_at": "2024-02-22T17:14:33Z", + "updated_at": "2024-02-23T12:16:26Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27209", + "html_url": "https://github.com/apache/superset/pull/27209", + "diff_url": "https://github.com/apache/superset/pull/27209.diff", + "patch_url": "https://github.com/apache/superset/pull/27209.patch", + "merged_at": null + }, + "body": "### SUMMARY\r\nWhen user selected Custom range for comparison on Big Number With Time Period Comparison chart, the filter control would accept any column. That worked but didn't make much sense, so this PR restricts the control to accept only dttm columns for filtering.\r\nPlease note that user can still write custom sql which will filter anything, so that's just a UX improvement.\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n\r\n\r\nhttps://github.com/apache/superset/assets/15073128/b4a63051-72f4-4a3c-900d-67aabcc79ba1\r\n\r\n\r\n### TESTING INSTRUCTIONS\r\n1. Create Big Number With Time Period Comparison chart\r\n2. Select Custom range for comparison\r\n3. Verify that you can drag only temporal columns\r\n4. Verify that in the columns select in the popover you can only select temporal columns\r\n\r\n### ADDITIONAL INFORMATION\r\n\r\n\r\n- [ ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27209/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27209/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27200", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27200/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27200/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27200/events", + "html_url": "https://github.com/apache/superset/issues/27200", + "id": 2148666735, + "node_id": "I_kwDOAlosUs6AEg1v", + "number": 27200, + "title": "Inter font chosen in Table does not allow correct alignment of dates and numbers", + "user": { + "login": "squalou", + "id": 4623644, + "node_id": "MDQ6VXNlcjQ2MjM2NDQ=", + "avatar_url": "https://avatars.githubusercontent.com/u/4623644?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/squalou", + "html_url": "https://github.com/squalou", + "followers_url": "https://api.github.com/users/squalou/followers", + "following_url": "https://api.github.com/users/squalou/following{/other_user}", + "gists_url": "https://api.github.com/users/squalou/gists{/gist_id}", + "starred_url": "https://api.github.com/users/squalou/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/squalou/subscriptions", + "organizations_url": "https://api.github.com/users/squalou/orgs", + "repos_url": "https://api.github.com/users/squalou/repos", + "events_url": "https://api.github.com/users/squalou/events{/privacy}", + "received_events_url": "https://api.github.com/users/squalou/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-22T09:56:48Z", + "updated_at": "2024-02-22T15:08:14Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "### Bug description\r\n\r\nThis reminds me of an old bug I opened a long time ago\r\n(for reference : https://github.com/apache/superset/issues/17633)\r\n\r\nFont used in Table are not using \"fixed width\" - at least for numbers.\r\n\r\nAs a results, dates are misaligned.\r\n\r\nFont reported by html console is : `Inter, Helvetica, Arial`\r\n\r\nIf I remove `Inter`, everything is fine, Hevletica is fine, just like in my previous issue patch :)\r\n\r\nOne may argue that Helvetica is not nice, but I favor usability over bells and whistle. (and my end users too). Once again, Inter seems to be a poor choice.\r\n\r\n\r\nAlso, \"Table\" view is not the only one affected by this, the default \"Results\" are also displayed using this font, but ok, I can admit it's not a \"visualisation\" item so it is not intended to be nice. (Well, if so, don't try to put a nice font in it to begin with ? :) )\r\n\r\nScreenshot may not be the more obvious one, I can provide more.\r\n\r\n\r\n### How to reproduce the bug\r\n\r\nOpen any Table displaying some dates.\r\n\r\n### Screenshots/recordings\r\n\r\n![2024-02-22_10-44_1](https://github.com/apache/superset/assets/4623644/393ce8d1-7f79-46f8-914d-263118233e83)\r\n\r\n\r\n### Superset version\r\n\r\nmaster / latest-dev\r\n\r\n### Python version\r\n\r\n3.9\r\n\r\n### Node version\r\n\r\n16\r\n\r\n### Browser\r\n\r\nFirefox\r\n\r\n### Additional context\r\n\r\nSeen this on 4.0.0-rc1\r\nand on 3.0.0\r\n\r\n### Checklist\r\n\r\n- [X] I have searched Superset docs and Slack and didn't find a solution to my problem.\r\n- [X] I have searched the GitHub issue tracker and didn't find a similar bug report.\r\n- [X] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the \"additional context\" section.", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27200/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27200/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27198", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27198/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27198/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27198/events", + "html_url": "https://github.com/apache/superset/pull/27198", + "id": 2147922483, + "node_id": "PR_kwDOAlosUs5nlhBH", + "number": 27198, + "title": "chore(node): bumping Superset to Node 18", + "user": { + "login": "rusackas", + "id": 812905, + "node_id": "MDQ6VXNlcjgxMjkwNQ==", + "avatar_url": "https://avatars.githubusercontent.com/u/812905?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/rusackas", + "html_url": "https://github.com/rusackas", + "followers_url": "https://api.github.com/users/rusackas/followers", + "following_url": "https://api.github.com/users/rusackas/following{/other_user}", + "gists_url": "https://api.github.com/users/rusackas/gists{/gist_id}", + "starred_url": "https://api.github.com/users/rusackas/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/rusackas/subscriptions", + "organizations_url": "https://api.github.com/users/rusackas/orgs", + "repos_url": "https://api.github.com/users/rusackas/repos", + "events_url": "https://api.github.com/users/rusackas/events{/privacy}", + "received_events_url": "https://api.github.com/users/rusackas/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329203144, + "node_id": "MDU6TGFiZWwxMzI5MjAzMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/M", + "name": "size/M", + "color": "705FA3", + "default": false, + "description": "" + }, + { + "id": 6496358600, + "node_id": "LA_kwDOAlosUs8AAAABgzaQyA", + "url": "https://api.github.com/repos/apache/superset/labels/dependencies:npm", + "name": "dependencies:npm", + "color": "ededed", + "default": false, + "description": null + }, + { + "id": 6501451649, + "node_id": "LA_kwDOAlosUs8AAAABg4RHgQ", + "url": "https://api.github.com/repos/apache/superset/labels/github_actions", + "name": "github_actions", + "color": "000000", + "default": false, + "description": "Pull requests that update GitHub Actions code" + }, + { + "id": 6599365865, + "node_id": "LA_kwDOAlosUs8AAAABiVpU6Q", + "url": "https://api.github.com/repos/apache/superset/labels/hold:review-after-release", + "name": "hold:review-after-release", + "color": "FBCA04", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 1, + "created_at": "2024-02-22T00:07:53Z", + "updated_at": "2024-02-22T18:02:51Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27198", + "html_url": "https://github.com/apache/superset/pull/27198", + "diff_url": "https://github.com/apache/superset/pull/27198.diff", + "patch_url": "https://github.com/apache/superset/pull/27198.patch", + "merged_at": null + }, + "body": "\r\n\r\n### SUMMARY\r\n\r\nNot sure all the actions bumps are needed, but I'll just aim high and hope we can get everything updated.\r\n\r\nThis does NOT bump the websocket app or docs to Node 18.\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n\r\n\r\n### TESTING INSTRUCTIONS\r\n\r\n\r\n### ADDITIONAL INFORMATION\r\n\r\n\r\n- [ ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27198/reactions", + "total_count": 1, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 1, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27198/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27197", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27197/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27197/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27197/events", + "html_url": "https://github.com/apache/superset/pull/27197", + "id": 2147916065, + "node_id": "PR_kwDOAlosUs5nlfon", + "number": 27197, + "title": "feat(jinja): current_user_email macro", + "user": { + "login": "Vitor-Avila", + "id": 96086495, + "node_id": "U_kgDOBbop3w", + "avatar_url": "https://avatars.githubusercontent.com/u/96086495?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Vitor-Avila", + "html_url": "https://github.com/Vitor-Avila", + "followers_url": "https://api.github.com/users/Vitor-Avila/followers", + "following_url": "https://api.github.com/users/Vitor-Avila/following{/other_user}", + "gists_url": "https://api.github.com/users/Vitor-Avila/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Vitor-Avila/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Vitor-Avila/subscriptions", + "organizations_url": "https://api.github.com/users/Vitor-Avila/orgs", + "repos_url": "https://api.github.com/users/Vitor-Avila/repos", + "events_url": "https://api.github.com/users/Vitor-Avila/events{/privacy}", + "received_events_url": "https://api.github.com/users/Vitor-Avila/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1331560260, + "node_id": "MDU6TGFiZWwxMzMxNTYwMjYw", + "url": "https://api.github.com/repos/apache/superset/labels/size/L", + "name": "size/L", + "color": "6D53B9", + "default": false, + "description": "" + }, + { + "id": 3815127807, + "node_id": "LA_kwDOAlosUs7jZjr_", + "url": "https://api.github.com/repos/apache/superset/labels/doc", + "name": "doc", + "color": "6f9eaf", + "default": false, + "description": "Namespace | Anything related to documentation" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 5, + "created_at": "2024-02-22T00:01:35Z", + "updated_at": "2024-02-23T15:19:32Z", + "closed_at": null, + "author_association": "CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27197", + "html_url": "https://github.com/apache/superset/pull/27197", + "diff_url": "https://github.com/apache/superset/pull/27197.diff", + "patch_url": "https://github.com/apache/superset/pull/27197.patch", + "merged_at": null + }, + "body": "### SUMMARY\r\nThis PR adds a new Jinja macro: `{{current_user_email()}}`. This macro can be specially useful when applying RLS based on the logged in user, as it's typically easier for organizations to have the email address information available in their warehouse (as opposed to the Superset account's `username` or `id`).\r\n\r\nFixes https://github.com/apache/superset/issues/26808.\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\nNo UI changes. \r\n\r\n### TESTING INSTRUCTIONS\r\n1. Navigate to SQL Lab.\r\n2. Execute `select '{{current_user_email()}}' as my_email;`\r\n3. Validate that your email address is properly displayed.\r\n\r\nTests also added.\r\n\r\n### ADDITIONAL INFORMATION\r\n- [x] Has associated issue: https://github.com/apache/superset/issues/26808\r\n- [x] Required feature flags: `ENABLE_TEMPLATE_PROCESSING`\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [x] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27197/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27197/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27195", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27195/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27195/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27195/events", + "html_url": "https://github.com/apache/superset/pull/27195", + "id": 2147461025, + "node_id": "PR_kwDOAlosUs5nj724", + "number": 27195, + "title": "fix: Upgrade eyes-cypress to latest", + "user": { + "login": "geido", + "id": 60598000, + "node_id": "MDQ6VXNlcjYwNTk4MDAw", + "avatar_url": "https://avatars.githubusercontent.com/u/60598000?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/geido", + "html_url": "https://github.com/geido", + "followers_url": "https://api.github.com/users/geido/followers", + "following_url": "https://api.github.com/users/geido/following{/other_user}", + "gists_url": "https://api.github.com/users/geido/gists{/gist_id}", + "starred_url": "https://api.github.com/users/geido/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/geido/subscriptions", + "organizations_url": "https://api.github.com/users/geido/orgs", + "repos_url": "https://api.github.com/users/geido/repos", + "events_url": "https://api.github.com/users/geido/events{/privacy}", + "received_events_url": "https://api.github.com/users/geido/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329203144, + "node_id": "MDU6TGFiZWwxMzI5MjAzMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/M", + "name": "size/M", + "color": "705FA3", + "default": false, + "description": "" + }, + { + "id": 6496358600, + "node_id": "LA_kwDOAlosUs8AAAABgzaQyA", + "url": "https://api.github.com/repos/apache/superset/labels/dependencies:npm", + "name": "dependencies:npm", + "color": "ededed", + "default": false, + "description": null + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 2, + "created_at": "2024-02-21T18:46:44Z", + "updated_at": "2024-02-21T19:33:32Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": true, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27195", + "html_url": "https://github.com/apache/superset/pull/27195", + "diff_url": "https://github.com/apache/superset/pull/27195.diff", + "patch_url": "https://github.com/apache/superset/pull/27195.patch", + "merged_at": null + }, + "body": "\r\n\r\n### SUMMARY\r\nFixes #27184\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\nN.A.\r\n\r\n### TESTING INSTRUCTIONS\r\nCI should pass\r\n\r\n### ADDITIONAL INFORMATION\r\n\r\n\r\n- [x] Has associated issue: #27184\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27195/reactions", + "total_count": 2, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 2, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27195/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27194", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27194/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27194/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27194/events", + "html_url": "https://github.com/apache/superset/issues/27194", + "id": 2147431685, + "node_id": "I_kwDOAlosUs5__zUF", + "number": 27194, + "title": "[SIP] Ability to assign a unique asset id to a superset dashboard / visual", + "user": { + "login": "hondyman", + "id": 54151500, + "node_id": "MDQ6VXNlcjU0MTUxNTAw", + "avatar_url": "https://avatars.githubusercontent.com/u/54151500?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/hondyman", + "html_url": "https://github.com/hondyman", + "followers_url": "https://api.github.com/users/hondyman/followers", + "following_url": "https://api.github.com/users/hondyman/following{/other_user}", + "gists_url": "https://api.github.com/users/hondyman/gists{/gist_id}", + "starred_url": "https://api.github.com/users/hondyman/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/hondyman/subscriptions", + "organizations_url": "https://api.github.com/users/hondyman/orgs", + "repos_url": "https://api.github.com/users/hondyman/repos", + "events_url": "https://api.github.com/users/hondyman/events{/privacy}", + "received_events_url": "https://api.github.com/users/hondyman/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1010577363, + "node_id": "MDU6TGFiZWwxMDEwNTc3MzYz", + "url": "https://api.github.com/repos/apache/superset/labels/sip", + "name": "sip", + "color": "D7B4E6", + "default": false, + "description": "Superset Improvement Proposal" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-21T18:29:40Z", + "updated_at": "2024-02-21T18:29:40Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "\r\n## [SIP] Proposal for Adding additional metadata to a Superset Dashboard for Data Governance\r\n\r\n### Motivation\r\nWe need to provide information on various classified data used in reports to Regulatory bodies globally\r\n\r\nDescription of the problem to be solved.\r\nIn large regulated industries, we have to keep track of classified data for various global reporting such a PII for GDPR, financials metrics for Dodd/Frank and other regulatory bodies. We have to be able to say where / when and how certain data sets are used inside the org. To do this we assign an asset id (UUID) to each report, process and interface we report, xfer or export data from and we manage these through Data Governance.\r\n\r\n### Proposed Change\r\nSuperset has metadata but its not editable, users should be able to add additional metadata to reports that allows for easier identification and governance. In our case we need to add an asset id to the metadata so we can track its usage.\r\nI have provided a logical diagram of how we would see the process working in our org. The data catalog manages all our metadata so it will have an inventory of all dashboards power bi, superset and looker each is assigned an asset id and is actively managed by DG. \r\n\r\nBecause superset uses integers inside an environment such as Dev, QA and prod the integers in each environment can be different. The asset id does not change between environments, its a consistent id thats callable from our portal.\r\n\r\n![Screenshot 2024-02-21 1 20 16 PM](https://github.com/apache/superset/assets/54151500/a67202da-9f39-4103-9dbe-5b4bb34c5d22)\r\n\r\n\r\n### New or Changed Public Interfaces\r\n\r\nTypically our Data Governance tool assigns asset ids (UUID) and we push them into the various assets such as dashboards either manually or via api. If a new metadata area was exposed we will expect this to be available via Swagger API for insert, read and update.\r\n\r\n\r\n\r\n\r\n### New dependencies\r\n\r\nNot Applicable\r\n\r\n### Migration Plan and Compatibility\r\n\r\nNot Applicable\r\n\r\n### Rejected Alternatives\r\n\r\nNot Applicable\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27194/reactions", + "total_count": 1, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 1 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27194/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27187", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27187/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27187/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27187/events", + "html_url": "https://github.com/apache/superset/pull/27187", + "id": 2146739162, + "node_id": "PR_kwDOAlosUs5nhcQh", + "number": 27187, + "title": "chore: numexpr to fix CVE-2023-39631⁠ (2.8.4 => 2.9.0)", + "user": { + "login": "nigzak", + "id": 102737855, + "node_id": "U_kgDOBh-nvw", + "avatar_url": "https://avatars.githubusercontent.com/u/102737855?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/nigzak", + "html_url": "https://github.com/nigzak", + "followers_url": "https://api.github.com/users/nigzak/followers", + "following_url": "https://api.github.com/users/nigzak/following{/other_user}", + "gists_url": "https://api.github.com/users/nigzak/gists{/gist_id}", + "starred_url": "https://api.github.com/users/nigzak/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/nigzak/subscriptions", + "organizations_url": "https://api.github.com/users/nigzak/orgs", + "repos_url": "https://api.github.com/users/nigzak/repos", + "events_url": "https://api.github.com/users/nigzak/events{/privacy}", + "received_events_url": "https://api.github.com/users/nigzak/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1330950144, + "node_id": "MDU6TGFiZWwxMzMwOTUwMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/S", + "name": "size/S", + "color": "C8B6FF", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 10, + "created_at": "2024-02-21T13:18:37Z", + "updated_at": "2024-02-23T16:46:04Z", + "closed_at": null, + "author_association": "FIRST_TIME_CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27187", + "html_url": "https://github.com/apache/superset/pull/27187", + "diff_url": "https://github.com/apache/superset/pull/27187.diff", + "patch_url": "https://github.com/apache/superset/pull/27187.patch", + "merged_at": null + }, + "body": "<!---\r\nPlease write the PR title following the conventions at https://www.conventionalcommits.org/en/v1.0.0/\r\nExample:\r\nfix(dashboard): load charts correctly\r\n-->\r\n\r\n### SUMMARY\r\nsuperset 3.1.0 / 3.1.1 has a critical finding: CVE-2023-39631⁠ because of numexpr 2.8.4\r\n\r\n### TESTING INSTRUCTIONS\r\nunknown (sorry), this pull request is to update the numexpr to a non-faulty version depending the CVE (2.9.0 is the current latest available version based on https://github.com/pydata/numexpr)\r\nTo test \"my\" finding: open final image in docker scout => no finding for numexpr is there anymore\r\n\r\n![image](https://github.com/apache/superset/assets/102737855/ad7499cf-2d55-4862-af37-9ae61312639c)\r\n\r\n\r\n### ADDITIONAL INFORMATION\r\n<!--- Check any relevant boxes with \"x\" -->\r\n<!--- HINT: Include \"Fixes #nnn\" if you are fixing an existing issue -->\r\n- [x] Has associated issue: https://github.com/apache/superset/issues/26967\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n\r\nHINT: This is my first pull request ... if I made something wrong please let me know that I can make a next one better :)", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27187/reactions", + "total_count": 1, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 1, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27187/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27186", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27186/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27186/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27186/events", + "html_url": "https://github.com/apache/superset/pull/27186", + "id": 2146550133, + "node_id": "PR_kwDOAlosUs5ngyLa", + "number": 27186, + "title": "fix: SSH Tunnel configuration settings ", + "user": { + "login": "geido", + "id": 60598000, + "node_id": "MDQ6VXNlcjYwNTk4MDAw", + "avatar_url": "https://avatars.githubusercontent.com/u/60598000?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/geido", + "html_url": "https://github.com/geido", + "followers_url": "https://api.github.com/users/geido/followers", + "following_url": "https://api.github.com/users/geido/following{/other_user}", + "gists_url": "https://api.github.com/users/geido/gists{/gist_id}", + "starred_url": "https://api.github.com/users/geido/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/geido/subscriptions", + "organizations_url": "https://api.github.com/users/geido/orgs", + "repos_url": "https://api.github.com/users/geido/repos", + "events_url": "https://api.github.com/users/geido/events{/privacy}", + "received_events_url": "https://api.github.com/users/geido/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329338104, + "node_id": "MDU6TGFiZWwxMzI5MzM4MTA0", + "url": "https://api.github.com/repos/apache/superset/labels/size/XL", + "name": "size/XL", + "color": "5C3BBc", + "default": false, + "description": "" + }, + { + "id": 2859155980, + "node_id": "MDU6TGFiZWwyODU5MTU1OTgw", + "url": "https://api.github.com/repos/apache/superset/labels/api", + "name": "api", + "color": "91C99C", + "default": false, + "description": "Related to the REST API" + }, + { + "id": 6502628977, + "node_id": "LA_kwDOAlosUs8AAAABg5Y-cQ", + "url": "https://api.github.com/repos/apache/superset/labels/packages", + "name": "packages", + "color": "E795E1", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 1, + "created_at": "2024-02-21T11:46:12Z", + "updated_at": "2024-02-23T16:44:57Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27186", + "html_url": "https://github.com/apache/superset/pull/27186", + "diff_url": "https://github.com/apache/superset/pull/27186.diff", + "patch_url": "https://github.com/apache/superset/pull/27186.patch", + "merged_at": null + }, + "body": "### SUMMARY\r\nThis PR fixes several issues with the SSH Tunnel option in the Database Modal which are related to edit the SSH Tunnel config, enable/disable the option, and misc fixes/enhancements. In particular:\r\n\r\n- Removes a duplicate `SSHTunnelSwitch` component from dynamic forms and merges the logics of both dynamic and SQLAlchemy forms into one overridable component\r\n- Fixes an issue that made removing the SSH tunnel config impossible when editing a database connection\r\n- Delegates the deletion of a ssh tunnel to the `UpdateDatabaseCommand`\r\n- Fixes an issue with the SSH tunnel config still being included in the request even when toggled off\r\n- Fixes an issue for which the switch was showing also with `SSH_TUNNELING` OFF. Saving would cause a \"SSH Tunneling is not enabled\" error\r\n- Surfaces to the user a missing port error in the SQLAlchemy uri when enabling SSH Tunnel (the port is required for the tunnel to work)\r\n- Clean the SSH Tunnel config from unnecessary credentials when switching from password to private key\r\n- Improves the UX so that toggling ON and OFF the SSH Tunnel config won't clear the form until saved\r\n- Misc fixes\r\n\r\n### BEFORE\r\n\r\nhttps://github.com/apache/superset/assets/60598000/c38052d6-53b0-47d8-a87a-391a647085d9\r\n\r\n### AFTER\r\n\r\nhttps://github.com/apache/superset/assets/60598000/f65ad7da-962f-4500-82f2-eb42bd2139ba\r\n\r\n### TESTING INSTRUCTIONS\r\n1. Create a new database connection using a SQLAlchemy uri and dynamic forms, make sure SSH Tunnel can be toggled ON and OFF and that configs can be saved successfully\r\n2. Edit a database connection that has a SSH Tunnel config, make sure the SSH Tunnel can be toggled ON and OFF and that it keeps the config available to the user until toggled OFF and saved\r\n3. Edit a database connection that has a SSH Tunnel config, make sure any change to the SSH tunnel can be saved successfully\r\n4. Re-open the database config and make sure the SSH Tunnel has been deleted successfully\r\n5. Create or edit a database connection using a SQLAlchemy uri without specifying the port, make sure an error is surfaced to the UI informing that specifying a port is necessary\r\n\r\n### ADDITIONAL INFORMATION\r\n<!--- Check any relevant boxes with \"x\" -->\r\n<!--- HINT: Include \"Fixes #nnn\" if you are fixing an existing issue -->\r\n- [ ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27186/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27186/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27184", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27184/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27184/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27184/events", + "html_url": "https://github.com/apache/superset/issues/27184", + "id": 2146285004, + "node_id": "I_kwDOAlosUs5_7bXM", + "number": 27184, + "title": "Update dependencies in superset-frontend package.json ", + "user": { + "login": "MFIB00", + "id": 79359373, + "node_id": "MDQ6VXNlcjc5MzU5Mzcz", + "avatar_url": "https://avatars.githubusercontent.com/u/79359373?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/MFIB00", + "html_url": "https://github.com/MFIB00", + "followers_url": "https://api.github.com/users/MFIB00/followers", + "following_url": "https://api.github.com/users/MFIB00/following{/other_user}", + "gists_url": "https://api.github.com/users/MFIB00/gists{/gist_id}", + "starred_url": "https://api.github.com/users/MFIB00/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/MFIB00/subscriptions", + "organizations_url": "https://api.github.com/users/MFIB00/orgs", + "repos_url": "https://api.github.com/users/MFIB00/repos", + "events_url": "https://api.github.com/users/MFIB00/events{/privacy}", + "received_events_url": "https://api.github.com/users/MFIB00/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": { + "login": "geido", + "id": 60598000, + "node_id": "MDQ6VXNlcjYwNTk4MDAw", + "avatar_url": "https://avatars.githubusercontent.com/u/60598000?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/geido", + "html_url": "https://github.com/geido", + "followers_url": "https://api.github.com/users/geido/followers", + "following_url": "https://api.github.com/users/geido/following{/other_user}", + "gists_url": "https://api.github.com/users/geido/gists{/gist_id}", + "starred_url": "https://api.github.com/users/geido/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/geido/subscriptions", + "organizations_url": "https://api.github.com/users/geido/orgs", + "repos_url": "https://api.github.com/users/geido/repos", + "events_url": "https://api.github.com/users/geido/events{/privacy}", + "received_events_url": "https://api.github.com/users/geido/received_events", + "type": "User", + "site_admin": false + }, + "assignees": [ + { + "login": "geido", + "id": 60598000, + "node_id": "MDQ6VXNlcjYwNTk4MDAw", + "avatar_url": "https://avatars.githubusercontent.com/u/60598000?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/geido", + "html_url": "https://github.com/geido", + "followers_url": "https://api.github.com/users/geido/followers", + "following_url": "https://api.github.com/users/geido/following{/other_user}", + "gists_url": "https://api.github.com/users/geido/gists{/gist_id}", + "starred_url": "https://api.github.com/users/geido/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/geido/subscriptions", + "organizations_url": "https://api.github.com/users/geido/orgs", + "repos_url": "https://api.github.com/users/geido/repos", + "events_url": "https://api.github.com/users/geido/events{/privacy}", + "received_events_url": "https://api.github.com/users/geido/received_events", + "type": "User", + "site_admin": false + } + ], + "milestone": null, + "comments": 0, + "created_at": "2024-02-21T09:47:19Z", + "updated_at": "2024-02-21T14:22:16Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "### Bug description\n\nI've found that in cyperset-fronted package.json file has a \"@applitools/eyes-cypress\" libary that outdated and has critical vulnerability in vm2. \r\nhttps://github.com/apache/superset/blob/3818da850957c779e8d24071a4fc7302cd053959/superset-frontend/cypress-base/package.json#L19\n\n### How to reproduce the bug\n\n1. Go to https://github.com/apache/superset/blob/3818da850957c779e8d24071a4fc7302cd053959/superset-frontend/cypress-base/package.json#L19 \r\n2. Check @applitools/eyes-cypress@3.29.1 for vulnerabilities\r\n3. Update @applitools/eyes-cypress@3.29.1 to 3.38.0\n\n### Screenshots/recordings\n\n![image](https://github.com/apache/superset/assets/79359373/303d6897-eb44-4b20-9385-79d72493913f)\r\n\n\n### Superset version\n\nmaster / latest-dev\n\n### Python version\n\n3.9\n\n### Node version\n\n16\n\n### Browser\n\nChrome\n\n### Additional context\n\n_No response_\n\n### Checklist\n\n- [X] I have searched Superset docs and Slack and didn't find a solution to my problem.\n- [X] I have searched the GitHub issue tracker and didn't find a similar bug report.\n- [X] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the \"additional context\" section.", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27184/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27184/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27177", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27177/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27177/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27177/events", + "html_url": "https://github.com/apache/superset/issues/27177", + "id": 2145178842, + "node_id": "I_kwDOAlosUs5_3NTa", + "number": 27177, + "title": "Error on embedded dashboard after upgrading to 3.1.1: \"Guest user cannot modify chart payload\"", + "user": { + "login": "rscarborough1996", + "id": 106171897, + "node_id": "U_kgDOBlQN-Q", + "avatar_url": "https://avatars.githubusercontent.com/u/106171897?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/rscarborough1996", + "html_url": "https://github.com/rscarborough1996", + "followers_url": "https://api.github.com/users/rscarborough1996/followers", + "following_url": "https://api.github.com/users/rscarborough1996/following{/other_user}", + "gists_url": "https://api.github.com/users/rscarborough1996/gists{/gist_id}", + "starred_url": "https://api.github.com/users/rscarborough1996/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/rscarborough1996/subscriptions", + "organizations_url": "https://api.github.com/users/rscarborough1996/orgs", + "repos_url": "https://api.github.com/users/rscarborough1996/repos", + "events_url": "https://api.github.com/users/rscarborough1996/events{/privacy}", + "received_events_url": "https://api.github.com/users/rscarborough1996/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": { + "login": "betodealmeida", + "id": 1534870, + "node_id": "MDQ6VXNlcjE1MzQ4NzA=", + "avatar_url": "https://avatars.githubusercontent.com/u/1534870?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/betodealmeida", + "html_url": "https://github.com/betodealmeida", + "followers_url": "https://api.github.com/users/betodealmeida/followers", + "following_url": "https://api.github.com/users/betodealmeida/following{/other_user}", + "gists_url": "https://api.github.com/users/betodealmeida/gists{/gist_id}", + "starred_url": "https://api.github.com/users/betodealmeida/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/betodealmeida/subscriptions", + "organizations_url": "https://api.github.com/users/betodealmeida/orgs", + "repos_url": "https://api.github.com/users/betodealmeida/repos", + "events_url": "https://api.github.com/users/betodealmeida/events{/privacy}", + "received_events_url": "https://api.github.com/users/betodealmeida/received_events", + "type": "User", + "site_admin": false + }, + "assignees": [ + { + "login": "betodealmeida", + "id": 1534870, + "node_id": "MDQ6VXNlcjE1MzQ4NzA=", + "avatar_url": "https://avatars.githubusercontent.com/u/1534870?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/betodealmeida", + "html_url": "https://github.com/betodealmeida", + "followers_url": "https://api.github.com/users/betodealmeida/followers", + "following_url": "https://api.github.com/users/betodealmeida/following{/other_user}", + "gists_url": "https://api.github.com/users/betodealmeida/gists{/gist_id}", + "starred_url": "https://api.github.com/users/betodealmeida/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/betodealmeida/subscriptions", + "organizations_url": "https://api.github.com/users/betodealmeida/orgs", + "repos_url": "https://api.github.com/users/betodealmeida/repos", + "events_url": "https://api.github.com/users/betodealmeida/events{/privacy}", + "received_events_url": "https://api.github.com/users/betodealmeida/received_events", + "type": "User", + "site_admin": false + } + ], + "milestone": null, + "comments": 3, + "created_at": "2024-02-20T20:07:26Z", + "updated_at": "2024-02-21T15:45:38Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "### Bug description\n\nAfter upgrading from 3.1.0 to 3.1.1, dashboards and filters no longer work and give this error: \"Guest user cannot modify chart payload\". I didn't change any permissions, and I even tried changing GUEST_ROLE_NAME to \"Admin\", but it still didn't work. I can see that the POST request to /api/v1/chart/data is coming back with 403, but I can't tell what is causing it.\n\n### How to reproduce the bug\n\nUsing the Docker 3.1.1 image:\r\n1. Create an embedded dashboard app\r\n2. Try to use it\n\n### Screenshots/recordings\n\n_No response_\n\n### Superset version\n\n3.1.1\n\n### Python version\n\nI don't know\n\n### Node version\n\nI don't know\n\n### Browser\n\nChrome\n\n### Additional context\n\nLogs:\r\nSupersetErrorException\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.9/site-packages/flask/app.py\", line 1823, in full_dispatch_request\r\n rv = self.dispatch_request()\r\n File \"/usr/local/lib/python3.9/site-packages/flask/app.py\", line 1799, in dispatch_request\r\n return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args)\r\n File \"/usr/local/lib/python3.9/site-packages/flask_appbuilder/security/decorators.py\", line 95, in wraps\r\n return f(self, *args, **kwargs)\r\n File \"/app/superset/views/base_api.py\", line 127, in wraps\r\n raise ex\r\n File \"/app/superset/views/base_api.py\", line 121, in wraps\r\n duration, response = time_function(f, self, *args, **kwargs)\r\n File \"/app/superset/utils/core.py\", line 1463, in time_function\r\n response = func(*args, **kwargs)\r\n File \"/app/superset/utils/log.py\", line 255, in wrapper\r\n value = f(*args, **kwargs)\r\n File \"/app/superset/charts/data/api.py\", line 235, in data\r\n command.validate()\r\n File \"/app/superset/commands/chart/data/get_data_command.py\", line 68, in validate\r\n self._query_context.raise_for_access()\r\n File \"/app/superset/common/query_context.py\", line 137, in raise_for_access\r\n self._processor.raise_for_access()\r\n File \"/app/superset/common/query_context_processor.py\", line 754, in raise_for_access\r\n security_manager.raise_for_access(query_context=self._query_context)\r\n File \"/app/superset/security/manager.py\", line 1960, in raise_for_access\r\n raise SupersetSecurityException(\r\nsuperset.exceptions.SupersetSecurityException: Guest user cannot modify chart payload\r\n2024-02-20 19:57:34,465:WARNING:superset.views.base:SupersetErrorException\r\nTraceback (most recent call last):\r\n File \"/usr/local/lib/python3.9/site-packages/flask/app.py\", line 1823, in full_dispatch_request\r\n rv = self.dispatch_request()\r\n File \"/usr/local/lib/python3.9/site-packages/flask/app.py\", line 1799, in dispatch_request\r\n return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args)\r\n File \"/usr/local/lib/python3.9/site-packages/flask_appbuilder/security/decorators.py\", line 95, in wraps\r\n return f(self, *args, **kwargs)\r\n File \"/app/superset/views/base_api.py\", line 127, in wraps\r\n raise ex\r\n File \"/app/superset/views/base_api.py\", line 121, in wraps\r\n duration, response = time_function(f, self, *args, **kwargs)\r\n File \"/app/superset/utils/core.py\", line 1463, in time_function\r\n response = func(*args, **kwargs)\r\n File \"/app/superset/utils/log.py\", line 255, in wrapper\r\n value = f(*args, **kwargs)\r\n File \"/app/superset/charts/data/api.py\", line 235, in data\r\n command.validate()\r\n File \"/app/superset/commands/chart/data/get_data_command.py\", line 68, in validate\r\n self._query_context.raise_for_access()\r\n File \"/app/superset/common/query_context.py\", line 137, in raise_for_access\r\n self._processor.raise_for_access()\r\n File \"/app/superset/common/query_context_processor.py\", line 754, in raise_for_access\r\n security_manager.raise_for_access(query_context=self._query_context)\r\n File \"/app/superset/security/manager.py\", line 1960, in raise_for_access\r\n raise SupersetSecurityException(\r\nsuperset.exceptions.SupersetSecurityException: Guest user cannot modify chart payload\r\n172.20.0.1 - - [20/Feb/2024:19:57:34 +0000] \"POST /api/v1/chart/data?form_data=%7B%22slice_id%22%3A108%7D&dashboard_id=65 HTTP/1.1\" 403 149 \"http://localhost:8088/embedded/3f11daf2-84ac-4c8f-80aa-e9310b488fe7\" \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36\"\n\n### Checklist\n\n- [X] I have searched Superset docs and Slack and didn't find a solution to my problem.\n- [X] I have searched the GitHub issue tracker and didn't find a similar bug report.\n- [X] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the \"additional context\" section.", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27177/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27177/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27174", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27174/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27174/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27174/events", + "html_url": "https://github.com/apache/superset/pull/27174", + "id": 2144928406, + "node_id": "PR_kwDOAlosUs5nbL8x", + "number": 27174, + "title": "test: 4.0 test environment - DO NOT MERGE", + "user": { + "login": "michael-s-molina", + "id": 70410625, + "node_id": "MDQ6VXNlcjcwNDEwNjI1", + "avatar_url": "https://avatars.githubusercontent.com/u/70410625?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/michael-s-molina", + "html_url": "https://github.com/michael-s-molina", + "followers_url": "https://api.github.com/users/michael-s-molina/followers", + "following_url": "https://api.github.com/users/michael-s-molina/following{/other_user}", + "gists_url": "https://api.github.com/users/michael-s-molina/gists{/gist_id}", + "starred_url": "https://api.github.com/users/michael-s-molina/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/michael-s-molina/subscriptions", + "organizations_url": "https://api.github.com/users/michael-s-molina/orgs", + "repos_url": "https://api.github.com/users/michael-s-molina/repos", + "events_url": "https://api.github.com/users/michael-s-molina/events{/privacy}", + "received_events_url": "https://api.github.com/users/michael-s-molina/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329183122, + "node_id": "MDU6TGFiZWwxMzI5MTgzMTIy", + "url": "https://api.github.com/repos/apache/superset/labels/size/XS", + "name": "size/XS", + "color": "E2D8FF", + "default": false, + "description": "" + }, + { + "id": 2635185640, + "node_id": "MDU6TGFiZWwyNjM1MTg1NjQw", + "url": "https://api.github.com/repos/apache/superset/labels/hold!", + "name": "hold!", + "color": "FBCA04", + "default": false, + "description": "On hold" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 2, + "created_at": "2024-02-20T17:41:38Z", + "updated_at": "2024-02-22T19:19:12Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27174", + "html_url": "https://github.com/apache/superset/pull/27174", + "diff_url": "https://github.com/apache/superset/pull/27174.diff", + "patch_url": "https://github.com/apache/superset/pull/27174.patch", + "merged_at": null + }, + "body": "### SUMMARY\r\n4.0 test environment.\r\n\r\n### ADDITIONAL INFORMATION\r\n- [ ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27174/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27174/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27172", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27172/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27172/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27172/events", + "html_url": "https://github.com/apache/superset/issues/27172", + "id": 2144892974, + "node_id": "I_kwDOAlosUs5_2Hgu", + "number": 27172, + "title": "🐛 Assigning roles to users lead to integrity error message", + "user": { + "login": "hanslemm", + "id": 32077629, + "node_id": "MDQ6VXNlcjMyMDc3NjI5", + "avatar_url": "https://avatars.githubusercontent.com/u/32077629?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/hanslemm", + "html_url": "https://github.com/hanslemm", + "followers_url": "https://api.github.com/users/hanslemm/followers", + "following_url": "https://api.github.com/users/hanslemm/following{/other_user}", + "gists_url": "https://api.github.com/users/hanslemm/gists{/gist_id}", + "starred_url": "https://api.github.com/users/hanslemm/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/hanslemm/subscriptions", + "organizations_url": "https://api.github.com/users/hanslemm/orgs", + "repos_url": "https://api.github.com/users/hanslemm/repos", + "events_url": "https://api.github.com/users/hanslemm/events{/privacy}", + "received_events_url": "https://api.github.com/users/hanslemm/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 1, + "created_at": "2024-02-20T17:22:44Z", + "updated_at": "2024-02-20T17:41:29Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "### Bug description\r\n\r\nWhen trying to assign any new role to a user, a `Integrity error, probably unique constraint` error pops up in the UI.\r\n\r\n### How to reproduce the bug\r\n\r\n1. Go to list users\r\n2. Edit a user\r\n3. Add a new role to the user\r\n4. Hit save\r\n5. UI lands in list users\r\n6. Error pops up\r\n\r\n### Screenshots/recordings\r\n\r\n![image](https://github.com/apache/superset/assets/32077629/b2c5a1a9-b98a-42e2-8b72-bf3369a94638)\r\n\r\n\r\n### Superset version\r\n\r\n3.1.0\r\n\r\n### Python version\r\n\r\n3.9\r\n\r\n### Node version\r\n\r\n16\r\n\r\n### Browser\r\n\r\nChrome\r\n\r\n### Additional context\r\n\r\n_No response_\r\n\r\n### Checklist\r\n\r\n- [X] I have searched Superset docs and Slack and didn't find a solution to my problem.\r\n- [X] I have searched the GitHub issue tracker and didn't find a similar bug report.\r\n- [X] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the \"additional context\" section.", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27172/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27172/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27160", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27160/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27160/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27160/events", + "html_url": "https://github.com/apache/superset/issues/27160", + "id": 2142502648, + "node_id": "I_kwDOAlosUs5_s_74", + "number": 27160, + "title": "Error warming up cache: Permanent Redirect", + "user": { + "login": "Cerberus112", + "id": 47566336, + "node_id": "MDQ6VXNlcjQ3NTY2MzM2", + "avatar_url": "https://avatars.githubusercontent.com/u/47566336?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/Cerberus112", + "html_url": "https://github.com/Cerberus112", + "followers_url": "https://api.github.com/users/Cerberus112/followers", + "following_url": "https://api.github.com/users/Cerberus112/following{/other_user}", + "gists_url": "https://api.github.com/users/Cerberus112/gists{/gist_id}", + "starred_url": "https://api.github.com/users/Cerberus112/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/Cerberus112/subscriptions", + "organizations_url": "https://api.github.com/users/Cerberus112/orgs", + "repos_url": "https://api.github.com/users/Cerberus112/repos", + "events_url": "https://api.github.com/users/Cerberus112/events{/privacy}", + "received_events_url": "https://api.github.com/users/Cerberus112/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": { + "login": "craig-rueda", + "id": 2595291, + "node_id": "MDQ6VXNlcjI1OTUyOTE=", + "avatar_url": "https://avatars.githubusercontent.com/u/2595291?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/craig-rueda", + "html_url": "https://github.com/craig-rueda", + "followers_url": "https://api.github.com/users/craig-rueda/followers", + "following_url": "https://api.github.com/users/craig-rueda/following{/other_user}", + "gists_url": "https://api.github.com/users/craig-rueda/gists{/gist_id}", + "starred_url": "https://api.github.com/users/craig-rueda/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/craig-rueda/subscriptions", + "organizations_url": "https://api.github.com/users/craig-rueda/orgs", + "repos_url": "https://api.github.com/users/craig-rueda/repos", + "events_url": "https://api.github.com/users/craig-rueda/events{/privacy}", + "received_events_url": "https://api.github.com/users/craig-rueda/received_events", + "type": "User", + "site_admin": false + }, + "assignees": [ + { + "login": "craig-rueda", + "id": 2595291, + "node_id": "MDQ6VXNlcjI1OTUyOTE=", + "avatar_url": "https://avatars.githubusercontent.com/u/2595291?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/craig-rueda", + "html_url": "https://github.com/craig-rueda", + "followers_url": "https://api.github.com/users/craig-rueda/followers", + "following_url": "https://api.github.com/users/craig-rueda/following{/other_user}", + "gists_url": "https://api.github.com/users/craig-rueda/gists{/gist_id}", + "starred_url": "https://api.github.com/users/craig-rueda/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/craig-rueda/subscriptions", + "organizations_url": "https://api.github.com/users/craig-rueda/orgs", + "repos_url": "https://api.github.com/users/craig-rueda/repos", + "events_url": "https://api.github.com/users/craig-rueda/events{/privacy}", + "received_events_url": "https://api.github.com/users/craig-rueda/received_events", + "type": "User", + "site_admin": false + }, + { + "login": "villebro", + "id": 33317356, + "node_id": "MDQ6VXNlcjMzMzE3MzU2", + "avatar_url": "https://avatars.githubusercontent.com/u/33317356?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/villebro", + "html_url": "https://github.com/villebro", + "followers_url": "https://api.github.com/users/villebro/followers", + "following_url": "https://api.github.com/users/villebro/following{/other_user}", + "gists_url": "https://api.github.com/users/villebro/gists{/gist_id}", + "starred_url": "https://api.github.com/users/villebro/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/villebro/subscriptions", + "organizations_url": "https://api.github.com/users/villebro/orgs", + "repos_url": "https://api.github.com/users/villebro/repos", + "events_url": "https://api.github.com/users/villebro/events{/privacy}", + "received_events_url": "https://api.github.com/users/villebro/received_events", + "type": "User", + "site_admin": false + } + ], + "milestone": null, + "comments": 2, + "created_at": "2024-02-19T14:30:12Z", + "updated_at": "2024-02-21T09:34:19Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "### Bug description\r\n\r\nCache warm-up is not functioning when configured using the latest version (3.1.1rc1) and the previous one (3.1.0) in kubernetes enviroment (with Helm chart version 0.2.15 or earlier).\r\n\r\nWhen the task is triggered, logs of superset worker throws 308 error trying to request the API endpoint.\r\n\r\n\r\nNote: Reports are working correctly on the same worker.\r\n\r\n### How to reproduce the bug\r\n\r\n1. Apply cache warm-up config in kubernetes enviroment\r\n2. Review the logs of superset worker\r\n\r\n### Screenshots/recordings\r\n\r\n_No response_\r\n\r\n### Superset version\r\n\r\nmaster / latest-dev\r\n\r\n### Python version\r\n\r\n3.9\r\n\r\n### Node version\r\n\r\n16\r\n\r\n### Browser\r\n\r\nChrome\r\n\r\n### Additional context\r\n\r\nThe values.yalm (cache warm-up configs):\r\n\r\n```\r\n celery_conf: |\r\n from celery.schedules import crontab\r\n class CeleryConfig:\r\n broker_url = f\"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0\"\r\n imports = (\r\n \"superset.sql_lab\",\r\n \"superset.tasks.cache\",\r\n \"superset.tasks.scheduler\",\r\n )\r\n result_backend = f\"redis://{env('REDIS_HOST')}:{env('REDIS_PORT')}/0\"\r\n task_annotations = {\r\n \"sql_lab.get_sql_results\": {\r\n \"rate_limit\": \"100/s\",\r\n },\r\n }\r\n beat_schedule = {\r\n \"reports.scheduler\": {\r\n \"task\": \"reports.scheduler\",\r\n \"schedule\": crontab(minute=\"*\", hour=\"*\"),\r\n },\r\n \"reports.prune_log\": {\r\n \"task\": \"reports.prune_log\",\r\n 'schedule': crontab(minute=0, hour=0),\r\n },\r\n 'cache-warmup-hourly': {\r\n \"task\": \"cache-warmup\",\r\n \"schedule\": crontab(minute=\"*/2\", hour=\"*\"), ## for testing\r\n \"kwargs\": {\r\n \"strategy_name\": \"dummy\"\r\n },\r\n }\r\n }\r\n CELERY_CONFIG = CeleryConfig\r\n THUMBNAIL_SELENIUM_USER = \"admin\"\r\n```\r\n\r\nSuperset worker logs:\r\n```\r\n[2024-02-19 14:26:00,227: INFO/ForkPoolWorker-1] fetch_url[ecc6c59f-1a81-472c-bb3c-25daf1ccb203]: Fetching http://url.of.my.site/superset/warm_up_cache/ with payload {\"chart_id\": 43}\r\n[2024-02-19` 14:22:00,263: ERROR/ForkPoolWorker-3] fetch_url[ecc6c59f-1a81-472c-bb3c-25daf1ccb203]: Error warming up cache!\r\nTraceback (most recent call last):\r\n File \"/app/superset/tasks/cache.py\", line 242, in fetch_url\r\n response = request.urlopen( # pylint: disable=consider-using-with\r\n File \"/usr/local/lib/python3.9/urllib/request.py\", line 214, in urlopen\r\n return opener.open(url, data, timeout)\r\n File \"/usr/local/lib/python3.9/urllib/request.py\", line 523, in open\r\n response = meth(req, response)\r\n File \"/usr/local/lib/python3.9/urllib/request.py\", line 632, in http_response\r\n response = self.parent.error(\r\n File \"/usr/local/lib/python3.9/urllib/request.py\", line 561, in error\r\n return self._call_chain(*args)\r\n File \"/usr/local/lib/python3.9/urllib/request.py\", line 494, in _call_chain\r\n result = func(*args)\r\n File \"/usr/local/lib/python3.9/urllib/request.py\", line 641, in http_error_default\r\n raise HTTPError(req.full_url, code, msg, hdrs, fp)\r\nurllib.error.HTTPError: HTTP Error 308: Permanent Redirect\r\n```\r\n\r\n### Checklist\r\n\r\n- [X] I have searched Superset docs and Slack and didn't find a solution to my problem.\r\n- [X] I have searched the GitHub issue tracker and didn't find a similar bug report.\r\n- [x] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the \"additional context\" section.", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27160/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27160/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27155", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27155/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27155/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27155/events", + "html_url": "https://github.com/apache/superset/issues/27155", + "id": 2141915818, + "node_id": "I_kwDOAlosUs5_qwqq", + "number": 27155, + "title": "Report&Alert Format", + "user": { + "login": "liangliangGit", + "id": 35413857, + "node_id": "MDQ6VXNlcjM1NDEzODU3", + "avatar_url": "https://avatars.githubusercontent.com/u/35413857?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/liangliangGit", + "html_url": "https://github.com/liangliangGit", + "followers_url": "https://api.github.com/users/liangliangGit/followers", + "following_url": "https://api.github.com/users/liangliangGit/following{/other_user}", + "gists_url": "https://api.github.com/users/liangliangGit/gists{/gist_id}", + "starred_url": "https://api.github.com/users/liangliangGit/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/liangliangGit/subscriptions", + "organizations_url": "https://api.github.com/users/liangliangGit/orgs", + "repos_url": "https://api.github.com/users/liangliangGit/repos", + "events_url": "https://api.github.com/users/liangliangGit/events{/privacy}", + "received_events_url": "https://api.github.com/users/liangliangGit/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 4, + "created_at": "2024-02-19T09:30:38Z", + "updated_at": "2024-02-23T03:16:31Z", + "closed_at": null, + "author_association": "NONE", + "active_lock_reason": null, + "body": "## Screenshot\r\n\r\n![image](https://github.com/apache/superset/assets/35413857/500532b7-06a1-4231-839b-bb1aec7d21be)\r\n\r\n## Description\r\nIn the Apache Superset web interface, I have set D3Format for my data, which displays correctly on the web page. However, when I generate a report for a chart and specify text content, the data in the received email loses its formatting.\r\n\r\n## Design input\r\nI hope the following formatted content can also be realized in the text format of emails.\r\n.1s (12345.432 => 10k)\r\n.3s (12345.432 => 12.3k)\r\n.4r (12345.432 => 12350)\r\n \"+, (12345.432 => +12,345) \"\r\n$,.2f (12345.432 => $12,345.43)\r\nDuration in ms (66000 => 1m 6s)\r\nDuration in ms (1.40008 => 1ms 400µs 80ns)\r\n%d/%m/%Y | 14/01/2019\r\n%m/%d/%Y | 01/14/2019\r\n%Y-%m-%d | 2019-01-14\r\n%d-%m-%Y %H:%M:%S | 14-01-2019 01:32:10\r\n%H:%M:%S | 01:32:10\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27155/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27155/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27154", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27154/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27154/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27154/events", + "html_url": "https://github.com/apache/superset/pull/27154", + "id": 2141799439, + "node_id": "PR_kwDOAlosUs5nQf9A", + "number": 27154, + "title": "fix(import-datasources): Use \"admin\" user as default for importing datasources", + "user": { + "login": "ddxv", + "id": 7601451, + "node_id": "MDQ6VXNlcjc2MDE0NTE=", + "avatar_url": "https://avatars.githubusercontent.com/u/7601451?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/ddxv", + "html_url": "https://github.com/ddxv", + "followers_url": "https://api.github.com/users/ddxv/followers", + "following_url": "https://api.github.com/users/ddxv/following{/other_user}", + "gists_url": "https://api.github.com/users/ddxv/gists{/gist_id}", + "starred_url": "https://api.github.com/users/ddxv/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/ddxv/subscriptions", + "organizations_url": "https://api.github.com/users/ddxv/orgs", + "repos_url": "https://api.github.com/users/ddxv/repos", + "events_url": "https://api.github.com/users/ddxv/events{/privacy}", + "received_events_url": "https://api.github.com/users/ddxv/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329203144, + "node_id": "MDU6TGFiZWwxMzI5MjAzMTQ0", + "url": "https://api.github.com/repos/apache/superset/labels/size/M", + "name": "size/M", + "color": "705FA3", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 7, + "created_at": "2024-02-19T08:29:46Z", + "updated_at": "2024-02-22T14:57:06Z", + "closed_at": null, + "author_association": "CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27154", + "html_url": "https://github.com/apache/superset/pull/27154", + "diff_url": "https://github.com/apache/superset/pull/27154.diff", + "patch_url": "https://github.com/apache/superset/pull/27154.patch", + "merged_at": null + }, + "body": "### SUMMARY\r\nThis is a potential fix for #17049 which prevents the use of CLI: `superset import-datasources` which appears to have been broken for for some time. Attempting to run `import-datasources` will result in a `NameError: user`.\r\n\r\nLooking at the history of `import-datasources` there used to be a --user which could be applied, but was removed. I saw though that other CLI functions use the following `g.user = security_manager.find_user(username=\"admin\")` to default to the admin user before the `security_manager` runs.\r\n\r\nPlease note, I'm new to Superset, so please help me check on whether using this is acceptable or an insecure workaround.\r\n\r\n### TESTING INSTRUCTIONS\r\n1) from CLI run: `superset import-datasources -p mydatasources.zip`\r\n\r\n### ADDITIONAL INFORMATION\r\n<!--- HINT: Include \"Fixes #nnn\" if you are fixing an existing issue -->\r\n- [x] Has associated issue: Fixes #17049\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27154/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27154/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27152", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27152/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27152/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27152/events", + "html_url": "https://github.com/apache/superset/pull/27152", + "id": 2141045180, + "node_id": "PR_kwDOAlosUs5nN_Dt", + "number": 27152, + "title": "feat: support image set for superset worker separately in helm chart", + "user": { + "login": "josedev-union", + "id": 70741025, + "node_id": "MDQ6VXNlcjcwNzQxMDI1", + "avatar_url": "https://avatars.githubusercontent.com/u/70741025?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/josedev-union", + "html_url": "https://github.com/josedev-union", + "followers_url": "https://api.github.com/users/josedev-union/followers", + "following_url": "https://api.github.com/users/josedev-union/following{/other_user}", + "gists_url": "https://api.github.com/users/josedev-union/gists{/gist_id}", + "starred_url": "https://api.github.com/users/josedev-union/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/josedev-union/subscriptions", + "organizations_url": "https://api.github.com/users/josedev-union/orgs", + "repos_url": "https://api.github.com/users/josedev-union/repos", + "events_url": "https://api.github.com/users/josedev-union/events{/privacy}", + "received_events_url": "https://api.github.com/users/josedev-union/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1329183122, + "node_id": "MDU6TGFiZWwxMzI5MTgzMTIy", + "url": "https://api.github.com/repos/apache/superset/labels/size/XS", + "name": "size/XS", + "color": "E2D8FF", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 2, + "created_at": "2024-02-18T15:35:43Z", + "updated_at": "2024-02-20T16:58:39Z", + "closed_at": null, + "author_association": "CONTRIBUTOR", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27152", + "html_url": "https://github.com/apache/superset/pull/27152", + "diff_url": "https://github.com/apache/superset/pull/27152.diff", + "patch_url": "https://github.com/apache/superset/pull/27152.patch", + "merged_at": null + }, + "body": "<!---\r\nPlease write the PR title following the conventions at https://www.conventionalcommits.org/en/v1.0.0/\r\nExample:\r\nfix(dashboard): load charts correctly\r\n-->\r\n\r\n### SUMMARY\r\n<!--- Describe the change below, including rationale and design decisions -->\r\nTo enable alerts and reports, i need to use dev image or custom built image for superset worker. This PR allows for users to use dev image only for worker.\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n<!--- Skip this if not applicable -->\r\n\r\n### TESTING INSTRUCTIONS\r\n<!--- Required! What steps can be taken to manually verify the changes? -->\r\n\r\n### ADDITIONAL INFORMATION\r\n<!--- Check any relevant boxes with \"x\" -->\r\n<!--- HINT: Include \"Fixes #nnn\" if you are fixing an existing issue -->\r\n- [ ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27152/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27152/timeline", + "performed_via_github_app": null, + "state_reason": null + }, + { + "url": "https://api.github.com/repos/apache/superset/issues/27149", + "repository_url": "https://api.github.com/repos/apache/superset", + "labels_url": "https://api.github.com/repos/apache/superset/issues/27149/labels{/name}", + "comments_url": "https://api.github.com/repos/apache/superset/issues/27149/comments", + "events_url": "https://api.github.com/repos/apache/superset/issues/27149/events", + "html_url": "https://github.com/apache/superset/pull/27149", + "id": 2139908577, + "node_id": "PR_kwDOAlosUs5nKEga", + "number": 27149, + "title": "chore(tests): Remove ineffectual login", + "user": { + "login": "john-bodley", + "id": 4567245, + "node_id": "MDQ6VXNlcjQ1NjcyNDU=", + "avatar_url": "https://avatars.githubusercontent.com/u/4567245?v=4", + "gravatar_id": "", + "url": "https://api.github.com/users/john-bodley", + "html_url": "https://github.com/john-bodley", + "followers_url": "https://api.github.com/users/john-bodley/followers", + "following_url": "https://api.github.com/users/john-bodley/following{/other_user}", + "gists_url": "https://api.github.com/users/john-bodley/gists{/gist_id}", + "starred_url": "https://api.github.com/users/john-bodley/starred{/owner}{/repo}", + "subscriptions_url": "https://api.github.com/users/john-bodley/subscriptions", + "organizations_url": "https://api.github.com/users/john-bodley/orgs", + "repos_url": "https://api.github.com/users/john-bodley/repos", + "events_url": "https://api.github.com/users/john-bodley/events{/privacy}", + "received_events_url": "https://api.github.com/users/john-bodley/received_events", + "type": "User", + "site_admin": false + }, + "labels": [ + { + "id": 1332384320, + "node_id": "MDU6TGFiZWwxMzMyMzg0MzIw", + "url": "https://api.github.com/repos/apache/superset/labels/size/XXL", + "name": "size/XXL", + "color": "5A30DA", + "default": false, + "description": "" + } + ], + "state": "open", + "locked": false, + "assignee": null, + "assignees": [ + + ], + "milestone": null, + "comments": 2, + "created_at": "2024-02-17T09:05:22Z", + "updated_at": "2024-02-23T08:34:33Z", + "closed_at": null, + "author_association": "MEMBER", + "active_lock_reason": null, + "draft": false, + "pull_request": { + "url": "https://api.github.com/repos/apache/superset/pulls/27149", + "html_url": "https://github.com/apache/superset/pull/27149", + "diff_url": "https://github.com/apache/superset/pull/27149.diff", + "patch_url": "https://github.com/apache/superset/pull/27149.patch", + "merged_at": null + }, + "body": "<!---\r\nPlease write the PR title following the conventions at https://www.conventionalcommits.org/en/v1.0.0/\r\nExample:\r\nfix(dashboard): load charts correctly\r\n-->\r\n\r\n### SUMMARY\r\n\r\nWhilst working on [[SIP-99B] Proposal for (re)defining a \"unit of work\"](https://github.com/apache/superset/issues/25108) I was running into an issue where (I thought) tests were failing due to an issue with the persistence of a logged in user whereas in actuality it was because I was missing the `Public` role.\r\n\r\nThe red herring was due to the slew of tests which don't interface with the test client \"logging\" in a user in the hope of persisting the logged in user to `g.user`. This is ineffectual outside of the client as the Flask globals only persist for the lifetime of the request, i.e., \r\n\r\n```python\r\n>>> from flask import g\r\n\r\n>>> self.login(username=\"admin\")\r\n>>> print(g.user)\r\n<flask_login.mixins.AnonymousUserMixin object at 0x160c48be0>\r\n```\r\n\r\nFlask-Login automatically restores `g.user` from that cookie if it is not in the session when dealing with requests from the client. The TL;DR is authenticating (logging in/logging out) the user is only effectual in the context of web requests via the client.\r\n\r\nThis PR removes any erroneous logins (and logouts)—determined by first eliminating all references and re-adding them for API related tests. It also cleans up any inconsistencies in how logging in occurs. I updated the `login()` method to remove the default `admin` username so now it's a required argument which aids with readability.\r\n\r\n### BEFORE/AFTER SCREENSHOTS OR ANIMATED GIF\r\n<!--- Skip this if not applicable -->\r\n\r\n### TESTING INSTRUCTIONS\r\n\r\nCI.\r\n\r\n### ADDITIONAL INFORMATION\r\n<!--- Check any relevant boxes with \"x\" -->\r\n<!--- HINT: Include \"Fixes #nnn\" if you are fixing an existing issue -->\r\n- [ ] Has associated issue:\r\n- [ ] Required feature flags:\r\n- [ ] Changes UI\r\n- [ ] Includes DB Migration (follow approval process in [SIP-59](https://github.com/apache/superset/issues/13351))\r\n - [ ] Migration is atomic, supports rollback & is backwards-compatible\r\n - [ ] Confirm DB migration upgrade and downgrade tested\r\n - [ ] Runtime estimates and downtime expectations provided\r\n- [ ] Introduces new feature or API\r\n- [ ] Removes existing feature or API\r\n", + "reactions": { + "url": "https://api.github.com/repos/apache/superset/issues/27149/reactions", + "total_count": 0, + "+1": 0, + "-1": 0, + "laugh": 0, + "hooray": 0, + "confused": 0, + "heart": 0, + "rocket": 0, + "eyes": 0 + }, + "timeline_url": "https://api.github.com/repos/apache/superset/issues/27149/timeline", + "performed_via_github_app": null, + "state_reason": null + } +] diff --git a/tests/fakes/github_response.json b/tests/fakes/github_pulls_response.json similarity index 100% rename from tests/fakes/github_response.json rename to tests/fakes/github_pulls_response.json From c1af57947f51e09e00c9689bc8cabafef5878455 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida <roberto@dealmeida.net> Date: Fri, 23 Feb 2024 14:32:05 -0500 Subject: [PATCH 12/22] chore: 1.2.17 release (#434) --- CHANGELOG.rst | 3 +++ docs/adapters.rst | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 039fc5ed..d20bbf64 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,9 @@ Changelog Next ==== +Version 1.2.17 - 2024-02-23 +=========================== + - Add support for GitHub issues (#433) Version 1.2.16 - 2024-02-22 diff --git a/docs/adapters.rst b/docs/adapters.rst index b3d40db7..bfbae712 100644 --- a/docs/adapters.rst +++ b/docs/adapters.rst @@ -316,7 +316,7 @@ You can select data from any `Datasette <https://datasette.io/>`_ table, by usin GitHub ====== -The GitHub adapter currently allows pull requests to be queried (other endpoints can be easily added): +The GitHub adapter currently allows pull requests and issues to be queried (other endpoints can be easily added): .. code-block:: sql From af57ef2e467cc44a39a1a7ea64c420d217f4bd6b Mon Sep 17 00:00:00 2001 From: Beto Dealmeida <roberto@dealmeida.net> Date: Wed, 28 Feb 2024 20:34:08 -0500 Subject: [PATCH 13/22] feat: improve CLI (#435) --- src/shillelagh/console.py | 158 ++++++++++++++++++++++++-------------- tests/console_test.py | 79 +++++++++++++++++++ 2 files changed, 179 insertions(+), 58 deletions(-) diff --git a/src/shillelagh/console.py b/src/shillelagh/console.py index 88407ccd..9dcb4706 100644 --- a/src/shillelagh/console.py +++ b/src/shillelagh/console.py @@ -20,9 +20,10 @@ """ import logging import os.path +import sys import time from pathlib import Path -from typing import List, Tuple +from typing import Iterable, Iterator, Optional import yaml from appdirs import user_config_dir @@ -174,6 +175,79 @@ ) style = style_from_pygments_cls(get_style_by_name("friendly")) +quote_chars = ('"', "'", "`") + + +def emit_statements(lines: Iterable[str]) -> Iterator[str]: + """ + Consume lines and emit complete statements. + """ + quote_context: Optional[str] = None + + rest = "" + for line in lines: + start = 0 + for pos, char in enumerate(line): + if quote_context is not None and char == quote_context: + # leave context + quote_context = None + elif quote_context is None and char == ";": + yield (rest + line[start:pos]).strip() + rest = "" + start = pos + 1 + else: + for quote in quote_chars: + if quote_context is None and char == quote: + # enter context + quote_context = quote + + rest += line[start:] + "\n" + + +def repl(session: PromptSession) -> Iterator[str]: + """ + Yield lines. + """ + quote_context: Optional[str] = None + + start = True + while True: + if start: + prompt = "🍀> " + elif quote_context is None: + prompt = " . " + else: + prompt = f" {quote_context}. " + + try: + line = session.prompt(prompt) + yield line + except KeyboardInterrupt: + continue # Control-C pressed. Clear and try again. + except EOFError: + break # Control-D pressed. + + quote_context = update_quote_context(line, quote_context) + start = quote_context is None and line.strip().endswith(";") + + +def update_quote_context(line: str, quote_context: Optional[str]) -> Optional[str]: + """ + Update the quote context. + + Inside single quotes, inside double quotes, neither. + """ + for char in line: + if quote_context is not None and char == quote_context: + # leave context + quote_context = None + else: + for quote in quote_chars: + if quote_context is None and char == quote: + # enter context + quote_context = quote + + return quote_context def main(): # pylint: disable=too-many-locals @@ -199,6 +273,16 @@ def main(): # pylint: disable=too-many-locals connection = connect(":memory:", adapter_kwargs=adapter_kwargs) cursor = connection.cursor() + # non-interactive + if not sys.stdin.isatty(): + for query in emit_statements(sys.stdin.readlines()): + cursor.execute(query) + results = cursor.fetchall() + headers = [t[0] for t in cursor.description or []] + sys.stdout.write(tabulate(results, headers=headers)) + sys.stdout.write("\n") + return + session = PromptSession( lexer=PygmentsLexer(SqlLexer), completer=sql_completer, @@ -206,69 +290,27 @@ def main(): # pylint: disable=too-many-locals history=FileHistory(history_path), ) - lines: List[str] = [] - quote_context = " " - while True: - prompt = "sql> " if not lines else f" {quote_context}. " + for query in emit_statements(repl(session)): + start = time.time() + results = None try: - line = session.prompt(prompt) - except KeyboardInterrupt: - lines = [] - quote_context = " " - continue # Control-C pressed. Clear and try again. - except EOFError: - break # Control-D pressed. + cursor.execute(query) + results = cursor.fetchall() + except Error as ex: + print(ex) + continue - lines.append(line) - query = "\n".join(lines) - - is_terminated, quote_context = get_query_termination(query) - if is_terminated: - start = time.time() - results = None - try: - cursor.execute(query) - results = cursor.fetchall() - except Error as ex: - print(ex) - continue - finally: - lines = [] - quote_context = " " - - headers = [t[0] for t in cursor.description or []] - print(tabulate(results, headers=headers)) - duration = time.time() - start - print( - f"({len(results)} row{'s' if len(results) != 1 else ''} in {duration:.2f}s)\n", - ) + headers = [t[0] for t in cursor.description or []] + print(tabulate(results, headers=headers)) + duration = time.time() - start + print( + f"({len(results)} row{'s' if len(results) != 1 else ''} " + f"in {duration:.2f}s)\n", + ) connection.close() print("GoodBye!") -def get_query_termination(query: str) -> Tuple[bool, str]: - """ - Check if a query is ended or if a new line should be created. - - This function looks for a semicolon at the end, making sure no quotation mark must be - closed. - """ - quote_context = " " - quote_chars = ('"', "'", "`") - - for query_char in query: - if quote_context == query_char: - quote_context = " " - else: - for quote in quote_chars: - if quote_context == " " and quote == query_char: - quote_context = quote - - is_terminated = quote_context == " " and query.endswith(";") - - return is_terminated, quote_context - - if __name__ == "__main__": main() diff --git a/tests/console_test.py b/tests/console_test.py index 0bc41f50..6c1cc93b 100644 --- a/tests/console_test.py +++ b/tests/console_test.py @@ -17,6 +17,7 @@ def test_main(mocker: MockerFixture) -> None: """ Test ``main``. """ + mocker.patch("sys.stdin.isatty", return_value=True) stdout = mocker.patch("sys.stdout", new_callable=StringIO) PromptSession = mocker.patch("shillelagh.console.PromptSession") @@ -39,6 +40,7 @@ def test_exception(mocker: MockerFixture) -> None: """ Test that exceptions are captured and printed. """ + mocker.patch("sys.stdin.isatty", return_value=True) stdout = mocker.patch("sys.stdout", new_callable=StringIO) PromptSession = mocker.patch("shillelagh.console.PromptSession") @@ -57,6 +59,7 @@ def test_ctrl_c(mocker: MockerFixture) -> None: """ Test that ``CTRL-C`` does not exit the REPL. """ + mocker.patch("sys.stdin.isatty", return_value=True) stdout = mocker.patch("sys.stdout", new_callable=StringIO) PromptSession = mocker.patch("shillelagh.console.PromptSession") @@ -83,6 +86,7 @@ def test_configuration(mocker: MockerFixture, fs: FakeFilesystem) -> None: """ Test loading the configuration file. """ + mocker.patch("sys.stdin.isatty", return_value=True) config_dir = Path(user_config_dir("shillelagh")) config_path = config_dir / "shillelagh.yaml" fs.create_file(config_path, contents=yaml.dump({"foo": {"bar": "baz"}})) @@ -101,6 +105,7 @@ def test_no_configuration(mocker: MockerFixture, fs: FakeFilesystem) -> None: """ Test no configuration file found. """ + mocker.patch("sys.stdin.isatty", return_value=True) config_dir = Path(user_config_dir("shillelagh")) fs.create_dir(config_dir) @@ -118,6 +123,7 @@ def test_configuration_invalid(mocker: MockerFixture, fs: FakeFilesystem) -> Non """ Test that an exception is raised if the configuration is invalid. """ + mocker.patch("sys.stdin.isatty", return_value=True) config_dir = Path(user_config_dir("shillelagh")) config_path = config_dir / "shillelagh.yaml" fs.create_file(config_path, contents="foo: *") @@ -136,6 +142,7 @@ def test_multiline(mocker: MockerFixture, fs: FakeFilesystem) -> None: """ Test a simple multiline query """ + mocker.patch("sys.stdin.isatty", return_value=True) stdout = mocker.patch("sys.stdout", new_callable=StringIO) PromptSession = mocker.patch("shillelagh.console.PromptSession") @@ -158,6 +165,7 @@ def test_multiline_quoted_semicolon(mocker: MockerFixture, fs: FakeFilesystem) - """ Test a multiline query that contains quoted semicolons. """ + mocker.patch("sys.stdin.isatty", return_value=True) stdout = mocker.patch("sys.stdout", new_callable=StringIO) PromptSession = mocker.patch("shillelagh.console.PromptSession") @@ -185,6 +193,7 @@ def test_multiline_quoted_semicolon_on_line_end( """ Test a multiline query that contains quoted semicolons on the line end. """ + mocker.patch("sys.stdin.isatty", return_value=True) stdout = mocker.patch("sys.stdout", new_callable=StringIO) PromptSession = mocker.patch("shillelagh.console.PromptSession") @@ -212,6 +221,7 @@ def test_multiline_triple_quoted_semicolon_on_line_end( """ Test a multiline query that contains quoted semicolons on the line end. """ + mocker.patch("sys.stdin.isatty", return_value=True) stdout = mocker.patch("sys.stdout", new_callable=StringIO) PromptSession = mocker.patch("shillelagh.console.PromptSession") @@ -234,3 +244,72 @@ def test_multiline_triple_quoted_semicolon_on_line_end( GoodBye! """ ) + + +def test_emit_statements() -> None: + """ + Test the ``emit_statements`` function. + """ + script = """ +SELECT + 1; +SELECT + 2 +; SELECT 3; SELECT 4; SELECT +5 +; + """ + assert list(console.emit_statements(script.split("\n"))) == [ + "SELECT\n 1", + "SELECT\n 2", + "SELECT 3", + "SELECT 4", + "SELECT\n5", + ] + + +def test_repl(mocker: MockerFixture) -> None: + """ + Test the REPL. + """ + session = mocker.MagicMock() + session.prompt.side_effect = [ + "SELECT", + "1", + ";", + EOFError(), + ] + + lines = list(console.repl(session)) + assert lines == ["SELECT", "1", ";"] + session.prompt.assert_has_calls( + [ + mocker.call("🍀> "), + mocker.call(" . "), + mocker.call(" . "), + mocker.call("🍀> "), + ], + ) + + +def test_non_interactive(mocker: MockerFixture) -> None: + """ + Test running ``shillelagh`` non-interactively. + + $ shillelagh < query.sql + + """ + stdin = mocker.patch("sys.stdin") + stdin.isatty.return_value = False + stdout = mocker.patch("sys.stdout", new_callable=StringIO) + + stdin.readlines.return_value = ["SELECT", "1", ";"] + console.main() + result = stdout.getvalue() + assert ( + result + == """ 1 +--- + 1 +""" + ) From 87d581eec7c0c646012a1b6ee9d201addca3df36 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida <roberto@dealmeida.net> Date: Wed, 27 Mar 2024 12:19:37 -0400 Subject: [PATCH 14/22] fix: Oauth2 in GSheets (#438) --- .pre-commit-config.yaml | 2 +- CHANGELOG.rst | 2 ++ setup.cfg | 1 + src/shillelagh/adapters/api/gsheets/adapter.py | 7 +++++-- src/shillelagh/adapters/api/gsheets/lib.py | 4 ++-- src/shillelagh/backends/apsw/dialects/gsheets.py | 2 +- tests/adapters/api/gsheets/adapter_test.py | 5 +++++ 7 files changed, 17 insertions(+), 6 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4d464b11..4d13935e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -57,7 +57,7 @@ repos: # additional_dependencies: [flake8-bugbear] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v0.910' # Use the sha / tag you want to point at + rev: 'v1.9.0' # Use the sha / tag you want to point at hooks: - id: mypy exclude: ^templates/ diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d20bbf64..35d526d6 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ Changelog Next ==== +- Fix OAuth2 flow in GSheets (#438) + Version 1.2.17 - 2024-02-23 =========================== diff --git a/setup.cfg b/setup.cfg index af8c68b2..cbe65e45 100644 --- a/setup.cfg +++ b/setup.cfg @@ -134,6 +134,7 @@ githubapi = python-jsonpath>=0.10.3 gsheetsapi = google-auth>=1.23.0 + pyopenssl>=24.0.0 holidaysmemory = holidays>=0.23 htmltableapi = diff --git a/src/shillelagh/adapters/api/gsheets/adapter.py b/src/shillelagh/adapters/api/gsheets/adapter.py index d2fafad3..3e45d74d 100644 --- a/src/shillelagh/adapters/api/gsheets/adapter.py +++ b/src/shillelagh/adapters/api/gsheets/adapter.py @@ -251,8 +251,11 @@ def _run_query(self, sql: str) -> QueryResults: if response.encoding is None: response.encoding = "utf-8" - if response.status_code != 200: - raise ProgrammingError(response.text) + try: + response.raise_for_status() + except Exception as ex: + self._check_permissions(ex) + raise ProgrammingError(response.text) from ex if response.text.startswith(JSON_PAYLOAD_PREFIX): result = json.loads(response.text[len(JSON_PAYLOAD_PREFIX) :]) diff --git a/src/shillelagh/adapters/api/gsheets/lib.py b/src/shillelagh/adapters/api/gsheets/lib.py index 4ec1c6ce..5a50ebf2 100644 --- a/src/shillelagh/adapters/api/gsheets/lib.py +++ b/src/shillelagh/adapters/api/gsheets/lib.py @@ -187,11 +187,11 @@ def get_index_from_letters(letters: str) -> int: """ base26 = reversed([string.ascii_uppercase.index(letter) + 1 for letter in letters]) - return ( + return int( sum( value * (len(string.ascii_uppercase) ** i) for i, value in enumerate(base26) ) - - 1 + - 1, ) diff --git a/src/shillelagh/backends/apsw/dialects/gsheets.py b/src/shillelagh/backends/apsw/dialects/gsheets.py index 2dfef2d4..fa7e0a63 100644 --- a/src/shillelagh/backends/apsw/dialects/gsheets.py +++ b/src/shillelagh/backends/apsw/dialects/gsheets.py @@ -146,7 +146,7 @@ def do_ping(self, dbapi_connection: _ConnectionFairy) -> bool: def get_table_names( # pylint: disable=unused-argument self, connection: _ConnectionFairy, - schema: str = None, + schema: Optional[str] = None, sqlite_include_internal: bool = False, **kwargs: Any, ) -> List[str]: diff --git a/tests/adapters/api/gsheets/adapter_test.py b/tests/adapters/api/gsheets/adapter_test.py index e74c2752..9e05df5e 100644 --- a/tests/adapters/api/gsheets/adapter_test.py +++ b/tests/adapters/api/gsheets/adapter_test.py @@ -752,6 +752,11 @@ def test_api_bugs(mocker: MockerFixture) -> None: status_code=400, headers={}, ) + adapter.register_uri( + "GET", + "https://sheets.googleapis.com/v4/spreadsheets/3/developerMetadata/0", + status_code=200, + ) connection = connect(":memory:", ["gsheetsapi"]) cursor = connection.cursor() From 25724a02941318ecf193f9d58131b2c9fde6a047 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida <roberto@dealmeida.net> Date: Wed, 27 Mar 2024 12:22:22 -0400 Subject: [PATCH 15/22] chore: release 1.2.18 (#439) --- CHANGELOG.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 35d526d6..991dde73 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,9 @@ Changelog Next ==== +Version 1.2.18 - 2024-03-27 +=========================== + - Fix OAuth2 flow in GSheets (#438) Version 1.2.17 - 2024-02-23 From adc7aa676db26fcbb0309f5e45e3d8e08ae5a55f Mon Sep 17 00:00:00 2001 From: Joe Li <joe@preset.io> Date: Wed, 3 Apr 2024 09:52:23 -0700 Subject: [PATCH 16/22] chore: relax tabulate dependency (#443) * relax tabulate dependency * update change log --- CHANGELOG.rst | 2 ++ setup.cfg | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 991dde73..02ffca64 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ Changelog Next ==== +- Relax ``tabulate`` dependency for Apache Superset (#443) + Version 1.2.18 - 2024-03-27 =========================== diff --git a/setup.cfg b/setup.cfg index cbe65e45..8175424f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -112,7 +112,7 @@ all = psutil>=5.8.0 pygments>=2.8 python-jsonpath>=0.10.3 - tabulate==0.8.9 + tabulate>=0.8.9 yarl>=1.8.1 docs = sphinx>=4.0.1 From a3ceb8cef05acb47f675964b725a8d8170c59525 Mon Sep 17 00:00:00 2001 From: Quentin Leroy <qleroy@users.noreply.github.com> Date: Wed, 3 Apr 2024 18:53:10 +0200 Subject: [PATCH 17/22] adapter_type to lowercase (#442) --- templates/adapter/cookiecutter.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/templates/adapter/cookiecutter.json b/templates/adapter/cookiecutter.json index 786a68b4..1d1b99cc 100644 --- a/templates/adapter/cookiecutter.json +++ b/templates/adapter/cookiecutter.json @@ -1,6 +1,6 @@ { "adapter_name": "MyAdapter", "description": "An adapter for ...", - "adapter_type": ["API", "File", "Memory"], + "adapter_type": ["api", "file", "memory"], "slug": "{{ cookiecutter.adapter_name|lower|replace(' ', '-') }}" } From 7521ebf74dd31f5bffa78a851c903845e6b4e558 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida <roberto@dealmeida.net> Date: Wed, 3 Apr 2024 14:24:28 -0400 Subject: [PATCH 18/22] chore: release 1.2.19 (#444) --- CHANGELOG.rst | 3 +++ setup.cfg | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 02ffca64..470991f2 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,9 @@ Changelog Next ==== +Version 1.2.19 - 2024-04-03 +=========================== + - Relax ``tabulate`` dependency for Apache Superset (#443) Version 1.2.18 - 2024-03-27 diff --git a/setup.cfg b/setup.cfg index 8175424f..a03b0229 100644 --- a/setup.cfg +++ b/setup.cfg @@ -95,7 +95,7 @@ testing = pytest>=7.2.0 python-jsonpath>=0.10.3 requests-mock>=1.8.0 - tabulate==0.8.9 + tabulate>=0.8.9 yarl>=1.8.1 all = PyYAML>=5.4 @@ -121,7 +121,7 @@ console = appdirs>=1.4.4 prompt_toolkit>=3 pygments>=2.8 - tabulate==0.8.9 + tabulate>=0.8.9 genericjsonapi = prison>=0.2.1 python-jsonpath>=0.10.3 From 09cc886ec767f55d5ecbb1c881d964095b4ba0dd Mon Sep 17 00:00:00 2001 From: Beto Dealmeida <roberto@dealmeida.net> Date: Sun, 14 Apr 2024 11:21:53 -0400 Subject: [PATCH 19/22] chore: use ruff (#446) --- .pre-commit-config.yaml | 24 ++++++-------- examples/csvfile.py | 1 + examples/dataframe.py | 1 + examples/datasette.py | 1 + examples/generic_xml.py | 1 + examples/github.py | 1 + examples/gsheets.py | 1 + examples/socrata.py | 1 + examples/weatherapi.py | 1 + pyproject.toml | 5 +++ requirements/base.txt | 10 +----- requirements/test.txt | 33 ++++++++----------- setup.cfg | 2 ++ setup.py | 1 + src/shillelagh/adapters/api/datasette.py | 1 - src/shillelagh/adapters/api/generic_json.py | 1 - src/shillelagh/adapters/api/generic_xml.py | 1 - src/shillelagh/adapters/api/github.py | 2 +- .../adapters/api/gsheets/adapter.py | 1 + src/shillelagh/adapters/api/gsheets/fields.py | 1 + src/shillelagh/adapters/api/gsheets/lib.py | 1 + .../adapters/api/gsheets/parsing/base.py | 1 + .../adapters/api/gsheets/parsing/number.py | 1 + src/shillelagh/adapters/api/gsheets/types.py | 1 + src/shillelagh/adapters/api/gsheets/typing.py | 1 + src/shillelagh/adapters/api/html_table.py | 1 - src/shillelagh/adapters/api/s3select.py | 10 ++---- src/shillelagh/adapters/api/socrata.py | 2 +- src/shillelagh/adapters/api/system.py | 5 ++- src/shillelagh/adapters/api/weatherapi.py | 2 +- src/shillelagh/adapters/base.py | 2 +- src/shillelagh/adapters/file/csvfile.py | 1 + src/shillelagh/adapters/memory/holidays.py | 1 - src/shillelagh/adapters/memory/pandas.py | 1 - src/shillelagh/backends/apsw/db.py | 26 +++++++++++++-- src/shillelagh/backends/apsw/dialects/base.py | 2 +- .../backends/apsw/dialects/gsheets.py | 1 + src/shillelagh/backends/apsw/dialects/safe.py | 2 +- src/shillelagh/backends/apsw/vt.py | 2 +- src/shillelagh/console.py | 1 + src/shillelagh/fields.py | 2 +- src/shillelagh/filters.py | 1 + src/shillelagh/functions.py | 1 + src/shillelagh/lib.py | 1 + src/shillelagh/types.py | 1 + src/shillelagh/typing.py | 1 + tests/adapters/api/datasette_test.py | 1 + tests/adapters/api/github_test.py | 1 + tests/adapters/api/gsheets/fields_test.py | 1 + .../adapters/api/gsheets/integration_test.py | 1 + tests/adapters/api/gsheets/lib_test.py | 1 + .../adapters/api/gsheets/parsing/base_test.py | 1 + .../adapters/api/gsheets/parsing/date_test.py | 1 + .../api/gsheets/parsing/number_test.py | 1 + tests/adapters/api/html_table_test.py | 1 + tests/adapters/api/socrata_test.py | 1 + tests/adapters/api/system_test.py | 1 + tests/adapters/api/weatherapi_test.py | 1 + tests/adapters/base_test.py | 8 ++--- tests/adapters/file/csvfile_test.py | 1 + tests/adapters/memory/pandas_test.py | 1 + tests/backends/apsw/db_test.py | 2 +- tests/backends/apsw/dialects/base_test.py | 1 + tests/backends/apsw/dialects/gsheets_test.py | 1 + tests/backends/apsw/dialects/safe_test.py | 1 + tests/backends/apsw/vt_test.py | 5 +-- tests/conftest.py | 1 + tests/console_test.py | 1 + tests/fakes/__init__.py | 1 - tests/fields_test.py | 2 +- tests/filters_test.py | 1 + tests/functions_test.py | 1 + tests/lib_test.py | 1 + tests/types_test.py | 1 + 74 files changed, 121 insertions(+), 81 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4d13935e..3d53891c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -34,20 +34,6 @@ repos: hooks: - id: isort -- repo: https://github.com/psf/black - rev: 22.10.0 - hooks: - - id: black - language_version: python3 - exclude: ^templates/ - -## If like to embrace black styles even in the docs: -# - repo: https://github.com/asottile/blacken-docs -# rev: v1.9.1 -# hooks: -# - id: blacken-docs -# additional_dependencies: [black] - - repo: https://github.com/PyCQA/flake8 rev: 3.9.2 hooks: @@ -77,12 +63,14 @@ repos: # hooks: # - id: reorder-python-imports # args: [--application-directories=.:src] + - repo: https://github.com/hadialqattan/pycln rev: v2.1.2 hooks: - id: pycln args: [--config=pyproject.toml] exclude: ^templates/ + - repo: local hooks: - id: pylint @@ -92,9 +80,17 @@ repos: types: [python] exclude: ^templates/ args: [--disable=use-implicit-booleaness-not-comparison] + - repo: https://github.com/asottile/pyupgrade rev: v3.10.1 hooks: - id: pyupgrade args: - --py38-plus + +- repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.3.7 + hooks: + - id: ruff + args: [ --fix ] + - id: ruff-format diff --git a/examples/csvfile.py b/examples/csvfile.py index a557e2f6..2c20ae5d 100644 --- a/examples/csvfile.py +++ b/examples/csvfile.py @@ -1,6 +1,7 @@ """ A simple example showing the CSV adapter. """ + from shillelagh.backends.apsw.db import connect if __name__ == "__main__": diff --git a/examples/dataframe.py b/examples/dataframe.py index 5accc5d5..5240c08f 100644 --- a/examples/dataframe.py +++ b/examples/dataframe.py @@ -1,6 +1,7 @@ """ A simple example showing the Pandas adapter. """ + import pandas as pd from shillelagh.backends.apsw.db import connect diff --git a/examples/datasette.py b/examples/datasette.py index 281f5b57..a6b76689 100644 --- a/examples/datasette.py +++ b/examples/datasette.py @@ -1,6 +1,7 @@ """ A simple example showing the Datasette adapter. """ + from shillelagh.backends.apsw.db import connect if __name__ == "__main__": diff --git a/examples/generic_xml.py b/examples/generic_xml.py index 23e8de5a..dbc7bccc 100644 --- a/examples/generic_xml.py +++ b/examples/generic_xml.py @@ -1,6 +1,7 @@ """ A simple example showing the generic XML. """ + import sys from shillelagh.backends.apsw.db import connect diff --git a/examples/github.py b/examples/github.py index ab311689..3aa840d5 100644 --- a/examples/github.py +++ b/examples/github.py @@ -1,6 +1,7 @@ """ A simple example showing the GitHub adapter. """ + from shillelagh.backends.apsw.db import connect if __name__ == "__main__": diff --git a/examples/gsheets.py b/examples/gsheets.py index bdb28b88..d7d3c92f 100644 --- a/examples/gsheets.py +++ b/examples/gsheets.py @@ -1,6 +1,7 @@ """ A simple example showing the GSheets adapter. """ + from shillelagh.backends.apsw.db import connect if __name__ == "__main__": diff --git a/examples/socrata.py b/examples/socrata.py index e40f67ff..7846ae7b 100644 --- a/examples/socrata.py +++ b/examples/socrata.py @@ -1,6 +1,7 @@ """ A simple example showing the Socrata adapter. """ + from shillelagh.backends.apsw.db import connect if __name__ == "__main__": diff --git a/examples/weatherapi.py b/examples/weatherapi.py index 9ebad47b..2adcd7f4 100644 --- a/examples/weatherapi.py +++ b/examples/weatherapi.py @@ -1,6 +1,7 @@ """ A simple example showing the WeatherAPI adapter. """ + import os import sys from datetime import datetime, timedelta diff --git a/pyproject.toml b/pyproject.toml index 0a7b37a8..01be1fb9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,3 +9,8 @@ version_scheme = "no-guess-dev" [tool.flake8] max-line-length = 90 + +[tool.ruff] +exclude = [ + "templates", +] diff --git a/requirements/base.txt b/requirements/base.txt index 25c06a38..f05e0622 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -19,16 +19,12 @@ certifi==2022.6.15 # via requests charset-normalizer==2.1.0 # via requests -exceptiongroup==1.1.3 - # via cattrs greenlet==2.0.2 # via # shillelagh # sqlalchemy idna==3.3 # via requests -importlib-metadata==6.7.0 - # via shillelagh packaging==23.0 # via shillelagh platformdirs==3.11.0 @@ -48,14 +44,10 @@ six==1.16.0 sqlalchemy==1.4.39 # via shillelagh typing-extensions==4.3.0 - # via - # cattrs - # shillelagh + # via shillelagh url-normalize==1.4.3 # via requests-cache urllib3==1.26.10 # via # requests # requests-cache -zipp==3.15.0 - # via importlib-metadata diff --git a/requirements/test.txt b/requirements/test.txt index 2b531444..d284d2d1 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -39,7 +39,9 @@ cfgv==3.3.1 charset-normalizer==2.1.0 # via requests click==8.1.3 - # via pip-tools + # via + # pip-compile-multi + # pip-tools codespell==2.1.0 # via shillelagh coverage[toml]==6.4.2 @@ -52,10 +54,6 @@ dill==0.3.6 # shillelagh distlib==0.3.5 # via virtualenv -exceptiongroup==1.0.4 - # via - # cattrs - # pytest filelock==3.7.1 # via virtualenv freezegun==1.2.1 @@ -76,8 +74,6 @@ idna==3.3 # via # requests # yarl -importlib-metadata==6.7.0 - # via shillelagh iniconfig==1.1.1 # via pytest isort==5.10.1 @@ -105,8 +101,12 @@ pandas==1.4.3 # via shillelagh pep517==0.12.0 # via build -pip-tools==6.8.0 +pip-compile-multi==2.6.3 # via shillelagh +pip-tools==6.8.0 + # via + # pip-compile-multi + # shillelagh platformdirs==2.5.2 # via # pylint @@ -173,6 +173,8 @@ requests-mock==1.9.3 # via shillelagh rsa==4.9 # via google-auth +ruff==0.3.7 + # via shillelagh s3transfer==0.6.0 # via boto3 six==1.16.0 @@ -193,18 +195,13 @@ tabulate==0.8.9 toml==0.10.2 # via pre-commit tomli==2.0.1 - # via - # build - # coverage - # pylint - # pytest + # via pep517 tomlkit==0.11.1 # via pylint +toposort==1.10 + # via pip-compile-multi typing-extensions==4.3.0 - # via - # astroid - # pylint - # shillelagh + # via shillelagh url-normalize==1.4.3 # via requests-cache urllib3==1.26.10 @@ -224,8 +221,6 @@ wrapt==1.14.1 # via astroid yarl==1.8.1 # via shillelagh -zipp==3.15.0 - # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # pip diff --git a/setup.cfg b/setup.cfg index a03b0229..cdeae6fe 100644 --- a/setup.cfg +++ b/setup.cfg @@ -83,6 +83,7 @@ testing = pandas>=1.2.2 pip-tools>=6.4.0 pre-commit>=2.13.0 + pip-compile-multi>=2.6.3 prison>=0.2.1 prompt_toolkit>=3 psutil>=5.8.0 @@ -95,6 +96,7 @@ testing = pytest>=7.2.0 python-jsonpath>=0.10.3 requests-mock>=1.8.0 + ruff>=0.3.7 tabulate>=0.8.9 yarl>=1.8.1 all = diff --git a/setup.py b/setup.py index 02ef1451..e44bcbb7 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,7 @@ """ Skeleton ``setup.py``, reads configuration from ``setup.cfg``. """ + import sys from pkg_resources import VersionConflict, require diff --git a/src/shillelagh/adapters/api/datasette.py b/src/shillelagh/adapters/api/datasette.py index 0a929b4b..0b308507 100644 --- a/src/shillelagh/adapters/api/datasette.py +++ b/src/shillelagh/adapters/api/datasette.py @@ -89,7 +89,6 @@ def get_field(value: Any) -> Field: class DatasetteAPI(Adapter): - """ An adapter to Datasette instances (https://datasette.io/). """ diff --git a/src/shillelagh/adapters/api/generic_json.py b/src/shillelagh/adapters/api/generic_json.py index 5aca6b09..8ba0e699 100644 --- a/src/shillelagh/adapters/api/generic_json.py +++ b/src/shillelagh/adapters/api/generic_json.py @@ -28,7 +28,6 @@ class GenericJSONAPI(Adapter): - """ An adapter for fetching JSON data. """ diff --git a/src/shillelagh/adapters/api/generic_xml.py b/src/shillelagh/adapters/api/generic_xml.py index 9da36a72..b6f59b63 100644 --- a/src/shillelagh/adapters/api/generic_xml.py +++ b/src/shillelagh/adapters/api/generic_xml.py @@ -42,7 +42,6 @@ def element_to_dict(element: ET.Element) -> Any: class GenericXMLAPI(GenericJSONAPI): - """ An adapter for fetching XML data. """ diff --git a/src/shillelagh/adapters/api/github.py b/src/shillelagh/adapters/api/github.py index 0eabdabe..9af9fa6a 100644 --- a/src/shillelagh/adapters/api/github.py +++ b/src/shillelagh/adapters/api/github.py @@ -1,6 +1,7 @@ """ An adapter for GitHub. """ + import json import logging import urllib.parse @@ -101,7 +102,6 @@ class Column: class GitHubAPI(Adapter): - """ An adapter for GitHub. """ diff --git a/src/shillelagh/adapters/api/gsheets/adapter.py b/src/shillelagh/adapters/api/gsheets/adapter.py index 3e45d74d..7de4878f 100644 --- a/src/shillelagh/adapters/api/gsheets/adapter.py +++ b/src/shillelagh/adapters/api/gsheets/adapter.py @@ -2,6 +2,7 @@ """ Google Sheets adapter. """ + import datetime import json import logging diff --git a/src/shillelagh/adapters/api/gsheets/fields.py b/src/shillelagh/adapters/api/gsheets/fields.py index 1f24b96c..61355826 100644 --- a/src/shillelagh/adapters/api/gsheets/fields.py +++ b/src/shillelagh/adapters/api/gsheets/fields.py @@ -1,6 +1,7 @@ """ Custom fields for the GSheets adapter. """ + import datetime from typing import Any, List, Optional, Type, Union diff --git a/src/shillelagh/adapters/api/gsheets/lib.py b/src/shillelagh/adapters/api/gsheets/lib.py index 5a50ebf2..8a41f20d 100644 --- a/src/shillelagh/adapters/api/gsheets/lib.py +++ b/src/shillelagh/adapters/api/gsheets/lib.py @@ -1,4 +1,5 @@ """Helper functions for the GSheets adapter.""" + import datetime import itertools import string diff --git a/src/shillelagh/adapters/api/gsheets/parsing/base.py b/src/shillelagh/adapters/api/gsheets/parsing/base.py index 029a2cba..94886e77 100644 --- a/src/shillelagh/adapters/api/gsheets/parsing/base.py +++ b/src/shillelagh/adapters/api/gsheets/parsing/base.py @@ -1,6 +1,7 @@ """ Token for parsing date and time. """ + import re from datetime import date, datetime, time, timedelta from typing import Any, Dict, Generic, Iterator, List, Tuple, Type, TypeVar diff --git a/src/shillelagh/adapters/api/gsheets/parsing/number.py b/src/shillelagh/adapters/api/gsheets/parsing/number.py index 5e268962..0b58ed01 100644 --- a/src/shillelagh/adapters/api/gsheets/parsing/number.py +++ b/src/shillelagh/adapters/api/gsheets/parsing/number.py @@ -3,6 +3,7 @@ https://developers.google.com/sheets/api/guides/formats#number_format_tokens """ + # pylint: disable=c-extension-no-member, broad-exception-raised import math import operator diff --git a/src/shillelagh/adapters/api/gsheets/types.py b/src/shillelagh/adapters/api/gsheets/types.py index c60e2a10..90d26da4 100644 --- a/src/shillelagh/adapters/api/gsheets/types.py +++ b/src/shillelagh/adapters/api/gsheets/types.py @@ -1,4 +1,5 @@ """Types for the GSheets adapter.""" + from enum import Enum diff --git a/src/shillelagh/adapters/api/gsheets/typing.py b/src/shillelagh/adapters/api/gsheets/typing.py index 96163ac2..12ecdab3 100644 --- a/src/shillelagh/adapters/api/gsheets/typing.py +++ b/src/shillelagh/adapters/api/gsheets/typing.py @@ -1,4 +1,5 @@ """Custom types for the GSheets adapter.""" + from typing import Any, List from typing_extensions import Literal, TypedDict diff --git a/src/shillelagh/adapters/api/html_table.py b/src/shillelagh/adapters/api/html_table.py index f1af4370..c68d0842 100644 --- a/src/shillelagh/adapters/api/html_table.py +++ b/src/shillelagh/adapters/api/html_table.py @@ -21,7 +21,6 @@ class HTMLTableAPI(Adapter): - """ An adapter for scraping HTML tables. """ diff --git a/src/shillelagh/adapters/api/s3select.py b/src/shillelagh/adapters/api/s3select.py index 186651cc..a2a390bf 100644 --- a/src/shillelagh/adapters/api/s3select.py +++ b/src/shillelagh/adapters/api/s3select.py @@ -129,8 +129,7 @@ def get_input_serialization(parsed: urllib.parse.ParseResult) -> InputSerializat input_serialization["CSV"] = { k: unescape_backslash(v[-1]) for k, v in options.items() - if k - in CSVSerializationOptionsType.__annotations__ # pylint: disable=no-member + if k in CSVSerializationOptionsType.__annotations__ # pylint: disable=no-member } if "FileHeaderInfo" not in input_serialization["CSV"]: input_serialization["CSV"]["FileHeaderInfo"] = "USE" @@ -141,8 +140,7 @@ def get_input_serialization(parsed: urllib.parse.ParseResult) -> InputSerializat input_serialization["JSON"] = { k: v[-1] for k, v in options.items() - if k - in JSONSerializationOptionsType.__annotations__ # pylint: disable=no-member + if k in JSONSerializationOptionsType.__annotations__ # pylint: disable=no-member } return cast(JSONSerializationType, input_serialization) @@ -151,8 +149,7 @@ def get_input_serialization(parsed: urllib.parse.ParseResult) -> InputSerializat input_serialization["Parquet"] = { k: v[-1] for k, v in options.items() - if k - in ParquetSerializationOptionsType.__annotations__ # pylint: disable=no-member + if k in ParquetSerializationOptionsType.__annotations__ # pylint: disable=no-member } return cast(ParquetSerializationType, input_serialization) @@ -163,7 +160,6 @@ def get_input_serialization(parsed: urllib.parse.ParseResult) -> InputSerializat class S3SelectAPI(Adapter): - """ An adapter to S3 files via S3Select. diff --git a/src/shillelagh/adapters/api/socrata.py b/src/shillelagh/adapters/api/socrata.py index 27f96490..0e7e32b2 100644 --- a/src/shillelagh/adapters/api/socrata.py +++ b/src/shillelagh/adapters/api/socrata.py @@ -3,6 +3,7 @@ See https://dev.socrata.com/ for more information. """ + import logging import re import urllib.parse @@ -88,7 +89,6 @@ def get_field(col: MetadataColumn) -> Field: class SocrataAPI(Adapter): - """ An adapter to the Socrata Open Data API (https://dev.socrata.com/). diff --git a/src/shillelagh/adapters/api/system.py b/src/shillelagh/adapters/api/system.py index c4224f83..13858fb2 100644 --- a/src/shillelagh/adapters/api/system.py +++ b/src/shillelagh/adapters/api/system.py @@ -4,6 +4,7 @@ See https://github.com/giampaolo/psutil for more information. """ + import logging import time import urllib.parse @@ -101,7 +102,6 @@ def get_columns(resource: ResourceType) -> Dict[str, Field]: class SystemAPI(Adapter): - """ An adapter for retrieving system information. """ @@ -117,8 +117,7 @@ def supports(uri: str, fast: bool = True, **kwargs: Any) -> Optional[bool]: parsed = urllib.parse.urlparse(uri) return parsed.scheme == "system" and ( # pylint: disable=protected-access - parsed.netloc in ResourceType._value2member_map_ - or parsed.netloc == "" + parsed.netloc in ResourceType._value2member_map_ or parsed.netloc == "" ) @staticmethod diff --git a/src/shillelagh/adapters/api/weatherapi.py b/src/shillelagh/adapters/api/weatherapi.py index ab6391fb..d28a4978 100644 --- a/src/shillelagh/adapters/api/weatherapi.py +++ b/src/shillelagh/adapters/api/weatherapi.py @@ -1,6 +1,7 @@ """ An adapter to WeatherAPI (https://www.weatherapi.com/). """ + import logging import urllib.parse from datetime import date, datetime, timedelta, timezone @@ -61,7 +62,6 @@ def combine_time_filters(bounds: Dict[str, Filter]) -> Range: class WeatherAPI(Adapter): - """ An adapter for WeatherAPI (https://www.weatherapi.com/). diff --git a/src/shillelagh/adapters/base.py b/src/shillelagh/adapters/base.py index da1efe48..165cf5ed 100644 --- a/src/shillelagh/adapters/base.py +++ b/src/shillelagh/adapters/base.py @@ -1,4 +1,5 @@ """Base class for adapters.""" + import atexit import inspect from typing import Any, Dict, Iterator, List, Optional, Tuple @@ -12,7 +13,6 @@ class Adapter: - """ An adapter to a table. diff --git a/src/shillelagh/adapters/file/csvfile.py b/src/shillelagh/adapters/file/csvfile.py index c9e4a5ea..49e6c127 100644 --- a/src/shillelagh/adapters/file/csvfile.py +++ b/src/shillelagh/adapters/file/csvfile.py @@ -9,6 +9,7 @@ Remote files (HTTP/HTTPS) are also supported in read-only mode. """ + import csv import logging import os diff --git a/src/shillelagh/adapters/memory/holidays.py b/src/shillelagh/adapters/memory/holidays.py index 3048a90f..e43a3b2f 100644 --- a/src/shillelagh/adapters/memory/holidays.py +++ b/src/shillelagh/adapters/memory/holidays.py @@ -27,7 +27,6 @@ class BoundsType(TypedDict, total=False): class HolidaysMemory(Adapter): - """ An adapter for in-memory holidays. """ diff --git a/src/shillelagh/adapters/memory/pandas.py b/src/shillelagh/adapters/memory/pandas.py index ee7a54a2..c3737e42 100644 --- a/src/shillelagh/adapters/memory/pandas.py +++ b/src/shillelagh/adapters/memory/pandas.py @@ -138,7 +138,6 @@ def get_columns_from_df(df: pd.DataFrame) -> Dict[str, Field]: class PandasMemory(Adapter): - """ An adapter for in-memory Pandas dataframes. """ diff --git a/src/shillelagh/backends/apsw/db.py b/src/shillelagh/backends/apsw/db.py index 04877343..0a9f6765 100644 --- a/src/shillelagh/backends/apsw/db.py +++ b/src/shillelagh/backends/apsw/db.py @@ -2,6 +2,7 @@ """ A DB API 2.0 wrapper for APSW. """ + import datetime import itertools import logging @@ -62,6 +63,29 @@ ) from shillelagh.typing import Description, SQLiteValidType +__all__ = [ + "DatabaseError", + "DataError", + "Error", + "IntegrityError", + "InterfaceError", + "InternalError", + "OperationalError", + "BINARY", + "DATETIME", + "NUMBER", + "ROWID", + "STRING", + "Binary", + "Date", + "DateFromTicks", + "Time", + "TimeFromTicks", + "Timestamp", + "TimestampFromTicks", + "Warning", +] + apilevel = "2.0" threadsafety = 2 paramstyle = "qmark" @@ -128,7 +152,6 @@ def convert_binding(binding: Any) -> SQLiteValidType: class Cursor: # pylint: disable=too-many-instance-attributes - """ Connection cursor. """ @@ -432,7 +455,6 @@ def apsw_version() -> str: class Connection: - """Connection.""" def __init__( # pylint: disable=too-many-arguments diff --git a/src/shillelagh/backends/apsw/dialects/base.py b/src/shillelagh/backends/apsw/dialects/base.py index 7af4f897..2dd8da0b 100644 --- a/src/shillelagh/backends/apsw/dialects/base.py +++ b/src/shillelagh/backends/apsw/dialects/base.py @@ -1,5 +1,6 @@ # pylint: disable=protected-access, abstract-method """A SQLALchemy dialect.""" + from typing import Any, Dict, List, Optional, Tuple, cast import sqlalchemy.types @@ -30,7 +31,6 @@ class SQLAlchemyColumn(TypedDict): class APSWDialect(SQLiteDialect): - """ A SQLAlchemy dialect for Shillelagh. diff --git a/src/shillelagh/backends/apsw/dialects/gsheets.py b/src/shillelagh/backends/apsw/dialects/gsheets.py index fa7e0a63..de6b3a9e 100644 --- a/src/shillelagh/backends/apsw/dialects/gsheets.py +++ b/src/shillelagh/backends/apsw/dialects/gsheets.py @@ -4,6 +4,7 @@ This dialect was implemented to replace the ``gsheetsdb`` library. """ + import logging import urllib.parse from datetime import timedelta diff --git a/src/shillelagh/backends/apsw/dialects/safe.py b/src/shillelagh/backends/apsw/dialects/safe.py index 8932644a..b20d2cfa 100644 --- a/src/shillelagh/backends/apsw/dialects/safe.py +++ b/src/shillelagh/backends/apsw/dialects/safe.py @@ -5,6 +5,7 @@ When this dialect is used only adapters marked as safe and explicitly listed are loaded. """ + from typing import Any, Dict, List, Optional, Tuple from sqlalchemy.engine.url import URL @@ -13,7 +14,6 @@ class APSWSafeDialect(APSWDialect): - """ A "safe" Shillelagh dialect. diff --git a/src/shillelagh/backends/apsw/vt.py b/src/shillelagh/backends/apsw/vt.py index 805b2dee..bc06ca27 100644 --- a/src/shillelagh/backends/apsw/vt.py +++ b/src/shillelagh/backends/apsw/vt.py @@ -6,6 +6,7 @@ to adapters. The main goal is to make the interface easier to use, to simplify the work of writing new adapters. """ + import json import logging from collections import defaultdict @@ -264,7 +265,6 @@ def get_bounds( class VTModule: # pylint: disable=too-few-public-methods - """ A module used to create SQLite virtual tables. diff --git a/src/shillelagh/console.py b/src/shillelagh/console.py index 9dcb4706..0bfc2947 100644 --- a/src/shillelagh/console.py +++ b/src/shillelagh/console.py @@ -18,6 +18,7 @@ api_key: XXX """ + import logging import os.path import sys diff --git a/src/shillelagh/fields.py b/src/shillelagh/fields.py index 22e32767..f64cf254 100644 --- a/src/shillelagh/fields.py +++ b/src/shillelagh/fields.py @@ -1,6 +1,7 @@ """ Fields representing columns of different types and capabilities. """ + import datetime import decimal from enum import Enum @@ -48,7 +49,6 @@ class Order(Enum): class Field(Generic[Internal, External]): - """ Represents a column in a table. diff --git a/src/shillelagh/filters.py b/src/shillelagh/filters.py index 66bcdb5c..2112c80e 100644 --- a/src/shillelagh/filters.py +++ b/src/shillelagh/filters.py @@ -1,6 +1,7 @@ """ Filters for representing SQL predicates. """ + import re from enum import Enum from typing import Any, Optional, Set, Tuple diff --git a/src/shillelagh/functions.py b/src/shillelagh/functions.py index ff1fde48..9a2d3252 100644 --- a/src/shillelagh/functions.py +++ b/src/shillelagh/functions.py @@ -1,6 +1,7 @@ """ Custom functions available to the SQL backend. """ + import json import sys import time diff --git a/src/shillelagh/lib.py b/src/shillelagh/lib.py index 6501af05..43ac5f4e 100644 --- a/src/shillelagh/lib.py +++ b/src/shillelagh/lib.py @@ -1,4 +1,5 @@ """Helper functions for Shillelagh.""" + import base64 import inspect import itertools diff --git a/src/shillelagh/types.py b/src/shillelagh/types.py index 8adfa3ed..de2385b6 100644 --- a/src/shillelagh/types.py +++ b/src/shillelagh/types.py @@ -1,4 +1,5 @@ """DB API 2.0 types for Shillelagh.""" + import datetime import inspect import time diff --git a/src/shillelagh/typing.py b/src/shillelagh/typing.py index 85e2cf8c..ccf1a204 100644 --- a/src/shillelagh/typing.py +++ b/src/shillelagh/typing.py @@ -1,4 +1,5 @@ """Custom types for Shillelagh.""" + from typing import Any, Dict, List, Optional, Tuple, Type, Union from typing_extensions import Literal diff --git a/tests/adapters/api/datasette_test.py b/tests/adapters/api/datasette_test.py index f598e6e4..58d64c18 100644 --- a/tests/adapters/api/datasette_test.py +++ b/tests/adapters/api/datasette_test.py @@ -2,6 +2,7 @@ """ Tests for the Datasette adapter. """ + from datetime import timedelta import pytest diff --git a/tests/adapters/api/github_test.py b/tests/adapters/api/github_test.py index e64f778f..3c6f2da6 100644 --- a/tests/adapters/api/github_test.py +++ b/tests/adapters/api/github_test.py @@ -2,6 +2,7 @@ """ Tests for the Datasette adapter. """ + import datetime import pytest diff --git a/tests/adapters/api/gsheets/fields_test.py b/tests/adapters/api/gsheets/fields_test.py index 0c9efd3b..3fe3befe 100644 --- a/tests/adapters/api/gsheets/fields_test.py +++ b/tests/adapters/api/gsheets/fields_test.py @@ -2,6 +2,7 @@ """ Tests for shillelagh.adapters.api.gsheets.fields. """ + import datetime import dateutil.tz diff --git a/tests/adapters/api/gsheets/integration_test.py b/tests/adapters/api/gsheets/integration_test.py index 6ff750b5..aa20de0c 100644 --- a/tests/adapters/api/gsheets/integration_test.py +++ b/tests/adapters/api/gsheets/integration_test.py @@ -6,6 +6,7 @@ Uses a private sheet: https://docs.google.com/spreadsheets/d/ 1_rN3lm0R_bU3NemO0s9pbFkY5LQPcuy1pscv8ZXPtg8/edit """ + import datetime from typing import Any, Dict diff --git a/tests/adapters/api/gsheets/lib_test.py b/tests/adapters/api/gsheets/lib_test.py index 7370b133..aa2ff5ba 100644 --- a/tests/adapters/api/gsheets/lib_test.py +++ b/tests/adapters/api/gsheets/lib_test.py @@ -1,6 +1,7 @@ """ Tests for shillelagh.adapters.api.gsheets.lib. """ + import itertools from typing import List, cast diff --git a/tests/adapters/api/gsheets/parsing/base_test.py b/tests/adapters/api/gsheets/parsing/base_test.py index f400273b..2b86f5e5 100644 --- a/tests/adapters/api/gsheets/parsing/base_test.py +++ b/tests/adapters/api/gsheets/parsing/base_test.py @@ -1,6 +1,7 @@ """ Test the base parser/tokenizer. """ + # pylint: disable=protected-access from datetime import datetime diff --git a/tests/adapters/api/gsheets/parsing/date_test.py b/tests/adapters/api/gsheets/parsing/date_test.py index 95c793bc..07843097 100644 --- a/tests/adapters/api/gsheets/parsing/date_test.py +++ b/tests/adapters/api/gsheets/parsing/date_test.py @@ -1,6 +1,7 @@ """ Test the date/time pattern handling (parsing and formatting). """ + # pylint: disable=protected-access from datetime import date, datetime, time, timedelta from typing import cast diff --git a/tests/adapters/api/gsheets/parsing/number_test.py b/tests/adapters/api/gsheets/parsing/number_test.py index d65937af..80216f36 100644 --- a/tests/adapters/api/gsheets/parsing/number_test.py +++ b/tests/adapters/api/gsheets/parsing/number_test.py @@ -1,6 +1,7 @@ """ Test number parsing. """ + import pytest from shillelagh.adapters.api.gsheets.parsing.base import tokenize diff --git a/tests/adapters/api/html_table_test.py b/tests/adapters/api/html_table_test.py index a8a2f40c..a23f4bd2 100644 --- a/tests/adapters/api/html_table_test.py +++ b/tests/adapters/api/html_table_test.py @@ -1,6 +1,7 @@ """ Test the HTML table scraper. """ + import pandas as pd import pytest from pytest_mock import MockerFixture diff --git a/tests/adapters/api/socrata_test.py b/tests/adapters/api/socrata_test.py index 484b3260..7cb51981 100644 --- a/tests/adapters/api/socrata_test.py +++ b/tests/adapters/api/socrata_test.py @@ -1,6 +1,7 @@ """ Tests for the Socrata adapter. """ + from datetime import date import pytest diff --git a/tests/adapters/api/system_test.py b/tests/adapters/api/system_test.py index 3c31b4f9..0faa5f49 100644 --- a/tests/adapters/api/system_test.py +++ b/tests/adapters/api/system_test.py @@ -1,6 +1,7 @@ """ Tests for the system adapter. """ + from datetime import datetime, timezone from unittest import mock diff --git a/tests/adapters/api/weatherapi_test.py b/tests/adapters/api/weatherapi_test.py index 5f6ecbb5..395a4cdf 100644 --- a/tests/adapters/api/weatherapi_test.py +++ b/tests/adapters/api/weatherapi_test.py @@ -2,6 +2,7 @@ """ Tests for shillelagh.adapters.api.weatherapi. """ + from datetime import datetime, timedelta, timezone from typing import Dict diff --git a/tests/adapters/base_test.py b/tests/adapters/base_test.py index c9bc59b2..dc75df22 100644 --- a/tests/adapters/base_test.py +++ b/tests/adapters/base_test.py @@ -1,6 +1,7 @@ """ Test for shillelagh.adapter.base. """ + from datetime import datetime from typing import Any, Dict, Iterator, List, Optional, Set, Tuple @@ -18,7 +19,6 @@ class FakeAdapterWithDateTime(FakeAdapter): - """ An adapter with a timestamp column. """ @@ -179,7 +179,6 @@ def test_limit_offset(registry: AdapterLoader) -> None: """ class CustomFakeAdapter(FakeAdapter): - """ Custom ``FakeAdapter`` with more data. """ @@ -203,7 +202,7 @@ def get_data( # pylint: disable=too-many-arguments limit: Optional[int] = None, offset: Optional[int] = None, requested_columns: Optional[Set[str]] = None, - **kwargs: Any + **kwargs: Any, ) -> Iterator[Row]: """ Return all data. @@ -211,7 +210,6 @@ def get_data( # pylint: disable=too-many-arguments return iter(self.data) class FakeAdapterWithLimitOnly(CustomFakeAdapter): - """ An adapter that only supports limit (like ``s3select``) """ @@ -222,7 +220,6 @@ class FakeAdapterWithLimitOnly(CustomFakeAdapter): supports_offset = False class FakeAdapterWithLimitAndOffset(CustomFakeAdapter): - """ An adapter that supports both limit and offset. """ @@ -233,7 +230,6 @@ class FakeAdapterWithLimitAndOffset(CustomFakeAdapter): supports_offset = True class FakeAdapterWithOffsetOnly(CustomFakeAdapter): - """ An adapter that supports only offset. """ diff --git a/tests/adapters/file/csvfile_test.py b/tests/adapters/file/csvfile_test.py index db55315a..4fb7ea13 100644 --- a/tests/adapters/file/csvfile_test.py +++ b/tests/adapters/file/csvfile_test.py @@ -2,6 +2,7 @@ """ Tests for shillelagh.adapters.file.csvfile. """ + from datetime import datetime, timezone from pathlib import Path diff --git a/tests/adapters/memory/pandas_test.py b/tests/adapters/memory/pandas_test.py index b20cdf0c..9821c403 100644 --- a/tests/adapters/memory/pandas_test.py +++ b/tests/adapters/memory/pandas_test.py @@ -1,6 +1,7 @@ """ Test the Pandas in-memory adapter. """ + import pandas as pd import pytest from pytest_mock import MockerFixture diff --git a/tests/backends/apsw/db_test.py b/tests/backends/apsw/db_test.py index baab91ee..8cdb4c33 100644 --- a/tests/backends/apsw/db_test.py +++ b/tests/backends/apsw/db_test.py @@ -2,6 +2,7 @@ """ Tests for shillelagh.backends.apsw.db. """ + import datetime from typing import Any, List, Tuple from unittest import mock @@ -427,7 +428,6 @@ def test_connect_safe_lists_only_safe_adapters(registry: AdapterLoader) -> None: """ class UnsafeAdapter(FakeAdapter): - """ A safe adapter. """ diff --git a/tests/backends/apsw/dialects/base_test.py b/tests/backends/apsw/dialects/base_test.py index 687823f7..3d5c744d 100644 --- a/tests/backends/apsw/dialects/base_test.py +++ b/tests/backends/apsw/dialects/base_test.py @@ -1,6 +1,7 @@ """ Tests for shillelagh.backends.apsw.dialects.base. """ + from unittest import mock import pytest diff --git a/tests/backends/apsw/dialects/gsheets_test.py b/tests/backends/apsw/dialects/gsheets_test.py index 80738a98..7f6de796 100644 --- a/tests/backends/apsw/dialects/gsheets_test.py +++ b/tests/backends/apsw/dialects/gsheets_test.py @@ -1,6 +1,7 @@ """ Test for shillelagh.backends.apsw.dialects.gsheets. """ + import datetime from typing import Any, Dict from unittest import mock diff --git a/tests/backends/apsw/dialects/safe_test.py b/tests/backends/apsw/dialects/safe_test.py index 6441a025..2d82ed22 100644 --- a/tests/backends/apsw/dialects/safe_test.py +++ b/tests/backends/apsw/dialects/safe_test.py @@ -1,6 +1,7 @@ """ Tests for shillelagh.backends.apsw.dialects.safe. """ + from sqlalchemy.engine.url import make_url from shillelagh.backends.apsw.dialects.safe import APSWSafeDialect diff --git a/tests/backends/apsw/vt_test.py b/tests/backends/apsw/vt_test.py index 897690d9..4cc9f15d 100644 --- a/tests/backends/apsw/vt_test.py +++ b/tests/backends/apsw/vt_test.py @@ -2,6 +2,7 @@ """ Tests for shillelagh.backends.apsw.vt. """ + import datetime import json from typing import Any, Dict, Iterable @@ -28,7 +29,6 @@ class FakeAdapterNoFilters(FakeAdapter): - """ An adapter where columns have no filters. """ @@ -39,7 +39,6 @@ class FakeAdapterNoFilters(FakeAdapter): class FakeAdapterOnlyEqual(FakeAdapter): - """ An adapter where columns can only be filtered via equality. """ @@ -50,7 +49,6 @@ class FakeAdapterOnlyEqual(FakeAdapter): class FakeAdapterStaticSort(FakeAdapter): - """ An adapter with columns having a static order. """ @@ -61,7 +59,6 @@ class FakeAdapterStaticSort(FakeAdapter): class FakeAdapterNoColumns(FakeAdapter): - """ An adapter without columns. """ diff --git a/tests/conftest.py b/tests/conftest.py index 48a4343b..6d73444c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,7 @@ """ Fixtures for Shillelagh. """ + import json import logging import os diff --git a/tests/console_test.py b/tests/console_test.py index 6c1cc93b..558c01df 100644 --- a/tests/console_test.py +++ b/tests/console_test.py @@ -2,6 +2,7 @@ """ Tests for shillelagh.console. """ + from io import StringIO from pathlib import Path diff --git a/tests/fakes/__init__.py b/tests/fakes/__init__.py index d062b2b7..0d7c3178 100644 --- a/tests/fakes/__init__.py +++ b/tests/fakes/__init__.py @@ -31,7 +31,6 @@ def load(self) -> Type[Adapter]: class FakeAdapter(Adapter): - """ A simple adapter that keeps data in memory. """ diff --git a/tests/fields_test.py b/tests/fields_test.py index 8c1947ff..8236f2a5 100644 --- a/tests/fields_test.py +++ b/tests/fields_test.py @@ -1,6 +1,7 @@ """ Tests for shillelagh.fields. """ + import datetime import decimal import sys @@ -456,7 +457,6 @@ class IntegerOrString(Field[Union[int, str], Union[int, str]]): db_api_type = "STRING" class CustomFakeAdapter(FakeAdapter): - """ A simple adapter with an ``IntegerOrString`` column. """ diff --git a/tests/filters_test.py b/tests/filters_test.py index 41046bea..f496d219 100644 --- a/tests/filters_test.py +++ b/tests/filters_test.py @@ -1,6 +1,7 @@ """ Tests for shillelagh.filters. """ + import pytest from shillelagh.filters import ( diff --git a/tests/functions_test.py b/tests/functions_test.py index 31e44949..8a3493df 100644 --- a/tests/functions_test.py +++ b/tests/functions_test.py @@ -1,6 +1,7 @@ """ Tests for shillelagh.functions. """ + import json import sys diff --git a/tests/lib_test.py b/tests/lib_test.py index 52dae66c..55c7328c 100644 --- a/tests/lib_test.py +++ b/tests/lib_test.py @@ -1,6 +1,7 @@ """ Tests for shillelagh.lib. """ + from typing import Any, Dict, Iterator, List, Tuple import pytest diff --git a/tests/types_test.py b/tests/types_test.py index 09d4456d..4a4344d0 100644 --- a/tests/types_test.py +++ b/tests/types_test.py @@ -1,6 +1,7 @@ """ Tests for shillelagh.types. """ + from datetime import date, datetime, time, timezone from shillelagh.backends.apsw.db import connect From d675a2280696a9500b20860b7aa108b64ec2d158 Mon Sep 17 00:00:00 2001 From: Ryan Julyan <ryan@julyan.biz> Date: Tue, 4 Jun 2024 17:21:36 +0200 Subject: [PATCH 20/22] Add import_dbapi() to mimic dbapi() to prevent SADeprecationWarning (#452) SADeprecationWarning: The dbapi() classmethod on dialect classes has been renamed to import_dbapi(). Implement an import_dbapi() classmethod directly on class <class 'shillelagh.backends.apsw.dialects.base.APSWDialect'> to remove this warning; the old .dbapi() classmethod may be maintained for backwards compatibility. engine = create_engine( --- src/shillelagh/backends/apsw/dialects/base.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/shillelagh/backends/apsw/dialects/base.py b/src/shillelagh/backends/apsw/dialects/base.py index 2dd8da0b..73d15b60 100644 --- a/src/shillelagh/backends/apsw/dialects/base.py +++ b/src/shillelagh/backends/apsw/dialects/base.py @@ -59,6 +59,13 @@ def dbapi(cls): # pylint: disable=method-hidden """ return db + @classmethod + def import_dbapi(cls): # pylint: disable=method-hidden + """ + Return the DB API module. + """ + return db + def __init__( self, adapters: Optional[List[str]] = None, From 69fa7bf3929384f0cafd3cc70fc40cbc9a2ddbb0 Mon Sep 17 00:00:00 2001 From: Beto Dealmeida <roberto@dealmeida.net> Date: Tue, 4 Jun 2024 11:23:51 -0400 Subject: [PATCH 21/22] Add missing coverage --- CHANGELOG.rst | 2 ++ tests/backends/apsw/dialects/base_test.py | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 470991f2..df23ca0c 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ Changelog Next ==== +- Add ``import_dbapi()`` to supress warning (#452) + Version 1.2.19 - 2024-04-03 =========================== diff --git a/tests/backends/apsw/dialects/base_test.py b/tests/backends/apsw/dialects/base_test.py index 3d5c744d..a578c2fd 100644 --- a/tests/backends/apsw/dialects/base_test.py +++ b/tests/backends/apsw/dialects/base_test.py @@ -66,3 +66,10 @@ def test_has_table(registry: AdapterLoader) -> None: assert inspector.has_table("dummy://a") assert inspector.has_table("dummy://b") assert not inspector.has_table("funny://b") + + +def test_import_dbapi() -> None: + """ + Test ``import_dbapi``. + """ + assert APSWDialect.import_dbapi() == APSWDialect.dbapi() From 70a60df577726546ca7c38e7a613c929df39b8ac Mon Sep 17 00:00:00 2001 From: Beto Dealmeida <roberto@dealmeida.net> Date: Tue, 4 Jun 2024 16:30:05 -0400 Subject: [PATCH 22/22] Add missing coverage (#453) --- src/shillelagh/adapters/api/generic_json.py | 2 +- tests/adapters/api/generic_json_test.py | 6 ++++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/shillelagh/adapters/api/generic_json.py b/src/shillelagh/adapters/api/generic_json.py index 8ba0e699..663ed326 100644 --- a/src/shillelagh/adapters/api/generic_json.py +++ b/src/shillelagh/adapters/api/generic_json.py @@ -142,7 +142,7 @@ def get_data( # pylint: disable=unused-argument, too-many-arguments response = self._session.get(self.uri) payload = response.json() if not response.ok: - raise ProgrammingError(f'Error: {payload["message"]}') + raise ProgrammingError(f'Error: {payload["error"]["message"]}') for i, row in enumerate(jsonpath.findall(self.path, payload)): row = { diff --git a/tests/adapters/api/generic_json_test.py b/tests/adapters/api/generic_json_test.py index 68dfcc6b..b7e2b1f7 100644 --- a/tests/adapters/api/generic_json_test.py +++ b/tests/adapters/api/generic_json_test.py @@ -99,12 +99,14 @@ def test_generic_json(requests_mock: Mocker) -> None: requests_mock.get( "https://example.org/data.json", - json={"message": "An error occurred"}, + json={ + "error": {"code": 1002, "message": "API key is invalid or not provided."}, + }, status_code=500, ) with pytest.raises(ProgrammingError) as excinfo: list(cursor.execute(sql)) - assert str(excinfo.value) == "Error: An error occurred" + assert str(excinfo.value) == "Error: API key is invalid or not provided." def test_generic_json_complex_type(requests_mock: Mocker) -> None: