From 500bdfac16ba3400efd7daee69b5c4468bc00451 Mon Sep 17 00:00:00 2001 From: talboren Date: Thu, 25 Jan 2024 16:54:07 +0200 Subject: [PATCH 01/12] fix(ui): row ids should be auto-generated (#751) --- keep-ui/app/alerts/alert-table.tsx | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/keep-ui/app/alerts/alert-table.tsx b/keep-ui/app/alerts/alert-table.tsx index a1a0b1d1d..0423f2784 100644 --- a/keep-ui/app/alerts/alert-table.tsx +++ b/keep-ui/app/alerts/alert-table.tsx @@ -74,6 +74,14 @@ export const getColumnsOrder = (presetName?: string): ColumnOrderState => { return []; }; +const hardcodedDefaultHidden = [ + "playbook_url", + "ack_status", + "deletedAt", + "created_by", + "assignees", +]; + export const getHiddenColumns = ( presetName?: string, columns?: ColumnDef[] @@ -84,8 +92,7 @@ export const getHiddenColumns = ( .map((c) => c.id!) ?? []; if (presetName === undefined) { return getDefaultColumnVisibility({}, [ - "playbook_url", - "ack_status", + ...hardcodedDefaultHidden, ...defaultHidden, ]); } @@ -99,8 +106,7 @@ export const getHiddenColumns = ( } return getDefaultColumnVisibility({}, [ - "playbook_url", - "ack_status", + ...hardcodedDefaultHidden, ...defaultHidden, ]); }; @@ -344,7 +350,6 @@ export function AlertTable({ onColumnOrderChange: setColumnOrder, onColumnVisibilityChange: setColumnVisibility, onRowSelectionChange: rowSelection?.onChange, - getRowId: (row) => row.fingerprint, }); return ( From fd5ecb6414a93f57d436813f94cfd7dfcd305e2e Mon Sep 17 00:00:00 2001 From: Ron <43244104+rons4@users.noreply.github.com> Date: Thu, 25 Jan 2024 15:56:05 +0100 Subject: [PATCH 02/12] feat: SIGNL4 Sample Workflow (#748) Co-authored-by: Shahar Glazner --- .../workflows/signl4-alerting-workflow.yaml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 examples/workflows/signl4-alerting-workflow.yaml diff --git a/examples/workflows/signl4-alerting-workflow.yaml b/examples/workflows/signl4-alerting-workflow.yaml new file mode 100644 index 000000000..7923b3129 --- /dev/null +++ b/examples/workflows/signl4-alerting-workflow.yaml @@ -0,0 +1,18 @@ +id: signl4-alerting-workflow +description: handle alerts +triggers: +- filters: + - key: source + value: r".*" + type: alert +owners: [] +services: [] +steps: [] +actions: +- name: signl4-action + provider: + config: '{{ providers.SIGNL4 Alerting }}' + type: signl4 + with: + message: Test. + title: Keep Alert From 270b24ab06584045575d2f3122e8ccf7782edd26 Mon Sep 17 00:00:00 2001 From: asharonbaltazar <58940073+asharonbaltazar@users.noreply.github.com> Date: Mon, 29 Jan 2024 07:19:22 -0500 Subject: [PATCH 03/12] fix: flashing alerts from pusher (#754) Signed-off-by: talboren Co-authored-by: talboren --- keep-ui/utils/hooks/useAlerts.ts | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/keep-ui/utils/hooks/useAlerts.ts b/keep-ui/utils/hooks/useAlerts.ts index 40c9d6d66..fd978c20d 100644 --- a/keep-ui/utils/hooks/useAlerts.ts +++ b/keep-ui/utils/hooks/useAlerts.ts @@ -170,11 +170,20 @@ export const useAlerts = () => { new TextDecoder().decode(decompressedAlert) ); - next(null, { - alerts: newAlerts, - lastSubscribedDate: new Date(), - isAsyncLoading: false, - pusherChannel, + next(null, (data) => { + if (data) { + return { + ...data, + alerts: [...newAlerts, ...data.alerts], + }; + } + + return { + alerts: newAlerts, + lastSubscribedDate: new Date(), + isAsyncLoading: false, + pusherChannel, + }; }); }); @@ -212,7 +221,7 @@ export const useAlerts = () => { next(null, { alerts: [], lastSubscribedDate: new Date(), - isAsyncLoading: false, + isAsyncLoading: true, pusherChannel, }); console.log("Connected to pusher"); From c6528ed7c63839755bb2807fe645033366bf6c1f Mon Sep 17 00:00:00 2001 From: asharonbaltazar <58940073+asharonbaltazar@users.noreply.github.com> Date: Mon, 29 Jan 2024 11:19:44 -0500 Subject: [PATCH 04/12] fix: Later Alerts Didn't Overwrite Earlier Ones (#756) --- keep-ui/utils/hooks/useAlerts.ts | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/keep-ui/utils/hooks/useAlerts.ts b/keep-ui/utils/hooks/useAlerts.ts index fd978c20d..65746786e 100644 --- a/keep-ui/utils/hooks/useAlerts.ts +++ b/keep-ui/utils/hooks/useAlerts.ts @@ -19,10 +19,15 @@ export const getFormatAndMergePusherWithEndpointAlerts = ( endpointAlerts: AlertDto[], pusherAlerts: AlertDto[] ): AlertDto[] => { - const pusherAlertsWithLastReceivedDate = pusherAlerts.map((pusherAlert) => ({ - ...pusherAlert, - lastReceived: new Date(pusherAlert.lastReceived), - })); + // Create a map of the latest received times for the new alerts + const uniquePusherAlerts = new Map( + pusherAlerts.map((alert) => [ + alert.fingerprint, + { ...alert, lastReceived: new Date(alert.lastReceived) }, + ]) + ); + + const pusherAlertsWithLastReceivedDate = [...uniquePusherAlerts.values()]; const endpointAlertsWithLastReceivedDate = endpointAlerts.map( (endpointAlert) => ({ @@ -31,26 +36,18 @@ export const getFormatAndMergePusherWithEndpointAlerts = ( }) ); - // Create a map of the latest received times for the new alerts - const latestReceivedTimes = new Map( - pusherAlertsWithLastReceivedDate.map((alert) => [ - alert.fingerprint, - alert.lastReceived, - ]) - ); - // Filter out previous alerts if they are already in the new alerts with a more recent lastReceived const filteredEndpointAlerts = endpointAlertsWithLastReceivedDate.filter( (endpointAlert) => { - const newAlertReceivedTime = latestReceivedTimes.get( + const pusherAlertByFingerprint = uniquePusherAlerts.get( endpointAlert.fingerprint ); - if (newAlertReceivedTime === undefined) { + if (pusherAlertByFingerprint === undefined) { return true; } - return endpointAlert.lastReceived > newAlertReceivedTime; + return endpointAlert.lastReceived > pusherAlertByFingerprint.lastReceived; } ); @@ -174,7 +171,7 @@ export const useAlerts = () => { if (data) { return { ...data, - alerts: [...newAlerts, ...data.alerts], + alerts: [...data.alerts, ...newAlerts], }; } From 80eec67a6d87a1fa77eae9ebaebd5b11768c7dba Mon Sep 17 00:00:00 2001 From: talboren Date: Mon, 29 Jan 2024 18:56:52 +0200 Subject: [PATCH 05/12] docs: deployment, secret manager and cleanup (#757) --- docs/api-ref/tenant/is-onboarded.mdx | 3 - .../tenant/save-github-installation-id.mdx | 3 - .../authentication.mdx | 0 docs/deployment/docker.mdx | 31 ++ docs/deployment/kubernetes.mdx | 23 + docs/deployment/openshift.mdx | 15 + docs/deployment/secret-manager.mdx | 84 ++++ docs/development/adding-a-new-provider.mdx | 170 ------- docs/development/external-url.mdx | 4 +- docs/development/getting-started.mdx | 48 -- docs/mint.json | 41 +- docs/overview/alternatives.mdx | 55 --- docs/overview/comparison.mdx | 41 ++ docs/overview/introduction.mdx | 4 +- .../alertseverityandstatus.mdx | 0 docs/providers/adding-a-new-provider.mdx | 437 +++++++++++++++++- keep/api/api.py | 2 - keep/api/routes/tenant.py | 82 ---- 18 files changed, 640 insertions(+), 403 deletions(-) delete mode 100644 docs/api-ref/tenant/is-onboarded.mdx delete mode 100644 docs/api-ref/tenant/save-github-installation-id.mdx rename docs/{development => deployment}/authentication.mdx (100%) create mode 100644 docs/deployment/docker.mdx create mode 100644 docs/deployment/kubernetes.mdx create mode 100644 docs/deployment/openshift.mdx create mode 100644 docs/deployment/secret-manager.mdx delete mode 100644 docs/development/adding-a-new-provider.mdx delete mode 100644 docs/overview/alternatives.mdx create mode 100644 docs/overview/comparison.mdx rename docs/{overview => platform}/alertseverityandstatus.mdx (100%) delete mode 100644 keep/api/routes/tenant.py diff --git a/docs/api-ref/tenant/is-onboarded.mdx b/docs/api-ref/tenant/is-onboarded.mdx deleted file mode 100644 index aeea46a93..000000000 --- a/docs/api-ref/tenant/is-onboarded.mdx +++ /dev/null @@ -1,3 +0,0 @@ ---- -openapi: get /tenant/onboarded ---- diff --git a/docs/api-ref/tenant/save-github-installation-id.mdx b/docs/api-ref/tenant/save-github-installation-id.mdx deleted file mode 100644 index 12737090b..000000000 --- a/docs/api-ref/tenant/save-github-installation-id.mdx +++ /dev/null @@ -1,3 +0,0 @@ ---- -openapi: post /tenant/github ---- diff --git a/docs/development/authentication.mdx b/docs/deployment/authentication.mdx similarity index 100% rename from docs/development/authentication.mdx rename to docs/deployment/authentication.mdx diff --git a/docs/deployment/docker.mdx b/docs/deployment/docker.mdx new file mode 100644 index 000000000..5b77e205c --- /dev/null +++ b/docs/deployment/docker.mdx @@ -0,0 +1,31 @@ +--- +title: "Docker" +sidebarTitle: "Docker" +--- + +### Spin up Keep with docker-compose latest images +The easiest way to start keep is is with docker-compose: +```shell +curl https://raw.githubusercontent.com/keephq/keep/main/start.sh | sh +``` + +```bash start.sh +#!/bin/bash +# Keep install script for docker compose + +echo "Creating state directory." +mkdir -p state +test -e state +echo "Changing directory ownership to non-privileged user." +chown -R 999:999 state || echo "Unable to change directory ownership, changing permissions instead." && chmod -R 0777 state +which curl &> /dev/null || echo "curl not installed" +curl https://raw.githubusercontent.com/keephq/keep/main/docker-compose.yml --output docker-compose.yml +curl https://raw.githubusercontent.com/keephq/keep/main/docker-compose.common.yml --output docker-compose.common.yml + +docker compose up -d +``` + +The docker-compose.yml contains 3 services: +- [keep-backend](https://console.cloud.google.com/artifacts/docker/keephq/us-central1/keep/keep-api?project=keephq) - a fastapi service that as the API server. +- [keep-frontend](https://console.cloud.google.com/artifacts/docker/keephq/us-central1/keep/keep-ui?project=keephq) - a nextjs app that serves as Keep UI interface. +- [keep-websocket-server](https://docs.soketi.app/getting-started/installation/docker) - Soketi (a pusher compatible websocket server) for real time alerting. diff --git a/docs/deployment/kubernetes.mdx b/docs/deployment/kubernetes.mdx new file mode 100644 index 000000000..714a20563 --- /dev/null +++ b/docs/deployment/kubernetes.mdx @@ -0,0 +1,23 @@ +--- +title: "Kubernetes" +sidebarTitle: "Kubernetes" +--- + +Keep can be installed via Helm Chart. + +First, clone Keep: +``` +git clone https://github.com/keephq/keep.git && cd keep +``` + +Next, install using: +``` +helm install -f chart/keep/values.yaml keep chart/keep/ +``` + +Notice for it to work locally, you'll need this port forwarding: +``` +kubectl port-forward svc/keep-frontend 3000:3000 +``` + +To learn more about Keep's helm chart, see https://github.com/keephq/keep/blob/main/chart/keep/README.md diff --git a/docs/deployment/openshift.mdx b/docs/deployment/openshift.mdx new file mode 100644 index 000000000..f53c223b1 --- /dev/null +++ b/docs/deployment/openshift.mdx @@ -0,0 +1,15 @@ +--- +title: "Openshift" +sidebarTitle: "Openshift" +--- + +Keep's Helm Chart also supports Openshift installation. + +Simply follow the Kubernetes set-up guide, but make sure to modify the following lines under frontend(/backend).route in the values.yaml file as follows: +``` +enabled: true +host: +path: # should be / for default +tls: +wildcardPolicy: +``` diff --git a/docs/deployment/secret-manager.mdx b/docs/deployment/secret-manager.mdx new file mode 100644 index 000000000..4de17c68a --- /dev/null +++ b/docs/deployment/secret-manager.mdx @@ -0,0 +1,84 @@ +--- +title: "Secret Manager" +sidebarTitle: "Secret Manager" +--- + +## Overview + +Secret Manager selection is crucial for securing your application. Different modes can be set up depending on the deployment type. Our system supports four primary secret manager types. + +## Secret Manager Factory + +The `SecretManagerFactory` is a utility class used to create instances of different types of secret managers. It leverages the Factory design pattern to abstract the creation logic based on the type of secret manager required. The factory supports creating instances of File, GCP, Kubernetes, and Vault Secret Managers. + +The `SECRET_MANAGER_TYPE` environment variable plays a crucial role in the SecretManagerFactory for determining the default type of secret manager to be instantiated when no specific type is provided in the method call. + +**Functionality**: + +**Default Secret Manager**: If the `SECRET_MANAGER_TYPE` environment variable is set, its value dictates the default type of secret manager that the factory will create. +The value of this variable should correspond to one of the types defined in SecretManagerTypes enum (`FILE`, `GCP`, `K8S`, `VAULT`). + +**Example Configuration**: + +Setting `SECRET_MANAGER_TYPE=GCP` in the environment will make the factory create instances of GcpSecretManager by default. +If `SECRET_MANAGER_TYPE` is not set or is set to `FILE`, the factory defaults to creating instances of FileSecretManager. +This environment variable provides flexibility and ease of configuration, allowing different secret managers to be used in different environments or scenarios without code changes. + +## File Secert Manager + +The `FileSecretManager` is a concrete implementation of the BaseSecretManager for managing secrets stored in the file system. It uses a specified directory (defaulting to ./) to read, write, and delete secret files. + +Configuration: + +Set the environment variable `SECRET_MANAGER_DIRECTORY` to specify the directory where secrets are stored. If not set, defaults to the current directory (./). + +Usage: + +- Secrets are stored as files in the specified directory. +- Reading a secret involves fetching content from a file. +- Writing a secret creates or updates a file with the given content. +- Deleting a secret removes the corresponding file. + +## Kubernetes Secret Manager + +The `KubernetesSecretManager` interfaces with Kubernetes' native secrets system. It manages secrets within a specified Kubernetes namespace and is designed to operate within a Kubernetes cluster. + +Configuration: + +Set `K8S_NAMESPACE` environment variable to specify the Kubernetes namespace. Defaults to default if not set. Assumes Kubernetes configurations (like service account tokens) are properly set up when running within a cluster. + +Usage: + +- Secrets are stored as Kubernetes Secret objects. +- Provides functionalities to create, retrieve, and delete Kubernetes secrets. +- Handles base64 encoding and decoding as required by Kubernetes. + +## GCP Secret Manager + +The `GcpSecretManager` utilizes Google Cloud's Secret Manager service for secret management. It requires setting up with Google Cloud credentials and a project ID. + +Configuration: + +Ensure the environment variable `GOOGLE_CLOUD_PROJECT` is set with your Google Cloud project ID. + +Usage: + +- Secrets are managed using Google Cloud's Secret Manager. +- Supports operations to create, access, and delete secrets in the cloud. +- Integrates with OpenTelemetry for tracing secret management operations. + +## Hashicorp Vault Secret Manager + +The `VaultSecretManager` is tailored for Hashicorp Vault, a tool for managing sensitive data. It supports token-based authentication as well as Kubernetes-based authentication for Vault. + +Configuration: + +- Set `HASHICORP_VAULT_ADDR` to the Vault server address. Defaults to http://localhost:8200. +- Use `HASHICORP_VAULT_TOKEN` for token-based authentication. +- Set `HASHICORP_VAULT_USE_K8S` to True and provide `HASHICORP_VAULT_K8S_ROLE` for Kubernetes-based authentication. + +Usage: + +- Manages secrets in a Hashicorp Vault server. +- Provides methods to write, read, and delete secrets from Vault. +- Supports different Vault authentication methods including static tokens and Kubernetes service account tokens. diff --git a/docs/development/adding-a-new-provider.mdx b/docs/development/adding-a-new-provider.mdx deleted file mode 100644 index 8f9f8f982..000000000 --- a/docs/development/adding-a-new-provider.mdx +++ /dev/null @@ -1,170 +0,0 @@ ---- -title: "Adding a new Provider" -sidebarTitle: "Adding a New Provider" ---- -Under contstruction - -### Basics - -- BaseProvider is the base class every provider needs to inherit from -- BaseProvider exposes 4 important functions: - - `query(self, **kwargs: dict)` which is used to query the provider in steps - - `notify(self, **kwargs: dict)` which is used to notify via the provider in actions - - `dispose(self)` which is used to dispose the provider after usage (e.g. close the connection to the DB) - - `validate_config(self)` which is used to validate the configuration passed to the Provider -- And 4 functions that are not required: - - `get_alerts(self)` which is used to fetch configured alerts (**not the currently active alerts**) - - `deploy_alert(self, alert: dict, alert_id: Optional[str]` which is used to deploy an alert to the provider - - `get_alert_schema(self)` which is used to describe the provider's API schema of how to deploy alert - - `get_logs(self, limit)` which is used to fetch logs from the provider (currently used by the AI layer to generate more accurate results) -- Providers must be located in the providers directory -- Provider directory must start with the provider's unique identifier followed by underscore+provider (e.g. `slack_provider`) -- Provider file name must start with the provider's unique identifier followed by underscore+provider+.py (e.g. `slack_provider.py`) - -### ProviderConfig - -```python -@dataclass -class ProviderConfig: - """ - Provider configuration model. - - Args: - description (Optional[str]): The description of the provider. - authentication (dict): The configuration for the provider. - """ - - authentication: dict - description: Optional[str] = None - - def __post_init__(self): - if not self.authentication: - return - for key, value in self.authentication.items(): - if ( - isinstance(value, str) - and value.startswith("{{") - and value.endswith("}}") - ): - self.authentication[key] = chevron.render(value, {"env": os.environ}) -``` - -### BaseProvider - -```python -class BaseProvider(metaclass=abc.ABCMeta): - def __init__(self, provider_id: str, config: ProviderConfig): - """ - Initialize a provider. - - Args: - provider_id (str): The provider id. - **kwargs: Provider configuration loaded from the provider yaml file. - """ - # Initalize logger for every provider - self.logger = logging.getLogger(self.__class__.__name__) - self.id = provider_id - self.config = config - self.validate_config() - self.logger.debug( - "Base provider initalized", extra={"provider": self.__class__.__name__} - ) - - @property - def provider_id(self) -> str: - """ - Get the provider id. - - Returns: - str: The provider id. - """ - return self.id - - @abc.abstractmethod - def dispose(self): - """ - Dispose of the provider. - """ - raise NotImplementedError("dispose() method not implemented") - - @abc.abstractmethod - def validate_config(): - """ - Validate provider configuration. - """ - raise NotImplementedError("validate_config() method not implemented") - - def notify(self, **kwargs): - """ - Output alert message. - - Args: - **kwargs (dict): The provider context (with statement) - """ - raise NotImplementedError("notify() method not implemented") - - def query(self, **kwargs: dict): - """ - Query the provider using the given query - - Args: - kwargs (dict): The provider context (with statement) - - Raises: - NotImplementedError: _description_ - """ - raise NotImplementedError("query() method not implemented") - - def get_alerts(self, alert_id: Optional[str] = None): - """ - Get alerts from the provider. - - Args: - alert_id (Optional[str], optional): If given, gets a specific alert by id. Defaults to None. - """ - # todo: we'd want to have a common alert model for all providers (also for consistent output from GPT) - raise NotImplementedError("get_alerts() method not implemented") - - def deploy_alert(self, alert: dict, alert_id: Optional[str] = None): - """ - Deploy an alert to the provider. - - Args: - alert (dict): The alert to deploy. - alert_id (Optional[str], optional): If given, deploys a specific alert by id. Defaults to None. - """ - raise NotImplementedError("deploy_alert() method not implemented") - - @staticmethod - def get_alert_schema() -> dict: - """ - Get the alert schema description for the provider. - e.g. How to define an alert for the provider that can be pushed via the API. - - Returns: - str: The alert format description. - """ - raise NotImplementedError( - "get_alert_format_description() method not implemented" - ) - - def get_logs(self, limit: int = 5) -> list: - """ - Get logs from the provider. - - Args: - limit (int): The number of logs to get. - """ - raise NotImplementedError("get_logs() method not implemented") - - def expose(self): - """Expose parameters that were calculated during query time. - - Each provider can expose parameters that were calculated during query time. - E.g. parameters that were supplied by the user and were rendered by the provider. - - A concrete example is the "_from" and "to" of the Datadog Provider which are calculated during execution. - """ - # TODO - implement dynamically using decorators and - return {} -``` diff --git a/docs/development/external-url.mdx b/docs/development/external-url.mdx index 9eaf2f479..70528f532 100644 --- a/docs/development/external-url.mdx +++ b/docs/development/external-url.mdx @@ -1,6 +1,6 @@ --- -title: "Keep with External URL" -sidebarTitle: "Keep with External URL" +title: "Keep with Internet URL" +sidebarTitle: "Keep with Internet URL" --- ## Introduction diff --git a/docs/development/getting-started.mdx b/docs/development/getting-started.mdx index 31ab3d0d0..8db52799f 100644 --- a/docs/development/getting-started.mdx +++ b/docs/development/getting-started.mdx @@ -3,19 +3,6 @@ title: "Getting started" sidebarTitle: "Getting started" --- -## Docker-compose - -### Spin up Keep with docker-compose latest images -The easiest way to start keep is is with docker-compose: -```shell -curl https://raw.githubusercontent.com/keephq/keep/main/start.sh | sh -``` - -The docker-compose.yml contains two services: -- keep-backend - a fastapi service that as the API server. -- keep-frontend - a nextjs app that serves as Keep UI interface. -- keep-websocket-server - Soketi (a pusher compatible websocket server). - ### Docker-compose dev images You can use `docker-compose.dev.yaml` to start Keep in a development mode. @@ -29,41 +16,6 @@ Next, run docker-compose -f docker-compose.dev.yaml - up ``` - -## Kubernetes -Keep can be installed via Helm Chart. - -First, clone Keep: -``` -git clone https://github.com/keephq/keep.git && cd keep -``` - -Next, install using: -``` -helm install -f chart/keep/values.yaml keep chart/keep/ -``` - -Notice for it to work locally, you'll need this port forwarding: -``` -kubectl port-forward svc/keep-frontend 3000:3000 -``` - -To learn more about Keep's helm chart, see https://github.com/keephq/keep/blob/main/chart/keep/README.md - - -## Openshift -Keep's Helm Chart also supports Openshift installation. - -Simply follow the Kubernetes set-up guide, but make sure to modify the following lines under frontend(/backend).route in the values.yaml file as follows: -``` -enabled: true -host: -path: # should be / for default -tls: -wildcardPolicy: -``` - - ## VSCode You can run Keep from your VSCode (after cloning the repo) by adding this configurations to your `launch.json`: diff --git a/docs/mint.json b/docs/mint.json index 0f21a508d..75e2bca54 100644 --- a/docs/mint.json +++ b/docs/mint.json @@ -28,11 +28,28 @@ "pages": [ "overview/introduction", "overview/keyconcepts", - "overview/alertseverityandstatus", "overview/usecases", "overview/ruleengine", "overview/examples", - "overview/alternatives" + "overview/comparison" + ] + }, + { + "group": "Development", + "pages": [ + "development/getting-started", + "development/authentication", + "development/external-url" + ] + }, + { + "group": "Deployment", + "pages": [ + "deployment/authentication", + "deployment/secret-manager", + "deployment/docker", + "deployment/kubernetes", + "deployment/openshift" ] }, { @@ -41,6 +58,7 @@ "platform/overview", "platform/providers", "platform/alerts", + "platform/alertseverityandstatus", "platform/workflows", "platform/workflowbuilder", "platform/settings" @@ -51,8 +69,9 @@ "pages": [ "providers/overview", "providers/fingerprints", + "providers/adding-a-new-provider", { - "group": "Supported providers", + "group": "Supported Providers", "pages": [ "providers/documentation/aks-provider", "providers/documentation/axiom-provider", @@ -172,13 +191,6 @@ "group": "Healthcheck", "pages": ["api-ref/healthcheck/healthcheck"] }, - { - "group": "Tenant", - "pages": [ - "api-ref/tenant/is-onboarded", - "api-ref/tenant/save-github-installation-id" - ] - }, { "group": "Alerts", "pages": ["api-ref/alerts/get-alerts", "api-ref/alerts/receive-event"] @@ -248,15 +260,6 @@ ] } ] - }, - { - "group": "Development", - "pages": [ - "development/getting-started", - "development/authentication", - "development/external-url", - "development/adding-a-new-provider" - ] } ], "footerSocials": { diff --git a/docs/overview/alternatives.mdx b/docs/overview/alternatives.mdx deleted file mode 100644 index 563cb79f5..000000000 --- a/docs/overview/alternatives.mdx +++ /dev/null @@ -1,55 +0,0 @@ ---- -title: "Comparison to other tools" ---- - -Sometimes, it's easier to understand a tool's capabilities by comparing it to other tools in the same ecosystem. In this section, we'll discuss how Keep interacts with and compares to other tools in the ecosystem. - -## Keep vs Alerta -> alerta monitoring system is a tool used to consolidate and de-duplicate alerts from multiple sources for quick ‘at-a-glance’ visualisation. With just one system you can monitor alerts from many other monitoring tools on a single screen -[(alerta docs)](https://docs.alerta.io/). - - - -| | Keep | Alternative | -|----------|-----------------------|----------------------| -| Aggregation and correlation | ✅ | ✅ | -| Integrations | ✅ (both manually and automatically with Webhooks Integration) | ✅ (manually) | -| Alerts enrichment | ✅ | ❌ -| Open source | ✅ | ✅ | -| Workflow automation | ✅ | ❌ | -| Managed version | ✅ | ❌ | -| CI/CD integration | 🚧 | ❌ | -| Noise reduction | 🚧 | ❌ | - - -## Keep vs Grafana - -> Using Grafana Alerting, you create queries and expressions from multiple data sources — no matter where your data is stored — giving you the flexibility to combine your data and alert on your metrics and logs in new and unique ways. You can then create, manage, and take action on your alerts from a single, consolidated view, and improve your team’s ability to identify and resolve issues quickly. -[(grafana docs)](https://grafana.com/docs/grafana/latest/alerting/). - -| | Keep | Alternative | -|----------|-----------------------|----------------------| -| Aggregation and correlation of alerts | ✅ | ❌ | -| Integrations | ✅ (both manually and automatically with Webhooks Integration) | ✅ (manually) | -| Alerts enrichment | ✅ | ❌ -| Open source | ✅ | ✅ | -| Workflow automation | ✅ | ❌| -| Managed version | ✅ | ✅ | -| CI/CD integration | 🚧 | ⚠️ has terraform integration | -| Noise reduction | 🚧 | ❌ | - - -## Keep vs observability tools (Datadog, New Relic, etc) -Most existing observability tools, such as Datadog and New Relic, have implemented alerting mechanisms. However, they generally have two main limitations: -- They are data centric - which means your data needs to be in the tool in order to be able to alert on it. -- In many observability tools, alerting features are often not as robust or flexible as other functionalities. While setting up basic alerts is usually straightforward, these tools often fall short when it comes to build a mature alerting culture. - - -> If you are not suffering from the problems above or the [problems Keep solves](/overview/introduction#what-problem-does-keep-solve) and satisfied with your alerting, you probably don't need Keep - -Keep mitigates these concerns by being agnostic to where the data is, so you can alert on whatever you want, and treats alerts as first-class citizen which let you create a great alerting culture. - - - -## Keep vs incident management tools (PagerDuty, OpsGenie, etc) -Most incident management tools offer features like alert aggregation and workflow automation. However, their core value is the incident management itself, which Keep aims to prevent. Keep focuses only on the alert lifecycle. diff --git a/docs/overview/comparison.mdx b/docs/overview/comparison.mdx new file mode 100644 index 000000000..f12085a6f --- /dev/null +++ b/docs/overview/comparison.mdx @@ -0,0 +1,41 @@ +--- +title: "Comparison" +--- + +Sometimes, it's easier to understand a tool's capabilities by comparing it to other tools in the same ecosystem. In this section, we'll discuss how Keep interacts with and compares to other tools in the ecosystem. + +## Keep vs Alerta + +> alerta monitoring system is a tool used to consolidate and de-duplicate alerts from multiple sources for quick ‘at-a-glance’ visualisation. With just one system you can monitor alerts from many other monitoring tools on a single screen +> [(alerta docs)](https://docs.alerta.io/). + +| | Keep | Alternative | +| --------------------------- | -------------------------------------------------------------- | ------------- | +| Aggregation and correlation | ✅ | ✅ | +| Integrations | ✅ (Both manually and automatically with Webhooks Integration) | ✅ (Manually) | +| Alerts enrichment | ✅ | ❌ | +| Open source | ✅ | ✅ | +| Workflow automation | ✅ | ❌ | +| Managed version | ✅ | ❌ | +| CI/CD integration | ✅ | ❌ | +| Noise reduction | ✅ | ❌ | + +## Keep vs Grafana + +> Using Grafana Alerting, you create queries and expressions from multiple data sources — no matter where your data is stored — giving you the flexibility to combine your data and alert on your metrics and logs in new and unique ways. You can then create, manage, and take action on your alerts from a single, consolidated view, and improve your team’s ability to identify and resolve issues quickly. +> [(Grafana docs)](https://grafana.com/docs/grafana/latest/alerting/). + +| | Keep | Alternative | +| ------------------------------------- | -------------------------------------------------------------- | ---------------------------- | +| Aggregation and correlation of alerts | ✅ | ❌ | +| Integrations | ✅ (Both manually and automatically with Webhooks Integration) | ✅ (Manually) | +| Alerts enrichment | ✅ | ❌ | +| Open source | ✅ | ✅ | +| Workflow automation | ✅ | ❌ | +| Managed version | ✅ | ✅ | +| CI/CD integration | ✅ | ⚠️ has terraform integration | +| Noise reduction | ✅ | ❌ | + +## Keep vs incident management tools (PagerDuty, OpsGenie, etc) + +Most incident management tools offer features like alert aggregation and workflow automation. However, their core value is the incident management itself, which Keep aims to prevent. Keep focuses only on the alert lifecycle. diff --git a/docs/overview/introduction.mdx b/docs/overview/introduction.mdx index 96cea438f..951be111c 100644 --- a/docs/overview/introduction.mdx +++ b/docs/overview/introduction.mdx @@ -17,8 +17,8 @@ Alerts usually categorized into three different groups: ## What problem does Keep solve? Keep helps with every step of the alert lifecycle: 1. Creation - Keep offers a framework for creating, debugging, and testing alerts through code that scales with your teams. -2. Maintenance - Keep integrates with your existing tools, allowing you to manage all of your alerts within a single interface. -3. Noise reduction - By integrating with tools that trigger alerts, Keep can deduplicate and correlate alerts to reduce noise in your organization. +2. Maintenance - Keep integrates with your tools, allowing you to manage all of your alerts within a single interface. +3. Noise reduction - By integrating with monitoring tools, Keep can deduplicate and correlate alerts to reduce noise in your organization. 4. Automation - [Keep Workflows](/workflows) enable automated alert enrichment and response. ## How does Keep integrate with the alerts? diff --git a/docs/overview/alertseverityandstatus.mdx b/docs/platform/alertseverityandstatus.mdx similarity index 100% rename from docs/overview/alertseverityandstatus.mdx rename to docs/platform/alertseverityandstatus.mdx diff --git a/docs/providers/adding-a-new-provider.mdx b/docs/providers/adding-a-new-provider.mdx index 3c92a88c3..2fccc2dc2 100644 --- a/docs/providers/adding-a-new-provider.mdx +++ b/docs/providers/adding-a-new-provider.mdx @@ -1,10 +1,8 @@ --- -title: "➕ New Provider" +title: "Adding a new Provider" sidebarTitle: "Adding a New Provider" --- - -This is an outdated documentation and will soon be updated. - +Under contstruction ### Basics @@ -23,6 +21,30 @@ sidebarTitle: "Adding a New Provider" - Provider directory must start with the provider's unique identifier followed by underscore+provider (e.g. `slack_provider`) - Provider file name must start with the provider's unique identifier followed by underscore+provider+.py (e.g. `slack_provider.py`) +### ProviderScope +```python +@dataclass +class ProviderScope: + """ + Provider scope model. + + Args: + name (str): The name of the scope. + description (Optional[str]): The description of the scope. + mandatory (bool): Whether the scope is mandatory. + mandatory_for_webhook (bool): Whether the scope is mandatory for webhook auto installation. + documentation_url (Optional[str]): The documentation url of the scope. + alias (Optional[str]): Another alias of the scope. + """ + + name: str + description: Optional[str] = None + mandatory: bool = False + mandatory_for_webhook: bool = False + documentation_url: Optional[str] = None + alias: Optional[str] = None +``` + ### ProviderConfig ```python @@ -36,7 +58,8 @@ class ProviderConfig: authentication (dict): The configuration for the provider. """ - authentication: dict + authentication: Optional[dict] + name: Optional[str] = None description: Optional[str] = None def __post_init__(self): @@ -54,8 +77,27 @@ class ProviderConfig: ### BaseProvider ```python +""" +Base class for all providers. +""" class BaseProvider(metaclass=abc.ABCMeta): - def __init__(self, provider_id: str, config: ProviderConfig): + OAUTH2_URL = None + PROVIDER_SCOPES: list[ProviderScope] = [] + PROVIDER_METHODS: list[ProviderMethod] = [] + FINGERPRINT_FIELDS: list[str] = [] + PROVIDER_TAGS: list[ + Literal["alert", "ticketing", "messaging", "data", "queue"] + ] = [] + + def __init__( + self, + context_manager: ContextManager, + provider_id: str, + config: ProviderConfig, + webhooke_template: Optional[str] = None, + webhook_description: Optional[str] = None, + provider_description: Optional[str] = None, + ): """ Initialize a provider. @@ -63,24 +105,36 @@ class BaseProvider(metaclass=abc.ABCMeta): provider_id (str): The provider id. **kwargs: Provider configuration loaded from the provider yaml file. """ - # Initalize logger for every provider - self.logger = logging.getLogger(self.__class__.__name__) - self.id = provider_id + self.provider_id = provider_id + self.config = config + self.webhooke_template = webhooke_template + self.webhook_description = webhook_description + self.provider_description = provider_description + self.context_manager = context_manager + self.logger = context_manager.get_logger() self.validate_config() self.logger.debug( "Base provider initalized", extra={"provider": self.__class__.__name__} ) + self.provider_type = self._extract_type() + self.results = [] + # tb: we can have this overriden by customer configuration, when initializing the provider + self.fingerprint_fields = self.FINGERPRINT_FIELDS - @property - def provider_id(self) -> str: + def _extract_type(self): """ - Get the provider id. + Extract the provider type from the provider class name. Returns: - str: The provider id. + str: The provider type. """ - return self.id + name = self.__class__.__name__ + name_without_provider = name.replace("Provider", "") + name_with_spaces = ( + re.sub("([A-Z])", r" \1", name_without_provider).lower().strip() + ) + return name_with_spaces.replace(" ", ".") @abc.abstractmethod def dispose(self): @@ -96,16 +150,106 @@ class BaseProvider(metaclass=abc.ABCMeta): """ raise NotImplementedError("validate_config() method not implemented") + def validate_scopes(self) -> dict[str, bool | str]: + """ + Validate provider scopes. + + Returns: + dict: where key is the scope name and value is whether the scope is valid (True boolean) or string with error message. + """ + return {} + def notify(self, **kwargs): """ Output alert message. + Args: + **kwargs (dict): The provider context (with statement) + """ + # trigger the provider + results = self._notify(**kwargs) + self.results.append(results) + # if the alert should be enriched, enrich it + enrich_alert = kwargs.get("enrich_alert", []) + if not enrich_alert or not results: + return results if results else None + + self._enrich_alert(enrich_alert, results) + return results + + def _enrich_alert(self, enrichments, results): + """ + Enrich alert with provider specific data. + + """ + self.logger.debug("Extracting the fingerprint from the alert") + if "fingerprint" in results: + fingerprint = results["fingerprint"] + elif self.context_manager.foreach_context.get("value", {}): + # TODO: if it's zipped, we need to extract the fingerprint from the zip (i.e. multiple foreach) + fingerprint = self.context_manager.foreach_context.get("value", {}).get( + "fingerprint" + ) + # else, if we are in an event context, use the event fingerprint + elif self.context_manager.event_context: + # TODO: map all casses event_context is dict and update them to the DTO + # and remove this if statement + if isinstance(self.context_manager.event_context, dict): + fingerprint = self.context_manager.event_context.get("fingerprint") + # Alert DTO + else: + fingerprint = self.context_manager.event_context.fingerprint + else: + fingerprint = None + + if not fingerprint: + self.logger.error( + "No fingerprint found for alert enrichment", + extra={"provider": self.provider_id}, + ) + raise Exception("No fingerprint found for alert enrichment") + self.logger.debug("Fingerprint extracted", extra={"fingerprint": fingerprint}) + + _enrichments = {} + # enrich only the requested fields + for enrichment in enrichments: + try: + if enrichment["value"].startswith("results."): + val = enrichment["value"].replace("results.", "") + parts = val.split(".") + r = copy.copy(results) + for part in parts: + r = r[part] + _enrichments[enrichment["key"]] = r + else: + _enrichments[enrichment["key"]] = enrichment["value"] + except Exception: + self.logger.error( + f"Failed to enrich alert - enrichment: {enrichment}", + extra={"fingerprint": fingerprint, "provider": self.provider_id}, + ) + continue + self.logger.info("Enriching alert", extra={"fingerprint": fingerprint}) + try: + enrich_alert(self.context_manager.tenant_id, fingerprint, _enrichments) + except Exception as e: + self.logger.error( + "Failed to enrich alert in db", + extra={"fingerprint": fingerprint, "provider": self.provider_id}, + ) + raise e + self.logger.info("Alert enriched", extra={"fingerprint": fingerprint}) + + def _notify(self, **kwargs): + """ + Output alert message. + Args: **kwargs (dict): The provider context (with statement) """ raise NotImplementedError("notify() method not implemented") - def query(self, **kwargs: dict): + def _query(self, **kwargs: dict): """ Query the provider using the given query @@ -117,9 +261,60 @@ class BaseProvider(metaclass=abc.ABCMeta): """ raise NotImplementedError("query() method not implemented") - def get_alerts(self, alert_id: Optional[str] = None): + def query(self, **kwargs: dict): + # just run the query + results = self._query(**kwargs) + # now add the type of the results to the global context + if results and isinstance(results, list): + self.context_manager.dependencies.add(results[0].__class__) + elif results: + self.context_manager.dependencies.add(results.__class__) + + enrich_alert = kwargs.get("enrich_alert", []) + if enrich_alert: + self._enrich_alert(enrich_alert, results) + # and return the results + return results + + @staticmethod + def _format_alert(event: dict) -> AlertDto | list[AlertDto]: + raise NotImplementedError("format_alert() method not implemented") + + @classmethod + def format_alert(cls, event: dict) -> AlertDto | list[AlertDto]: + logger = logging.getLogger(__name__) + logger.debug("Formatting alert") + formatted_alert = cls._format_alert(event) + logger.debug("Alert formatted") + return formatted_alert + + @staticmethod + def get_alert_fingerprint(alert: AlertDto, fingerprint_fields: list = []) -> str: """ - Get alerts from the provider. + Get the fingerprint of an alert. + + Args: + event (AlertDto): The alert to get the fingerprint of. + fingerprint_fields (list, optional): The fields we calculate the fingerprint upon. Defaults to []. + + Returns: + str: hexdigest of the fingerprint or the event.name if no fingerprint_fields were given. + """ + if not fingerprint_fields: + return alert.name + fingerprint = hashlib.sha256() + event_dict = alert.dict() + for fingerprint_field in fingerprint_fields: + fingerprint_field_value = event_dict.get(fingerprint_field, None) + if isinstance(fingerprint_field_value, (list, dict)): + fingerprint_field_value = json.dumps(fingerprint_field_value) + if fingerprint_field_value: + fingerprint.update(str(fingerprint_field_value).encode()) + return fingerprint.hexdigest() + + def get_alerts_configuration(self, alert_id: Optional[str] = None): + """ + Get configuration of alerts from the provider. Args: alert_id (Optional[str], optional): If given, gets a specific alert by id. Defaults to None. @@ -137,6 +332,91 @@ class BaseProvider(metaclass=abc.ABCMeta): """ raise NotImplementedError("deploy_alert() method not implemented") + def _get_alerts(self) -> list[AlertDto]: + """ + Get alerts from the provider. + """ + raise NotImplementedError("get_alerts() method not implemented") + + def get_alerts(self) -> list[AlertDto]: + """ + Get alerts from the provider. + """ + with tracer.start_as_current_span(f"{self.__class__.__name__}-get_alerts"): + alerts = self._get_alerts() + # enrich alerts with provider id + for alert in alerts: + alert.providerId = self.provider_id + return alerts + + def get_alerts_by_fingerprint(self, tenant_id: str) -> dict[str, list[AlertDto]]: + """ + Get alerts from the provider grouped by fingerprint, sorted by lastReceived. + + Returns: + dict[str, list[AlertDto]]: A dict of alerts grouped by fingerprint, sorted by lastReceived. + """ + alerts = self.get_alerts() + + if not alerts: + return {} + + # get alerts, group by fingerprint and sort them by lastReceived + with tracer.start_as_current_span(f"{self.__class__.__name__}-get_last_alerts"): + get_attr = operator.attrgetter("fingerprint") + grouped_alerts = { + fingerprint: list(alerts) + for fingerprint, alerts in itertools.groupby( + sorted( + alerts, + key=get_attr, + ), + get_attr, + ) + } + + # enrich alerts + with tracer.start_as_current_span(f"{self.__class__.__name__}-enrich_alerts"): + pulled_alerts_enrichments = get_enrichments( + tenant_id=tenant_id, + fingerprints=grouped_alerts.keys(), + ) + for alert_enrichment in pulled_alerts_enrichments: + if alert_enrichment: + alerts_to_enrich = grouped_alerts.get( + alert_enrichment.alert_fingerprint + ) + for alert_to_enrich in alerts_to_enrich: + parse_and_enrich_deleted_and_assignees( + alert_to_enrich, alert_enrichment.enrichments + ) + for enrichment in alert_enrichment.enrichments: + # set the enrichment + setattr( + alert_to_enrich, + enrichment, + alert_enrichment.enrichments[enrichment], + ) + + return grouped_alerts + + def setup_webhook( + self, tenant_id: str, keep_api_url: str, api_key: str, setup_alerts: bool = True + ): + """ + Setup a webhook for the provider. + + Args: + tenant_id (str): _description_ + keep_api_url (str): _description_ + api_key (str): _description_ + setup_alerts (bool, optional): _description_. Defaults to True. + + Raises: + NotImplementedError: _description_ + """ + raise NotImplementedError("setup_webhook() method not implemented") + @staticmethod def get_alert_schema() -> dict: """ @@ -150,6 +430,37 @@ class BaseProvider(metaclass=abc.ABCMeta): "get_alert_format_description() method not implemented" ) + @staticmethod + def oauth2_logic(**payload) -> dict: + """ + Logic for oauth2 authentication. + + For example, in Slack oauth2, we need to get the code from the payload and exchange it for a token. + + return: dict: The secrets to be saved as the provider configuration. (e.g. the Slack access token) + """ + raise NotImplementedError("oauth2_logic() method not implemented") + + @staticmethod + def parse_event_raw_body(raw_body: bytes) -> bytes: + """ + Parse the raw body of an event and create an ingestable dict from it. + + For instance, in parseable, the "event" is just a string + > b'Alert: Server side error triggered on teststream1\nMessage: server reporting status as 500\nFailing Condition: status column equal to abcd, 2 times' + and we want to return an object + > b"{'alert': 'Server side error triggered on teststream1', 'message': 'server reporting status as 500', 'failing_condition': 'status column equal to abcd, 2 times'}" + + If this method is not implemented for a provider, just return the raw body. + + Args: + raw_body (bytes): The raw body of the incoming event (/event endpoint in alerts.py) + + Returns: + dict: Ingestable event + """ + return raw_body + def get_logs(self, limit: int = 5) -> list: """ Get logs from the provider. @@ -169,4 +480,96 @@ class BaseProvider(metaclass=abc.ABCMeta): """ # TODO - implement dynamically using decorators and return {} + + def start_consume(self): + """Get the consumer for the provider. + + should be implemented by the provider if it has a consumer. + + for an example, see Kafka Provider + + Returns: + Consumer: The consumer for the provider. + """ + return + + def status(self) -> bool: + """Return the status of the provider. + + Returns: + bool: The status of the provider. + """ + return { + "status": "should be implemented by the provider if it has a consumer", + "error": "", + } + + @property + def is_consumer(self) -> bool: + """Return consumer if the inherited class has a start_consume method. + + Returns: + bool: _description_ + """ + return self.start_consume.__qualname__ != "BaseProvider.start_consume" + + def _push_alert(self, alert: dict): + """ + Push an alert to the provider. + + Args: + alert (dict): The alert to push. + """ + # if this is not a dict, try to convert it to a dict + if not isinstance(alert, dict): + try: + alert_data = json.loads(alert) + except Exception: + alert_data = alert_data + else: + alert_data = alert + + # if this is still not a dict, we can't push it + if not isinstance(alert_data, dict): + self.logger.warning( + "We currently support only alert represented as a dict, dismissing alert", + extra={"alert": alert}, + ) + return + # now try to build the alert model + # we will have a lot of default values here to support all providers and all cases, the + # way to fine tune those would be to use the provider specific model or enforce that the event from the queue will be casted into the fields + alert_model = AlertDto( + id=alert_data.get("id", str(uuid.uuid4())), + name=alert_data.get("name", "alert-from-event-queue"), + status=alert_data.get("status", AlertStatus.FIRING), + lastReceived=alert_data.get("lastReceived", datetime.datetime.now()), + environment=alert_data.get("environment", "alert-from-event-queue"), + isDuplicate=alert_data.get("isDuplicate", False), + duplicateReason=alert_data.get("duplicateReason", None), + service=alert_data.get("service", "alert-from-event-queue"), + source=alert_data.get("source", [self.provider_type]), + message=alert_data.get("message", "alert-from-event-queue"), + description=alert_data.get("description", "alert-from-event-queue"), + severity=alert_data.get("severity", AlertSeverity.INFO), + pushed=alert_data.get("pushed", False), + event_id=alert_data.get("event_id", str(uuid.uuid4())), + url=alert_data.get("url", None), + fingerprint=alert_data.get("fingerprint", None), + ) + # push the alert to the provider + url = f'{os.environ["KEEP_API_URL"]}/alerts/event' + headers = { + "Content-Type": "application/json", + "Accept": "application/json", + "X-API-KEY": self.context_manager.api_key, + } + response = requests.post(url, json=alert_model.dict(), headers=headers) + try: + response.raise_for_status() + self.logger.info("Alert pushed successfully") + except Exception: + self.logger.error( + f"Failed to push alert to {self.provider_id}: {response.content}" + ) ``` diff --git a/keep/api/api.py b/keep/api/api.py index bb34d0a50..f22d01889 100644 --- a/keep/api/api.py +++ b/keep/api/api.py @@ -32,7 +32,6 @@ rules, settings, status, - tenant, whoami, workflows, ) @@ -149,7 +148,6 @@ def get_app( app.include_router(providers.router, prefix="/providers", tags=["providers"]) app.include_router(healthcheck.router, prefix="/healthcheck", tags=["healthcheck"]) - app.include_router(tenant.router, prefix="/tenant", tags=["tenant"]) app.include_router(alerts.router, prefix="/alerts", tags=["alerts"]) app.include_router(settings.router, prefix="/settings", tags=["settings"]) app.include_router( diff --git a/keep/api/routes/tenant.py b/keep/api/routes/tenant.py deleted file mode 100644 index 349c8664d..000000000 --- a/keep/api/routes/tenant.py +++ /dev/null @@ -1,82 +0,0 @@ -import logging -from uuid import uuid4 - -from fastapi import APIRouter, Depends, Request -from fastapi.responses import JSONResponse -from sqlmodel import Session, select - -# This import is required to create the tables -from keep.api.core.dependencies import AuthenticatedEntity, AuthVerifier, get_session -from keep.api.core.rbac import Webhook as WebhookRole -from keep.api.models.db.tenant import TenantInstallation -from keep.api.utils.tenant_utils import create_api_key - -router = APIRouter() - - -@router.get( - "/onboarded", - description="Check if a tenant is onboarded (meaning - installed github bot)", -) -def is_onboarded( - authenticated_entity: AuthenticatedEntity = Depends(AuthVerifier()), - session: Session = Depends(get_session), -) -> JSONResponse: - tenant_id = authenticated_entity.tenant_id - logging.getLogger().info(f"Serving request for onboarded [tenant_id: {tenant_id}]") - statement = select(TenantInstallation).where( - TenantInstallation.tenant_id == tenant_id - ) - result = session.exec(statement) - installations = result.all() - # TODO: in the future support more than one onboard.. - # {"onboarded": {"github": true, "gitlab": false}"}} - if installations: - return JSONResponse({"onboarded": True}) - else: - return JSONResponse({"onboarded": False}) - - -@router.post("/github", status_code=204) -async def save_github_installation_id( - request: Request, - authenticated_entity: AuthenticatedEntity = Depends(AuthVerifier()), - session: Session = Depends(get_session), -) -> None: - try: - tenant_id = authenticated_entity.tenant_id - # Get the installation_id and action from the request body - data = await request.json() - installation_id = data.get("installation_id") - # TODO - do things with the action (update, etc) - # action = data.get("setup_action") - - # Check if the installation ID already exists for the tenant - statement = select(TenantInstallation).where( - TenantInstallation.tenant_id == tenant_id, - TenantInstallation.bot_id == str(installation_id), - ) - existing_installation = session.exec(statement).first() - if existing_installation: - # TODO: update the installation if needed - return JSONResponse({"success": True}) - - # Create a new TenantInstallation instance and save it in the database - new_installation = TenantInstallation( - id=uuid4(), tenant_id=tenant_id, bot_id=str(installation_id), installed=True - ) - create_api_key( - session, - tenant_id, - str(installation_id), - role=str(WebhookRole), - is_system=True, - system_description="GitHub application", - commit=False, - ) # commit happens after the installation is saved - session.add(new_installation) - session.commit() - except Exception: - return JSONResponse({"success": False}) - # Return a success response - return JSONResponse({"success": True}) From a0e142b06befd9b88a0a00c17b00a0444358c0c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 30 Jan 2024 10:59:02 +0200 Subject: [PATCH 06/12] chore(deps-dev): bump aiohttp from 3.9.1 to 3.9.2 (#758) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 386 +++++++++++++--------------------------------------- 1 file changed, 95 insertions(+), 291 deletions(-) diff --git a/poetry.lock b/poetry.lock index 125865eb9..2b5157ccf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,89 +1,88 @@ -# This file is automatically @generated by Poetry and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "aiohttp" -version = "3.9.1" +version = "3.9.2" description = "Async http client/server framework (asyncio)" -category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, - {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, - {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, - {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, - {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, - {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, - {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, - {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, - {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, - {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, - {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, - {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, + {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:772fbe371788e61c58d6d3d904268e48a594ba866804d08c995ad71b144f94cb"}, + {file = "aiohttp-3.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:edd4f1af2253f227ae311ab3d403d0c506c9b4410c7fc8d9573dec6d9740369f"}, + {file = "aiohttp-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cfee9287778399fdef6f8a11c9e425e1cb13cc9920fd3a3df8f122500978292b"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc158466f6a980a6095ee55174d1de5730ad7dec251be655d9a6a9dd7ea1ff9"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54ec82f45d57c9a65a1ead3953b51c704f9587440e6682f689da97f3e8defa35"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abeb813a18eb387f0d835ef51f88568540ad0325807a77a6e501fed4610f864e"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc91d07280d7d169f3a0f9179d8babd0ee05c79d4d891447629ff0d7d8089ec2"}, + {file = "aiohttp-3.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b65e861f4bebfb660f7f0f40fa3eb9f2ab9af10647d05dac824390e7af8f75b7"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04fd8ffd2be73d42bcf55fd78cde7958eeee6d4d8f73c3846b7cba491ecdb570"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3d8d962b439a859b3ded9a1e111a4615357b01620a546bc601f25b0211f2da81"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:8ceb658afd12b27552597cf9a65d9807d58aef45adbb58616cdd5ad4c258c39e"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0e4ee4df741670560b1bc393672035418bf9063718fee05e1796bf867e995fad"}, + {file = "aiohttp-3.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2dec87a556f300d3211decf018bfd263424f0690fcca00de94a837949fbcea02"}, + {file = "aiohttp-3.9.2-cp310-cp310-win32.whl", hash = "sha256:3e1a800f988ce7c4917f34096f81585a73dbf65b5c39618b37926b1238cf9bc4"}, + {file = "aiohttp-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:ea510718a41b95c236c992b89fdfc3d04cc7ca60281f93aaada497c2b4e05c46"}, + {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6aaa6f99256dd1b5756a50891a20f0d252bd7bdb0854c5d440edab4495c9f973"}, + {file = "aiohttp-3.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a27d8c70ad87bcfce2e97488652075a9bdd5b70093f50b10ae051dfe5e6baf37"}, + {file = "aiohttp-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:54287bcb74d21715ac8382e9de146d9442b5f133d9babb7e5d9e453faadd005e"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bb3d05569aa83011fcb346b5266e00b04180105fcacc63743fc2e4a1862a891"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8534e7d69bb8e8d134fe2be9890d1b863518582f30c9874ed7ed12e48abe3c4"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bd9d5b989d57b41e4ff56ab250c5ddf259f32db17159cce630fd543376bd96b"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa6904088e6642609981f919ba775838ebf7df7fe64998b1a954fb411ffb4663"}, + {file = "aiohttp-3.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bda42eb410be91b349fb4ee3a23a30ee301c391e503996a638d05659d76ea4c2"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:193cc1ccd69d819562cc7f345c815a6fc51d223b2ef22f23c1a0f67a88de9a72"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b9f1cb839b621f84a5b006848e336cf1496688059d2408e617af33e3470ba204"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d22a0931848b8c7a023c695fa2057c6aaac19085f257d48baa24455e67df97ec"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4112d8ba61fbd0abd5d43a9cb312214565b446d926e282a6d7da3f5a5aa71d36"}, + {file = "aiohttp-3.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c4ad4241b52bb2eb7a4d2bde060d31c2b255b8c6597dd8deac2f039168d14fd7"}, + {file = "aiohttp-3.9.2-cp311-cp311-win32.whl", hash = "sha256:ee2661a3f5b529f4fc8a8ffee9f736ae054adfb353a0d2f78218be90617194b3"}, + {file = "aiohttp-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:4deae2c165a5db1ed97df2868ef31ca3cc999988812e82386d22937d9d6fed52"}, + {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:6f4cdba12539215aaecf3c310ce9d067b0081a0795dd8a8805fdb67a65c0572a"}, + {file = "aiohttp-3.9.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:84e843b33d5460a5c501c05539809ff3aee07436296ff9fbc4d327e32aa3a326"}, + {file = "aiohttp-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8008d0f451d66140a5aa1c17e3eedc9d56e14207568cd42072c9d6b92bf19b52"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61c47ab8ef629793c086378b1df93d18438612d3ed60dca76c3422f4fbafa792"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc71f748e12284312f140eaa6599a520389273174b42c345d13c7e07792f4f57"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a1c3a4d0ab2f75f22ec80bca62385db2e8810ee12efa8c9e92efea45c1849133"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a87aa0b13bbee025faa59fa58861303c2b064b9855d4c0e45ec70182bbeba1b"}, + {file = "aiohttp-3.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2cc0d04688b9f4a7854c56c18aa7af9e5b0a87a28f934e2e596ba7e14783192"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1956e3ac376b1711c1533266dec4efd485f821d84c13ce1217d53e42c9e65f08"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:114da29f39eccd71b93a0fcacff178749a5c3559009b4a4498c2c173a6d74dff"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3f17999ae3927d8a9a823a1283b201344a0627272f92d4f3e3a4efe276972fe8"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f31df6a32217a34ae2f813b152a6f348154f948c83213b690e59d9e84020925c"}, + {file = "aiohttp-3.9.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7a75307ffe31329928a8d47eae0692192327c599113d41b278d4c12b54e1bd11"}, + {file = "aiohttp-3.9.2-cp312-cp312-win32.whl", hash = "sha256:972b63d589ff8f305463593050a31b5ce91638918da38139b9d8deaba9e0fed7"}, + {file = "aiohttp-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:200dc0246f0cb5405c80d18ac905c8350179c063ea1587580e3335bfc243ba6a"}, + {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:158564d0d1020e0d3fe919a81d97aadad35171e13e7b425b244ad4337fc6793a"}, + {file = "aiohttp-3.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da1346cd0ccb395f0ed16b113ebb626fa43b7b07fd7344fce33e7a4f04a8897a"}, + {file = "aiohttp-3.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:eaa9256de26ea0334ffa25f1913ae15a51e35c529a1ed9af8e6286dd44312554"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1543e7fb00214fb4ccead42e6a7d86f3bb7c34751ec7c605cca7388e525fd0b4"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:186e94570433a004e05f31f632726ae0f2c9dee4762a9ce915769ce9c0a23d89"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d52d20832ac1560f4510d68e7ba8befbc801a2b77df12bd0cd2bcf3b049e52a4"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c45e4e815ac6af3b72ca2bde9b608d2571737bb1e2d42299fc1ffdf60f6f9a1"}, + {file = "aiohttp-3.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa906b9bdfd4a7972dd0628dbbd6413d2062df5b431194486a78f0d2ae87bd55"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:68bbee9e17d66f17bb0010aa15a22c6eb28583edcc8b3212e2b8e3f77f3ebe2a"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4c189b64bd6d9a403a1a3f86a3ab3acbc3dc41a68f73a268a4f683f89a4dec1f"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8a7876f794523123bca6d44bfecd89c9fec9ec897a25f3dd202ee7fc5c6525b7"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d23fba734e3dd7b1d679b9473129cd52e4ec0e65a4512b488981a56420e708db"}, + {file = "aiohttp-3.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b141753be581fab842a25cb319f79536d19c2a51995d7d8b29ee290169868eab"}, + {file = "aiohttp-3.9.2-cp38-cp38-win32.whl", hash = "sha256:103daf41ff3b53ba6fa09ad410793e2e76c9d0269151812e5aba4b9dd674a7e8"}, + {file = "aiohttp-3.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:328918a6c2835861ff7afa8c6d2c70c35fdaf996205d5932351bdd952f33fa2f"}, + {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5264d7327c9464786f74e4ec9342afbbb6ee70dfbb2ec9e3dfce7a54c8043aa3"}, + {file = "aiohttp-3.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07205ae0015e05c78b3288c1517afa000823a678a41594b3fdc870878d645305"}, + {file = "aiohttp-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0a1e638cffc3ec4d4784b8b4fd1cf28968febc4bd2718ffa25b99b96a741bd"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d43302a30ba1166325974858e6ef31727a23bdd12db40e725bec0f759abce505"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16a967685907003765855999af11a79b24e70b34dc710f77a38d21cd9fc4f5fe"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fa3ee92cd441d5c2d07ca88d7a9cef50f7ec975f0117cd0c62018022a184308"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b500c5ad9c07639d48615a770f49618130e61be36608fc9bc2d9bae31732b8f"}, + {file = "aiohttp-3.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c07327b368745b1ce2393ae9e1aafed7073d9199e1dcba14e035cc646c7941bf"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:cc7d6502c23a0ec109687bf31909b3fb7b196faf198f8cff68c81b49eb316ea9"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:07be2be7071723c3509ab5c08108d3a74f2181d4964e869f2504aaab68f8d3e8"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:122468f6fee5fcbe67cb07014a08c195b3d4c41ff71e7b5160a7bcc41d585a5f"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:00a9abcea793c81e7f8778ca195a1714a64f6d7436c4c0bb168ad2a212627000"}, + {file = "aiohttp-3.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a9825fdd64ecac5c670234d80bb52bdcaa4139d1f839165f548208b3779c6c6"}, + {file = "aiohttp-3.9.2-cp39-cp39-win32.whl", hash = "sha256:5422cd9a4a00f24c7244e1b15aa9b87935c85fb6a00c8ac9b2527b38627a9211"}, + {file = "aiohttp-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:7d579dcd5d82a86a46f725458418458fa43686f6a7b252f2966d359033ffc8ab"}, + {file = "aiohttp-3.9.2.tar.gz", hash = "sha256:b0ad0a5e86ce73f5368a164c10ada10504bf91869c05ab75d982c6048217fbf7"}, ] [package.dependencies] @@ -100,7 +99,6 @@ speedups = ["Brotli", "aiodns", "brotlicffi"] name = "aiohttp-retry" version = "2.8.3" description = "Simple retry client for aiohttp" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -115,7 +113,6 @@ aiohttp = "*" name = "aiosignal" version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -130,7 +127,6 @@ frozenlist = ">=1.1.0" name = "anyio" version = "4.1.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -151,7 +147,6 @@ trio = ["trio (>=0.23)"] name = "asgiref" version = "3.7.2" description = "ASGI specs, helper code, and adapters" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -166,7 +161,6 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] name = "asn1crypto" version = "1.5.1" description = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP" -category = "main" optional = false python-versions = "*" files = [ @@ -178,7 +172,6 @@ files = [ name = "asteval" version = "0.9.31" description = "Safe, minimalistic evaluator of python expression using ast module" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -196,7 +189,6 @@ test = ["coverage", "pytest", "pytest-cov"] name = "astunparse" version = "1.6.3" description = "An AST unparser for Python" -category = "main" optional = false python-versions = "*" files = [ @@ -212,7 +204,6 @@ wheel = ">=0.23.0,<1.0" name = "asyncio" version = "3.4.3" description = "reference implementation of PEP 3156" -category = "main" optional = false python-versions = "*" files = [ @@ -226,7 +217,6 @@ files = [ name = "attrs" version = "23.1.0" description = "Classes Without Boilerplate" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -245,7 +235,6 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte name = "auth0-python" version = "4.5.0" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -264,7 +253,6 @@ requests = ">=2.31.0,<3.0.0" name = "autopep8" version = "2.0.4" description = "A tool that automatically formats Python code to conform to the PEP 8 style guide" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -279,7 +267,6 @@ pycodestyle = ">=2.10.0" name = "azure-common" version = "1.1.28" description = "Microsoft Azure Client Library for Python (Common)" -category = "main" optional = false python-versions = "*" files = [ @@ -291,7 +278,6 @@ files = [ name = "azure-core" version = "1.29.5" description = "Microsoft Azure Core Library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -311,7 +297,6 @@ aio = ["aiohttp (>=3.0)"] name = "azure-identity" version = "1.15.0" description = "Microsoft Azure Identity Library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -329,7 +314,6 @@ msal-extensions = ">=0.3.0,<2.0.0" name = "azure-mgmt-containerservice" version = "27.0.0" description = "Microsoft Azure Container Service Management Client Library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -346,7 +330,6 @@ isodate = ">=0.6.1,<1.0.0" name = "azure-mgmt-core" version = "1.4.0" description = "Microsoft Azure Management Core Library for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -361,7 +344,6 @@ azure-core = ">=1.26.2,<2.0.0" name = "babel" version = "2.14.0" description = "Internationalization utilities" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -376,7 +358,6 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "backoff" version = "2.2.1" description = "Function decoration for backoff and retry" -category = "main" optional = false python-versions = ">=3.7,<4.0" files = [ @@ -388,7 +369,6 @@ files = [ name = "bcrypt" version = "4.1.1" description = "Modern password hashing for your software and your servers" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -419,7 +399,6 @@ typecheck = ["mypy"] name = "black" version = "23.12.0" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -464,7 +443,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "boto3" version = "1.33.12" description = "The AWS SDK for Python" -category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -484,7 +462,6 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] name = "botocore" version = "1.33.12" description = "Low-level, data-driven core of boto 3." -category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -504,7 +481,6 @@ crt = ["awscrt (==0.19.17)"] name = "cachetools" version = "5.3.2" description = "Extensible memoizing collections and decorators" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -516,7 +492,6 @@ files = [ name = "cel-python" version = "0.1.5" description = "Pure Python CEL Implementation" -category = "main" optional = false python-versions = ">=3.7, <4" files = [ @@ -537,7 +512,6 @@ urllib3 = ">=1.26.4" name = "certifi" version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -549,7 +523,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -614,7 +587,6 @@ pycparser = "*" name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -626,7 +598,6 @@ files = [ name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -726,7 +697,6 @@ files = [ name = "chevron" version = "0.14.0" description = "Mustache templating language renderer" -category = "main" optional = false python-versions = "*" files = [ @@ -738,7 +708,6 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -753,7 +722,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "cloud-sql-python-connector" version = "1.4.3" description = "The Cloud SQL Python Connector is a library that can be used alongside a database driver to allow users with sufficient permissions to connect to a Cloud SQL database without having to manually allowlist IPs or manage SSL certificates." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -777,7 +745,6 @@ pytds = ["python-tds (==1.13.0)"] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -789,7 +756,6 @@ files = [ name = "coverage" version = "7.3.2" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -854,7 +820,6 @@ toml = ["tomli"] name = "cryptography" version = "41.0.7" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -900,7 +865,6 @@ test-randomorder = ["pytest-randomly"] name = "datadog-api-client" version = "2.19.0" description = "Collection of all Datadog Public endpoints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -924,7 +888,6 @@ zstandard = ["zstandard"] name = "datefinder" version = "0.7.3" description = "Extract datetime objects from strings" -category = "main" optional = false python-versions = "*" files = [ @@ -944,7 +907,6 @@ test = ["mock", "pytest (>=2.8.5)", "pytz (>=2015.7)"] name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -956,7 +918,6 @@ files = [ name = "deprecated" version = "1.2.14" description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -974,7 +935,6 @@ dev = ["PyTest", "PyTest-Cov", "bump2version (<1)", "sphinx (<2)", "tox"] name = "distlib" version = "0.3.8" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -986,7 +946,6 @@ files = [ name = "dnspython" version = "2.4.2" description = "DNS toolkit" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1006,7 +965,6 @@ wmi = ["wmi (>=1.5.1,<2.0.0)"] name = "ecdsa" version = "0.18.0" description = "ECDSA cryptographic signature library (pure python)" -category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1025,7 +983,6 @@ gmpy2 = ["gmpy2"] name = "elastic-transport" version = "8.10.0" description = "Transport classes and utilities shared among Python Elastic client libraries" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1044,7 +1001,6 @@ develop = ["aiohttp", "furo", "mock", "pytest", "pytest-asyncio", "pytest-cov", name = "elasticsearch" version = "8.11.1" description = "Python client for Elasticsearch" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1063,7 +1019,6 @@ requests = ["requests (>=2.4.0,<3.0.0)"] name = "fastapi" version = "0.92.0" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1085,7 +1040,6 @@ test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.10.0)", "coverage[toml] (>= name = "filelock" version = "3.13.1" description = "A platform independent file lock." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1102,7 +1056,6 @@ typing = ["typing-extensions (>=4.8)"] name = "flake8" version = "6.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -1119,7 +1072,6 @@ pyflakes = ">=3.1.0,<3.2.0" name = "frozenlist" version = "1.4.0" description = "A list-like structure which implements collections.abc.MutableSequence" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1190,7 +1142,6 @@ files = [ name = "google-api-core" version = "2.15.0" description = "Google API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1201,14 +1152,8 @@ files = [ [package.dependencies] google-auth = ">=2.14.1,<3.0.dev0" googleapis-common-protos = ">=1.56.2,<2.0.dev0" -grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\""}, -] -grpcio-status = [ - {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""}, - {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\""}, -] +grpcio = {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} +grpcio-status = {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""} protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" requests = ">=2.18.0,<3.0.0.dev0" @@ -1221,7 +1166,6 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] name = "google-auth" version = "2.25.2" description = "Google Authentication Library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1245,7 +1189,6 @@ requests = ["requests (>=2.20.0,<3.0.0.dev0)"] name = "google-cloud-bigquery" version = "3.14.0" description = "Google BigQuery API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1254,7 +1197,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" google-cloud-core = ">=1.6.0,<3.0.0dev" google-resumable-media = ">=0.6.0,<3.0dev" packaging = ">=20.0.0" @@ -1276,7 +1219,6 @@ tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] name = "google-cloud-container" version = "2.36.0" description = "Google Cloud Container API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1285,7 +1227,7 @@ files = [ ] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1293,7 +1235,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 name = "google-cloud-core" version = "2.4.1" description = "Google Cloud API client core library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1302,7 +1243,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.6,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=1.25.0,<3.0dev" [package.extras] @@ -1312,7 +1253,6 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] name = "google-cloud-secret-manager" version = "2.17.0" description = "Google Cloud Secret Manager API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1321,7 +1261,7 @@ files = [ ] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" proto-plus = ">=1.22.3,<2.0.0dev" protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1330,7 +1270,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 name = "google-cloud-storage" version = "2.13.0" description = "Google Cloud Storage API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1339,7 +1278,7 @@ files = [ ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.0 || >2.3.0,<3.0.0dev" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" google-auth = ">=2.23.3,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-crc32c = ">=1.0,<2.0dev" @@ -1353,7 +1292,6 @@ protobuf = ["protobuf (<5.0.0dev)"] name = "google-cloud-trace" version = "1.11.3" description = "Google Cloud Trace API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1362,7 +1300,7 @@ files = [ ] [package.dependencies] -google-api-core = {version = ">=1.34.0,<2.0.0 || >=2.11.0,<3.0.0dev", extras = ["grpc"]} +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} proto-plus = {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""} protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1370,7 +1308,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4 name = "google-crc32c" version = "1.5.0" description = "A python wrapper of the C library 'Google CRC32C'" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1451,7 +1388,6 @@ testing = ["pytest"] name = "google-resumable-media" version = "2.6.0" description = "Utilities for Google Media Downloads and Resumable Uploads" -category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -1470,7 +1406,6 @@ requests = ["requests (>=2.18.0,<3.0.0dev)"] name = "googleapis-common-protos" version = "1.62.0" description = "Common protobufs used in Google APIs" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1489,7 +1424,6 @@ grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] name = "grafana-api-sdk" version = "0.1.1" description = "A Grafana API SDK" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1504,7 +1438,6 @@ urllib3 = "*" name = "greenlet" version = "3.0.2" description = "Lightweight in-process concurrent programming" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1576,7 +1509,6 @@ test = ["objgraph", "psutil"] name = "grpc-google-iam-v1" version = "0.13.0" description = "IAM API client library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1593,7 +1525,6 @@ protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4 name = "grpcio" version = "1.60.0" description = "HTTP/2-based RPC framework" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1660,7 +1591,6 @@ protobuf = ["grpcio-tools (>=1.60.0)"] name = "grpcio-status" version = "1.60.0" description = "Status proto mapping for gRPC" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1677,7 +1607,6 @@ protobuf = ">=4.21.6" name = "gunicorn" version = "21.2.0" description = "WSGI HTTP Server for UNIX" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1698,7 +1627,6 @@ tornado = ["tornado (>=0.2)"] name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1710,7 +1638,6 @@ files = [ name = "httpcore" version = "1.0.2" description = "A minimal low-level HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1725,14 +1652,13 @@ h11 = ">=0.13,<0.15" [package.extras] asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] trio = ["trio (>=0.22.0,<0.23.0)"] [[package]] name = "httpx" version = "0.25.2" description = "The next generation HTTP client." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1743,21 +1669,20 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" -httpcore = ">=1.0.0,<2.0.0" +httpcore = "==1.*" idna = "*" sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (>=1.0.0,<2.0.0)"] +socks = ["socksio (==1.*)"] [[package]] name = "hvac" version = "2.1.0" description = "HashiCorp Vault API client" -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -1775,7 +1700,6 @@ parser = ["pyhcl (>=0.4.4,<0.5.0)"] name = "identify" version = "2.5.33" description = "File identification library for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1790,7 +1714,6 @@ license = ["ukkonen"] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1802,7 +1725,6 @@ files = [ name = "importlib-metadata" version = "6.11.0" description = "Read metadata from Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1822,7 +1744,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1834,7 +1755,6 @@ files = [ name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = false python-versions = "*" files = [ @@ -1849,7 +1769,6 @@ six = "*" name = "isort" version = "5.13.1" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -1861,7 +1780,6 @@ files = [ name = "jmespath" version = "1.0.1" description = "JSON Matching Expressions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1873,7 +1791,6 @@ files = [ name = "jwcrypto" version = "1.5.1" description = "Implementation of JOSE Web standards" -category = "main" optional = false python-versions = ">= 3.6" files = [ @@ -1888,7 +1805,6 @@ deprecated = "*" name = "kafka-python" version = "2.0.2" description = "Pure Python client for Apache Kafka" -category = "main" optional = false python-versions = "*" files = [ @@ -1903,7 +1819,6 @@ crc32c = ["crc32c"] name = "kubernetes" version = "27.2.0" description = "Kubernetes python client" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -1921,7 +1836,7 @@ requests = "*" requests-oauthlib = "*" six = ">=1.9.0" urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.0 || >=0.43.0" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] @@ -1930,7 +1845,6 @@ adal = ["adal (>=1.0.2)"] name = "lark-parser" version = "0.12.0" description = "a modern parsing library" -category = "main" optional = false python-versions = "*" files = [ @@ -1947,7 +1861,6 @@ regex = ["regex"] name = "logmine" version = "0.4.1" description = "Log pattern analyzer" -category = "main" optional = false python-versions = "*" files = [ @@ -1958,7 +1871,6 @@ files = [ name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1970,7 +1882,6 @@ files = [ name = "monotonic" version = "1.6" description = "An implementation of time.monotonic() for Python 2 & < 3.3" -category = "main" optional = false python-versions = "*" files = [ @@ -1982,7 +1893,6 @@ files = [ name = "msal" version = "1.26.0" description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." -category = "main" optional = false python-versions = ">=2.7" files = [ @@ -2002,7 +1912,6 @@ broker = ["pymsalruntime (>=0.13.2,<0.14)"] name = "msal-extensions" version = "1.1.0" description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2022,7 +1931,6 @@ portalocker = [ name = "multidict" version = "6.0.4" description = "multidict implementation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2106,7 +2014,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -2118,7 +2025,6 @@ files = [ name = "mysql-connector-python" version = "8.2.0" description = "MySQL driver written in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2163,9 +2069,8 @@ opentelemetry = ["Deprecated (>=1.2.6)", "typing-extensions (>=3.7.4)", "zipp (> name = "ndg-httpsclient" version = "0.5.1" description = "Provides enhanced HTTPS support for httplib and urllib2 using PyOpenSSL" -category = "main" optional = false -python-versions = ">=2.7,<3.0.0 || >=3.4.0" +python-versions = ">=2.7,<3.0.dev0 || >=3.4.dev0" files = [ {file = "ndg_httpsclient-0.5.1-py2-none-any.whl", hash = "sha256:d2c7225f6a1c6cf698af4ebc962da70178a99bcde24ee6d1961c4f3338130d57"}, {file = "ndg_httpsclient-0.5.1-py3-none-any.whl", hash = "sha256:dd174c11d971b6244a891f7be2b32ca9853d3797a72edb34fa5d7b07d8fff7d4"}, @@ -2180,7 +2085,6 @@ PyOpenSSL = "*" name = "nest-asyncio" version = "1.5.8" description = "Patch asyncio to allow nested event loops" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -2192,7 +2096,6 @@ files = [ name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" -category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -2207,7 +2110,6 @@ setuptools = "*" name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2224,7 +2126,6 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] name = "openai" version = "0.27.10" description = "Python client library for the OpenAI API" -category = "main" optional = false python-versions = ">=3.7.1" files = [ @@ -2239,7 +2140,7 @@ tqdm = "*" [package.extras] datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] -dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] +dev = ["black (>=21.6b0,<22.0)", "pytest (==6.*)", "pytest-asyncio", "pytest-mock"] embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] @@ -2247,7 +2148,6 @@ wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1 name = "opentelemetry-api" version = "1.20.0" description = "OpenTelemetry Python API" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2263,7 +2163,6 @@ importlib-metadata = ">=6.0,<7.0" name = "opentelemetry-exporter-gcp-trace" version = "1.6.0" description = "Google Cloud Trace exporter for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2274,14 +2173,13 @@ files = [ [package.dependencies] google-cloud-trace = ">=1.1,<2.0" opentelemetry-api = ">=1.0,<2.0" -opentelemetry-resourcedetector-gcp = ">=1.5.0dev0,<2.0.0" +opentelemetry-resourcedetector-gcp = ">=1.5.0dev0,<2.dev0" opentelemetry-sdk = ">=1.0,<2.0" [[package]] name = "opentelemetry-exporter-otlp-proto-common" version = "1.20.0" description = "OpenTelemetry Protobuf encoding" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2297,7 +2195,6 @@ opentelemetry-proto = "1.20.0" name = "opentelemetry-exporter-otlp-proto-grpc" version = "1.20.0" description = "OpenTelemetry Collector Protobuf over gRPC Exporter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2322,7 +2219,6 @@ test = ["pytest-grpc"] name = "opentelemetry-exporter-otlp-proto-http" version = "1.20.0" description = "OpenTelemetry Collector Protobuf over HTTP Exporter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2347,7 +2243,6 @@ test = ["responses (==0.22.0)"] name = "opentelemetry-instrumentation" version = "0.41b0" description = "Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2364,7 +2259,6 @@ wrapt = ">=1.0.0,<2.0.0" name = "opentelemetry-instrumentation-asgi" version = "0.41b0" description = "ASGI instrumentation for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2387,7 +2281,6 @@ test = ["opentelemetry-instrumentation-asgi[instruments]", "opentelemetry-test-u name = "opentelemetry-instrumentation-fastapi" version = "0.41b0" description = "OpenTelemetry FastAPI Instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2410,7 +2303,6 @@ test = ["httpx (>=0.22,<1.0)", "opentelemetry-instrumentation-fastapi[instrument name = "opentelemetry-instrumentation-logging" version = "0.41b0" description = "OpenTelemetry Logging instrumentation" -category = "main" optional = false python-versions = "*" files = [ @@ -2429,7 +2321,6 @@ test = ["opentelemetry-test-utils (==0.41b0)"] name = "opentelemetry-instrumentation-requests" version = "0.41b0" description = "OpenTelemetry requests instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2451,7 +2342,6 @@ test = ["httpretty (>=1.0,<2.0)", "opentelemetry-instrumentation-requests[instru name = "opentelemetry-instrumentation-sqlalchemy" version = "0.41b0" description = "OpenTelemetry SQLAlchemy instrumentation" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2474,7 +2364,6 @@ test = ["opentelemetry-instrumentation-sqlalchemy[instruments]", "opentelemetry- name = "opentelemetry-propagator-gcp" version = "1.6.0" description = "Google Cloud propagator for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2489,7 +2378,6 @@ opentelemetry-api = ">=1.0,<2.0" name = "opentelemetry-proto" version = "1.20.0" description = "OpenTelemetry Python Proto" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2504,7 +2392,6 @@ protobuf = ">=3.19,<5.0" name = "opentelemetry-resourcedetector-gcp" version = "1.6.0a0" description = "Google Cloud resource detector for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2522,7 +2409,6 @@ typing-extensions = ">=4.0,<5.0" name = "opentelemetry-sdk" version = "1.20.0" description = "OpenTelemetry Python SDK" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2539,7 +2425,6 @@ typing-extensions = ">=3.7.4" name = "opentelemetry-semantic-conventions" version = "0.41b0" description = "OpenTelemetry Semantic Conventions" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2551,7 +2436,6 @@ files = [ name = "opentelemetry-util-http" version = "0.41b0" description = "Web util for OpenTelemetry" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2563,7 +2447,6 @@ files = [ name = "opsgenie-sdk" version = "2.1.5" description = "Python SDK for Opsgenie REST API" -category = "main" optional = false python-versions = "*" files = [ @@ -2583,7 +2466,6 @@ urllib3 = ">=1.26.5" name = "oscrypto" version = "1.3.0" description = "TLS (SSL) sockets, key generation, encryption, decryption, signing, verification and KDFs using the OS crypto libraries. Does not require a compiler, and relies on the OS for patching. Works on Windows, OS X and Linux/BSD." -category = "main" optional = false python-versions = "*" files = [ @@ -2598,7 +2480,6 @@ asn1crypto = ">=1.5.1" name = "packaging" version = "23.2" description = "Core utilities for Python packages" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2610,7 +2491,6 @@ files = [ name = "paramiko" version = "3.4.0" description = "SSH2 protocol library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2632,7 +2512,6 @@ invoke = ["invoke (>=2.0)"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2644,7 +2523,6 @@ files = [ name = "platformdirs" version = "3.8.1" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2660,7 +2538,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest- name = "pluggy" version = "1.3.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2676,7 +2553,6 @@ testing = ["pytest", "pytest-benchmark"] name = "portalocker" version = "2.8.2" description = "Wraps the portalocker recipe for easy usage" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2696,7 +2572,6 @@ tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "p name = "posthog" version = "3.1.0" description = "Integrate PostHog into any python application." -category = "main" optional = false python-versions = "*" files = [ @@ -2720,7 +2595,6 @@ test = ["coverage", "flake8", "freezegun (==0.3.15)", "mock (>=2.0.0)", "pylint" name = "pre-commit" version = "3.6.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." -category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -2739,7 +2613,6 @@ virtualenv = ">=20.10.0" name = "pre-commit-hooks" version = "4.5.0" description = "Some out-of-the-box hooks for pre-commit." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2754,7 +2627,6 @@ files = [ name = "prettytable" version = "3.9.0" description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2772,7 +2644,6 @@ tests = ["pytest", "pytest-cov", "pytest-lazy-fixture"] name = "proto-plus" version = "1.23.0" description = "Beautiful, Pythonic protocol buffers." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -2790,7 +2661,6 @@ testing = ["google-api-core[grpc] (>=1.31.5)"] name = "protobuf" version = "4.21.12" description = "" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2814,7 +2684,6 @@ files = [ name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -2896,7 +2765,6 @@ files = [ name = "pusher" version = "3.3.2" description = "A Python library to interract with the Pusher Channels API" -category = "main" optional = false python-versions = "*" files = [ @@ -2921,7 +2789,6 @@ tornado = ["tornado (>=5.0.0)"] name = "pyasn1" version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2933,7 +2800,6 @@ files = [ name = "pyasn1-modules" version = "0.3.0" description = "A collection of ASN.1-based protocols modules" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2948,7 +2814,6 @@ pyasn1 = ">=0.4.6,<0.6.0" name = "pycodestyle" version = "2.11.1" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2960,7 +2825,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -2972,7 +2836,6 @@ files = [ name = "pycryptodomex" version = "3.19.1" description = "Cryptographic library for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3014,7 +2877,6 @@ files = [ name = "pydantic" version = "1.10.13" description = "Data validation and settings management using python type hints" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3067,7 +2929,6 @@ email = ["email-validator (>=1.0.3)"] name = "pyflakes" version = "3.1.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3079,7 +2940,6 @@ files = [ name = "pygithub" version = "1.59.1" description = "Use the full Github API v3" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3097,7 +2957,6 @@ requests = ">=2.14.0" name = "pyjwt" version = "2.8.0" description = "JSON Web Token implementation in Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3118,7 +2977,6 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] name = "pymongo" version = "4.6.1" description = "Python driver for MongoDB " -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3222,7 +3080,6 @@ zstd = ["zstandard"] name = "pympler" version = "1.0.1" description = "A development tool to measure, monitor and analyze the memory behavior of Python objects." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3234,7 +3091,6 @@ files = [ name = "pymysql" version = "1.1.0" description = "Pure Python MySQL Driver" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3250,7 +3106,6 @@ rsa = ["cryptography"] name = "pynacl" version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3277,7 +3132,6 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] name = "pyngrok" version = "7.0.3" description = "A Python wrapper for ngrok." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3292,7 +3146,6 @@ PyYAML = "*" name = "pyopenssl" version = "23.3.0" description = "Python wrapper module around the OpenSSL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3311,7 +3164,6 @@ test = ["flaky", "pretend", "pytest (>=3.0.1)"] name = "pytest" version = "7.4.3" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3332,7 +3184,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-docker" version = "2.0.1" description = "Simple pytest fixtures for Docker and Docker Compose based tests" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3352,7 +3203,6 @@ tests = ["pytest-pycodestyle (>=2.0.0,<3.0)", "pytest-pylint (>=0.14.1,<1.0)", " name = "pytest-mock" version = "3.12.0" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -3370,7 +3220,6 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] name = "python-dateutil" version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -3385,7 +3234,6 @@ six = ">=1.5" name = "python-dotenv" version = "0.21.1" description = "Read key-value pairs from a .env file and set them as environment variables" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3400,7 +3248,6 @@ cli = ["click (>=5.0)"] name = "python-http-client" version = "3.3.7" description = "HTTP REST client, simplified for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3412,7 +3259,6 @@ files = [ name = "python-jose" version = "3.3.0" description = "JOSE implementation in Python" -category = "main" optional = false python-versions = "*" files = [ @@ -3434,7 +3280,6 @@ pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] name = "python-json-logger" version = "2.0.7" description = "A python library adding a json log formatter" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3446,7 +3291,6 @@ files = [ name = "python-multipart" version = "0.0.6" description = "A streaming multipart parser for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3461,7 +3305,6 @@ dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatc name = "python-telegram-bot" version = "20.7" description = "We have made you a wrapper you can't refuse" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3487,7 +3330,6 @@ webhooks = ["tornado (>=6.3.3,<6.4.0)"] name = "pytz" version = "2023.3.post1" description = "World timezone definitions, modern and historical" -category = "main" optional = false python-versions = "*" files = [ @@ -3499,7 +3341,6 @@ files = [ name = "pywin32" version = "306" description = "Python for Window Extensions" -category = "main" optional = false python-versions = "*" files = [ @@ -3523,7 +3364,6 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -3545,6 +3385,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3583,7 +3424,6 @@ files = [ name = "regex" version = "2023.10.3" description = "Alternative regular expression module, to replace re." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3681,7 +3521,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3703,7 +3542,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-oauthlib" version = "1.3.1" description = "OAuthlib authentication support for Requests." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -3722,7 +3560,6 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] name = "rsa" version = "4.9" description = "Pure-Python RSA implementation" -category = "main" optional = false python-versions = ">=3.6,<4" files = [ @@ -3737,7 +3574,6 @@ pyasn1 = ">=0.1.3" name = "ruamel-yaml" version = "0.18.5" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3756,7 +3592,6 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] name = "ruamel-yaml-clib" version = "0.2.8" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -3816,7 +3651,6 @@ files = [ name = "ruff" version = "0.1.7" description = "An extremely fast Python linter and code formatter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -3843,7 +3677,6 @@ files = [ name = "s3transfer" version = "0.8.2" description = "An Amazon S3 Transfer Manager" -category = "main" optional = false python-versions = ">= 3.7" files = [ @@ -3861,7 +3694,6 @@ crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] name = "sendgrid" version = "6.11.0" description = "Twilio SendGrid library for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -3877,7 +3709,6 @@ starkbank-ecdsa = ">=2.0.1" name = "sentry-sdk" version = "1.38.0" description = "Python client for Sentry (https://sentry.io)" -category = "main" optional = false python-versions = "*" files = [ @@ -3923,7 +3754,6 @@ tornado = ["tornado (>=5)"] name = "setuptools" version = "69.0.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -3940,7 +3770,6 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -3952,7 +3781,6 @@ files = [ name = "sniffio" version = "1.3.0" description = "Sniff out which async library your code is running under" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -3964,7 +3792,6 @@ files = [ name = "snowflake-connector-python" version = "3.1.0" description = "Snowflake Connector for Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4021,7 +3848,6 @@ secure-local-storage = ["keyring (!=16.1.0,<25.0.0)"] name = "sortedcontainers" version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -category = "main" optional = false python-versions = "*" files = [ @@ -4033,7 +3859,6 @@ files = [ name = "sqlalchemy" version = "1.4.41" description = "Database Abstraction Library" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -4108,7 +3933,6 @@ sqlcipher = ["sqlcipher3-binary"] name = "sqlalchemy2-stubs" version = "0.0.2a37" description = "Typing Stubs for SQLAlchemy 1.4" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4123,7 +3947,6 @@ typing-extensions = ">=3.7.4" name = "sqlmodel" version = "0.0.8" description = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." -category = "main" optional = false python-versions = ">=3.6.1,<4.0.0" files = [ @@ -4140,7 +3963,6 @@ sqlalchemy2-stubs = "*" name = "starkbank-ecdsa" version = "2.2.0" description = "A lightweight and fast pure python ECDSA library" -category = "main" optional = false python-versions = "*" files = [ @@ -4151,7 +3973,6 @@ files = [ name = "starlette" version = "0.25.0" description = "The little ASGI library that shines." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4169,7 +3990,6 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyam name = "starlette-context" version = "0.3.6" description = "Middleware for Starlette that allows you to store and access the context data of a request. Can be used with logging so logs automatically use request headers such as x-request-id or x-correlation-id." -category = "main" optional = false python-versions = ">=3.8,<4.0" files = [ @@ -4184,7 +4004,6 @@ starlette = "*" name = "tenacity" version = "8.2.3" description = "Retry code until it succeeds" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4199,7 +4018,6 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] name = "tomlkit" version = "0.12.3" description = "Style preserving TOML library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4211,7 +4029,6 @@ files = [ name = "tqdm" version = "4.66.1" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4232,7 +4049,6 @@ telegram = ["requests"] name = "twilio" version = "8.10.3" description = "Twilio API client and TwiML generator" -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -4250,7 +4066,6 @@ requests = ">=2.0.0" name = "typing-extensions" version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4262,7 +4077,6 @@ files = [ name = "urllib3" version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ @@ -4279,7 +4093,6 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] name = "uvicorn" version = "0.20.0" description = "The lightning-fast ASGI server." -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4298,7 +4111,6 @@ standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", name = "validators" version = "0.20.0" description = "Python Data Validation for Humans™." -category = "main" optional = false python-versions = ">=3.4" files = [ @@ -4315,7 +4127,6 @@ test = ["flake8 (>=2.4.0)", "isort (>=4.2.2)", "pytest (>=2.2.3)"] name = "virtualenv" version = "20.24.1" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -4336,7 +4147,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "wcwidth" version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" -category = "main" optional = false python-versions = "*" files = [ @@ -4348,7 +4158,6 @@ files = [ name = "websocket-client" version = "1.7.0" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -4365,7 +4174,6 @@ test = ["websockets"] name = "wheel" version = "0.42.0" description = "A built-package format for Python" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4380,7 +4188,6 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "wrapt" version = "1.16.0" description = "Module for decorators, wrappers and monkey patching." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -4460,7 +4267,6 @@ files = [ name = "yamllint" version = "1.33.0" description = "A linter for YAML files." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -4479,7 +4285,6 @@ dev = ["doc8", "flake8", "flake8-import-order", "rstcheck[sphinx]", "sphinx"] name = "yarl" version = "1.9.4" description = "Yet another URL library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -4583,7 +4388,6 @@ multidict = ">=4.0" name = "zipp" version = "3.17.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "main" optional = false python-versions = ">=3.8" files = [ From 07339030cf7e78cabfb4dd22315cd4c7194ea677 Mon Sep 17 00:00:00 2001 From: talboren Date: Tue, 30 Jan 2024 12:33:42 +0200 Subject: [PATCH 07/12] fix(provider): jira on prem missmatch with params (#759) --- .../jiraonprem_provider.py | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/keep/providers/jiraonprem_provider/jiraonprem_provider.py b/keep/providers/jiraonprem_provider/jiraonprem_provider.py index 1cc1f6b15..e7aabaa58 100644 --- a/keep/providers/jiraonprem_provider/jiraonprem_provider.py +++ b/keep/providers/jiraonprem_provider/jiraonprem_provider.py @@ -2,6 +2,7 @@ JiraonpremProvider is a class that implements the BaseProvider interface for Jira updates. """ import dataclasses +import json from typing import List from urllib.parse import urlencode, urljoin @@ -380,26 +381,29 @@ def _extract_project_key_from_board_name(self, board_name: str): def _notify( self, + summary: str, + description: str = "", + issue_type: str = "", + project_key: str = "", + board_name: str = "", + labels: List[str] = None, + components: List[str] = None, + custom_fields: dict = None, **kwargs: dict, ): """ Notify jira by creating an issue. """ - # extracrt the required params - project_key = kwargs.get("project_key", "") # if the user didn't provider a project_key, try to extract it from the board name if not project_key: - board_name = kwargs.get("board_name", "") project_key = self._extract_project_key_from_board_name(board_name) - summary = kwargs.get("summary", "") - description = kwargs.get("description", "") - # issue_type, issuetype - both are supported - issue_type = kwargs.get("issuetype", kwargs.get("issue_type", "")) - + issue_type = issue_type if issue_type else kwargs.get("issuetype", "Task") if not project_key or not summary or not issue_type or not description: raise ProviderException( f"Project key and summary are required! - {project_key}, {summary}, {issue_type}, {description}" ) + if labels and isinstance(labels, str): + labels = json.loads(labels.replace("'", '"')) try: self.logger.info("Notifying jira...") result = self.__create_issue( @@ -407,6 +411,10 @@ def _notify( summary=summary, description=description, issue_type=issue_type, + labels=labels, + components=components, + custom_fields=custom_fields, + **kwargs, ) result["ticket_url"] = f"{self.jira_host}/browse/{result['issue']['key']}" self.logger.info("Notified jira!") From d500461ac8c8b2eac9b9d9bc360b20ba4d0f03f7 Mon Sep 17 00:00:00 2001 From: talboren Date: Tue, 30 Jan 2024 16:14:19 +0200 Subject: [PATCH 08/12] fix(providers): prometheus webhook option not showing (#761) Co-authored-by: Shahar Glazner --- keep/providers/providers_factory.py | 1 - 1 file changed, 1 deletion(-) diff --git a/keep/providers/providers_factory.py b/keep/providers/providers_factory.py index 6761aae51..14fe43d7a 100644 --- a/keep/providers/providers_factory.py +++ b/keep/providers/providers_factory.py @@ -220,7 +220,6 @@ def get_all_providers() -> list[Provider]: ) supports_webhook = ( issubclass(provider_class, BaseProvider) - and provider_class.__dict__.get("format_alert") is not None and provider_class.__dict__.get("webhook_template") is not None ) can_notify = ( From 193cb45701b6d4e4819397f81e9da009ebb4c398 Mon Sep 17 00:00:00 2001 From: talboren Date: Wed, 31 Jan 2024 09:36:42 +0200 Subject: [PATCH 09/12] chore(ui): refactor columns order and visibility mech (#760) Co-authored-by: asharonbaltazar <58940073+asharonbaltazar@users.noreply.github.com> --- docker/Dockerfile.dev.ui | 2 +- docker/Dockerfile.ui | 2 +- keep-ui/app/alerts/alert-columns-select.tsx | 187 ++-------- keep-ui/app/alerts/alert-history.tsx | 10 +- keep-ui/app/alerts/alert-menu.tsx | 12 +- keep-ui/app/alerts/alert-table-headers.tsx | 157 +++++++++ keep-ui/app/alerts/alert-table-tab-panel.tsx | 3 +- keep-ui/app/alerts/alert-table-utils.tsx | 193 +++++++++++ keep-ui/app/alerts/alert-table.tsx | 345 ++----------------- keep-ui/package-lock.json | 68 +++- keep-ui/package.json | 4 +- keep-ui/utils/hooks/useAlerts.ts | 9 +- keep-ui/utils/hooks/useConfig.ts | 6 +- keep-ui/utils/hooks/useLocalStorage.ts | 57 +++ 14 files changed, 562 insertions(+), 493 deletions(-) create mode 100644 keep-ui/app/alerts/alert-table-headers.tsx create mode 100644 keep-ui/app/alerts/alert-table-utils.tsx create mode 100644 keep-ui/utils/hooks/useLocalStorage.ts diff --git a/docker/Dockerfile.dev.ui b/docker/Dockerfile.dev.ui index ab7709e3c..e49705cfe 100644 --- a/docker/Dockerfile.dev.ui +++ b/docker/Dockerfile.dev.ui @@ -16,7 +16,7 @@ COPY keep-ui/package*.json /app/ COPY ./keep-ui/ /app # Install dependencies in /app -RUN npm install --legacy-peer-deps +RUN npm install # Ensure port 3000 is accessible to our system EXPOSE 3000 diff --git a/docker/Dockerfile.ui b/docker/Dockerfile.ui index 57acb52a2..91e23410f 100644 --- a/docker/Dockerfile.ui +++ b/docker/Dockerfile.ui @@ -10,7 +10,7 @@ WORKDIR /app # Install dependencies based on the preferred package manager COPY package.json package-lock.json ./ -RUN npm ci --legacy-peer-deps +RUN npm ci # Rebuild the source code only when needed diff --git a/keep-ui/app/alerts/alert-columns-select.tsx b/keep-ui/app/alerts/alert-columns-select.tsx index daf7b11e2..88eee9b6f 100644 --- a/keep-ui/app/alerts/alert-columns-select.tsx +++ b/keep-ui/app/alerts/alert-columns-select.tsx @@ -1,21 +1,9 @@ import { Table } from "@tanstack/table-core"; -import { Subtitle } from "@tremor/react"; +import { Subtitle, MultiSelect, MultiSelectItem } from "@tremor/react"; import { AlertDto } from "./models"; -import { MouseEventHandler } from "react"; -import Select, { - components, - MultiValueGenericProps, - MultiValueProps, - Props, -} from "react-select"; -import { - SortableContainer, - SortableContainerProps, - SortableElement, - SortEndHandler, - SortableHandle, -} from "react-sortable-hoc"; -import { Column } from "@tanstack/react-table"; +import { useLocalStorage } from "utils/hooks/useLocalStorage"; +import { VisibilityState } from "@tanstack/react-table"; +import { getDefaultColumnVisibilityState } from "./alert-table-utils"; interface AlertColumnsSelectProps { table: Table; @@ -23,154 +11,51 @@ interface AlertColumnsSelectProps { isLoading: boolean; } -export interface Option { - readonly value: string; - readonly label: string; - readonly color: string; - readonly isFixed?: boolean; - readonly isDisabled?: boolean; -} - -function arrayMove(array: readonly T[], from: number, to: number) { - const slicedArray = array.slice(); - slicedArray.splice( - to < 0 ? array.length + to : to, - 0, - slicedArray.splice(from, 1)[0] - ); - return slicedArray; -} - -const columnsWithFixedPosition = ["alertMenu", "checkbox"]; - -const SortableMultiValue = SortableElement((props: MultiValueProps