diff --git a/contrib/templates/sql-alert-trigger/README.md b/contrib/templates/sql-alert-trigger/README.md new file mode 100644 index 0000000..282237d --- /dev/null +++ b/contrib/templates/sql-alert-trigger/README.md @@ -0,0 +1,67 @@ +# SQL Alert Trigger + +The 'sql-alert-trigger' project contains a databricks job and a Python notebook that connects to your **Jira** environment and creates an issue when run. + +![Worklow UI example](images/sql-alert-workflow.png) + +### Databricks Asset Bundle Template: Alert-Triggered Job + + This template creates a job that: + 1. Runs a SQL alert query + 2. Checks if the alert was triggered + 3. Executes a notebook if the condition is met + +Common use case: Monitor data quality, send notifications, or trigger remediation + + +## Getting started + +1. Install the Databricks CLI from https://docs.databricks.com/dev-tools/cli/install.html + +2. Authenticate to your Databricks workspace (if you have not done so already): + ``` + $ databricks configure + ``` + +3. Initialize this template in your environment + + ``` + databricks bundle init https://github.com/databricks/bundle-examples --template-dir contrib/templates/sql-alert-trigger + ``` +You will be prompted to provide the following details to tailor the template to your preference: +- Project name +- SQL warehouse ID +- SQL alert ID + +Ensure you update the Python notebook to include your Jira connection details. + + + +4. To deploy a development copy of this project, type: + ``` + $ databricks bundle deploy --target dev + ``` + (Note that "dev" is the default target, so the `--target` parameter + is optional here.) + + This deploys everything that's defined for this project. + For example, the default template would deploy a job called + `[dev yourname] Alert-Workflow` to your workspace. + You can find that job by opening your workpace and clicking on **Jobs & Pipelines**. + +4. Similarly, to deploy a production copy, type: + ``` + $ databricks bundle deploy --target prod + ``` + +5. To run a job, use the "run" command: + ``` + $ databricks bundle run + ``` + +6. Optionally, install developer tools such as the Databricks extension for Visual Studio Code from + https://docs.databricks.com/dev-tools/vscode-ext.html. + +7. For documentation on the Databricks Asset Bundles format used + for this project, and for CI/CD configuration, see + https://docs.databricks.com/dev-tools/bundles/index.html. \ No newline at end of file diff --git a/contrib/templates/sql-alert-trigger/databricks_template_schema.json b/contrib/templates/sql-alert-trigger/databricks_template_schema.json new file mode 100644 index 0000000..5373cfa --- /dev/null +++ b/contrib/templates/sql-alert-trigger/databricks_template_schema.json @@ -0,0 +1,27 @@ +{ + "welcome_message": "\nWelcome to the sql-trigger-app template for Databricks Asset Bundles!", + "properties": { + + "project_name": { + "type": "string", + "default": "my_sql_alert_job", + "description": "Please provide the following details to tailor the template to your preferences.\n\nUnique name for this project\nproject_name", + "order": 1, + "pattern": "^[A-Za-z0-9_]+$", + "pattern_match_failure_message": "Name must consist of letters, numbers, and underscores." + }, + + "warehouse_id": { + "type": "string", + "description": "SQL Warehouse ID to run the alert query", + "order": 2 + }, + "alert_id": { + "type": "string", + "description": "ID of the existing SQL alert to execute", + "order": 3 + } + }, + "success_message": "\n\nYour new project has been created in the '{{.project_name}}' directory!" +} + diff --git a/contrib/templates/sql-alert-trigger/images/sql-alert-workflow.png b/contrib/templates/sql-alert-trigger/images/sql-alert-workflow.png new file mode 100644 index 0000000..6692e13 Binary files /dev/null and b/contrib/templates/sql-alert-trigger/images/sql-alert-workflow.png differ diff --git a/contrib/templates/sql-alert-trigger/template/{{.project_name}}/databricks.yml.tmpl b/contrib/templates/sql-alert-trigger/template/{{.project_name}}/databricks.yml.tmpl new file mode 100644 index 0000000..f91d0b0 --- /dev/null +++ b/contrib/templates/sql-alert-trigger/template/{{.project_name}}/databricks.yml.tmpl @@ -0,0 +1,36 @@ +bundle: + name: {{.project_name}} + +include: + - resources/*.yml + +# Variable declarations. These variables are assigned in the dev/prod targets below. +variables: + warehouse_id: + description: The warehouse to use + alert_id: + description: The ID of the SQL alert + +targets: + dev: + # The default target uses 'mode: development' to create a development copy. + # - Deployed resources get prefixed with '[dev my_user_name]' + # - Any job schedules and triggers are paused by default. + # See also https://docs.databricks.com/dev-tools/bundles/deployment-modes.html. + mode: development + default: true + workspace: + host: {{workspace_host}} + variables: + warehouse_id: {{.warehouse_id}} + alert_id: {{.alert_id}} + prod: + mode: production + workspace: + host: {{workspace_host}} + # We explicitly specify /Workspace/Users/{{user_name}} to make sure we only have a single copy. + root_path: /Workspace/Users/{{user_name}}/.bundle/${bundle.name}/${bundle.target} + variables: + warehouse_id: 1234abcd + alert_id: abcd-1234-abcd + \ No newline at end of file diff --git a/contrib/templates/sql-alert-trigger/template/{{.project_name}}/resources/job.yml.tmpl b/contrib/templates/sql-alert-trigger/template/{{.project_name}}/resources/job.yml.tmpl new file mode 100644 index 0000000..c6e8f4c --- /dev/null +++ b/contrib/templates/sql-alert-trigger/template/{{.project_name}}/resources/job.yml.tmpl @@ -0,0 +1,43 @@ + +resources: + jobs: + alert_workflow_job: + name: Alert-Workflow + tasks: + # Task 1: SQL Alert Query + - task_key: sql_alert_check + email_notifications: {} + sql_task: + warehouse_id: ${var.warehouse_id} + alert: + subscriptions: + - user_name: {{user_name}} # dynamically retrieved + alert_id: ${var.alert_id} + webhook_notifications: {} + run_if: ALL_SUCCESS + + # Task 2: Conditional Check + - task_key: condition + depends_on: + - task_key: sql_alert_check + webhook_notifications: {} + condition_task: + left: '{{ "{{" }}tasks.`sql_alert_check`.output.alert_state{{ "}}" }}' + op: EQUAL_TO + right: TRIGGERED + run_if: ALL_SUCCESS + + # Task 3: Action Notebook + - task_key: trigger-action-notebook + depends_on: + - outcome: "true" + task_key: condition + notebook_task: + notebook_path: "../src/create_jira_issue.ipynb" + source: WORKSPACE + webhook_notifications: {} + run_if: ALL_SUCCESS + + queue: + enabled: true + webhook_notifications: {} diff --git a/contrib/templates/sql-alert-trigger/template/{{.project_name}}/src/create_jira_issue.ipynb b/contrib/templates/sql-alert-trigger/template/{{.project_name}}/src/create_jira_issue.ipynb new file mode 100644 index 0000000..ff495d1 --- /dev/null +++ b/contrib/templates/sql-alert-trigger/template/{{.project_name}}/src/create_jira_issue.ipynb @@ -0,0 +1,231 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": { + "byteLimit": 2048000, + "rowLimit": 10000 + }, + "inputWidgets": {}, + "nuid": "7e414f82-0392-4a4a-9e8a-b1599e401ccd", + "showTitle": false, + "tableResultSettingsMap": {}, + "title": "" + } + }, + "outputs": [], + "source": [ + "import requests\n", + "import json\n", + "from requests.auth import HTTPBasicAuth" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# see here for more details on working with the Jira API: \n", + "# https://developer.atlassian.com/cloud/jira/platform/rest/v3/api-group-issues/#api-group-issues\n", + "\n", + "## Configuration - Store these as Databricks secrets for security\n", + "JIRA_URL = \"[https://JIRA-HOST-URL.atlassian.net/]\"\n", + "JIRA_EMAIL = \"[JIRA-EMAIL]\"\n", + "JIRA_API_TOKEN = \"[JIRA-API-TOKEN]\" ## Use dbutils.secrets.get() \n", + "PROJECT_KEY = \"[JIRA-PROJECT-KEY]\" ## Your Jira project key" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": { + "byteLimit": 2048000, + "rowLimit": 10000 + }, + "inputWidgets": {}, + "nuid": "58029cee-ab15-45b1-9816-26fa98f244ee", + "showTitle": false, + "tableResultSettingsMap": {}, + "title": "" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "✓ Jira ticket created successfully!\n", + " Ticket Key: PB-10\n", + " URL: https://databricks-jira-sandbox.atlassian.net//browse/PB-10\n", + "Proceed with next steps - Ticket: PB-10\n" + ] + } + ], + "source": [ + "\n", + "def create_jira_ticket(summary, description, issue_type=\"Task\", priority=\"Medium\"):\n", + " \"\"\"\n", + " Create a Jira ticket via REST API\n", + " \n", + " Args:\n", + " summary (str): Ticket summary/title\n", + " description (str): Ticket description\n", + " issue_type (str): Type of issue (Task, Bug, Story, etc.)\n", + " priority (str): Priority level (Highest, High, Medium, Low, Lowest)\n", + " \n", + " Returns:\n", + " dict: Response containing ticket details or error\n", + " \"\"\"\n", + " \n", + " url = f\"{JIRA_URL}/rest/api/3/issue\"\n", + " \n", + " auth = HTTPBasicAuth(JIRA_EMAIL, JIRA_API_TOKEN)\n", + " \n", + " headers = {\n", + " \"Accept\": \"application/json\",\n", + " \"Content-Type\": \"application/json\"\n", + " }\n", + " \n", + " payload = {\n", + " \"fields\": {\n", + " \"project\": {\n", + " \"key\": PROJECT_KEY\n", + " },\n", + " \"summary\": summary,\n", + " \"description\": {\n", + " \"type\": \"doc\",\n", + " \"version\": 1,\n", + " \"content\": [\n", + " {\n", + " \"type\": \"paragraph\",\n", + " \"content\": [\n", + " {\n", + " \"type\": \"text\",\n", + " \"text\": description\n", + " }\n", + " ]\n", + " }\n", + " ]\n", + " },\n", + " \"issuetype\": {\n", + " \"name\": issue_type\n", + " },\n", + " \"priority\": {\n", + " \"name\": priority\n", + " }\n", + " }\n", + " }\n", + " \n", + " try:\n", + " response = requests.post(\n", + " url,\n", + " data=json.dumps(payload),\n", + " headers=headers,\n", + " auth=auth\n", + " )\n", + " \n", + " response.raise_for_status()\n", + " \n", + " result = response.json()\n", + " ticket_key = result.get(\"key\")\n", + " ticket_url = f\"{JIRA_URL}/browse/{ticket_key}\"\n", + " \n", + " print(f\"✓ Jira ticket created successfully!\")\n", + " print(f\" Ticket Key: {ticket_key}\")\n", + " print(f\" URL: {ticket_url}\")\n", + " \n", + " return {\n", + " \"success\": True,\n", + " \"ticket_key\": ticket_key,\n", + " \"ticket_url\": ticket_url,\n", + " \"response\": result\n", + " }\n", + " \n", + " except requests.exceptions.HTTPError as e:\n", + " error_msg = f\"HTTP Error: {e.response.status_code} - {e.response.text}\"\n", + " print(f\"✗ Failed to create Jira ticket: {error_msg}\")\n", + " return {\n", + " \"success\": False,\n", + " \"error\": error_msg\n", + " }\n", + " except Exception as e:\n", + " error_msg = f\"Error: {str(e)}\"\n", + " print(f\"✗ Failed to create Jira ticket: {error_msg}\")\n", + " return {\n", + " \"success\": False,\n", + " \"error\": error_msg\n", + " }\n", + "\n", + "# Example usage in Databricks\n", + "if __name__ == \"__main__\":\n", + " ## In production, use Databricks secrets:\n", + " ## JIRA_API_TOKEN = dbutils.secrets.get(scope=\"jira\", key=\"api-token\")\n", + " ## JIRA_EMAIL = dbutils.secrets.get(scope=\"jira\", key=\"email\")\n", + " \n", + " # Example: Create a ticket for a data pipeline failure\n", + " result = create_jira_ticket(\n", + " summary=\"Data Pipeline Failed - Daily ETL Job\",\n", + " description=\"The daily ETL job failed at step 3 during data validation. Error: NULL values found in required field 'customer_id'.\",\n", + " issue_type=\"Task\",\n", + " priority=\"High\"\n", + " )\n", + " \n", + " # You can use the result in your Databricks workflow\n", + " if result[\"success\"]:\n", + " print(f\"Proceed with next steps - Ticket: {result['ticket_key']}\")\n", + " else:\n", + " raise Exception(f\"Failed to create Jira ticket: {result['error']}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "6ef541f9-0c3a-4e57-95f8-c22b47ea2369", + "showTitle": false, + "tableResultSettingsMap": {}, + "title": "" + } + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "computePreferences": { + "hardware": { + "accelerator": null, + "gpuPoolId": null, + "memory": null + } + }, + "dashboards": [], + "environmentMetadata": { + "base_environment": "", + "environment_version": "4" + }, + "inputWidgetPreferences": null, + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "Jira Ticket Creation", + "widgets": {} + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +}