From eb2e88bf285c4addfb40e9697d4eb4867fe65baa Mon Sep 17 00:00:00 2001 From: Ricardo Serradas Date: Mon, 22 Jul 2019 19:11:24 -0700 Subject: [PATCH 1/7] Removing wait for cluster reboot --- package.json | 3 +- .../WaitForClusterRebootV1/.gitignore | 1 - .../WaitForClusterRebootV1/package.json | 32 --------- .../WaitForClusterRebootV1/task.json | 38 ----------- .../WaitForClusterRebootV1/tsconfig.json | 63 ----------------- .../waitforclusterreboot.js | 63 ----------------- .../waitforclusterreboot.sh | 33 --------- .../waitforclusterreboot.ts | 67 ------------------- vss-extension.json | 13 ---- 9 files changed, 1 insertion(+), 312 deletions(-) delete mode 100644 tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/.gitignore delete mode 100644 tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/package.json delete mode 100644 tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/task.json delete mode 100644 tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/tsconfig.json delete mode 100644 tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.js delete mode 100644 tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.sh delete mode 100644 tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.ts diff --git a/package.json b/package.json index 7161055..67b49ac 100644 --- a/package.json +++ b/package.json @@ -24,8 +24,7 @@ "build.scala.runSbtTests": "tsc -p ./tasks/Scala/RunSbtTests/RunSbtTestsV1", "build.scala.startCluster": "tsc -p ./tasks/Scala/StartCluster/StartClusterV1", "build.scala.uninstallCodeFromCluster": "tsc -p ./tasks/Scala/UninstallCodeFromCluster/UninstallCodeFromClusterV1", - "build.scala.waitForClusterReboot": "tsc -p ./tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1", - "build": "npm run recursive-install && npm run build.deployNotebook && npm run build.configureDatabricks && npm run build.executeNotebook && npm run build.waitExecution && npm run build.scala.installScalaTools && npm run build.scala.installSpark && npm run build.scala.runSbtTests && npm run build.scala.startCluster && npm run build.scala.uninstallCodeFromCluster && npm run build.scala.waitForClusterReboot", + "build": "npm run recursive-install && npm run build.deployNotebook && npm run build.configureDatabricks && npm run build.executeNotebook && npm run build.waitExecution && npm run build.scala.installScalaTools && npm run build.scala.installSpark && npm run build.scala.runSbtTests && npm run build.scala.startCluster && npm run build.scala.uninstallCodeFromCluster", "test.deployNotebook": "npm run build.deployNotebook && mocha ./DeployNotebooksTask/DeployNotebooksTaskV1/tests/_suite.js", "test": "npm run test.deployNotebook", "package.dev": "npm run test && tfx extension create --manifest-globs vss-extension.json --overrides-file --output-path out", diff --git a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/.gitignore b/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/.gitignore deleted file mode 100644 index 3c3629e..0000000 --- a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/.gitignore +++ /dev/null @@ -1 +0,0 @@ -node_modules diff --git a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/package.json b/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/package.json deleted file mode 100644 index 10dd06a..0000000 --- a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "waitforclusterreboot", - "version": "1.0.0", - "description": "Waits for a given cluster to be running", - "main": "waitforclusterreboot.js", - "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/microsoft/azdo-databricks.git" - }, - "keywords": [ - "azure", - "databricks", - "devops", - "cluster" - ], - "author": "Microsoft DevLabs", - "license": "MIT", - "bugs": { - "url": "https://github.com/microsoft/azdo-databricks/issues" - }, - "homepage": "https://github.com/microsoft/azdo-databricks#readme", - "dependencies": { - "azure-pipelines-task-lib": "^2.8.0" - }, - "devDependencies": { - "@types/node": "^12.6.2", - "@types/q": "^1.5.2" - } -} diff --git a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/task.json b/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/task.json deleted file mode 100644 index e64144c..0000000 --- a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/task.json +++ /dev/null @@ -1,38 +0,0 @@ -{ - "id": "75E7F77C-8FF4-4B1C-A993-276ECC5922D2", - "name": "waitforclusterreboot", - "friendlyName": "Wait for Cluster Reboot", - "description": "Wait for a reboot of a given cluster", - "helpMarkDown": "", - "category": "Utility", - "author": "Microsoft DevLabs", - "version": { - "Major": 0, - "Minor": 1, - "Patch": 0 - }, - "instanceNameFormat": "Wait the reboot of cluster $(clusterid) ", - "inputs": [ - { - "name": "clusterid", - "type": "string", - "label": "Cluster ID", - "defaultValue": "", - "required": true, - "helpMarkDown": "The ID of the Cluster to be monitored" - }, - { - "name": "workingDirectory", - "type": "filePath", - "label": "Working Directory", - "defaultValue": "", - "required": false, - "helpMarkDown": "" - } - ], - "execution": { - "Node": { - "target": "waitforclusterreboot.js" - } - } -} diff --git a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/tsconfig.json b/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/tsconfig.json deleted file mode 100644 index e7f648d..0000000 --- a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/tsconfig.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "compilerOptions": { - /* Basic Options */ - // "incremental": true, /* Enable incremental compilation */ - "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ - "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ - // "lib": [], /* Specify library files to be included in the compilation. */ - // "allowJs": true, /* Allow javascript files to be compiled. */ - // "checkJs": true, /* Report errors in .js files. */ - // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ - // "declaration": true, /* Generates corresponding '.d.ts' file. */ - // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ - // "sourceMap": true, /* Generates corresponding '.map' file. */ - // "outFile": "./", /* Concatenate and emit output to single file. */ - // "outDir": "./", /* Redirect output structure to the directory. */ - // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ - // "composite": true, /* Enable project compilation */ - // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ - // "removeComments": true, /* Do not emit comments to output. */ - // "noEmit": true, /* Do not emit outputs. */ - // "importHelpers": true, /* Import emit helpers from 'tslib'. */ - // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ - // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ - - /* Strict Type-Checking Options */ - "strict": true, /* Enable all strict type-checking options. */ - // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* Enable strict null checks. */ - // "strictFunctionTypes": true, /* Enable strict checking of function types. */ - // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ - // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ - // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ - // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ - - /* Additional Checks */ - // "noUnusedLocals": true, /* Report errors on unused locals. */ - // "noUnusedParameters": true, /* Report errors on unused parameters. */ - // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ - // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ - - /* Module Resolution Options */ - // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ - // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ - // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ - // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ - // "typeRoots": [], /* List of folders to include type definitions from. */ - // "types": [], /* Type declaration files to be included in compilation. */ - // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ - "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ - // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - - /* Source Map Options */ - // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ - // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ - - /* Experimental Options */ - // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ - // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ - } -} diff --git a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.js b/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.js deleted file mode 100644 index db5c5f2..0000000 --- a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.js +++ /dev/null @@ -1,63 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = require("path"); -const tl = require("azure-pipelines-task-lib"); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - tl.setResourcePath(path.join(__dirname, 'task.json')); - const workingDirectory = tl.getInput('workingDirectory', false); - if (workingDirectory != '') { - tl.cd(workingDirectory); - } - const clusterid = tl.getInput('clusterid', true); - let bashPath = tl.which('bash', true); - let fileName = 'waitforclusterreboot.sh'; - let filePath = path.join(__dirname, fileName); - let bash = tl.tool(bashPath); - bash.arg([ - filePath, - clusterid - ]); - let options = { - cwd: __dirname, - env: {}, - silent: false, - failOnStdErr: false, - errStream: process.stdout, - outStream: process.stdout, - ignoreReturnCode: true, - windowsVerbatimArguments: false - }; - // Listen for stderr. - let stderrFailure = false; - bash.on('stderr', (data) => { - stderrFailure = true; - }); - let exitCode = yield bash.exec(options); - let result = tl.TaskResult.Succeeded; - if (exitCode !== 0) { - tl.error("Bash exited with code " + exitCode); - result = tl.TaskResult.Failed; - } - // Fail on stderr. - if (stderrFailure) { - tl.error("Bash wrote one or more lines to the standard error stream."); - result = tl.TaskResult.Failed; - } - tl.setResult(result, "", true); - } - catch (err) { - tl.setResult(tl.TaskResult.Failed, err.message); - } - }); -} -run(); diff --git a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.sh b/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.sh deleted file mode 100644 index 29fb6ad..0000000 --- a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -# =================================================================================== -# -# FILE: 4-wait-for-reboot.sh -# -# USAGE: bash 4-wait-for-reboot.sh -# -# DESCRIPTION: Uses Databricks API to get id for cluster. -# Polls cluster state to see if cluster is running. -# Pauses execution of pipeline so new Spark JAR file can be installed. -# -# NOTES: --- -# AUTHOR: Bruno Terkaly -# VERSION: 1.0 -# CREATED: June 10, 2019 -#=================================================================================== -lookfor=RUNNING - -clusterStatus=$(databricks clusters get --cluster-id $clusterid --profile AZDO | jq -r .state) - -if [ "$clusterStatus" == "TERMINATED"] -do - echo "The cluster is not rebooting." - exit 1 -done - -while [ "$clusterStatus" != "$lookfor" ] -do - sleep 30 - echo "Restarting..." - clusterStatus=$(databricks clusters get --cluster-id $clusterid --profile AZDO | jq -r .state) -done -echo "Running now..." \ No newline at end of file diff --git a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.ts b/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.ts deleted file mode 100644 index ff50fa7..0000000 --- a/tasks/Scala/WaitForClusterReboot/WaitForClusterRebootV1/waitforclusterreboot.ts +++ /dev/null @@ -1,67 +0,0 @@ -import path = require('path') -import tl = require('azure-pipelines-task-lib'); -import tr = require('azure-pipelines-task-lib/toolrunner') - -async function run() { - try { - tl.setResourcePath(path.join(__dirname, 'task.json')); - - const workingDirectory: string = tl.getInput('workingDirectory', false); - - if(workingDirectory != ''){ - tl.cd(workingDirectory); - } - - const clusterid: string = tl.getInput('clusterid', true); - - let bashPath: string = tl.which('bash', true); - let fileName = 'waitforclusterreboot.sh' - let filePath = path.join(__dirname, fileName); - - let bash = tl.tool(bashPath); - - bash.arg([ - filePath, - clusterid - ]); - - let options = { - cwd: __dirname, - env: {}, - silent: false, - failOnStdErr: false, - errStream: process.stdout, - outStream: process.stdout, - ignoreReturnCode: true, - windowsVerbatimArguments: false - }; - - // Listen for stderr. - let stderrFailure = false; - bash.on('stderr', (data) => { - stderrFailure = true; - }); - - let exitCode: number = await bash.exec(options); - - let result = tl.TaskResult.Succeeded; - - if (exitCode !== 0) { - tl.error("Bash exited with code " + exitCode); - result = tl.TaskResult.Failed - } - - // Fail on stderr. - if (stderrFailure) { - tl.error("Bash wrote one or more lines to the standard error stream."); - result = tl.TaskResult.Failed; - } - - tl.setResult(result, "", true); - } - catch (err) { - tl.setResult(tl.TaskResult.Failed, err.message); - } -} - -run(); \ No newline at end of file diff --git a/vss-extension.json b/vss-extension.json index dbe8f5d..03254b0 100644 --- a/vss-extension.json +++ b/vss-extension.json @@ -53,9 +53,6 @@ { "path": "tasks/Scala/UninstallCodeFromCluster" }, - { - "path": "tasks/Scala/WaitForClusterReboot" - }, { "path": "tasks/Scala/CompileInstallJar" } @@ -151,16 +148,6 @@ "name": "tasks/Scala/UninstallCodeFromCluster" } }, - { - "id": "azdo-databricks-waitforclusterreboot", - "type": "ms.vss-distributed-task.task", - "targets": [ - "ms.vss-distributed-task.tasks" - ], - "properties": { - "name": "tasks/Scala/WaitForClusterReboot" - } - }, { "id": "azdo-databricks-compileinstalljar", "type": "ms.vss-distributed-task.task", From 14b88a06d40d7541e7a8a2b01a6259965a5edac6 Mon Sep 17 00:00:00 2001 From: Ricardo Serradas Date: Mon, 22 Jul 2019 19:46:11 -0700 Subject: [PATCH 2/7] Adding initial of executeADBjob --- package.json | 3 +- .../ExecuteDatabricksJobV1/.gitignore | 1 + .../executedatabricksjob.js | 31 +++++++ .../executedatabricksjob.ts | 20 +++++ .../ExecuteDatabricksJobV1/package.json | 37 ++++++++ .../ExecuteDatabricksJobV1/task.json | 86 +++++++++++++++++++ .../ExecuteDatabricksJobV1/tsconfig.json | 63 ++++++++++++++ vss-extension.json | 13 +++ 8 files changed, 253 insertions(+), 1 deletion(-) create mode 100644 tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/.gitignore create mode 100644 tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.js create mode 100644 tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.ts create mode 100644 tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json create mode 100644 tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/task.json create mode 100644 tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/tsconfig.json diff --git a/package.json b/package.json index 67b49ac..52479a2 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,8 @@ "build.scala.runSbtTests": "tsc -p ./tasks/Scala/RunSbtTests/RunSbtTestsV1", "build.scala.startCluster": "tsc -p ./tasks/Scala/StartCluster/StartClusterV1", "build.scala.uninstallCodeFromCluster": "tsc -p ./tasks/Scala/UninstallCodeFromCluster/UninstallCodeFromClusterV1", - "build": "npm run recursive-install && npm run build.deployNotebook && npm run build.configureDatabricks && npm run build.executeNotebook && npm run build.waitExecution && npm run build.scala.installScalaTools && npm run build.scala.installSpark && npm run build.scala.runSbtTests && npm run build.scala.startCluster && npm run build.scala.uninstallCodeFromCluster", + "build.executeDatabricksJob": "tsc -p ./tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1", + "build": "npm run recursive-install && npm run build.deployNotebook && npm run build.configureDatabricks && npm run build.executeNotebook && npm run build.waitExecution && npm run build.scala.installScalaTools && npm run build.scala.installSpark && npm run build.scala.runSbtTests && npm run build.scala.startCluster && npm run build.scala.uninstallCodeFromCluster && npm run build.executeDatabricksJob", "test.deployNotebook": "npm run build.deployNotebook && mocha ./DeployNotebooksTask/DeployNotebooksTaskV1/tests/_suite.js", "test": "npm run test.deployNotebook", "package.dev": "npm run test && tfx extension create --manifest-globs vss-extension.json --overrides-file --output-path out", diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/.gitignore b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/.gitignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/.gitignore @@ -0,0 +1 @@ +node_modules diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.js b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.js new file mode 100644 index 0000000..256351d --- /dev/null +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.js @@ -0,0 +1,31 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const tl = require("azure-pipelines-task-lib/task"); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const targetType = tl.getInput('targetType'); + if (targetType.toUpperCase() == "JARJOB") { + console.log("Selected JAR Job"); + } + else if (targetType.toUpperCase() == "NOTEBOOKJOB") { + console.log("Selected Notebook Job"); + } + else { + console.log("Selected None"); + } + } + catch (err) { + tl.setResult(tl.TaskResult.Failed, err.message); + } + }); +} +run(); diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.ts b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.ts new file mode 100644 index 0000000..5760a15 --- /dev/null +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.ts @@ -0,0 +1,20 @@ +import tl = require('azure-pipelines-task-lib/task'); + +async function run() { + try { + const targetType: string = tl.getInput('targetType'); + + if(targetType.toUpperCase() == "JARJOB"){ + console.log("Selected JAR Job"); + } else if(targetType.toUpperCase() == "NOTEBOOKJOB"){ + console.log("Selected Notebook Job") + } else { + console.log("Selected None"); + } + } + catch (err) { + tl.setResult(tl.TaskResult.Failed, err.message); + } +} + +run(); diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json new file mode 100644 index 0000000..dfdf0bf --- /dev/null +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json @@ -0,0 +1,37 @@ +{ + "name": "executedatabricksjob", + "version": "1.0.0", + "description": "Executes a Databricks Job", + "main": "executedatabricksjob.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/microsoft/azdo-databricks.git" + }, + "keywords": [ + "azure", + "databricks", + "spark", + "devops", + "notebook", + "python", + "scala", + "jar", + "job" + ], + "author": "Microsoft DevLabs", + "license": "MIT", + "bugs": { + "url": "https://github.com/microsoft/azdo-databricks/issues" + }, + "homepage": "https://github.com/microsoft/azdo-databricks#readme", + "dependencies": { + "azure-pipelines-task-lib": "^2.8.0" + }, + "devDependencies": { + "@types/node": "^12.6.8", + "@types/q": "^1.5.2" + } +} diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/task.json b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/task.json new file mode 100644 index 0000000..ae3f575 --- /dev/null +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/task.json @@ -0,0 +1,86 @@ +{ + "id": "98C5EE74-7831-4858-9B86-48C0EB8E7811", + "name": "executedatabricksjob", + "friendlyName": "Execute Databricks Job", + "description": "Runs a Job on Databricks", + "helpMarkDown": "Runs a Job on Databricks, based on given parameters. This task is currently able to run JAR jobs and, in the future, will also be able to run Notebooks.", + "category": "Utility", + "author": "Microsoft DevLabs", + "version": { + "Major": 0, + "Minor": 1, + "Patch": 0 + }, + "instanceNameFormat": "Run Databricks Job on $(clusterid)", + "inputs": [ + { + "name": "targetType", + "type": "radio", + "label": "Job Type", + "required": false, + "defaultValue": "jarjob", + "helpMarkDown": "Type of Job: JAR library or Notebook", + "options": { + "jarjob": "JAR Job", + "notebookjob": "Notebook" + } + }, + { + "name": "clusterid", + "type": "string", + "label": "Cluster ID", + "defaultValue": "", + "required": true, + "helpMarkDown": "The ID of the Cluster to use for running the job." + }, + { + "name": "notebookPath", + "type": "string", + "label": "Notebook path (at workspace)", + "visibleRule": "targetType = notebookjob", + "defaultValue": "", + "required": true, + "helpMarkDown": "The path to the Notebook to be executed (e.g., `/Shared/MyNotebook`)." + }, + { + "name": "executionParams", + "type": "multiLine", + "label": "Notebook parameters", + "visibleRule": "targetType = notebookjob", + "defaultValue": "", + "required": false, + "helpMarkDown": "The parameters to override the ones defined on the Job Configuration File. These will be used on this notebook execution, and should be provided in JSON. i.e., `{\\\"myParameter\\\":\\\"myValue\\\"}`. Quotes should be preceeded by backslash." + }, + { + "name": "packageName", + "type": "string", + "label": "Package Name (JAR file name without extension)", + "visibleRule": "targetType = jarjob", + "defaultValue": "", + "required": true, + "helpMarkDown": "The name of the JAR package you want to execute on the cluster, with no extension." + }, + { + "name": "mainClassName", + "type": "string", + "label": "Main Class name", + "visibleRule": "targetType = jarjob", + "defaultValue": "", + "required": true, + "helpMarkDown": "The name of the main class on the JAR package. E.g `com.company.classname`" + }, + { + "name": "failOnStderr", + "type": "boolean", + "label": "Fail on Standard Error", + "defaultValue": "false", + "required": false, + "helpMarkDown": "If this is true, this task will fail if any errors are written to the StandardError stream." + } + ], + "execution": { + "Node": { + "target": "executedatabricksjob.js" + } + } +} diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/tsconfig.json b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/tsconfig.json new file mode 100644 index 0000000..e7f648d --- /dev/null +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/tsconfig.json @@ -0,0 +1,63 @@ +{ + "compilerOptions": { + /* Basic Options */ + // "incremental": true, /* Enable incremental compilation */ + "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */ + "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ + // "lib": [], /* Specify library files to be included in the compilation. */ + // "allowJs": true, /* Allow javascript files to be compiled. */ + // "checkJs": true, /* Report errors in .js files. */ + // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ + // "declaration": true, /* Generates corresponding '.d.ts' file. */ + // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ + // "sourceMap": true, /* Generates corresponding '.map' file. */ + // "outFile": "./", /* Concatenate and emit output to single file. */ + // "outDir": "./", /* Redirect output structure to the directory. */ + // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ + // "composite": true, /* Enable project compilation */ + // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ + // "removeComments": true, /* Do not emit comments to output. */ + // "noEmit": true, /* Do not emit outputs. */ + // "importHelpers": true, /* Import emit helpers from 'tslib'. */ + // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ + // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ + + /* Strict Type-Checking Options */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* Enable strict null checks. */ + // "strictFunctionTypes": true, /* Enable strict checking of function types. */ + // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ + // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ + // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ + // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ + + /* Additional Checks */ + // "noUnusedLocals": true, /* Report errors on unused locals. */ + // "noUnusedParameters": true, /* Report errors on unused parameters. */ + // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ + // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ + + /* Module Resolution Options */ + // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ + // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ + // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ + // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ + // "typeRoots": [], /* List of folders to include type definitions from. */ + // "types": [], /* Type declaration files to be included in compilation. */ + // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ + "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ + // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + + /* Source Map Options */ + // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ + // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ + + /* Experimental Options */ + // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ + // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ + } +} diff --git a/vss-extension.json b/vss-extension.json index 03254b0..9c1e1a6 100644 --- a/vss-extension.json +++ b/vss-extension.json @@ -55,6 +55,9 @@ }, { "path": "tasks/Scala/CompileInstallJar" + }, + { + "path": "tasks/ExecuteDatabricksJob" } ], "contributions": [ @@ -157,6 +160,16 @@ "properties": { "name": "tasks/Scala/CompileInstallJar" } + }, + { + "id": "azdo-databricks-executedatabricksjob", + "type": "ms.vss-distributed-task.task", + "targets": [ + "ms.vss-distributed-task.tasks" + ], + "properties": { + "name": "tasks/ExecuteDatabricksJob" + } } ] } From b764a3b1407636ddb262b972ae1423aeb81d222c Mon Sep 17 00:00:00 2001 From: Ricardo Serradas Date: Mon, 22 Jul 2019 19:47:10 -0700 Subject: [PATCH 3/7] Adjusting extension version on AzDO pipeline --- azure-pipelines-cd.yml | 2 +- azure-pipelines.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/azure-pipelines-cd.yml b/azure-pipelines-cd.yml index 7328e55..56f457f 100644 --- a/azure-pipelines-cd.yml +++ b/azure-pipelines-cd.yml @@ -1,4 +1,4 @@ -name: 0.5$(Rev:.r) +name: 0.6$(Rev:.r) trigger: branches: include: diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 536bd48..295178d 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -1,4 +1,4 @@ -name: 0.5$(Rev:.r) +name: 0.6$(Rev:.r) trigger: branches: include: From 66c18bffc5972e1d2a40649f06d38dae648fd409 Mon Sep 17 00:00:00 2001 From: Ricardo Serradas Date: Mon, 22 Jul 2019 20:17:20 -0700 Subject: [PATCH 4/7] Run on shell works --- azure-pipelines.yml | 5 +- .../ExecuteDatabricksJobV1/runjarjob.sh | 70 +++++++++++++++++++ 2 files changed, 73 insertions(+), 2 deletions(-) create mode 100755 tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/runjarjob.sh diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 295178d..757e599 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -1,13 +1,14 @@ name: 0.6$(Rev:.r) trigger: branches: - include: - - '*' exclude: - master paths: exclude: - README.md + - CONTRIBUTING.md + - LICENSE + - 'docs/*' pool: vmImage: "ubuntu-latest" diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/runjarjob.sh b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/runjarjob.sh new file mode 100755 index 0000000..fe7f2c5 --- /dev/null +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/runjarjob.sh @@ -0,0 +1,70 @@ +#!/bin/bash +# =================================================================================== +# +# FILE: 1-run-jar.sh +# +# USAGE: bash 1-run-jar.sh ... +# +# DESCRIPTION: Uses Databricks API to launch Spark Job. +# Relies on an existing JAR file being present. +# Uses API api/2.0/jobs/create +# api/2.0/jobs/run-now +# Results in a Run Id that is needed later to validate SUCCESS +# +# NOTES: --- +# AUTHOR: Bruno Terkaly +# VERSION: 1.0 +# CREATED: June 10, 2019 +#=================================================================================== + +#---------Create job + +clusterid=$1 +packagename=$2 +mainclassname=$3 + +echo "Run a job" +cat > job-configuration.json << EOF +{ + "name": "MySparkJob", + "existing_cluster_id": "$clusterid", + "libraries": [ + { + "jar": "dbfs:/jar/$packagename.jar" + } + ], + "spark_jar_task": { + "main_class_name": "$mainclassname" + } +} +EOF +cat job-configuration.json + +result=$(databricks jobs create --json-file job-configuration.json --profile AZDO) +echo "result = $result" +echo "Finished creating Databricks Job" + +jobid=$(echo $result | jq -r ".job_id") +echo "==================================" +echo "Job id "$jobid +echo "==================================" + +#---------Run the job +cat > job-run-configuration.json << EOF +{ + "job_id": $jobid +} +EOF +result=$(databricks jobs run-now --job-id $jobid --profile AZDO) +echo "result = $result" +runid=`echo $result | jq -r ".run_id"` +number_in_job=`echo $result | jq ".number_in_job"` +echo "number_in_job = "$number_in_job + +echo "==================================" +echo "Run id = "$runid +echo "Number in Job = "$number_in_job +echo "==================================" + +echo $runid > last-run.txt +cat last-run.txt \ No newline at end of file From 53810af3a7fcdd61a74abf4363b6d82b92a7590b Mon Sep 17 00:00:00 2001 From: Ricardo Serradas Date: Mon, 22 Jul 2019 21:44:36 -0700 Subject: [PATCH 5/7] Added Shell execution. Additional parameters to the Job job execution --- .gitignore | 4 +- .../executedatabricksjob.js | 32 +++++++++++++-- .../{runjarjob.sh => executedatabricksjob.sh} | 16 +++++--- .../executedatabricksjob.ts | 39 +++++++++++++++++-- .../ExecuteDatabricksJobV1/task.json | 9 +++++ 5 files changed, 87 insertions(+), 13 deletions(-) rename tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/{runjarjob.sh => executedatabricksjob.sh} (83%) diff --git a/.gitignore b/.gitignore index dd834fd..36ea307 100644 --- a/.gitignore +++ b/.gitignore @@ -85,4 +85,6 @@ package-lock.json npm-debug.log .override-file.json tasks/ExecuteNotebook/ExecuteNotebookV1/job-configuration.json -.taskkey \ No newline at end of file +.taskkey +tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/job-configuration.json +tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/last-run.txt diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.js b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.js index 256351d..bf62213 100644 --- a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.js +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.js @@ -8,19 +8,45 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge }); }; Object.defineProperty(exports, "__esModule", { value: true }); +const path = require("path"); const tl = require("azure-pipelines-task-lib/task"); +const shell = require("shelljs"); +const clusterid = tl.getInput('clusterid', true); +const failOnStderr = tl.getBoolInput('failOnStderr', false); +function runJarJob() { + return __awaiter(this, void 0, void 0, function* () { + const packageName = tl.getInput('packageName', true); + const mainClassName = tl.getInput('mainClassName', true); + const jarParameters = tl.getInput('jarParameters', false); + let jarParametersJson = JSON.stringify(jarParameters); + let fileName = 'executedatabricksjob.sh'; + let filePath = path.join(__dirname, fileName); + let runJobExec = shell.exec(`bash ${filePath} ${clusterid} ${packageName} ${mainClassName} ${jarParametersJson}`.trim()); + if (runJobExec.code != 0) { + tl.setResult(tl.TaskResult.Failed, `Error while executing command: ${runJobExec.stderr}`); + } + if (failOnStderr && runJobExec.stderr != "") { + tl.setResult(tl.TaskResult.Failed, `Command wrote to stderr: ${runJobExec.stderr}`); + } + }); +} +function runNotebookJob() { + return __awaiter(this, void 0, void 0, function* () { + }); +} function run() { return __awaiter(this, void 0, void 0, function* () { try { + tl.setResourcePath(path.join(__dirname, 'task.json')); const targetType = tl.getInput('targetType'); if (targetType.toUpperCase() == "JARJOB") { - console.log("Selected JAR Job"); + yield runJarJob(); } else if (targetType.toUpperCase() == "NOTEBOOKJOB") { - console.log("Selected Notebook Job"); + yield runNotebookJob(); } else { - console.log("Selected None"); + tl.setResult(tl.TaskResult.Failed, "Could not retrieve Job Type."); } } catch (err) { diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/runjarjob.sh b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.sh similarity index 83% rename from tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/runjarjob.sh rename to tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.sh index fe7f2c5..ecac317 100755 --- a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/runjarjob.sh +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.sh @@ -22,6 +22,7 @@ clusterid=$1 packagename=$2 mainclassname=$3 +additionalparams=$4 echo "Run a job" cat > job-configuration.json << EOF @@ -50,12 +51,15 @@ echo "Job id "$jobid echo "==================================" #---------Run the job -cat > job-run-configuration.json << EOF -{ - "job_id": $jobid -} -EOF -result=$(databricks jobs run-now --job-id $jobid --profile AZDO) + +echo "Additional params: $additionalparams" + +if [ "$additionalparams" == "" ]; then + echo "No additional params passed." + result=$(databricks jobs run-now --job-id $jobid --profile AZDO) +else + result=$(databricks jobs run-now --job-id $jobid --jar-params "$additionalparams" --profile AZDO) +fi echo "result = $result" runid=`echo $result | jq -r ".run_id"` number_in_job=`echo $result | jq ".number_in_job"` diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.ts b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.ts index 5760a15..838429d 100644 --- a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.ts +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.ts @@ -1,15 +1,48 @@ +import path = require('path'); import tl = require('azure-pipelines-task-lib/task'); +import shell = require('shelljs'); +import { async } from 'q'; + +const clusterid: string = tl.getInput('clusterid', true); +const failOnStderr: boolean = tl.getBoolInput('failOnStderr', false); + +async function runJarJob(){ + const packageName: string = tl.getInput('packageName', true); + const mainClassName: string = tl.getInput('mainClassName', true); + const jarParameters: string = tl.getInput('jarParameters', false); + + let jarParametersJson = JSON.stringify(jarParameters); + + let fileName = 'executedatabricksjob.sh'; + let filePath = path.join(__dirname, fileName); + + let runJobExec = shell.exec(`bash ${filePath} ${clusterid} ${packageName} ${mainClassName} ${jarParametersJson}`.trim()); + + if(runJobExec.code != 0) { + tl.setResult(tl.TaskResult.Failed, `Error while executing command: ${runJobExec.stderr}`); + } + + if(failOnStderr && runJobExec.stderr != "") { + tl.setResult(tl.TaskResult.Failed, `Command wrote to stderr: ${runJobExec.stderr}`); + } +} + +async function runNotebookJob() { + +} async function run() { try { + tl.setResourcePath(path.join(__dirname, 'task.json')); + const targetType: string = tl.getInput('targetType'); if(targetType.toUpperCase() == "JARJOB"){ - console.log("Selected JAR Job"); + await runJarJob(); } else if(targetType.toUpperCase() == "NOTEBOOKJOB"){ - console.log("Selected Notebook Job") + await runNotebookJob(); } else { - console.log("Selected None"); + tl.setResult(tl.TaskResult.Failed, "Could not retrieve Job Type."); } } catch (err) { diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/task.json b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/task.json index ae3f575..e3f8bbf 100644 --- a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/task.json +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/task.json @@ -69,6 +69,15 @@ "required": true, "helpMarkDown": "The name of the main class on the JAR package. E.g `com.company.classname`" }, + { + "name": "jarParameters", + "type": "multiLine", + "label": "Additional parameters", + "visibleRule": "targetType = jarjob", + "defaultValue": "", + "required": false, + "helpMarkDown": "Additional parameters to pass to the JAR job. Format example:

`[\"param1\", \"param2\"]`" + }, { "name": "failOnStderr", "type": "boolean", From 402c10bb612b42277fd022daaf0994ec333d25cf Mon Sep 17 00:00:00 2001 From: Ricardo Serradas Date: Mon, 22 Jul 2019 22:31:34 -0700 Subject: [PATCH 6/7] Implementing wait for job completion --- .../executedatabricksjob.sh | 89 ++++++++++++------ .../ExecuteDatabricksJobV1/icon.png | Bin 0 -> 10207 bytes 2 files changed, 61 insertions(+), 28 deletions(-) create mode 100755 tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/icon.png diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.sh b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.sh index ecac317..9df9a3c 100755 --- a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.sh +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/executedatabricksjob.sh @@ -23,9 +23,11 @@ clusterid=$1 packagename=$2 mainclassname=$3 additionalparams=$4 +jobrunid=-1 -echo "Run a job" -cat > job-configuration.json << EOF +createAndRunJob() { + echo "Run a job" + cat > job-configuration.json << EOF { "name": "MySparkJob", "existing_cluster_id": "$clusterid", @@ -39,36 +41,67 @@ cat > job-configuration.json << EOF } } EOF -cat job-configuration.json + cat job-configuration.json -result=$(databricks jobs create --json-file job-configuration.json --profile AZDO) -echo "result = $result" -echo "Finished creating Databricks Job" + result=$(databricks jobs create --json-file job-configuration.json --profile AZDO) + echo "result = $result" + echo "Finished creating Databricks Job" -jobid=$(echo $result | jq -r ".job_id") -echo "==================================" -echo "Job id "$jobid -echo "==================================" + jobid=$(echo $result | jq -r ".job_id") + echo "==================================" + echo "Job id "$jobid + echo "==================================" -#---------Run the job + #---------Run the job -echo "Additional params: $additionalparams" + echo "Additional params: $additionalparams" -if [ "$additionalparams" == "" ]; then - echo "No additional params passed." - result=$(databricks jobs run-now --job-id $jobid --profile AZDO) -else - result=$(databricks jobs run-now --job-id $jobid --jar-params "$additionalparams" --profile AZDO) -fi -echo "result = $result" -runid=`echo $result | jq -r ".run_id"` -number_in_job=`echo $result | jq ".number_in_job"` -echo "number_in_job = "$number_in_job + if [ "$additionalparams" == "" ]; then + echo "No additional params passed." + result=$(databricks jobs run-now --job-id $jobid --profile AZDO) + else + result=$(databricks jobs run-now --job-id $jobid --jar-params "$additionalparams" --profile AZDO) + fi + echo "result = $result" + runid=`echo $result | jq -r ".run_id"` + number_in_job=`echo $result | jq ".number_in_job"` + echo "number_in_job = "$number_in_job -echo "==================================" -echo "Run id = "$runid -echo "Number in Job = "$number_in_job -echo "==================================" + echo "==================================" + echo "Run id = "$runid + echo "Number in Job = "$number_in_job + echo "==================================" + jobrunid=$runid +} + +waitJobExecution() { + echo "run_id = "$jobrunid + + result=$(databricks runs get --run-id $jobrunid --profile AZDO | jq -r '.state.result_state') + + if [ "$result" == "null" ] + then + while [ "$result" == "null" ] + do + echo "Job still running..." + result=$(databricks runs get --run-id $jobrunid --profile AZDO | jq -r '.state.result_state') + sleep 10 + done + fi + + echo "result = $result" + if [ "$result" == "SUCCESS" ] + then + echo "-------------------------------" + echo "Success for last run of "$jobrunid + echo "-------------------------------" + else + echo "-------------------------------" + echo "Failure for last run of "$jobrunid + echo "-------------------------------" + exit 1 + fi +} -echo $runid > last-run.txt -cat last-run.txt \ No newline at end of file +createAndRunJob +waitJobExecution \ No newline at end of file diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/icon.png b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/icon.png new file mode 100755 index 0000000000000000000000000000000000000000..3bee39abe714d30c0824dde3fbc10882ccbe1501 GIT binary patch literal 10207 zcmZ{K2{@GP+x{bkEb+4@WhvQZODbDL7$S_l>`Ph8K3QLpWfEbsQ)*-xjAaOkEM;FK zOL%FJUCNeazI#S|@9+P;f5+i)nCE%!=UUGDx~}uQC-RoI8Xfg1Y6ybpG}Kk}An1rO z1d+*7QGid#mIyfF9}hh>WvH<8^ep)2sDsi?B?u~xq1mxI2L3;OU){t5f=)CLf5|?% z<=8?Hzo&+Z65Q8(aa89Pv(FykN6q45>Z^9{(e#YV@dlRStZgNYSUi7NpoyI-*_rZ7 z29@{~Szj%K3uAJoeyb%0x#L#CYEL;kI)vp^&y-97Lk4u9Cx!&Lp#+YQ-t&2gLS8$QuB2KmUAg= zxb!qX8>tPY0YWVq1l*P=Rp zQL<;xcM!fDfuJOZHLi6(v&zo~TRA!wN$VUw>Vfm!gf4J@(o6C0=EHWu@=3l^H(_an z%%cz#@ut2Bu45%GjTTk0Y7~C3Iub!!PYwkSJ{vVCX@Hr9v@!^K=`HPuQ9{s*hb@Id zMPL123Clm@of-S}K7*CeMh-zaoRZtTvUwp~mh^mjzws4pyJBqM?h+RFCAN}On31!P zpx$0!E4omeiXMW5p36Nk@z$Mf)l&T={AkrIdXEdN!Tqqr!%=v~{LAyixz?Nn6+#S1 z`K>_7x+PScQxPg*PN%bmr*`c-K>~!bo?jMiTLJ%$RDRVZE%91WoZorMYy_P}&gl|c?=Sg-SmDX&+B3M!& zW8v{IFmXLIcpol0gT5SP%lS;`q4bF!cVVc*W_^+vYrn_!Z5G4Lr zJ8jGLr*@y>%eiB(ot-iWr0FA+gP6Y+!haRBeG^~E5c~JJ^M8dtI?2LsaZ+38{FRNz z2^NGY01DJXQ&#t9JJvA6n}r(e>lO;&)_dg_s>o@vMi#aV<}e8)j10=*UR|+5P!%st z-pms)zcSWdnYkq<3@{QR=WNV*6;opzFZ5LQEP5y?nGCvirCA$YaCFWCnF$BtGIrrcuetGYgrHtM`pLE% z7X+JehhgjxqweO`zsrSzJ=rOJKoD?{lXU&k#6aqF#O?;ySxu3M|Bqi(peQnTsH?~R;> zf>&e1=Oyd3t+|$`m0Zv6RLDcnL@YZCBIg29Ia1>XY(U0ce}W6tjH7CtS#6N4gtWUp zj|>}eryt=d6&O2a|7(rr1kVk=#D`~PIQAyUpt`Z<>+VxlmBUC{9$U|!%dUw*Zy^XP zix4RE`LX#Mhdj=s!nVw(;(BAJff5QfYFWyxx15tCUkF_g{(_%wR7``Q&CzA*D;qS) zqCK}SQt_zXaMi~1g{h(;%gczt0=Zv?KE>Rj3sPept@fR}?TYOXlpPyB;yU$pei%v4 z!~5dxId`UST;5<`40Bp`fvqV<5gL1c@3xL}=Ehbr>oh>ZMLM@-OL5ax8b1VeiT zPkt|D^{}Su<8i4xgq33u{1k$!pxqZMkPz$m1f)uBZ-N%eIl8)HLNRzecd5CfN2l^L zOQp#wy$f`WnV17wEE8j?uZ*U9AD(L(3U+@M-qw2E9>?#XtbSWkNJe+hiXNguE_4gh zX20)Ai#X2nAlgUjyo|x#Jy4>UlUr7z`t88l5?3usbC{tcdd-y<&Qp3i7x~SUBdngK zic}6+1HdixO^=UWKp10|cr~dc6$e)ZpaLj%=SH($4^a9{FT)Ic#Pn@7s7_F3kQ zDh5E1Ie#h3*zupksrnSE&CV&0v`eVpfYD8_qa&PsE2d@q**YLDw0oZIFzlb;Q=@QdRO zjjgfW#S~1NG0nB~oRZef+(`_|BZErSKMc3j*sF=I-72Hv@$0&g`HFSKsRG%rEno~i zS#t+h2R}VEo%g`;>jLYpgpnDE`f&QAjdiimNgX4sw`BiB&KlV+aLU>nMA>nwR&?#x zU}}>iKeGt*v6m%3QL5K6e^JSI*)~tMg;BY^ZJ6Cm-UT7yfHNb$0xfJ%)y%!K!>(|N zw-Jxoxa#ls!f&6rIc_q|uOQej0Y%ud@}SI%m$qQzD`w!nDrTf$B)rFgou zpAkHiAk7i*)YO^YJGkbLcOk9Miet{;9%f{zswW9HLw*jI53O9)@6_@Vr_xdHex~|T zxJ2M$ddwF4UyATrI^UvL(On7mSmFhP_T0t>ntX( zC23j5Es>ulq0I3efkvWUDl&%|yo`$b0c;ugq*F%KW9O%P*C@Sw5Dwd4_Y&%PI6sI= zv$kz;fC0r|>KZybZr{O|xs4$8HWU+)69`ZPg>4VLvlqJJlj@9X&eF3s-p_#@9hQxW zniaB{g=tCKmNn@L*D#ARE;~18b?XRw96o5CCm5f3*|8u~$(OOnnI4;oLHg_7&D~E> z8Qko~i4n)DcyKWW<89QL)vb4(gfpw$f#$5YwS&z%4?iQNPMQj{i^iN7SNP{|d$eelD;Em`RzWvS8Yh}%uVabq@z{zfD zfg)SC510@aCGAiYoj!4gexj|9516hLp_aaT7|e@WU88)<%817?bQ%waF3;qIeVaZ^ z12qNWSr4mP!7oWFonJ$-sxLEBWDmi_itl%(uiyB|B)WQbgnWU2{)zD+;+gRsAl9?m zYM^Xcct*0DEM9UtOp#V>g&M`gjP{S>m-kE($<_^{`px%K6v@5ZN&m?iH+a1#%8!(X zPG~!D{~Ch3$pO=^n?ECH`an~OVo;XdQcQZ^JBpGGtv%4s9z=*x-vLRW#ZoeoVAOhK z2nba!c@FTPg=5i6i3V|#yY-t8!&KNa_-)(y6o801I> zeZB0vvawaRJ9Yn~X%EX$y@y~6gI&{>f#X!`kgDT!)y0KeQ1~W7h*Sjzy zHSoNUH@5fOqZN-gn}gUvIYp=ZEklaIhIY^ElVd97*<*@yfJ>ZxwBpk{@TDr`T1X^q zz1aG#va#pU3TmR|zHo=GkPgZO znu!)0o;>8-(Z#gZQnb@>8e%~C4==HI8oweg+k)q}7ZXB}3T6V?diPoKh4?kx^*t*( zD2KVCZ~Z(g>MAylHM!t|QSqDm8_v!(#Oqi)530@@Z=e=#s{Zs`27Gr*|2bBzKGmUvQ z&2C9AP0tbUbJCI$B6k)O>M%AUzMToUuc%+i#o?Of@wxuNG^W1Y^OA(jSghPEm;fRD z|Hjf~$1Ue6+Ilw-`V$~h$;r-H;s{?4rjFSc5|*oPmYdRA z>CTb{AL5-cC6TD@aHs9vhGS50P378O>1$B~bi}P{3rdV|4Vq2Vgt^Kyglt`0Dz#t> zk)^PO3i1V1EyXMxQ#kQNI*+)Ov;s?NRna8^o~o>2l69P>p!b!$_ik1^4M9``i)8_k zN}DSH(|zep^#6t>ZqM>BZBfJIlnYEMiejIUiSK|%;8~ZHLa`f z{$N!-t0M@|GKP#y3THp3?lZ&hn>XHkwyl083e50TH59G6NL4$uoY!@PVhterykMgy>OF9ehU_?B zsTz~x`?kQTnq9N6Je8Fz^nj+rj81xrxlHK^|EB43Gg)}hKlIE{~m*#S1w;rio(vg0~_+wpZq7K^8V@dte=GokoEB=bx}()WB!)xOM?re6+`{tuF5+N zfH~E4bYaNHRKLxCxp4NpT|t+@w@+(fe=UeIV|t~wb(KZEYF#=VGk>_=z-KoqJ^`|g z((bpvmuVj5wpEd>zbN@SpU0KD*7O%M8}-1R)ChienNVD|+yVW>eSN(d&H~lhhNW#c z8D3aZ@hGM`uS1y2Tjoga8rBgo_UpRcN4B=P`JtN&Q^QoL@hp|4^4<@62`E@2@RSzL zjjrlk6;fpobo}JPjs9ka%J)E%_N?xl(oGqk?H=(ecO?O1HzX}q z(Um>`Xij)NBf$KFg(|#y&Irsvez%^5pO&H`*b5jJfuXj>g$t-TPklb9k;p@?@a8c-mUQ5H>sh4F{ z@d(@viV6M^_Rz#zSjW%pY8p8IDm{up@+coEJgR_|AWi|nsS%r)cJI(ifs1pPG>?Ac z{P(^|jPW!B3q7GJ%X!^o>vcqu85d+s4rTGurx!dXCUEduUbR}s&R}a;#q4a}1Ws88Dx(Wf5J^3W?t3$WNu;YiZ0OkSlAg06Mj z`e<8;uSHKEb@m_F)_2@_`#zyj@PU|dDcAYWdOlxgFzf8g*)Z3_ojw3)xFnK?J|_An zh_A_#SAR^$Z+FJ@=y)60Kdn<;d>ekMA_OO27r666 zf_yC8(&7duD)2o+xMsFQ7L0UDWp=0_U_g%ih|rj37#-y!`tD$ z{H;!I8J9I~nOs@U+jjXqsVAB{9U?t%H^J;-`0``*mH5<)gt8;hbR0W1;(NXZ07vKK z`0cd@iGaUyf5B}&#F4F6>WD5nDJjD)c%cAayK&NWFs7h(mQOMZiCgtaE zmmaZ&-AIe3SsGaQL`f(jF;fy_dh_a%*ms>c-mlGWMK1L9T!l}w3`CDPt;f(})sp*N z0h83oiTsVP-O;CmlI&*cw*um8c9~d4TjD~wK(}%(+ELnsfh>HAbKJT}zMyMI;=nkw-C-G3jbbY_3uaOEiIRn-2(tG3esA6$0vWBY^n=#!?*` zYwy4`oKxaG-2_!H;Slnq>GqDwI4iLyH-w4iVL;X2e6Z`JU#Mb&wO54Ypk1diS*;I? zUa=03rIilvPy65r)E@=`d>~>^^!FFO1r=Sat?u_e?I;8I5ST*rK8?&^ylF^0g{{Y0 zYOna!z`OXOrPo^ZT|4(G7Lc$u6KpxGSSFJx_lfvzjvnn58C(1Ki@1`GHp z?++8qK5{_Hm(HG2`NP^eQT6HNp8zC>e?*fbR16K!rkOVd2#6ZW zi`~*qRHt}&pCGdJV1G79n2AbIahVu)Ry$%02*dA$w*`$!eHckZL(fc3)B4~42#!0L ze2lvE%d%RnW2*0Xdlhwzlq81WY6Qwgt-aJ#NIHS-X;vIj~V{Ipv+4CUr-A3Y^0pQ`9I&nMOX!TWD4UFfk~ip^?O{~_rRqa0z` zy+pAAlh~$perDo0E9wSH`2yNr=l#jh#1QFie%7!XEC-bMWp3HNbYw3ieU7gDVLP|K zEvS~gGL>N+^vJ~Unm$i~%fEt^LmgFRyjEnYoYI@lmaTwMk`l9SE4?e@hRFRER z93=w3`dZH%KA7m(hUW)@ngd@%<2e#F7P3BoNFf$(0|s28O!T^{boh?Rp@3wEP654& zNQ-%L3o#(#PX1B*5*=Dt@Ii!Jqi>{=uinJacaP83eqJXIq>@&Rv_A|;RL?|^_O0Wr ziWmLCMVQ8EsVz3O0{ZV_-k}LVCL+$NA1x1(ZggYv6sRp$x`L4cPH(uuwfLr8-xl%3l<3@19|UGg6ioBd{=#IlzPlzraM zK3{}FNseb#pKdDKFE$ItWuU6kVwWZM6MwwK52yx9%v>BPaWf7s(&Mc_h1i#~{^D+$ zA}U1-FYh+(>vaynH3|D`o^BpH;G%C;Si>%)TL#NV>|Z+ByeGNE2J`}t{e1_H{;>O3 zn49+iFP0J!DyYKj^GyoQeF+TEfkTnv;ekkDOpMsT{g0=5iL8p42f#9-+JJ1zDl0&w z3pUmP?|2Zsiv3V9VwKBOwBg=-Hi;$kAz|Cb@Kav@CRjWK`oq3nkwChaSTLBMk*yoB zTRQ08y^HWKoBl#c*D^mYbDq$qh=w5V|DRm({Qt-m(ctiIdVmW}>t?iZC=7zQZFN-(~!TM1z6wB${alN;6Ua`)_u6lX$v&*`~rz)k(#O zL0z0ZQE4+UFbg-%d`K^ofCxaq0>_m9W+@O`y*D2TJnJP6gBkZfPag}2{d3aJ~ zHaP*&owWI!%p0P5MqBTvI`6Jv<1$O+VGa_LPNYGk=h%?Y>Q4O9 zW}x%ivH6^^OC)M?f6KXSzJ?>5jumMH*H z4cz{3!4p%ZJ@o&gRJ;SHT^vBE*m@$JMH0x`xc!5+g0c*V6A%A97*DK0o>T%>E%qjk zLUsA?&aRj!xX2@FPRIVex)Q2i4w}d4CGrKH8?0dpceVV%p&s;iI;bC)Z7l3{@A-l^ zSL#pD)hqsA(aRZ}c6Syi8dCyzrVyPWjXh)+ghq)qUY2|T?B`3x?u7d;wLwYX%ulgG z0b6R`Q}vQ$C(xwPJ_J?GKb58($a8@iIXZz;T&&=D@!J4TKG-7Bb3~CSK6AlEH}^ms z4JQg(n4&YQ^x&ko{(-EE1sKfUkw)>EYRZ2@(XC6MSYEavsa{}!;~#bVHxLC?lS8-6 z$I&h$!Y1S2iSH#IZRmAvJH8q>K%QaF7yp?fQ5;w+@uQNkP62V7ZqXM!pD6RvCju_{ z-sHDB>|}sjP;yRD&17vlI0YBkDjSX82M&zpnDFL5!2$_ohzJ4ICb5CtN!18cQRNTR zmk#tS_!;)l+W6waIUoT00l~rcYHE^A@EPdV8YwSyvoLZ&1)vv59;lygnh-Q;1u6>A zDkBQzVtv9xaJ8W9{P>Mm+^yh&pdX$9#C2J{HQ+)8ICGHUdv6C8w4`MZrG0H#=ez#RshbaUBN;36tU`Um3iGX5@^nUX;~xN7cGq>X_#(e;K8*pUN@SZk0h88|&3y=v=4M=S#$#O0YWGUvz_nCNhX^>GEDuE;d7Zvz3= zg6`OSMZ5Hw%`UjjDj!mr3>rLV0>7G8T!<%1uEfqefYg%V%LZaB^g;y`EB1MgJG0_~ z*=!iKp~lwTkBu{XBj8&JM|lV@`{Q)XWIU%DLX%9f?-V!0nAUJcI~Cy6pXxd&NWQ90 zFeBy3(|y#>MX1vOc&vdM3fX~e>h@+)lTTDgI@eO(S!>J+1SL2EjE3n?ybO%gCs~=O z2Z;J1g12Y77Hl~UGa=Fk84v@9p*g$_?A7v_#Xx~Z4>NrVIgeES zG#^j+Mf#RRiz`E>o3)7TF57P{I+H_3rHsE&m+JLu{+(FH)AVOYu5V?=xP9IVZ1J4dY*G>}yZ}5%V zAIBmB_Q~Cx-3o#WP{H_4b`Dy=`@rYS7-Gvv{v!!PkK1*UZ4v`*B zO0oxmfSgA-L%WQ}zen9fzmB6Bc;#$86=*Z*K^Qp>zBG|)#x*zuH2jysmv!GT(mdag z-X+yc!a4~0yE_L#_soKwXAuEnsfmSbW7aKD1{{D=jfR=PO$Cq5TiL9h0KzErf>)9% zh~MT;^g{-Pxe#d}|KH@)DBBtbr69OBAhCQAh?cK~6{pnU$DH_51++Ld%vWoH=nj1# z);`jZa5OC0osIY+^UR$$gikesV+iGY6+0!upPR8?&MV{c+OkPrh3&8;h|;TSCin>$}vUh6U4_}{1#R)N1l ziNwYWx_QESDmTq@(7-N!-WwsfgG+hgMrH4*92pcMh#c*2Mg8cOR1s?M1t5c)6caUB zUX`C!(P&l-klq3L2M9B?W!Tdm{%vZM5!Dj-kD#{!ZzD1fgRWa`S9Gjxdwaj$5FEgp zCdD$#{T@pV

Vn>N$UiQ+-K19U#5yV^w;&7)xnRSyIRvRssYE* zeMkX)2OOmg?*xI&L*D+0B9PatwhkNY RmNXMT(ooe_DO9!$`5#A<-r4{F literal 0 HcmV?d00001 From e7cfdef8cbb8ca5f83c4e9cf322f29745454c217 Mon Sep 17 00:00:00 2001 From: Ricardo Serradas Date: Mon, 22 Jul 2019 22:33:25 -0700 Subject: [PATCH 7/7] Added shelljs to packages.json --- tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json index dfdf0bf..47df04c 100644 --- a/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json +++ b/tasks/ExecuteDatabricksJob/ExecuteDatabricksJobV1/package.json @@ -28,6 +28,7 @@ }, "homepage": "https://github.com/microsoft/azdo-databricks#readme", "dependencies": { + "@types/shelljs": "^0.8.5", "azure-pipelines-task-lib": "^2.8.0" }, "devDependencies": {