diff --git a/custom-words.txt b/custom-words.txt index 21df137e474d..c32f0dc4eb9b 100644 --- a/custom-words.txt +++ b/custom-words.txt @@ -1761,3 +1761,5 @@ userprincipalname sessionstate sessionhosts hostpool +sparkr +ambari diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkBatch.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkBatch.json new file mode 100644 index 000000000000..c142db23107b --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkBatch.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "X-Requested-By": "admin", + "batchId": 123 + }, + "responses": { + "200": { + "body": { + "msg": "deleted" + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkSessionJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkSessionJob.json new file mode 100644 index 000000000000..1de611517d3d --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkSessionJob.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "x-Requested-By": "admin", + "sessionId": 123 + }, + "responses": { + "200": { + "body": { + "msg": "deleted" + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkStatementJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkStatementJob.json new file mode 100644 index 000000000000..66961d597b2f --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkStatementJob.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 123, + "statementId": 123 + }, + "responses": { + "200": { + "body": { + "msg": "canceled" + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Get.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Get.json index d6b5d8d8977d..c246f56679d8 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Get.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Get.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "jobId": "job_1542085634998_0024", "fields": "*" }, @@ -62,9 +62,7 @@ "callback": null, "completed": "done", "userargs": { - "statusdir": "2018-11-13T05-49-52-4418fce1-e92e-4732-bc12-f2a3b8ef4432", "file": null, - "enablejobreconnect": null, "define": [], "enablelog": "false", "files": null, diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetAppState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetAppState.json index 7706db1f395a..50263f2566c5 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetAppState.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetAppState.json @@ -1,7 +1,7 @@ { "parameters": { "appId": "application_1462754318084_0071", - "clusterDnsName": "cluster.azurehdinsight.net" + "endpoint": "cluster.azurehdinsight.net" }, "responses": { "200": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchJob.json new file mode 100644 index 000000000000..8834c36eba62 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchJob.json @@ -0,0 +1,31 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "batchId": 45 + }, + "responses": { + "200": { + "body": { + "id": 45, + "appId": null, + "appInfo": { + "driverLogUrl": null, + "sparkUiUrl": null + }, + "state": "starting", + "log": [ + "\t queue: default", + "\t start time: 1590386942572", + "\t final status: UNDEFINED", + "\t tracking URL: https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0005/", + "\t user: livy", + "20/05/25 06:09:02 INFO ShutdownHookManager: Shutdown hook called", + "20/05/25 06:09:02 INFO ShutdownHookManager: Deleting directory /tmp/spark-9ddb6d73-f204-44a4-83e8-afdbd4ce1a0b", + "20/05/25 06:09:02 INFO ShutdownHookManager: Deleting directory /tmp/spark-ef198a0d-de24-4da9-aeb5-c1b78c5fdd5c", + "\nstderr: ", + "\nYARN Diagnostics: " + ] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchLog.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchLog.json new file mode 100644 index 000000000000..19b0c392ee7e --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchLog.json @@ -0,0 +1,29 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "batchId": 45, + "from": 1, + "size": 10 + }, + "responses": { + "200": { + "body": { + "id": 45, + "from": 1, + "size": 10, + "log": [ + "SLF4J: Class path contains multiple SLF4J bindings.", + "SLF4J: Found binding in [jar:file:/usr/hdp/2.6.5.3015-8/spark2/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]", + "SLF4J: Found binding in [jar:file:/usr/hdp/2.6.5.3015-8/spark_llap/spark-llap-assembly-1.0.0.2.6.5.3015-8.jar!/org/slf4j/impl/StaticLoggerBinder.class]", + "SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.", + "SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]", + "Warning: Master yarn-cluster is deprecated since 2.0. Please use master \"yarn\" with specified deploy mode instead.", + "20/05/25 06:08:59 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable", + "Warning: Skip remote jar wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/microsoft-spark-2.3.x-0.6.0.jar.", + "20/05/25 06:08:59 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties", + "20/05/25 06:08:59 INFO WasbAzureIaasSink: Init starting." + ] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchState.json new file mode 100644 index 000000000000..c3444effea60 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchState.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "batchId": 45 + }, + "responses": { + "200": { + "body": { + "id": 45, + "state": "starting" + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionJob.json new file mode 100644 index 000000000000..5008f9c51d27 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionJob.json @@ -0,0 +1,34 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 123 + }, + "responses": { + "200": { + "body": { + "id": 34, + "appId": "application_1590286636717_0004", + "owner": null, + "proxyUser": null, + "state": "idle", + "kind": "spark", + "appInfo": { + "driverLogUrl": "http://wn2-shangw.4jhwm2oxfmoehnhvt2gwwtriqb.bx.internal.cloudapp.net:30060/node/containerlogs/container_e06_1590286636717_0004_01_000001/livy", + "sparkUiUrl": "https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0004/" + }, + "log": [ + "\t ApplicationMaster RPC port: -1", + "\t queue: default", + "\t start time: 1590384019312", + "\t final status: UNDEFINED", + "\t tracking URL: https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0004/", + "\t user: livy", + "20/05/25 05:20:19 INFO ShutdownHookManager: Shutdown hook called", + "20/05/25 05:20:19 INFO ShutdownHookManager: Deleting directory /tmp/spark-1557a190-880c-422d-a744-ce31d0fefa1d", + "20/05/25 05:20:19 INFO ShutdownHookManager: Deleting directory /tmp/spark-12bb0346-a2c9-4b9d-9f9b-feb91e30f554", + "\nYARN Diagnostics: " + ] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionLog.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionLog.json new file mode 100644 index 000000000000..d2846f9b62ae --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionLog.json @@ -0,0 +1,29 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 34, + "from": 1, + "size": 10 + }, + "responses": { + "200": { + "body": { + "id": 34, + "from": 0, + "total": 57, + "log": [ + "stdout: ", + "\nstderr: ", + "SLF4J: Class path contains multiple SLF4J bindings.", + "SLF4J: Found binding in [jar:file:/usr/hdp/2.6.5.3015-8/spark2/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]", + "SLF4J: Found binding in [jar:file:/usr/hdp/2.6.5.3015-8/spark_llap/spark-llap-assembly-1.0.0.2.6.5.3015-8.jar!/org/slf4j/impl/StaticLoggerBinder.class]", + "SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.", + "SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]", + "Warning: Master yarn-cluster is deprecated since 2.0. Please use master \"yarn\" with specified deploy mode instead.", + "20/05/25 05:20:14 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable", + "20/05/25 05:20:15 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties" + ] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionState.json new file mode 100644 index 000000000000..3c73a94ce528 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionState.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 34 + }, + "responses": { + "200": { + "body": { + "id": 34, + "state": "idle" + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkStatementJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkStatementJob.json new file mode 100644 index 000000000000..1c45ad23b01c --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkStatementJob.json @@ -0,0 +1,24 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 34, + "statementId": 1 + }, + "responses": { + "200": { + "body": { + "id": 0, + "code": "1 + 1", + "state": "available", + "output": { + "status": "ok", + "execution_count": 0, + "data": { + "text/plain": "res0: Int = 2" + } + }, + "progress": 1.0 + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Kill.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Kill.json index a5574d18d08e..d0b67543a848 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Kill.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Kill.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "jobId": "job_1542085634998_0007" }, "responses": { @@ -62,7 +62,6 @@ "completed": null, "userargs": { "statusdir": "2018-11-13T05-35-01-6f79af0f-6157-494a-a03e-eabb190a0c10", - "enablejobreconnect": null, "arg": [ "10", "1000" @@ -73,11 +72,9 @@ "mapreduce.task.timeout=60000" ], "enablelog": "false", - "libjars": null, "files": null, "callback": null, - "jar": "/example/jars/hadoop-mapreduce-examples.jar", - "class": "pi" + "jar": "/example/jars/hadoop-mapreduce-examples.jar" }, "msg": null } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_List.json index b4f85d1dd7a7..d19c3bc9109e 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_List.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_List.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "fields": "*", "showall": "true" }, diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListAfterJobId.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListAfterJobId.json index c1c106808005..0fea23c34a31 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListAfterJobId.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListAfterJobId.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "jobId": "job_1542085634998_0029", "numrecords": 3, "fields": "*", diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkBatchJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkBatchJob.json new file mode 100644 index 000000000000..3c7db0140439 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkBatchJob.json @@ -0,0 +1,38 @@ +{ + "parameters": { + "endpoint": "clustername.azurehdinsight.net", + "from": 0, + "size": 1 + }, + "responses": { + "200": { + "body": { + "from": 0, + "total": 1, + "sessions": [ + { + "id": 44, + "state": "starting", + "appId": null, + "appInfo": { + "driverLogUrl": null, + "sparkUiUrl": null + }, + "log": [ + "20/05/25 04:59:18 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable", + "Warning: Skip remote jar wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/microsoft-spark-2.3.x-0.6.0.jar.", + "20/05/25 04:59:18 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties", + "20/05/25 04:59:18 INFO WasbAzureIaasSink: Init starting.", + "20/05/25 04:59:18 INFO AzureIaasSink: Init starting. Initializing MdsLogger.", + "20/05/25 04:59:18 INFO AzureIaasSink: Init completed.", + "20/05/25 04:59:18 INFO WasbAzureIaasSink: Init completed.", + "20/05/25 04:59:18 INFO MetricsSinkAdapter: Sink azurefs2 started", + "\nstderr: ", + "\nYARN Diagnostics: " + ] + } + ] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkSessionJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkSessionJob.json new file mode 100644 index 000000000000..30f7a88887a4 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkSessionJob.json @@ -0,0 +1,41 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "from": 0, + "size": 1 + }, + "responses": { + "200": { + "body": { + "from": 0, + "total": 1, + "sessions": [ + { + "id": 34, + "appId": "application_1590286636717_0004", + "owner": null, + "proxyUser": null, + "state": "idle", + "kind": "spark", + "appInfo": { + "driverLogUrl": "http://wn2-shangw.4jhwm2oxfmoehnhvt2gwwtriqb.bx.internal.cloudapp.net:30060/node/containerlogs/container_e06_1590286636717_0004_01_000001/livy", + "sparkUiUrl": "https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0004/" + }, + "log": [ + "\t ApplicationMaster RPC port: -1", + "\t queue: default", + "\t start time: 1590384019312", + "\t final status: UNDEFINED", + "\t tracking URL: https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0004/", + "\t user: livy", + "20/05/25 05:20:19 INFO ShutdownHookManager: Shutdown hook called", + "20/05/25 05:20:19 INFO ShutdownHookManager: Deleting directory /tmp/spark-1557a190-880c-422d-a744-ce31d0fefa1d", + "20/05/25 05:20:19 INFO ShutdownHookManager: Deleting directory /tmp/spark-12bb0346-a2c9-4b9d-9f9b-feb91e30f554", + "\nYARN Diagnostics: " + ] + } + ] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkStatementJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkStatementJob.json new file mode 100644 index 000000000000..3e447a4f4318 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkStatementJob.json @@ -0,0 +1,40 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 34 + }, + "responses": { + "200": { + "body": { + "statements": [ + { + "id": 0, + "code": "1 + 1", + "state": "available", + "output": { + "status": "ok", + "execution_count": 0, + "data": { + "text/plain": "res0: Int = 2" + } + }, + "progress": 1.0 + }, + { + "id": 1, + "code": "1 + 1", + "state": "available", + "output": { + "status": "ok", + "execution_count": 1, + "data": { + "text/plain": "res1: Int = 2" + } + }, + "progress": 1.0 + } + ] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitHiveJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitHiveJob.json index 314b5c13f31d..bcd62e40bfde 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitHiveJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitHiveJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "execute=select%20querydwelltime%2B2%20from%20hivesampletable%20where%20clientid%20%3D%208&statusdir=2018-10-30T10-13-36-30513317-c398-4da0-a98c-57db43750f3a" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceJob.json index 5cf12f4e0d8f..9a13c808f75a 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "jar=%2Fexample%2Fjars%2Fhadoop-mapreduce-examples.jar&class=pi&arg=10&arg=1000&define=mapreduce.map.maxattempts%3D10&define=mapreduce.reduce.maxattempts%3D10&define=mapreduce.task.timeout%3D60000&statusdir=2018-10-30T10-12-29-595ef52d-583c-4b17-987b-675e4f8eea8f" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceStreamingJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceStreamingJob.json index b3cb16e542b2..506c4135ff63 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceStreamingJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceStreamingJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "input=%2Fexample%2Fdata%2Fgutenberg%2Fdavinci.txt&output=%2Fexample%2Fdata%2Fgutenberg%2Fwcount%2F3745c39c-0115-4735-a9eb-c57de9a29dcd&mapper=cat&reducer=wc&statusdir=2018-10-30T11-40-44-eb9dc5c1-99dd-4282-9d4a-a77c8ffb6b35" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitPigJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitPigJob.json index 95f0e96fafde..ad2da4c1f308 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitPigJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitPigJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "execute=LOGS%20%3D%20LOAD%20%27wasb%3A%2F%2F%2Fexample%2Fdata%2Fsample.log%27%3BLEVELS%20%3D%20foreach%20LOGS%20generate%20REGEX_EXTRACT%28%240%2C%20%27%28TRACE%7CDEBUG%7CINFO%7CWARN%7CERROR%7CFATAL%29%27%2C%201%29%20%20as%20LOGLEVEL%3BFILTEREDLEVELS%20%3D%20FILTER%20LEVELS%20by%20LOGLEVEL%20is%20not%20null%3BGROUPEDLEVELS%20%3D%20GROUP%20FILTEREDLEVELS%20by%20LOGLEVEL%3BFREQUENCIES%20%3D%20foreach%20GROUPEDLEVELS%20generate%20group%20as%20LOGLEVEL%2C%20COUNT%28FILTEREDLEVELS.LOGLEVEL%29%20as%20COUNT%3BRESULT%20%3D%20order%20FREQUENCIES%20by%20COUNT%20desc%3BDUMP%20RESULT%3B&statusdir=2018-10-30T12-44-24-bd642975-9a89-4480-aea7-3ee1f34016ec" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkBatchJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkBatchJob.json new file mode 100644 index 000000000000..9090b5329746 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkBatchJob.json @@ -0,0 +1,47 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "X-Requested-By": "admin", + "sparkBatchJobRequest": { + "file": "wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/microsoft-spark-2.3.x-0.6.0.jar", + "proxyUser": "", + "className": "org.apache.spark.deploy.dotnet.DotnetRunner12", + "args": [ + "wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/wordcount.zip", + "WordCount", + "wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/user/sshroot/shakespeare.txt", + "wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/user/sshroot/result_livy.txt" + ], + "jars": [], + "pyFiles": [], + "files": [], + "driverMemory": "4g", + "driverCores": 4, + "executorMemory": "2g", + "executorCores": 2, + "numExecutors": 2, + "archives": [], + "queue": "default", + "name": "jobname", + "conf": null + } + }, + "responses": { + "201": { + "body": { + "id": 46, + "state": "starting", + "appId": null, + "appInfo": { + "driverLogUrl": null, + "sparkUiUrl": null + }, + "log": [ + "stdout: ", + "\nstderr: ", + "\nYARN Diagnostics: " + ] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkSessionJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkSessionJob.json new file mode 100644 index 000000000000..cc0c91872f3e --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkSessionJob.json @@ -0,0 +1,44 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "X-Requested-By": "admin", + "sparkSessionJobRequest": { + "kind": "spark", + "proxyUser": "", + "jars": [], + "pyFiles": [], + "files": [], + "driverMemory": "4g", + "driverCores": 4, + "executorMemory": "2g", + "executorCores": 2, + "numExecutors": 2, + "archives": [], + "queue": "default", + "name": "jobname", + "conf": null, + "heartbeatTimeoutInSecond": 0 + } + }, + "responses": { + "201": { + "body": { + "id": 34, + "appId": null, + "owner": null, + "proxyUser": null, + "kind": "spark", + "state": "starting", + "log": [ + "stdout: ", + "\nstderr: ", + "\nYARN Diagnostics: " + ], + "appInfo": { + "driverLogUrl": null, + "sparkUiUrl": null + } + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkStatementJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkStatementJob.json new file mode 100644 index 000000000000..0a288511d8aa --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkStatementJob.json @@ -0,0 +1,22 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "X-Requested-By": "admin", + "sessionId": 34, + "sparkStatementRequest": { + "code": "1 + 1", + "kind": "spark" + } + }, + "responses": { + "201": { + "body": { + "id": 0, + "code": "1 + 1", + "state": "waiting", + "output": null, + "progress": 0.0 + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSqoopJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSqoopJob.json index 2c26b24f605d..eddc3cb5cded 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSqoopJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSqoopJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "command=import%20--connect%20jdbc%3Asqlserver%3A%2F%2Fdongwwahdi.database.windows.net%3A1433%3Bdatabase%3DHdInsightJobTest%3Buser%3Ddongwwaadmin%3Bpassword%3DHDInsight123%21%3B%20--table%20dept%20--warehouse-dir%20%2Fuser%2Fadmin%2Fsqoop%2F25c8b9f9-dbc1-4096-aa42-45f2868ab782%20--hive-import%20-m%201%20--hive-table%20deptd3175062928d4170831ac17d2eefdaeb&statusdir=SqoopStatus" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json index 9faa6c4282c0..544f3e29c9c8 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json @@ -1,7 +1,7 @@ { "swagger": "2.0", "info": { - "title": "HDInsightJobManagementClient", + "title": "HDInsightJobClient", "description": "The HDInsight Job Client.", "version": "2018-11-01-preview", "x-ms-code-generation-settings": { @@ -9,10 +9,10 @@ } }, "x-ms-parameterized-host": { - "hostTemplate": "{clusterDnsName}", + "hostTemplate": "{endpoint}", "parameters": [ { - "$ref": "#/parameters/clusterDnsNameParameter" + "$ref": "#/parameters/endpointParameter" } ] }, @@ -911,13 +911,13 @@ } }, "parameters": { - "clusterDnsNameParameter": { - "name": "clusterDnsName", + "endpointParameter": { + "name": "endpoint", "in": "path", "required": true, "type": "string", "x-ms-skip-url-encoding": true, - "description": "The cluster dns name against which the job management is to be.", + "description": "The cluster endpoint, for example https://clustername.azurehdinsight.net.", "x-ms-parameter-location": "client" }, "UserNameParameter": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json new file mode 100644 index 000000000000..d5cbc84df867 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json @@ -0,0 +1,1121 @@ +{ + "swagger": "2.0", + "info": { + "title": "HDInsightJobClient", + "description": "The HDInsight Job Client.", + "version": "2018-11-01-preview", + "x-ms-code-generation-settings": { + "internalConstructors": true + } + }, + "x-ms-parameterized-host": { + "hostTemplate": "{endpoint}", + "parameters": [ + { + "$ref": "#/parameters/endpointParameter" + } + ] + }, + "schemes": [ + "https" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/livy/batches": { + "get": { + "tags": [ + "Job" + ], + "description": "List all spark batch jobs", + "operationId": "Job_ListSparkBatchJob", + "x-ms-examples": { + "List all spark batch jobs": { + "$ref": "./examples/HDI_Job_ListSparkBatchJob.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkBatchJobCollection" + } + } + } + }, + "post": { + "tags": [ + "Job" + ], + "description": "Create a new spark batch job.", + "operationId": "Job_SubmitSparkBatchJob", + "x-ms-examples": { + "Create a spark batch job": { + "$ref": "./examples/HDI_Job_SubmitSparkBatchJob.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sparkBatchJobRequest", + "in": "body", + "description": "Livy compatible batch job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkBatchJobRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkBatchJob" + } + } + } + } + }, + "/livy/batches/{batchId}": { + "get": { + "tags": [ + "Job" + ], + "description": "Gets a single spark batch job.", + "operationId": "Job_GetSparkBatchJob", + "x-ms-examples": { + "Gets a single spark batch job": { + "$ref": "./examples/HDI_Job_GetSparkBatchJob.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkBatchJob" + } + } + } + }, + "delete": { + "tags": [ + "Job" + ], + "description": "Cancels a running spark batch job.", + "operationId": "Job_DeleteSparkBatch", + "x-ms-examples": { + "Cancels a running spark batch job": { + "$ref": "./examples/HDI_Job_DeleteSparkBatch.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobDeletedResult" + } + } + } + } + }, + "/livy/batches/{batchId}/log": { + "get": { + "tags": [ + "Job" + ], + "description": "Gets a single spark batch job logs.", + "operationId": "Job_GetSparkBatchLog", + "x-ms-examples": { + "Gets a single spark batch job logs": { + "$ref": "./examples/HDI_Job_GetSparkBatchLog.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobLog" + } + } + } + } + }, + "/livy/batches/{batchId}/state": { + "get": { + "tags": [ + "Job" + ], + "description": "Gets a single spark batch state.", + "operationId": "Job_GetSparkBatchState", + "x-ms-examples": { + "Gets a single spark batch state": { + "$ref": "./examples/HDI_Job_GetSparkBatchState.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobState" + } + } + } + } + }, + "/livy/sessions": { + "get": { + "tags": [ + "Job" + ], + "description": "List all spark sessions.", + "operationId": "Job_ListSparkSessionJob", + "x-ms-examples": { + "List all spark sessions": { + "$ref": "./examples/HDI_Job_ListSparkSessionJob.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkSessionCollection" + } + } + } + }, + "post": { + "tags": [ + "Job" + ], + "description": "Create a new spark session.", + "operationId": "Job_SubmitSparkSessionJob", + "x-ms-examples": { + "Create a new spark session": { + "$ref": "./examples/HDI_Job_SubmitSparkSessionJob.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sparkSessionJobRequest", + "in": "body", + "description": "Livy compatible session job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkSessionJobRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkSessionJob" + } + } + } + } + }, + "/livy/sessions/{sessionId}": { + "get": { + "tags": [ + "Job" + ], + "description": "Gets a single spark session.", + "operationId": "Job_GetSparkSessionJob", + "x-ms-examples": { + "Gets a single spark session": { + "$ref": "./examples/HDI_Job_GetSparkSessionJob.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkSessionJob" + } + } + } + }, + "delete": { + "tags": [ + "Job" + ], + "description": "Cancels a running spark session.", + "operationId": "Job_DeleteSparkSessionJob", + "x-ms-examples": { + "Cancels a running spark session": { + "$ref": "./examples/HDI_Job_DeleteSparkSessionJob.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobDeletedResult" + } + } + } + } + }, + "/livy/sessions/{sessionId}/log": { + "get": { + "tags": [ + "Job" + ], + "description": "Gets a single spark session job logs.", + "operationId": "Job_GetSparkSessionLog", + "x-ms-examples": { + "Gets a single spark session job logs": { + "$ref": "./examples/HDI_Job_GetSparkSessionLog.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session job.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobLog" + } + } + } + } + }, + "/livy/sessions/{sessionId}/state": { + "get": { + "tags": [ + "Job" + ], + "description": "Gets a single spark session job state.", + "operationId": "Job_GetSparkSessionState", + "x-ms-examples": { + "Gets a single spark session job state": { + "$ref": "./examples/HDI_Job_GetSparkSessionState.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobState" + } + } + } + } + }, + "/livy/sessions/{sessionId}/statements": { + "get": { + "tags": [ + "Job" + ], + "description": "Gets a list of statements within a spark session.", + "operationId": "Job_ListSparkStatementJob", + "x-ms-examples": { + "Gets a list of statements within a spark session": { + "$ref": "./examples/HDI_Job_ListSparkStatementJob.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatementCollection" + } + } + } + }, + "post": { + "tags": [ + "Job" + ], + "description": "Create a statement within a spark session.", + "operationId": "Job_SubmitSparkStatementJob", + "x-ms-examples": { + "Create a statement within a spark session": { + "$ref": "./examples/HDI_Job_SubmitSparkStatementJob.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "sparkStatementRequest", + "in": "body", + "description": "Livy compatible batch job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkStatementRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkStatement" + } + } + } + } + }, + "/livy/sessions/{sessionId}/statements/{statementId}": { + "get": { + "tags": [ + "Job" + ], + "description": "Gets a single statement within a spark session.", + "operationId": "Job_GetSparkStatementJob", + "x-ms-examples": { + "Gets a single statement within a spark session": { + "$ref": "./examples/HDI_Job_GetSparkStatementJob.json" + } + }, + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "statementId", + "in": "path", + "description": "Identifier for the statement.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatement" + } + } + } + } + }, + "/livy/sessions/{sessionId}/statements/{statementId}/cancel": { + "post": { + "tags": [ + "Job" + ], + "description": "Kill a statement within a session.", + "operationId": "Job_DeleteSparkStatementJob", + "x-ms-examples": { + "Kill a statement within a session": { + "$ref": "./examples/HDI_Job_DeleteSparkStatementJob.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "statementId", + "in": "path", + "description": "Identifier for the statement.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatementCancellationResult" + } + } + } + } + } + }, + "definitions": { + "SparkBatchJobCollection": { + "type": "object", + "properties": { + "from": { + "format": "int32", + "type": "integer" + }, + "total": { + "format": "int32", + "type": "integer" + }, + "sessions": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkBatchJob" + } + } + } + }, + "SparkBatchJob": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "appId": { + "type": "string" + }, + "appInfo": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "state": { + "type": "string" + }, + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "SparkBatchJobRequest": { + "type": "object", + "properties": { + "file": { + "type": "string" + }, + "proxyUser": { + "type": "string" + }, + "className": { + "type": "string" + }, + "args": { + "x-ms-client-name": "arguments", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "jars": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "pyFiles": { + "x-ms-client-name": "pythonFiles", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "files": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "driverMemory": { + "type": "string" + }, + "driverCores": { + "format": "int32", + "type": "integer" + }, + "executorMemory": { + "type": "string" + }, + "executorCores": { + "format": "int32", + "type": "integer" + }, + "numExecutors": { + "x-ms-client-name": "executorCount", + "format": "int32", + "type": "integer" + }, + "archives": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "queue": { + "type": "string" + }, + "name": { + "type": "string" + }, + "conf": { + "x-ms-client-name": "configuration", + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "SparkSessionCollection": { + "type": "object", + "properties": { + "from": { + "format": "int32", + "type": "integer" + }, + "total": { + "format": "int32", + "type": "integer" + }, + "sessions": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkSessionJob" + } + } + } + }, + "SparkSessionJob": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "appId": { + "type": "string" + }, + "owner": { + "type": "string" + }, + "proxyUser": { + "type": "string" + }, + "kind": { + "type": "string" + }, + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "state": { + "type": "string" + }, + "appInfo": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "SparkSessionJobRequest": { + "type": "object", + "properties": { + "kind": { + "type": "string", + "x-ms-enum": { + "name": "SessionJobKind", + "modelAsString": true + }, + "enum": [ + "spark", + "pyspark", + "sparkr", + "sql" + ] + }, + "proxyUser": { + "type": "string" + }, + "jars": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "pyFiles": { + "x-ms-client-name": "pythonFiles", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "files": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "driverMemory": { + "type": "string" + }, + "driverCores": { + "format": "int32", + "type": "integer" + }, + "executorMemory": { + "type": "string" + }, + "executorCores": { + "format": "int32", + "type": "integer" + }, + "numExecutors": { + "x-ms-client-name": "executorCount", + "format": "int32", + "type": "integer" + }, + "archives": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "queue": { + "type": "string" + }, + "name": { + "type": "string" + }, + "conf": { + "x-ms-client-name": "configuration", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "heartbeatTimeoutInSecond": { + "format": "int32", + "type": "integer" + } + } + }, + "SparkJobLog": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "from": { + "format": "int32", + "type": "integer" + }, + "size": { + "format": "int32", + "type": "integer" + }, + "total": { + "format": "int64", + "type": "integer" + }, + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "SparkJobState": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "state": { + "type": "string" + } + } + }, + "SparkStatementCollection": { + "type": "object", + "properties": { + "statements": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkStatement" + } + } + } + }, + "SparkStatement": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "code": { + "type": "string" + }, + "state": { + "type": "string" + }, + "output": { + "$ref": "#/definitions/SparkStatementOutput" + }, + "progress": { + "type": "number", + "format": "double" + } + } + }, + "SparkStatementOutput": { + "type": "object", + "properties": { + "status": { + "type": "string" + }, + "execution_count": { + "format": "int32", + "type": "integer" + }, + "data": { + "type": "object" + } + } + }, + "SparkStatementRequest": { + "type": "object", + "properties": { + "code": { + "type": "string" + }, + "kind": { + "type": "string" + } + } + }, + "SparkStatementCancellationResult": { + "type": "object", + "properties": { + "msg": { + "x-ms-client-name": "cancelMessage", + "type": "string" + } + } + }, + "SparkJobDeletedResult": { + "type": "object", + "properties": { + "msg": { + "x-ms-client-name": "deletedMessage", + "type": "string" + } + } + } + }, + "parameters": { + "requestedByParameter": { + "name": "X-Requested-By", + "x-ms-parameter-location": "method", + "x-ms-client-name": "requestedBy", + "description": "Add default value for X-Requested-By in header.", + "in": "header", + "required": false, + "default": "ambari", + "type": "string" + }, + "endpointParameter": { + "name": "endpoint", + "in": "path", + "required": true, + "type": "string", + "x-ms-skip-url-encoding": true, + "description": "The cluster endpoint, for example https://clustername.azurehdinsight.net.", + "x-ms-parameter-location": "client" + } + } +} diff --git a/specification/hdinsight/data-plane/readme.md b/specification/hdinsight/data-plane/readme.md index ffd1eb557e82..7116c5692305 100644 --- a/specification/hdinsight/data-plane/readme.md +++ b/specification/hdinsight/data-plane/readme.md @@ -36,8 +36,18 @@ These settings apply only when `--tag=package-2018-11-preview` is specified on t ``` yaml $(tag) == 'package-2018-11-preview' input-file: - Microsoft.HDInsight/preview/2018-11-01-preview/job.json +- Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json ``` +## Suppression +``` yaml +directive: + - suppress: DefinitionsPropertiesNamesCamelCase + reason: This would require a breaking change, and need to be consistent with the response from RP side. + from: livySpark.json + where: + - $.definitions.SparkStatementOutput.properties.execution_count +``` --- # Code Generation @@ -146,6 +156,7 @@ require: $(this-folder)/../../../profiles/readme.md # all the input files across all versions input-file: - $(this-folder)/Microsoft.HDInsight/preview/2018-11-01-preview/job.json + - $(this-folder)/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json ```