From dda77796321c4e6724112c53f93f65e3d2848c1a Mon Sep 17 00:00:00 2001 From: Shangwei Sun Date: Fri, 8 May 2020 19:35:52 +0800 Subject: [PATCH 1/6] Add livyspark api spec for hdi. --- .../examples/HDI_Job_Get.json | 4 +- .../examples/HDI_Job_GetAppState.json | 2 +- .../examples/HDI_Job_Kill.json | 7 +- .../examples/HDI_Job_List.json | 2 +- .../examples/HDI_Job_ListAfterJobId.json | 2 +- .../examples/HDI_Job_SubmitHiveJob.json | 2 +- .../examples/HDI_Job_SubmitMapReduceJob.json | 2 +- .../HDI_Job_SubmitMapReduceStreamingJob.json | 2 +- .../examples/HDI_Job_SubmitPigJob.json | 2 +- .../examples/HDI_Job_SubmitSqoopJob.json | 2 +- .../examples/HDI_SparkBatch_Create.json | 35 + .../examples/HDI_SparkBatch_Delete.json | 10 + .../examples/HDI_SparkBatch_Get.json | 19 + .../examples/HDI_SparkBatch_GetLogs.json | 18 + .../examples/HDI_SparkBatch_GetState.json | 14 + .../examples/HDI_SparkBatch_List.json | 17 + .../examples/HDI_SparkSession_Create.json | 37 + .../HDI_SparkSession_CreateStatement.json | 21 + .../examples/HDI_SparkSession_Delete.json | 10 + .../HDI_SparkSession_DeleteStatement.json | 14 + .../examples/HDI_SparkSession_Get.json | 20 + .../examples/HDI_SparkSession_GetLogs.json | 18 + .../examples/HDI_SparkSession_GetState.json | 14 + .../HDI_SparkSession_GetStatement.json | 17 + .../examples/HDI_SparkSession_List.json | 16 + .../HDI_SparkSession_ListStatements.json | 13 + .../preview/2018-11-01-preview/job.json | 10 +- .../preview/2018-11-01-preview/livySpark.json | 1128 +++++++++++++++++ specification/hdinsight/data-plane/readme.md | 11 + 29 files changed, 1448 insertions(+), 21 deletions(-) create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_DeleteStatement.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json create mode 100644 specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Get.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Get.json index d6b5d8d8977d..c246f56679d8 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Get.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Get.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "jobId": "job_1542085634998_0024", "fields": "*" }, @@ -62,9 +62,7 @@ "callback": null, "completed": "done", "userargs": { - "statusdir": "2018-11-13T05-49-52-4418fce1-e92e-4732-bc12-f2a3b8ef4432", "file": null, - "enablejobreconnect": null, "define": [], "enablelog": "false", "files": null, diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetAppState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetAppState.json index 7706db1f395a..50263f2566c5 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetAppState.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetAppState.json @@ -1,7 +1,7 @@ { "parameters": { "appId": "application_1462754318084_0071", - "clusterDnsName": "cluster.azurehdinsight.net" + "endpoint": "cluster.azurehdinsight.net" }, "responses": { "200": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Kill.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Kill.json index a5574d18d08e..d0b67543a848 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Kill.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_Kill.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "jobId": "job_1542085634998_0007" }, "responses": { @@ -62,7 +62,6 @@ "completed": null, "userargs": { "statusdir": "2018-11-13T05-35-01-6f79af0f-6157-494a-a03e-eabb190a0c10", - "enablejobreconnect": null, "arg": [ "10", "1000" @@ -73,11 +72,9 @@ "mapreduce.task.timeout=60000" ], "enablelog": "false", - "libjars": null, "files": null, "callback": null, - "jar": "/example/jars/hadoop-mapreduce-examples.jar", - "class": "pi" + "jar": "/example/jars/hadoop-mapreduce-examples.jar" }, "msg": null } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_List.json index b4f85d1dd7a7..d19c3bc9109e 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_List.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_List.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "fields": "*", "showall": "true" }, diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListAfterJobId.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListAfterJobId.json index c1c106808005..0fea23c34a31 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListAfterJobId.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListAfterJobId.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "jobId": "job_1542085634998_0029", "numrecords": 3, "fields": "*", diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitHiveJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitHiveJob.json index 314b5c13f31d..bcd62e40bfde 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitHiveJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitHiveJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "execute=select%20querydwelltime%2B2%20from%20hivesampletable%20where%20clientid%20%3D%208&statusdir=2018-10-30T10-13-36-30513317-c398-4da0-a98c-57db43750f3a" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceJob.json index 5cf12f4e0d8f..9a13c808f75a 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "jar=%2Fexample%2Fjars%2Fhadoop-mapreduce-examples.jar&class=pi&arg=10&arg=1000&define=mapreduce.map.maxattempts%3D10&define=mapreduce.reduce.maxattempts%3D10&define=mapreduce.task.timeout%3D60000&statusdir=2018-10-30T10-12-29-595ef52d-583c-4b17-987b-675e4f8eea8f" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceStreamingJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceStreamingJob.json index b3cb16e542b2..506c4135ff63 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceStreamingJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitMapReduceStreamingJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "input=%2Fexample%2Fdata%2Fgutenberg%2Fdavinci.txt&output=%2Fexample%2Fdata%2Fgutenberg%2Fwcount%2F3745c39c-0115-4735-a9eb-c57de9a29dcd&mapper=cat&reducer=wc&statusdir=2018-10-30T11-40-44-eb9dc5c1-99dd-4282-9d4a-a77c8ffb6b35" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitPigJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitPigJob.json index 95f0e96fafde..ad2da4c1f308 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitPigJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitPigJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "execute=LOGS%20%3D%20LOAD%20%27wasb%3A%2F%2F%2Fexample%2Fdata%2Fsample.log%27%3BLEVELS%20%3D%20foreach%20LOGS%20generate%20REGEX_EXTRACT%28%240%2C%20%27%28TRACE%7CDEBUG%7CINFO%7CWARN%7CERROR%7CFATAL%29%27%2C%201%29%20%20as%20LOGLEVEL%3BFILTEREDLEVELS%20%3D%20FILTER%20LEVELS%20by%20LOGLEVEL%20is%20not%20null%3BGROUPEDLEVELS%20%3D%20GROUP%20FILTEREDLEVELS%20by%20LOGLEVEL%3BFREQUENCIES%20%3D%20foreach%20GROUPEDLEVELS%20generate%20group%20as%20LOGLEVEL%2C%20COUNT%28FILTEREDLEVELS.LOGLEVEL%29%20as%20COUNT%3BRESULT%20%3D%20order%20FREQUENCIES%20by%20COUNT%20desc%3BDUMP%20RESULT%3B&statusdir=2018-10-30T12-44-24-bd642975-9a89-4480-aea7-3ee1f34016ec" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSqoopJob.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSqoopJob.json index 2c26b24f605d..eddc3cb5cded 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSqoopJob.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSqoopJob.json @@ -1,7 +1,7 @@ { "parameters": { "user.name": "admin", - "clusterDnsName": "cluster.azurehdinsight.net", + "endpoint": "cluster.azurehdinsight.net", "content": "command=import%20--connect%20jdbc%3Asqlserver%3A%2F%2Fdongwwahdi.database.windows.net%3A1433%3Bdatabase%3DHdInsightJobTest%3Buser%3Ddongwwaadmin%3Bpassword%3DHDInsight123%21%3B%20--table%20dept%20--warehouse-dir%20%2Fuser%2Fadmin%2Fsqoop%2F25c8b9f9-dbc1-4096-aa42-45f2868ab782%20--hive-import%20-m%201%20--hive-table%20deptd3175062928d4170831ac17d2eefdaeb&statusdir=SqoopStatus" }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json new file mode 100644 index 000000000000..d53cbdfb6743 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json @@ -0,0 +1,35 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "X-Requested-By": "admin", + "sparkBatchJobRequest": { + "file": "abfss://", + "proxyUser": "", + "className": "classname", + "args": [], + "jars": [], + "pyFiles": [], + "files": [], + "driverMemory": "4g", + "driverCores": 4, + "executorMemory": "2g", + "executorCores": 2, + "numExecutors": 2, + "archives": [], + "queue": "default", + "name": "jobname", + "conf": null + } + }, + "responses": { + "201": { + "body": { + "id": 1, + "appId": "fill in here", + "appInfo": null, + "state": "the state", + "log": [] + } + } + } +} \ No newline at end of file diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json new file mode 100644 index 000000000000..cf79a1c21e01 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json @@ -0,0 +1,10 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "X-Requested-By": "admin", + "batchId": 123 + }, + "responses": { + "200": {} + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json new file mode 100644 index 000000000000..7f8f01359892 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json @@ -0,0 +1,19 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "from": 1, + "size": 10, + "batchId": 123 + }, + "responses": { + "200": { + "body": { + "id": 123, + "appId": "fill in here", + "appInfo": null, + "state": "the state", + "log": [] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json new file mode 100644 index 000000000000..fe7ed613127a --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json @@ -0,0 +1,18 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "batchId": 123, + "from": 1, + "size": 10 + }, + "responses": { + "200": { + "body": { + "id": 123, + "from": 1, + "size": 10, + "log": [] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json new file mode 100644 index 000000000000..fc6f75ffe83a --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "batchId": 123 + }, + "responses": { + "200": { + "body": { + "id": 123, + "state": "starting" + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json new file mode 100644 index 000000000000..9f48960d6771 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json @@ -0,0 +1,17 @@ +{ + "parameters": { + "endpoint":"clustername.azurehdinsight.net", + "from": 0, + "size": 2, + "detailed": true + }, + "responses": { + "200": { + "body": { + "from": 0, + "total": 2, + "sessions": [] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json new file mode 100644 index 000000000000..1bc0cdb451ae --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json @@ -0,0 +1,37 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "X-Requested-By": "admin", + "sparkSessionJobRequest": { + "kind": "spark", + "proxyUser": "", + "jars": [], + "pyFiles": [], + "files": [], + "driverMemory": "4g", + "driverCores": 4, + "executorMemory": "2g", + "executorCores": 2, + "numExecutors": 2, + "archives": [], + "queue": "default", + "name": "jobname", + "conf": null, + "heartbeatTimeoutInSecond":0 + } + }, + "responses": { + "201": { + "body": { + "id": 1, + "appId": "fill in here", + "owner": "admin", + "proxyUser": "", + "kind": "spark", + "state": "the state", + "log": [], + "appInfo": null + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json new file mode 100644 index 000000000000..b58a580649ff --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json @@ -0,0 +1,21 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "X-Requested-By": "admin", + "sessionId": 123, + "sparkStatementRequest": { + "code": "fill in here", + "kind": "fill in here" + } + }, + "responses": { + "201": { + "body": { + "id": 123, + "code": "fill in here", + "state": "fill in here", + "output": null + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json new file mode 100644 index 000000000000..ac5c99911393 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json @@ -0,0 +1,10 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "x-Requested-By":"admin", + "sessionId": 123 + }, + "responses": { + "200": {} + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_DeleteStatement.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_DeleteStatement.json new file mode 100644 index 000000000000..66961d597b2f --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_DeleteStatement.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 123, + "statementId": 123 + }, + "responses": { + "200": { + "body": { + "msg": "canceled" + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json new file mode 100644 index 000000000000..bf2e6cc586b3 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json @@ -0,0 +1,20 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 123 + }, + "responses": { + "200": { + "body": { + "id": 123, + "appId": "fill in here", + "owner": "admin", + "proxyUser": "", + "kind": "spark", + "state": "the state", + "log": [], + "appInfo": null + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json new file mode 100644 index 000000000000..0691124d3d49 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json @@ -0,0 +1,18 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 123, + "from": 1, + "size": 10 + }, + "responses": { + "200": { + "body": { + "id": 123, + "from": 1, + "size": 10, + "log": [] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json new file mode 100644 index 000000000000..47c967123405 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json @@ -0,0 +1,14 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 123 + }, + "responses": { + "200": { + "body": { + "id": 123, + "state": "starting" + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json new file mode 100644 index 000000000000..609211da02a8 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json @@ -0,0 +1,17 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 123, + "statementId": 123 + }, + "responses": { + "200": { + "body": { + "id": 123, + "code": "fill in here", + "state": "fill in here", + "output": null + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json new file mode 100644 index 000000000000..cd5d22315212 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json @@ -0,0 +1,16 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "from": 0, + "size": 2 + }, + "responses": { + "200": { + "body": { + "from": 0, + "total": 2, + "sessions": [] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json new file mode 100644 index 000000000000..b7c1dc5acd02 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json @@ -0,0 +1,13 @@ +{ + "parameters": { + "endpoint": "cluster.azurehdinsight.net", + "sessionId": 123 + }, + "responses": { + "200": { + "body": { + "statements": [] + } + } + } +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json index 9faa6c4282c0..5fc691a66980 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json @@ -5,14 +5,14 @@ "description": "The HDInsight Job Client.", "version": "2018-11-01-preview", "x-ms-code-generation-settings": { - "internalConstructors": true + "internalConstructors": true } }, "x-ms-parameterized-host": { - "hostTemplate": "{clusterDnsName}", + "hostTemplate": "{endpoint}", "parameters": [ { - "$ref": "#/parameters/clusterDnsNameParameter" + "$ref": "#/parameters/endpointParameter" } ] }, @@ -911,8 +911,8 @@ } }, "parameters": { - "clusterDnsNameParameter": { - "name": "clusterDnsName", + "endpointParameter": { + "name": "endpoint", "in": "path", "required": true, "type": "string", diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json new file mode 100644 index 000000000000..22283886b5c2 --- /dev/null +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json @@ -0,0 +1,1128 @@ +{ + "swagger": "2.0", + "info": { + "title": "HDInsightJobManagementClient", + "description": "The HDInsight Job Client.", + "version": "2018-11-01-preview", + "x-ms-code-generation-settings": { + "internalConstructors": true + } + }, + "x-ms-parameterized-host": { + "hostTemplate": "{endpoint}", + "parameters": [ + { + "$ref": "#/parameters/endpointParameter" + } + ] + }, + "schemes": [ + "https" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/livy/batches": { + "get": { + "tags": [ + "SparkBatch" + ], + "description": "List all spark batch jobs", + "operationId": "SparkBatch_List", + "x-ms-examples": { + "List all spark batch jobs": { + "$ref": "./examples/HDI_SparkBatch_List.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkBatchJobCollection" + } + } + } + }, + "post": { + "tags": [ + "SparkBatch" + ], + "description": "Create a new spark batch job.", + "operationId": "SparkBatch_Create", + "x-ms-examples": { + "Create a spark batch job": { + "$ref": "./examples/HDI_SparkBatch_Create.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sparkBatchJobRequest", + "in": "body", + "description": "Livy compatible batch job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkBatchJobRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkBatchJob" + } + } + } + } + }, + "/livy/batches/{batchId}": { + "get": { + "tags": [ + "SparkBatch" + ], + "description": "Gets a single spark batch job.", + "operationId": "SparkBatch_Get", + "x-ms-examples": { + "Gets a single spark batch job.": { + "$ref": "./examples/HDI_SparkBatch_Get.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkBatchJob" + } + } + } + }, + "delete": { + "tags": [ + "SparkBatch" + ], + "description": "Cancels a running spark batch job.", + "operationId": "SparkBatch_Delete", + "x-ms-examples": { + "Cancels a running spark batch job.": { + "$ref": "./examples/HDI_SparkBatch_Delete.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success" + } + } + } + }, + "/livy/batches/{batchId}/log": { + "get": { + "tags": [ + "SparkBatch" + ], + "description": "Gets a single spark batch job logs.", + "operationId": "SparkBatch_GetLogs", + "x-ms-examples": { + "Gets a single spark batch job logs": { + "$ref": "./examples/HDI_SparkBatch_GetLogs.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobLog" + } + } + } + } + }, + "/livy/batches/{batchId}/state": { + "get": { + "tags": [ + "SparkBatch" + ], + "description": "Gets a single spark batch state.", + "operationId": "SparkBatch_GetState", + "x-ms-examples": { + "Gets a single spark batch state.": { + "$ref": "./examples/HDI_SparkBatch_GetState.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobState" + } + } + } + } + }, + "/livy/sessions": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "List all spark sessions.", + "operationId": "SparkSession_List", + "x-ms-examples": { + "List all spark sessions.": { + "$ref": "./examples/HDI_SparkSession_List.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkSessionCollection" + } + } + } + }, + "post": { + "tags": [ + "SparkSession" + ], + "description": "Create a new spark session.", + "operationId": "SparkSession_Create", + "x-ms-examples": { + "Create a new spark session.": { + "$ref": "./examples/HDI_SparkSession_Create.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sparkSessionJobRequest", + "in": "body", + "description": "Livy compatible session job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkSessionJobRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkSessionJob" + } + } + } + } + }, + "/livy/sessions/{sessionId}": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a single spark session.", + "operationId": "SparkSession_Get", + "x-ms-examples": { + "Gets a single spark session.": { + "$ref": "./examples/HDI_SparkSession_Get.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkSessionJob" + } + } + } + }, + "delete": { + "tags": [ + "SparkSession" + ], + "description": "Cancels a running spark session.", + "operationId": "SparkSession_Delete", + "x-ms-examples": { + "Cancels a running spark session.": { + "$ref": "./examples/HDI_SparkSession_Delete.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success" + } + } + } + }, + "/livy/sessions/{sessionId}/log": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a single spark session job logs.", + "operationId": "SparkSession_GetLogs", + "x-ms-examples": { + "Gets a single spark session job logs": { + "$ref": "./examples/HDI_SparkSession_GetLogs.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session job.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobLog" + } + } + } + } + }, + "/livy/sessions/{sessionId}/state": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a single spark session job state.", + "operationId": "SparkSession_GetState", + "x-ms-examples": { + "Gets a single spark session job state": { + "$ref": "./examples/HDI_SparkSession_GetState.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobState" + } + } + } + } + }, + "/livy/sessions/{sessionId}/statements": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a list of statements within a spark session.", + "operationId": "SparkSession_ListStatements", + "x-ms-examples": { + "Gets a list of statements within a spark session.": { + "$ref": "./examples/HDI_SparkSession_ListStatements.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatementCollection" + } + } + } + }, + "post": { + "tags": [ + "SparkSession" + ], + "description": "Create a statement within a spark session.", + "operationId": "SparkSession_CreateStatement", + "x-ms-examples": { + "Create a statement within a spark session.": { + "$ref": "./examples/HDI_SparkSession_CreateStatement.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "sparkStatementRequest", + "in": "body", + "description": "Livy compatible batch job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkStatementRequest" + } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkStatement" + } + } + } + } + }, + "/livy/sessions/{sessionId}/statements/{statementId}": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a single statement within a spark session.", + "operationId": "SparkSession_GetStatement", + "x-ms-examples": { + "Gets a single statement within a spark session.": { + "$ref": "./examples/HDI_SparkSession_GetStatement.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "statementId", + "in": "path", + "description": "Identifier for the statement.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatement" + } + } + } + } + }, + "/livy/sessions/{sessionId}/statements/{statementId}/cancel": { + "post": { + "tags": [ + "SparkSession" + ], + "description": "Kill a statement within a session.", + "operationId": "SparkSession_DeleteStatement", + "x-ms-examples": { + "Kill a statement within a session.": { + "$ref": "./examples/HDI_SparkSession_DeleteStatement.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "statementId", + "in": "path", + "description": "Identifier for the statement.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatementCancellationResult" + } + } + } + } + } + }, + "definitions": { + "SparkBatchJobCollection": { + "type": "object", + "properties": { + "from": { + "format": "int32", + "type": "integer" + }, + "total": { + "format": "int32", + "type": "integer" + }, + "sessions": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkBatchJob" + } + } + } + }, + "SparkBatchJob": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "appId": { + "type": "string" + }, + "appInfo": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "state": { + "type": "string" + }, + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "SparkBatchJobRequest": { + "type": "object", + "properties": { + "file": { + "type": "string" + }, + "proxyUser": { + "type": "string" + }, + "className": { + "type": "string" + }, + "args": { + "x-ms-client-name": "arguments", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "jars": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "pyFiles": { + "x-ms-client-name": "pythonFiles", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "files": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "driverMemory": { + "type": "string" + }, + "driverCores": { + "format": "int32", + "type": "integer" + }, + "executorMemory": { + "type": "string" + }, + "executorCores": { + "format": "int32", + "type": "integer" + }, + "numExecutors": { + "x-ms-client-name": "executorCount", + "format": "int32", + "type": "integer" + }, + "archives": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "queue": { + "type": "string" + }, + "name": { + "type": "string" + }, + "conf": { + "x-ms-client-name": "configuration", + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "SparkSessionCollection": { + "type": "object", + "properties": { + "from": { + "format": "int32", + "type": "integer" + }, + "total": { + "format": "int32", + "type": "integer" + }, + "sessions": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkSessionJob" + } + } + } + }, + "SparkSessionJob": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "appId": { + "type": "string" + }, + "owner": { + "type": "string" + }, + "proxyUser": { + "type": "string" + }, + "kind": { + "type": "string" + }, + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "state": { + "type": "string" + }, + "appInfo": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "SparkSessionJobRequest": { + "type": "object", + "properties": { + "kind": { + "type": "string", + "x-ms-enum": { + "name": "SessionJobKind", + "modelAsString": false + }, + "enum": [ + "spark", + "pyspark", + "sparkr", + "sql" + ] + }, + "proxyUser": { + "type": "string" + }, + "jars": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "pyFiles": { + "x-ms-client-name": "pythonFiles", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "files": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "driverMemory": { + "type": "string" + }, + "driverCores": { + "format": "int32", + "type": "integer" + }, + "executorMemory": { + "type": "string" + }, + "executorCores": { + "format": "int32", + "type": "integer" + }, + "numExecutors": { + "x-ms-client-name": "executorCount", + "format": "int32", + "type": "integer" + }, + "archives": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "queue": { + "type": "string" + }, + "name": { + "type": "string" + }, + "conf": { + "x-ms-client-name": "configuration", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "heartbeatTimeoutInSecond": { + "format": "int32", + "type": "integer" + } + } + }, + "SparkJobLog": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "from": { + "format": "int32", + "type": "integer" + }, + "size": { + "format": "int32", + "type": "integer" + }, + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "SparkJobState": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "state": { + "type": "string" + } + } + }, + "SparkStatementCollection": { + "type": "object", + "properties": { + "statements": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkStatement" + } + } + } + }, + "SparkStatement": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "code": { + "type": "string" + }, + "state": { + "type": "string" + }, + "output": { + "$ref": "#/definitions/SparkStatementOutput" + } + } + }, + "SparkStatementOutput": { + "type": "object", + "properties": { + "status": { + "type": "string" + }, + "execution_count": { + "format": "int32", + "type": "integer" + }, + "data": { + "type": "object" + } + } + }, + "SparkStatementRequest": { + "type": "object", + "properties": { + "code": { + "type": "string" + }, + "kind": { + "type": "string" + } + } + }, + "SparkStatementCancellationResult": { + "type": "object", + "properties": { + "msg": { + "x-ms-client-name": "cancelMessage", + "type": "string" + } + } + } + }, + "parameters": { + "requestedByParameter": { + "name": "X-Requested-By", + "x-ms-parameter-location": "client", + "x-ms-client-name": "requestedBy", + "description": "Add default vaule for X-Requested-By in header.", + "in": "header", + "required": false, + "default": "ambari", + "type": "string" + }, + "endpointParameter": { + "name": "endpoint", + "in": "path", + "required": true, + "type": "string", + "x-ms-skip-url-encoding": true, + "description": "The cluster dns name against which the job management is to be.", + "x-ms-parameter-location": "client" + } + } +} \ No newline at end of file diff --git a/specification/hdinsight/data-plane/readme.md b/specification/hdinsight/data-plane/readme.md index ffd1eb557e82..7116c5692305 100644 --- a/specification/hdinsight/data-plane/readme.md +++ b/specification/hdinsight/data-plane/readme.md @@ -36,8 +36,18 @@ These settings apply only when `--tag=package-2018-11-preview` is specified on t ``` yaml $(tag) == 'package-2018-11-preview' input-file: - Microsoft.HDInsight/preview/2018-11-01-preview/job.json +- Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json ``` +## Suppression +``` yaml +directive: + - suppress: DefinitionsPropertiesNamesCamelCase + reason: This would require a breaking change, and need to be consistent with the response from RP side. + from: livySpark.json + where: + - $.definitions.SparkStatementOutput.properties.execution_count +``` --- # Code Generation @@ -146,6 +156,7 @@ require: $(this-folder)/../../../profiles/readme.md # all the input files across all versions input-file: - $(this-folder)/Microsoft.HDInsight/preview/2018-11-01-preview/job.json + - $(this-folder)/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json ``` From e56b4bd16a42eefdc9b56d2d8423782dab80034b Mon Sep 17 00:00:00 2001 From: Shangwei Sun Date: Fri, 15 May 2020 17:17:40 +0800 Subject: [PATCH 2/6] Prettier check --- custom-words.txt | 2 + .../examples/HDI_SparkBatch_Create.json | 2 +- .../examples/HDI_SparkBatch_List.json | 2 +- .../examples/HDI_SparkSession_Create.json | 2 +- .../examples/HDI_SparkSession_Delete.json | 2 +- .../preview/2018-11-01-preview/job.json | 2 +- .../preview/2018-11-01-preview/livySpark.json | 2158 ++++++++--------- 7 files changed, 1086 insertions(+), 1084 deletions(-) diff --git a/custom-words.txt b/custom-words.txt index 21df137e474d..c32f0dc4eb9b 100644 --- a/custom-words.txt +++ b/custom-words.txt @@ -1761,3 +1761,5 @@ userprincipalname sessionstate sessionhosts hostpool +sparkr +ambari diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json index d53cbdfb6743..0a9a9656da45 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json @@ -32,4 +32,4 @@ } } } -} \ No newline at end of file +} diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json index 9f48960d6771..2f213841c7d0 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json @@ -1,6 +1,6 @@ { "parameters": { - "endpoint":"clustername.azurehdinsight.net", + "endpoint": "clustername.azurehdinsight.net", "from": 0, "size": 2, "detailed": true diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json index 1bc0cdb451ae..00876d23d0d9 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json @@ -17,7 +17,7 @@ "queue": "default", "name": "jobname", "conf": null, - "heartbeatTimeoutInSecond":0 + "heartbeatTimeoutInSecond": 0 } }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json index ac5c99911393..767fc13e311e 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json @@ -1,7 +1,7 @@ { "parameters": { "endpoint": "cluster.azurehdinsight.net", - "x-Requested-By":"admin", + "x-Requested-By": "admin", "sessionId": 123 }, "responses": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json index 5fc691a66980..7853a754c235 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json @@ -5,7 +5,7 @@ "description": "The HDInsight Job Client.", "version": "2018-11-01-preview", "x-ms-code-generation-settings": { - "internalConstructors": true + "internalConstructors": true } }, "x-ms-parameterized-host": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json index 22283886b5c2..adfdf3851b30 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json @@ -1,1128 +1,1128 @@ { - "swagger": "2.0", - "info": { - "title": "HDInsightJobManagementClient", - "description": "The HDInsight Job Client.", - "version": "2018-11-01-preview", - "x-ms-code-generation-settings": { - "internalConstructors": true - } - }, - "x-ms-parameterized-host": { - "hostTemplate": "{endpoint}", + "swagger": "2.0", + "info": { + "title": "HDInsightJobManagementClient", + "description": "The HDInsight Job Client.", + "version": "2018-11-01-preview", + "x-ms-code-generation-settings": { + "internalConstructors": true + } + }, + "x-ms-parameterized-host": { + "hostTemplate": "{endpoint}", + "parameters": [ + { + "$ref": "#/parameters/endpointParameter" + } + ] + }, + "schemes": [ + "https" + ], + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ], + "securityDefinitions": { + "azure_auth": { + "type": "oauth2", + "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", + "flow": "implicit", + "description": "Azure Active Directory OAuth2 Flow", + "scopes": { + "user_impersonation": "impersonate your user account" + } + } + }, + "paths": { + "/livy/batches": { + "get": { + "tags": [ + "SparkBatch" + ], + "description": "List all spark batch jobs", + "operationId": "SparkBatch_List", + "x-ms-examples": { + "List all spark batch jobs": { + "$ref": "./examples/HDI_SparkBatch_List.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], "parameters": [ - { - "$ref": "#/parameters/endpointParameter" + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkBatchJobCollection" } - ] - }, - "schemes": [ - "https" - ], - "security": [ - { - "azure_auth": [ - "user_impersonation" - ] + } } - ], - "securityDefinitions": { - "azure_auth": { - "type": "oauth2", - "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", - "flow": "implicit", - "description": "Azure Active Directory OAuth2 Flow", - "scopes": { - "user_impersonation": "impersonate your user account" + }, + "post": { + "tags": [ + "SparkBatch" + ], + "description": "Create a new spark batch job.", + "operationId": "SparkBatch_Create", + "x-ms-examples": { + "Create a spark batch job": { + "$ref": "./examples/HDI_SparkBatch_Create.json" + } + }, + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sparkBatchJobRequest", + "in": "body", + "description": "Livy compatible batch job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkBatchJobRequest" } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkBatchJob" + } + } } + } }, - "paths": { - "/livy/batches": { - "get": { - "tags": [ - "SparkBatch" - ], - "description": "List all spark batch jobs", - "operationId": "SparkBatch_List", - "x-ms-examples": { - "List all spark batch jobs": { - "$ref": "./examples/HDI_SparkBatch_List.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "from", - "in": "query", - "description": "Optional param specifying which index the list should begin from.", - "required": false, - "type": "integer", - "format": "int32" - }, - { - "name": "size", - "in": "query", - "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", - "required": false, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkBatchJobCollection" - } - } - } - }, - "post": { - "tags": [ - "SparkBatch" - ], - "description": "Create a new spark batch job.", - "operationId": "SparkBatch_Create", - "x-ms-examples": { - "Create a spark batch job": { - "$ref": "./examples/HDI_SparkBatch_Create.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "$ref": "#/parameters/requestedByParameter" - }, - { - "name": "sparkBatchJobRequest", - "in": "body", - "description": "Livy compatible batch job request payload.", - "required": true, - "schema": { - "$ref": "#/definitions/SparkBatchJobRequest" - } - } - ], - "responses": { - "201": { - "description": "Created", - "schema": { - "$ref": "#/definitions/SparkBatchJob" - } - } - } - } + "/livy/batches/{batchId}": { + "get": { + "tags": [ + "SparkBatch" + ], + "description": "Gets a single spark batch job.", + "operationId": "SparkBatch_Get", + "x-ms-examples": { + "Gets a single spark batch job.": { + "$ref": "./examples/HDI_SparkBatch_Get.json" + } }, - "/livy/batches/{batchId}": { - "get": { - "tags": [ - "SparkBatch" - ], - "description": "Gets a single spark batch job.", - "operationId": "SparkBatch_Get", - "x-ms-examples": { - "Gets a single spark batch job.": { - "$ref": "./examples/HDI_SparkBatch_Get.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "batchId", - "in": "path", - "description": "Identifier for the batch job.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkBatchJob" - } - } - } - }, - "delete": { - "tags": [ - "SparkBatch" - ], - "description": "Cancels a running spark batch job.", - "operationId": "SparkBatch_Delete", - "x-ms-examples": { - "Cancels a running spark batch job.": { - "$ref": "./examples/HDI_SparkBatch_Delete.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "$ref": "#/parameters/requestedByParameter" - }, - { - "name": "batchId", - "in": "path", - "description": "Identifier for the batch job.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success" - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkBatchJob" } + } + } + }, + "delete": { + "tags": [ + "SparkBatch" + ], + "description": "Cancels a running spark batch job.", + "operationId": "SparkBatch_Delete", + "x-ms-examples": { + "Cancels a running spark batch job.": { + "$ref": "./examples/HDI_SparkBatch_Delete.json" + } }, - "/livy/batches/{batchId}/log": { - "get": { - "tags": [ - "SparkBatch" - ], - "description": "Gets a single spark batch job logs.", - "operationId": "SparkBatch_GetLogs", - "x-ms-examples": { - "Gets a single spark batch job logs": { - "$ref": "./examples/HDI_SparkBatch_GetLogs.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "batchId", - "in": "path", - "description": "Identifier for the batch job.", - "required": true, - "type": "integer", - "format": "int32" - }, - { - "name": "from", - "in": "query", - "description": "Optional param specifying which index the list should begin from.", - "required": false, - "type": "integer", - "format": "int32" - }, - { - "name": "size", - "in": "query", - "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", - "required": false, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkJobLog" - } - } - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success" + } + } + } + }, + "/livy/batches/{batchId}/log": { + "get": { + "tags": [ + "SparkBatch" + ], + "description": "Gets a single spark batch job logs.", + "operationId": "SparkBatch_GetLogs", + "x-ms-examples": { + "Gets a single spark batch job logs": { + "$ref": "./examples/HDI_SparkBatch_GetLogs.json" + } }, - "/livy/batches/{batchId}/state": { - "get": { - "tags": [ - "SparkBatch" - ], - "description": "Gets a single spark batch state.", - "operationId": "SparkBatch_GetState", - "x-ms-examples": { - "Gets a single spark batch state.": { - "$ref": "./examples/HDI_SparkBatch_GetState.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "batchId", - "in": "path", - "description": "Identifier for the batch job.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkJobState" - } - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobLog" } + } + } + } + }, + "/livy/batches/{batchId}/state": { + "get": { + "tags": [ + "SparkBatch" + ], + "description": "Gets a single spark batch state.", + "operationId": "SparkBatch_GetState", + "x-ms-examples": { + "Gets a single spark batch state.": { + "$ref": "./examples/HDI_SparkBatch_GetState.json" + } }, - "/livy/sessions": { - "get": { - "tags": [ - "SparkSession" - ], - "description": "List all spark sessions.", - "operationId": "SparkSession_List", - "x-ms-examples": { - "List all spark sessions.": { - "$ref": "./examples/HDI_SparkSession_List.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "from", - "in": "query", - "description": "Optional param specifying which index the list should begin from.", - "required": false, - "type": "integer", - "format": "int32" - }, - { - "name": "size", - "in": "query", - "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", - "required": false, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkSessionCollection" - } - } - } - }, - "post": { - "tags": [ - "SparkSession" - ], - "description": "Create a new spark session.", - "operationId": "SparkSession_Create", - "x-ms-examples": { - "Create a new spark session.": { - "$ref": "./examples/HDI_SparkSession_Create.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "$ref": "#/parameters/requestedByParameter" - }, - { - "name": "sparkSessionJobRequest", - "in": "body", - "description": "Livy compatible session job request payload.", - "required": true, - "schema": { - "$ref": "#/definitions/SparkSessionJobRequest" - } - } - ], - "responses": { - "201": { - "description": "Created", - "schema": { - "$ref": "#/definitions/SparkSessionJob" - } - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "batchId", + "in": "path", + "description": "Identifier for the batch job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobState" } + } + } + } + }, + "/livy/sessions": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "List all spark sessions.", + "operationId": "SparkSession_List", + "x-ms-examples": { + "List all spark sessions.": { + "$ref": "./examples/HDI_SparkSession_List.json" + } }, - "/livy/sessions/{sessionId}": { - "get": { - "tags": [ - "SparkSession" - ], - "description": "Gets a single spark session.", - "operationId": "SparkSession_Get", - "x-ms-examples": { - "Gets a single spark session.": { - "$ref": "./examples/HDI_SparkSession_Get.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "sessionId", - "in": "path", - "description": "Identifier for the session.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkSessionJob" - } - } - } - }, - "delete": { - "tags": [ - "SparkSession" - ], - "description": "Cancels a running spark session.", - "operationId": "SparkSession_Delete", - "x-ms-examples": { - "Cancels a running spark session.": { - "$ref": "./examples/HDI_SparkSession_Delete.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "$ref": "#/parameters/requestedByParameter" - }, - { - "name": "sessionId", - "in": "path", - "description": "Identifier for the session.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success" - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkSessionCollection" } + } + } + }, + "post": { + "tags": [ + "SparkSession" + ], + "description": "Create a new spark session.", + "operationId": "SparkSession_Create", + "x-ms-examples": { + "Create a new spark session.": { + "$ref": "./examples/HDI_SparkSession_Create.json" + } }, - "/livy/sessions/{sessionId}/log": { - "get": { - "tags": [ - "SparkSession" - ], - "description": "Gets a single spark session job logs.", - "operationId": "SparkSession_GetLogs", - "x-ms-examples": { - "Gets a single spark session job logs": { - "$ref": "./examples/HDI_SparkSession_GetLogs.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "sessionId", - "in": "path", - "description": "Identifier for the session job.", - "required": true, - "type": "integer", - "format": "int32" - }, - { - "name": "from", - "in": "query", - "description": "Optional param specifying which index the list should begin from.", - "required": false, - "type": "integer", - "format": "int32" - }, - { - "name": "size", - "in": "query", - "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", - "required": false, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkJobLog" - } - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sparkSessionJobRequest", + "in": "body", + "description": "Livy compatible session job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkSessionJobRequest" } - }, - "/livy/sessions/{sessionId}/state": { - "get": { - "tags": [ - "SparkSession" - ], - "description": "Gets a single spark session job state.", - "operationId": "SparkSession_GetState", - "x-ms-examples": { - "Gets a single spark session job state": { - "$ref": "./examples/HDI_SparkSession_GetState.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "sessionId", - "in": "path", - "description": "Identifier for the session job.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkJobState" - } - } - } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkSessionJob" } + } + } + } + }, + "/livy/sessions/{sessionId}": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a single spark session.", + "operationId": "SparkSession_Get", + "x-ms-examples": { + "Gets a single spark session.": { + "$ref": "./examples/HDI_SparkSession_Get.json" + } }, - "/livy/sessions/{sessionId}/statements": { - "get": { - "tags": [ - "SparkSession" - ], - "description": "Gets a list of statements within a spark session.", - "operationId": "SparkSession_ListStatements", - "x-ms-examples": { - "Gets a list of statements within a spark session.": { - "$ref": "./examples/HDI_SparkSession_ListStatements.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "sessionId", - "in": "path", - "description": "Identifier for the session.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkStatementCollection" - } - } - } - }, - "post": { - "tags": [ - "SparkSession" - ], - "description": "Create a statement within a spark session.", - "operationId": "SparkSession_CreateStatement", - "x-ms-examples": { - "Create a statement within a spark session.": { - "$ref": "./examples/HDI_SparkSession_CreateStatement.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "$ref": "#/parameters/requestedByParameter" - }, - { - "name": "sessionId", - "in": "path", - "description": "Identifier for the session.", - "required": true, - "type": "integer", - "format": "int32" - }, - { - "name": "sparkStatementRequest", - "in": "body", - "description": "Livy compatible batch job request payload.", - "required": true, - "schema": { - "$ref": "#/definitions/SparkStatementRequest" - } - } - ], - "responses": { - "201": { - "description": "Created", - "schema": { - "$ref": "#/definitions/SparkStatement" - } - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkSessionJob" } + } + } + }, + "delete": { + "tags": [ + "SparkSession" + ], + "description": "Cancels a running spark session.", + "operationId": "SparkSession_Delete", + "x-ms-examples": { + "Cancels a running spark session.": { + "$ref": "./examples/HDI_SparkSession_Delete.json" + } }, - "/livy/sessions/{sessionId}/statements/{statementId}": { - "get": { - "tags": [ - "SparkSession" - ], - "description": "Gets a single statement within a spark session.", - "operationId": "SparkSession_GetStatement", - "x-ms-examples": { - "Gets a single statement within a spark session.": { - "$ref": "./examples/HDI_SparkSession_GetStatement.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "name": "sessionId", - "in": "path", - "description": "Identifier for the session.", - "required": true, - "type": "integer", - "format": "int32" - }, - { - "name": "statementId", - "in": "path", - "description": "Identifier for the statement.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkStatement" - } - } - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success" + } + } + } + }, + "/livy/sessions/{sessionId}/log": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a single spark session job logs.", + "operationId": "SparkSession_GetLogs", + "x-ms-examples": { + "Gets a single spark session job logs": { + "$ref": "./examples/HDI_SparkSession_GetLogs.json" + } }, - "/livy/sessions/{sessionId}/statements/{statementId}/cancel": { - "post": { - "tags": [ - "SparkSession" - ], - "description": "Kill a statement within a session.", - "operationId": "SparkSession_DeleteStatement", - "x-ms-examples": { - "Kill a statement within a session.": { - "$ref": "./examples/HDI_SparkSession_DeleteStatement.json" - } - }, - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "parameters": [ - { - "$ref": "#/parameters/requestedByParameter" - }, - { - "name": "sessionId", - "in": "path", - "description": "Identifier for the session.", - "required": true, - "type": "integer", - "format": "int32" - }, - { - "name": "statementId", - "in": "path", - "description": "Identifier for the statement.", - "required": true, - "type": "integer", - "format": "int32" - } - ], - "responses": { - "200": { - "description": "Success", - "schema": { - "$ref": "#/definitions/SparkStatementCancellationResult" - } - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session job.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "from", + "in": "query", + "description": "Optional param specifying which index the list should begin from.", + "required": false, + "type": "integer", + "format": "int32" + }, + { + "name": "size", + "in": "query", + "description": "Optional param specifying the size of the returned list.\r\n By default it is 20 and that is the maximum.", + "required": false, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobLog" } + } } + } }, - "definitions": { - "SparkBatchJobCollection": { - "type": "object", - "properties": { - "from": { - "format": "int32", - "type": "integer" - }, - "total": { - "format": "int32", - "type": "integer" - }, - "sessions": { - "uniqueItems": false, - "type": "array", - "items": { - "$ref": "#/definitions/SparkBatchJob" - } - } - } + "/livy/sessions/{sessionId}/state": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a single spark session job state.", + "operationId": "SparkSession_GetState", + "x-ms-examples": { + "Gets a single spark session job state": { + "$ref": "./examples/HDI_SparkSession_GetState.json" + } }, - "SparkBatchJob": { - "type": "object", - "properties": { - "id": { - "format": "int32", - "type": "integer" - }, - "appId": { - "type": "string" - }, - "appInfo": { - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "state": { - "type": "string" - }, - "log": { - "x-ms-client-name": "logLines", - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session job.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobState" } + } + } + } + }, + "/livy/sessions/{sessionId}/statements": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a list of statements within a spark session.", + "operationId": "SparkSession_ListStatements", + "x-ms-examples": { + "Gets a list of statements within a spark session.": { + "$ref": "./examples/HDI_SparkSession_ListStatements.json" + } }, - "SparkBatchJobRequest": { - "type": "object", - "properties": { - "file": { - "type": "string" - }, - "proxyUser": { - "type": "string" - }, - "className": { - "type": "string" - }, - "args": { - "x-ms-client-name": "arguments", - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "jars": { - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "pyFiles": { - "x-ms-client-name": "pythonFiles", - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "files": { - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "driverMemory": { - "type": "string" - }, - "driverCores": { - "format": "int32", - "type": "integer" - }, - "executorMemory": { - "type": "string" - }, - "executorCores": { - "format": "int32", - "type": "integer" - }, - "numExecutors": { - "x-ms-client-name": "executorCount", - "format": "int32", - "type": "integer" - }, - "archives": { - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "queue": { - "type": "string" - }, - "name": { - "type": "string" - }, - "conf": { - "x-ms-client-name": "configuration", - "type": "object", - "additionalProperties": { - "type": "string" - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatementCollection" } + } + } + }, + "post": { + "tags": [ + "SparkSession" + ], + "description": "Create a statement within a spark session.", + "operationId": "SparkSession_CreateStatement", + "x-ms-examples": { + "Create a statement within a spark session.": { + "$ref": "./examples/HDI_SparkSession_CreateStatement.json" + } }, - "SparkSessionCollection": { - "type": "object", - "properties": { - "from": { - "format": "int32", - "type": "integer" - }, - "total": { - "format": "int32", - "type": "integer" - }, - "sessions": { - "uniqueItems": false, - "type": "array", - "items": { - "$ref": "#/definitions/SparkSessionJob" - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "sparkStatementRequest", + "in": "body", + "description": "Livy compatible batch job request payload.", + "required": true, + "schema": { + "$ref": "#/definitions/SparkStatementRequest" } - }, - "SparkSessionJob": { - "type": "object", - "properties": { - "id": { - "format": "int32", - "type": "integer" - }, - "appId": { - "type": "string" - }, - "owner": { - "type": "string" - }, - "proxyUser": { - "type": "string" - }, - "kind": { - "type": "string" - }, - "log": { - "x-ms-client-name": "logLines", - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "state": { - "type": "string" - }, - "appInfo": { - "type": "object", - "additionalProperties": { - "type": "string" - } - } + } + ], + "responses": { + "201": { + "description": "Created", + "schema": { + "$ref": "#/definitions/SparkStatement" } + } + } + } + }, + "/livy/sessions/{sessionId}/statements/{statementId}": { + "get": { + "tags": [ + "SparkSession" + ], + "description": "Gets a single statement within a spark session.", + "operationId": "SparkSession_GetStatement", + "x-ms-examples": { + "Gets a single statement within a spark session.": { + "$ref": "./examples/HDI_SparkSession_GetStatement.json" + } }, - "SparkSessionJobRequest": { - "type": "object", - "properties": { - "kind": { - "type": "string", - "x-ms-enum": { - "name": "SessionJobKind", - "modelAsString": false - }, - "enum": [ - "spark", - "pyspark", - "sparkr", - "sql" - ] - }, - "proxyUser": { - "type": "string" - }, - "jars": { - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "pyFiles": { - "x-ms-client-name": "pythonFiles", - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "files": { - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "driverMemory": { - "type": "string" - }, - "driverCores": { - "format": "int32", - "type": "integer" - }, - "executorMemory": { - "type": "string" - }, - "executorCores": { - "format": "int32", - "type": "integer" - }, - "numExecutors": { - "x-ms-client-name": "executorCount", - "format": "int32", - "type": "integer" - }, - "archives": { - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - }, - "queue": { - "type": "string" - }, - "name": { - "type": "string" - }, - "conf": { - "x-ms-client-name": "configuration", - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "heartbeatTimeoutInSecond": { - "format": "int32", - "type": "integer" - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "statementId", + "in": "path", + "description": "Identifier for the statement.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatement" } + } + } + } + }, + "/livy/sessions/{sessionId}/statements/{statementId}/cancel": { + "post": { + "tags": [ + "SparkSession" + ], + "description": "Kill a statement within a session.", + "operationId": "SparkSession_DeleteStatement", + "x-ms-examples": { + "Kill a statement within a session.": { + "$ref": "./examples/HDI_SparkSession_DeleteStatement.json" + } }, - "SparkJobLog": { - "type": "object", - "properties": { - "id": { - "format": "int32", - "type": "integer" - }, - "from": { - "format": "int32", - "type": "integer" - }, - "size": { - "format": "int32", - "type": "integer" - }, - "log": { - "x-ms-client-name": "logLines", - "uniqueItems": false, - "type": "array", - "items": { - "type": "string" - } - } + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "#/parameters/requestedByParameter" + }, + { + "name": "sessionId", + "in": "path", + "description": "Identifier for the session.", + "required": true, + "type": "integer", + "format": "int32" + }, + { + "name": "statementId", + "in": "path", + "description": "Identifier for the statement.", + "required": true, + "type": "integer", + "format": "int32" + } + ], + "responses": { + "200": { + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkStatementCancellationResult" } + } + } + } + } + }, + "definitions": { + "SparkBatchJobCollection": { + "type": "object", + "properties": { + "from": { + "format": "int32", + "type": "integer" }, - "SparkJobState": { - "type": "object", - "properties": { - "id": { - "format": "int32", - "type": "integer" - }, - "state": { - "type": "string" - } - } + "total": { + "format": "int32", + "type": "integer" }, - "SparkStatementCollection": { - "type": "object", - "properties": { - "statements": { - "uniqueItems": false, - "type": "array", - "items": { - "$ref": "#/definitions/SparkStatement" - } - } - } + "sessions": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkBatchJob" + } + } + } + }, + "SparkBatchJob": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" }, - "SparkStatement": { - "type": "object", - "properties": { - "id": { - "format": "int32", - "type": "integer" - }, - "code": { - "type": "string" - }, - "state": { - "type": "string" - }, - "output": { - "$ref": "#/definitions/SparkStatementOutput" - } - } + "appId": { + "type": "string" }, - "SparkStatementOutput": { - "type": "object", - "properties": { - "status": { - "type": "string" - }, - "execution_count": { - "format": "int32", - "type": "integer" - }, - "data": { - "type": "object" - } - } + "appInfo": { + "type": "object", + "additionalProperties": { + "type": "string" + } }, - "SparkStatementRequest": { - "type": "object", - "properties": { - "code": { - "type": "string" - }, - "kind": { - "type": "string" - } - } + "state": { + "type": "string" }, - "SparkStatementCancellationResult": { - "type": "object", - "properties": { - "msg": { - "x-ms-client-name": "cancelMessage", - "type": "string" - } - } + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } } + } }, - "parameters": { - "requestedByParameter": { - "name": "X-Requested-By", - "x-ms-parameter-location": "client", - "x-ms-client-name": "requestedBy", - "description": "Add default vaule for X-Requested-By in header.", - "in": "header", - "required": false, - "default": "ambari", + "SparkBatchJobRequest": { + "type": "object", + "properties": { + "file": { + "type": "string" + }, + "proxyUser": { + "type": "string" + }, + "className": { + "type": "string" + }, + "args": { + "x-ms-client-name": "arguments", + "uniqueItems": false, + "type": "array", + "items": { "type": "string" + } }, - "endpointParameter": { - "name": "endpoint", - "in": "path", - "required": true, - "type": "string", - "x-ms-skip-url-encoding": true, - "description": "The cluster dns name against which the job management is to be.", - "x-ms-parameter-location": "client" + "jars": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "pyFiles": { + "x-ms-client-name": "pythonFiles", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "files": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "driverMemory": { + "type": "string" + }, + "driverCores": { + "format": "int32", + "type": "integer" + }, + "executorMemory": { + "type": "string" + }, + "executorCores": { + "format": "int32", + "type": "integer" + }, + "numExecutors": { + "x-ms-client-name": "executorCount", + "format": "int32", + "type": "integer" + }, + "archives": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "queue": { + "type": "string" + }, + "name": { + "type": "string" + }, + "conf": { + "x-ms-client-name": "configuration", + "type": "object", + "additionalProperties": { + "type": "string" + } } + } + }, + "SparkSessionCollection": { + "type": "object", + "properties": { + "from": { + "format": "int32", + "type": "integer" + }, + "total": { + "format": "int32", + "type": "integer" + }, + "sessions": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkSessionJob" + } + } + } + }, + "SparkSessionJob": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "appId": { + "type": "string" + }, + "owner": { + "type": "string" + }, + "proxyUser": { + "type": "string" + }, + "kind": { + "type": "string" + }, + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "state": { + "type": "string" + }, + "appInfo": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "SparkSessionJobRequest": { + "type": "object", + "properties": { + "kind": { + "type": "string", + "x-ms-enum": { + "name": "SessionJobKind", + "modelAsString": false + }, + "enum": [ + "spark", + "pyspark", + "sparkr", + "sql" + ] + }, + "proxyUser": { + "type": "string" + }, + "jars": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "pyFiles": { + "x-ms-client-name": "pythonFiles", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "files": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "driverMemory": { + "type": "string" + }, + "driverCores": { + "format": "int32", + "type": "integer" + }, + "executorMemory": { + "type": "string" + }, + "executorCores": { + "format": "int32", + "type": "integer" + }, + "numExecutors": { + "x-ms-client-name": "executorCount", + "format": "int32", + "type": "integer" + }, + "archives": { + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + }, + "queue": { + "type": "string" + }, + "name": { + "type": "string" + }, + "conf": { + "x-ms-client-name": "configuration", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "heartbeatTimeoutInSecond": { + "format": "int32", + "type": "integer" + } + } + }, + "SparkJobLog": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "from": { + "format": "int32", + "type": "integer" + }, + "size": { + "format": "int32", + "type": "integer" + }, + "log": { + "x-ms-client-name": "logLines", + "uniqueItems": false, + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "SparkJobState": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "state": { + "type": "string" + } + } + }, + "SparkStatementCollection": { + "type": "object", + "properties": { + "statements": { + "uniqueItems": false, + "type": "array", + "items": { + "$ref": "#/definitions/SparkStatement" + } + } + } + }, + "SparkStatement": { + "type": "object", + "properties": { + "id": { + "format": "int32", + "type": "integer" + }, + "code": { + "type": "string" + }, + "state": { + "type": "string" + }, + "output": { + "$ref": "#/definitions/SparkStatementOutput" + } + } + }, + "SparkStatementOutput": { + "type": "object", + "properties": { + "status": { + "type": "string" + }, + "execution_count": { + "format": "int32", + "type": "integer" + }, + "data": { + "type": "object" + } + } + }, + "SparkStatementRequest": { + "type": "object", + "properties": { + "code": { + "type": "string" + }, + "kind": { + "type": "string" + } + } + }, + "SparkStatementCancellationResult": { + "type": "object", + "properties": { + "msg": { + "x-ms-client-name": "cancelMessage", + "type": "string" + } + } + } + }, + "parameters": { + "requestedByParameter": { + "name": "X-Requested-By", + "x-ms-parameter-location": "client", + "x-ms-client-name": "requestedBy", + "description": "Add default value for X-Requested-By in header.", + "in": "header", + "required": false, + "default": "ambari", + "type": "string" + }, + "endpointParameter": { + "name": "endpoint", + "in": "path", + "required": true, + "type": "string", + "x-ms-skip-url-encoding": true, + "description": "The cluster dns name against which the job management is to be.", + "x-ms-parameter-location": "client" } -} \ No newline at end of file + } +} From ab6745e968b93dc818ba98275f62721883f8aa65 Mon Sep 17 00:00:00 2001 From: Shangwei Sun Date: Mon, 18 May 2020 10:58:23 +0800 Subject: [PATCH 3/6] Update code style. --- .../preview/2018-11-01-preview/job.json | 2 +- .../preview/2018-11-01-preview/livySpark.json | 24 +++++++++---------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json index 7853a754c235..1c4266041ae4 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json @@ -917,7 +917,7 @@ "required": true, "type": "string", "x-ms-skip-url-encoding": true, - "description": "The cluster dns name against which the job management is to be.", + "description": "The endpoint against which the job management is to be.", "x-ms-parameter-location": "client" }, "UserNameParameter": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json index adfdf3851b30..9b220b789f0b 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json @@ -132,7 +132,7 @@ "description": "Gets a single spark batch job.", "operationId": "SparkBatch_Get", "x-ms-examples": { - "Gets a single spark batch job.": { + "Gets a single spark batch job": { "$ref": "./examples/HDI_SparkBatch_Get.json" } }, @@ -168,7 +168,7 @@ "description": "Cancels a running spark batch job.", "operationId": "SparkBatch_Delete", "x-ms-examples": { - "Cancels a running spark batch job.": { + "Cancels a running spark batch job": { "$ref": "./examples/HDI_SparkBatch_Delete.json" } }, @@ -260,7 +260,7 @@ "description": "Gets a single spark batch state.", "operationId": "SparkBatch_GetState", "x-ms-examples": { - "Gets a single spark batch state.": { + "Gets a single spark batch state": { "$ref": "./examples/HDI_SparkBatch_GetState.json" } }, @@ -298,7 +298,7 @@ "description": "List all spark sessions.", "operationId": "SparkSession_List", "x-ms-examples": { - "List all spark sessions.": { + "List all spark sessions": { "$ref": "./examples/HDI_SparkSession_List.json" } }, @@ -342,7 +342,7 @@ "description": "Create a new spark session.", "operationId": "SparkSession_Create", "x-ms-examples": { - "Create a new spark session.": { + "Create a new spark session": { "$ref": "./examples/HDI_SparkSession_Create.json" } }, @@ -384,7 +384,7 @@ "description": "Gets a single spark session.", "operationId": "SparkSession_Get", "x-ms-examples": { - "Gets a single spark session.": { + "Gets a single spark session": { "$ref": "./examples/HDI_SparkSession_Get.json" } }, @@ -420,7 +420,7 @@ "description": "Cancels a running spark session.", "operationId": "SparkSession_Delete", "x-ms-examples": { - "Cancels a running spark session.": { + "Cancels a running spark session": { "$ref": "./examples/HDI_SparkSession_Delete.json" } }, @@ -550,7 +550,7 @@ "description": "Gets a list of statements within a spark session.", "operationId": "SparkSession_ListStatements", "x-ms-examples": { - "Gets a list of statements within a spark session.": { + "Gets a list of statements within a spark session": { "$ref": "./examples/HDI_SparkSession_ListStatements.json" } }, @@ -586,7 +586,7 @@ "description": "Create a statement within a spark session.", "operationId": "SparkSession_CreateStatement", "x-ms-examples": { - "Create a statement within a spark session.": { + "Create a statement within a spark session": { "$ref": "./examples/HDI_SparkSession_CreateStatement.json" } }, @@ -636,7 +636,7 @@ "description": "Gets a single statement within a spark session.", "operationId": "SparkSession_GetStatement", "x-ms-examples": { - "Gets a single statement within a spark session.": { + "Gets a single statement within a spark session": { "$ref": "./examples/HDI_SparkSession_GetStatement.json" } }, @@ -682,7 +682,7 @@ "description": "Kill a statement within a session.", "operationId": "SparkSession_DeleteStatement", "x-ms-examples": { - "Kill a statement within a session.": { + "Kill a statement within a session": { "$ref": "./examples/HDI_SparkSession_DeleteStatement.json" } }, @@ -1121,7 +1121,7 @@ "required": true, "type": "string", "x-ms-skip-url-encoding": true, - "description": "The cluster dns name against which the job management is to be.", + "description": "The endpoint against which the job management is to be.", "x-ms-parameter-location": "client" } } From e405dc8d3280b101e28a98921ca9b83698d84110 Mon Sep 17 00:00:00 2001 From: Shangwei Sun Date: Mon, 25 May 2020 15:37:10 +0800 Subject: [PATCH 4/6] fix comments --- .../examples/HDI_SparkBatch_Create.json | 28 ++++-- .../examples/HDI_SparkBatch_Delete.json | 6 +- .../examples/HDI_SparkBatch_Get.json | 28 ++++-- .../examples/HDI_SparkBatch_GetLogs.json | 17 +++- .../examples/HDI_SparkBatch_GetState.json | 4 +- .../examples/HDI_SparkBatch_List.json | 29 +++++- .../examples/HDI_SparkSession_Create.json | 21 ++-- .../HDI_SparkSession_CreateStatement.json | 15 +-- .../examples/HDI_SparkSession_Delete.json | 6 +- .../examples/HDI_SparkSession_Get.json | 28 ++++-- .../examples/HDI_SparkSession_GetLogs.json | 21 +++- .../examples/HDI_SparkSession_GetState.json | 6 +- .../HDI_SparkSession_GetStatement.json | 19 ++-- .../examples/HDI_SparkSession_List.json | 31 +++++- .../HDI_SparkSession_ListStatements.json | 31 +++++- .../preview/2018-11-01-preview/job.json | 2 +- .../preview/2018-11-01-preview/livySpark.json | 95 +++++++++---------- 17 files changed, 268 insertions(+), 119 deletions(-) diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json index 0a9a9656da45..9090b5329746 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json @@ -3,10 +3,15 @@ "endpoint": "cluster.azurehdinsight.net", "X-Requested-By": "admin", "sparkBatchJobRequest": { - "file": "abfss://", + "file": "wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/microsoft-spark-2.3.x-0.6.0.jar", "proxyUser": "", - "className": "classname", - "args": [], + "className": "org.apache.spark.deploy.dotnet.DotnetRunner12", + "args": [ + "wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/wordcount.zip", + "WordCount", + "wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/user/sshroot/shakespeare.txt", + "wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/user/sshroot/result_livy.txt" + ], "jars": [], "pyFiles": [], "files": [], @@ -24,11 +29,18 @@ "responses": { "201": { "body": { - "id": 1, - "appId": "fill in here", - "appInfo": null, - "state": "the state", - "log": [] + "id": 46, + "state": "starting", + "appId": null, + "appInfo": { + "driverLogUrl": null, + "sparkUiUrl": null + }, + "log": [ + "stdout: ", + "\nstderr: ", + "\nYARN Diagnostics: " + ] } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json index cf79a1c21e01..9acc05bb98ec 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json @@ -5,6 +5,10 @@ "batchId": 123 }, "responses": { - "200": {} + "200": { + "body":{ + "msg": "deleted" + } + } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json index 7f8f01359892..8834c36eba62 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json @@ -1,18 +1,30 @@ { "parameters": { "endpoint": "cluster.azurehdinsight.net", - "from": 1, - "size": 10, - "batchId": 123 + "batchId": 45 }, "responses": { "200": { "body": { - "id": 123, - "appId": "fill in here", - "appInfo": null, - "state": "the state", - "log": [] + "id": 45, + "appId": null, + "appInfo": { + "driverLogUrl": null, + "sparkUiUrl": null + }, + "state": "starting", + "log": [ + "\t queue: default", + "\t start time: 1590386942572", + "\t final status: UNDEFINED", + "\t tracking URL: https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0005/", + "\t user: livy", + "20/05/25 06:09:02 INFO ShutdownHookManager: Shutdown hook called", + "20/05/25 06:09:02 INFO ShutdownHookManager: Deleting directory /tmp/spark-9ddb6d73-f204-44a4-83e8-afdbd4ce1a0b", + "20/05/25 06:09:02 INFO ShutdownHookManager: Deleting directory /tmp/spark-ef198a0d-de24-4da9-aeb5-c1b78c5fdd5c", + "\nstderr: ", + "\nYARN Diagnostics: " + ] } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json index fe7ed613127a..19b0c392ee7e 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json @@ -1,17 +1,28 @@ { "parameters": { "endpoint": "cluster.azurehdinsight.net", - "batchId": 123, + "batchId": 45, "from": 1, "size": 10 }, "responses": { "200": { "body": { - "id": 123, + "id": 45, "from": 1, "size": 10, - "log": [] + "log": [ + "SLF4J: Class path contains multiple SLF4J bindings.", + "SLF4J: Found binding in [jar:file:/usr/hdp/2.6.5.3015-8/spark2/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]", + "SLF4J: Found binding in [jar:file:/usr/hdp/2.6.5.3015-8/spark_llap/spark-llap-assembly-1.0.0.2.6.5.3015-8.jar!/org/slf4j/impl/StaticLoggerBinder.class]", + "SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.", + "SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]", + "Warning: Master yarn-cluster is deprecated since 2.0. Please use master \"yarn\" with specified deploy mode instead.", + "20/05/25 06:08:59 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable", + "Warning: Skip remote jar wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/microsoft-spark-2.3.x-0.6.0.jar.", + "20/05/25 06:08:59 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties", + "20/05/25 06:08:59 INFO WasbAzureIaasSink: Init starting." + ] } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json index fc6f75ffe83a..c3444effea60 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json @@ -1,12 +1,12 @@ { "parameters": { "endpoint": "cluster.azurehdinsight.net", - "batchId": 123 + "batchId": 45 }, "responses": { "200": { "body": { - "id": 123, + "id": 45, "state": "starting" } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json index 2f213841c7d0..3c7db0140439 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json @@ -2,15 +2,36 @@ "parameters": { "endpoint": "clustername.azurehdinsight.net", "from": 0, - "size": 2, - "detailed": true + "size": 1 }, "responses": { "200": { "body": { "from": 0, - "total": 2, - "sessions": [] + "total": 1, + "sessions": [ + { + "id": 44, + "state": "starting", + "appId": null, + "appInfo": { + "driverLogUrl": null, + "sparkUiUrl": null + }, + "log": [ + "20/05/25 04:59:18 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable", + "Warning: Skip remote jar wasbs://containercmk002@shangweistorageaccount.blob.core.windows.net/microsoft-spark-2.3.x-0.6.0.jar.", + "20/05/25 04:59:18 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties", + "20/05/25 04:59:18 INFO WasbAzureIaasSink: Init starting.", + "20/05/25 04:59:18 INFO AzureIaasSink: Init starting. Initializing MdsLogger.", + "20/05/25 04:59:18 INFO AzureIaasSink: Init completed.", + "20/05/25 04:59:18 INFO WasbAzureIaasSink: Init completed.", + "20/05/25 04:59:18 INFO MetricsSinkAdapter: Sink azurefs2 started", + "\nstderr: ", + "\nYARN Diagnostics: " + ] + } + ] } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json index 00876d23d0d9..cc0c91872f3e 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json @@ -23,14 +23,21 @@ "responses": { "201": { "body": { - "id": 1, - "appId": "fill in here", - "owner": "admin", - "proxyUser": "", + "id": 34, + "appId": null, + "owner": null, + "proxyUser": null, "kind": "spark", - "state": "the state", - "log": [], - "appInfo": null + "state": "starting", + "log": [ + "stdout: ", + "\nstderr: ", + "\nYARN Diagnostics: " + ], + "appInfo": { + "driverLogUrl": null, + "sparkUiUrl": null + } } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json index b58a580649ff..0a288511d8aa 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json @@ -2,19 +2,20 @@ "parameters": { "endpoint": "cluster.azurehdinsight.net", "X-Requested-By": "admin", - "sessionId": 123, + "sessionId": 34, "sparkStatementRequest": { - "code": "fill in here", - "kind": "fill in here" + "code": "1 + 1", + "kind": "spark" } }, "responses": { "201": { "body": { - "id": 123, - "code": "fill in here", - "state": "fill in here", - "output": null + "id": 0, + "code": "1 + 1", + "state": "waiting", + "output": null, + "progress": 0.0 } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json index 767fc13e311e..6b7e9b63362f 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json @@ -5,6 +5,10 @@ "sessionId": 123 }, "responses": { - "200": {} + "200": { + "body":{ + "msg": "deleted" + } + } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json index bf2e6cc586b3..5008f9c51d27 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json @@ -6,14 +6,28 @@ "responses": { "200": { "body": { - "id": 123, - "appId": "fill in here", - "owner": "admin", - "proxyUser": "", + "id": 34, + "appId": "application_1590286636717_0004", + "owner": null, + "proxyUser": null, + "state": "idle", "kind": "spark", - "state": "the state", - "log": [], - "appInfo": null + "appInfo": { + "driverLogUrl": "http://wn2-shangw.4jhwm2oxfmoehnhvt2gwwtriqb.bx.internal.cloudapp.net:30060/node/containerlogs/container_e06_1590286636717_0004_01_000001/livy", + "sparkUiUrl": "https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0004/" + }, + "log": [ + "\t ApplicationMaster RPC port: -1", + "\t queue: default", + "\t start time: 1590384019312", + "\t final status: UNDEFINED", + "\t tracking URL: https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0004/", + "\t user: livy", + "20/05/25 05:20:19 INFO ShutdownHookManager: Shutdown hook called", + "20/05/25 05:20:19 INFO ShutdownHookManager: Deleting directory /tmp/spark-1557a190-880c-422d-a744-ce31d0fefa1d", + "20/05/25 05:20:19 INFO ShutdownHookManager: Deleting directory /tmp/spark-12bb0346-a2c9-4b9d-9f9b-feb91e30f554", + "\nYARN Diagnostics: " + ] } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json index 0691124d3d49..d2846f9b62ae 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json @@ -1,17 +1,28 @@ { "parameters": { "endpoint": "cluster.azurehdinsight.net", - "sessionId": 123, + "sessionId": 34, "from": 1, "size": 10 }, "responses": { "200": { "body": { - "id": 123, - "from": 1, - "size": 10, - "log": [] + "id": 34, + "from": 0, + "total": 57, + "log": [ + "stdout: ", + "\nstderr: ", + "SLF4J: Class path contains multiple SLF4J bindings.", + "SLF4J: Found binding in [jar:file:/usr/hdp/2.6.5.3015-8/spark2/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]", + "SLF4J: Found binding in [jar:file:/usr/hdp/2.6.5.3015-8/spark_llap/spark-llap-assembly-1.0.0.2.6.5.3015-8.jar!/org/slf4j/impl/StaticLoggerBinder.class]", + "SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.", + "SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]", + "Warning: Master yarn-cluster is deprecated since 2.0. Please use master \"yarn\" with specified deploy mode instead.", + "20/05/25 05:20:14 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable", + "20/05/25 05:20:15 INFO MetricsConfig: loaded properties from hadoop-metrics2-azure-file-system.properties" + ] } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json index 47c967123405..3c73a94ce528 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json @@ -1,13 +1,13 @@ { "parameters": { "endpoint": "cluster.azurehdinsight.net", - "sessionId": 123 + "sessionId": 34 }, "responses": { "200": { "body": { - "id": 123, - "state": "starting" + "id": 34, + "state": "idle" } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json index 609211da02a8..1c45ad23b01c 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json @@ -1,16 +1,23 @@ { "parameters": { "endpoint": "cluster.azurehdinsight.net", - "sessionId": 123, - "statementId": 123 + "sessionId": 34, + "statementId": 1 }, "responses": { "200": { "body": { - "id": 123, - "code": "fill in here", - "state": "fill in here", - "output": null + "id": 0, + "code": "1 + 1", + "state": "available", + "output": { + "status": "ok", + "execution_count": 0, + "data": { + "text/plain": "res0: Int = 2" + } + }, + "progress": 1.0 } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json index cd5d22315212..30f7a88887a4 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json @@ -2,14 +2,39 @@ "parameters": { "endpoint": "cluster.azurehdinsight.net", "from": 0, - "size": 2 + "size": 1 }, "responses": { "200": { "body": { "from": 0, - "total": 2, - "sessions": [] + "total": 1, + "sessions": [ + { + "id": 34, + "appId": "application_1590286636717_0004", + "owner": null, + "proxyUser": null, + "state": "idle", + "kind": "spark", + "appInfo": { + "driverLogUrl": "http://wn2-shangw.4jhwm2oxfmoehnhvt2gwwtriqb.bx.internal.cloudapp.net:30060/node/containerlogs/container_e06_1590286636717_0004_01_000001/livy", + "sparkUiUrl": "https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0004/" + }, + "log": [ + "\t ApplicationMaster RPC port: -1", + "\t queue: default", + "\t start time: 1590384019312", + "\t final status: UNDEFINED", + "\t tracking URL: https://shangwei-hdi-cmk-ps.azurehdinsight.net/yarnui/hn/proxy/application_1590286636717_0004/", + "\t user: livy", + "20/05/25 05:20:19 INFO ShutdownHookManager: Shutdown hook called", + "20/05/25 05:20:19 INFO ShutdownHookManager: Deleting directory /tmp/spark-1557a190-880c-422d-a744-ce31d0fefa1d", + "20/05/25 05:20:19 INFO ShutdownHookManager: Deleting directory /tmp/spark-12bb0346-a2c9-4b9d-9f9b-feb91e30f554", + "\nYARN Diagnostics: " + ] + } + ] } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json index b7c1dc5acd02..3e447a4f4318 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json @@ -1,12 +1,39 @@ { "parameters": { "endpoint": "cluster.azurehdinsight.net", - "sessionId": 123 + "sessionId": 34 }, "responses": { "200": { "body": { - "statements": [] + "statements": [ + { + "id": 0, + "code": "1 + 1", + "state": "available", + "output": { + "status": "ok", + "execution_count": 0, + "data": { + "text/plain": "res0: Int = 2" + } + }, + "progress": 1.0 + }, + { + "id": 1, + "code": "1 + 1", + "state": "available", + "output": { + "status": "ok", + "execution_count": 1, + "data": { + "text/plain": "res1: Int = 2" + } + }, + "progress": 1.0 + } + ] } } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json index 1c4266041ae4..e04e428c3634 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json @@ -917,7 +917,7 @@ "required": true, "type": "string", "x-ms-skip-url-encoding": true, - "description": "The endpoint against which the job management is to be.", + "description": "The cluster endpoint, for example https://clustername.azurehdinsight.net.", "x-ms-parameter-location": "client" }, "UserNameParameter": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json index 9b220b789f0b..59fe04a3fb30 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json @@ -44,15 +44,12 @@ "SparkBatch" ], "description": "List all spark batch jobs", - "operationId": "SparkBatch_List", + "operationId": "SparkBatches_List", "x-ms-examples": { "List all spark batch jobs": { "$ref": "./examples/HDI_SparkBatch_List.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -88,7 +85,7 @@ "SparkBatch" ], "description": "Create a new spark batch job.", - "operationId": "SparkBatch_Create", + "operationId": "SparkBatches_Create", "x-ms-examples": { "Create a spark batch job": { "$ref": "./examples/HDI_SparkBatch_Create.json" @@ -130,15 +127,12 @@ "SparkBatch" ], "description": "Gets a single spark batch job.", - "operationId": "SparkBatch_Get", + "operationId": "SparkBatches_Get", "x-ms-examples": { "Gets a single spark batch job": { "$ref": "./examples/HDI_SparkBatch_Get.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -166,7 +160,7 @@ "SparkBatch" ], "description": "Cancels a running spark batch job.", - "operationId": "SparkBatch_Delete", + "operationId": "SparkBatches_Delete", "x-ms-examples": { "Cancels a running spark batch job": { "$ref": "./examples/HDI_SparkBatch_Delete.json" @@ -193,7 +187,10 @@ ], "responses": { "200": { - "description": "Success" + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobDeletedResult" + } } } } @@ -204,15 +201,12 @@ "SparkBatch" ], "description": "Gets a single spark batch job logs.", - "operationId": "SparkBatch_GetLogs", + "operationId": "SparkBatches_GetLogs", "x-ms-examples": { "Gets a single spark batch job logs": { "$ref": "./examples/HDI_SparkBatch_GetLogs.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -258,15 +252,12 @@ "SparkBatch" ], "description": "Gets a single spark batch state.", - "operationId": "SparkBatch_GetState", + "operationId": "SparkBatches_GetState", "x-ms-examples": { "Gets a single spark batch state": { "$ref": "./examples/HDI_SparkBatch_GetState.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -296,15 +287,12 @@ "SparkSession" ], "description": "List all spark sessions.", - "operationId": "SparkSession_List", + "operationId": "SparkSessions_List", "x-ms-examples": { "List all spark sessions": { "$ref": "./examples/HDI_SparkSession_List.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -340,7 +328,7 @@ "SparkSession" ], "description": "Create a new spark session.", - "operationId": "SparkSession_Create", + "operationId": "SparkSessions_Create", "x-ms-examples": { "Create a new spark session": { "$ref": "./examples/HDI_SparkSession_Create.json" @@ -382,15 +370,12 @@ "SparkSession" ], "description": "Gets a single spark session.", - "operationId": "SparkSession_Get", + "operationId": "SparkSessions_Get", "x-ms-examples": { "Gets a single spark session": { "$ref": "./examples/HDI_SparkSession_Get.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -418,7 +403,7 @@ "SparkSession" ], "description": "Cancels a running spark session.", - "operationId": "SparkSession_Delete", + "operationId": "SparkSessions_Delete", "x-ms-examples": { "Cancels a running spark session": { "$ref": "./examples/HDI_SparkSession_Delete.json" @@ -445,7 +430,10 @@ ], "responses": { "200": { - "description": "Success" + "description": "Success", + "schema": { + "$ref": "#/definitions/SparkJobDeletedResult" + } } } } @@ -456,15 +444,12 @@ "SparkSession" ], "description": "Gets a single spark session job logs.", - "operationId": "SparkSession_GetLogs", + "operationId": "SparkSessions_GetLogs", "x-ms-examples": { "Gets a single spark session job logs": { "$ref": "./examples/HDI_SparkSession_GetLogs.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -510,15 +495,12 @@ "SparkSession" ], "description": "Gets a single spark session job state.", - "operationId": "SparkSession_GetState", + "operationId": "SparkSessions_GetState", "x-ms-examples": { "Gets a single spark session job state": { "$ref": "./examples/HDI_SparkSession_GetState.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -548,15 +530,12 @@ "SparkSession" ], "description": "Gets a list of statements within a spark session.", - "operationId": "SparkSession_ListStatements", + "operationId": "SparkSessions_ListStatements", "x-ms-examples": { "Gets a list of statements within a spark session": { "$ref": "./examples/HDI_SparkSession_ListStatements.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -584,7 +563,7 @@ "SparkSession" ], "description": "Create a statement within a spark session.", - "operationId": "SparkSession_CreateStatement", + "operationId": "SparkSessions_CreateStatement", "x-ms-examples": { "Create a statement within a spark session": { "$ref": "./examples/HDI_SparkSession_CreateStatement.json" @@ -634,15 +613,12 @@ "SparkSession" ], "description": "Gets a single statement within a spark session.", - "operationId": "SparkSession_GetStatement", + "operationId": "SparkSessions_GetStatement", "x-ms-examples": { "Gets a single statement within a spark session": { "$ref": "./examples/HDI_SparkSession_GetStatement.json" } }, - "consumes": [ - "application/json" - ], "produces": [ "application/json" ], @@ -680,7 +656,7 @@ "SparkSession" ], "description": "Kill a statement within a session.", - "operationId": "SparkSession_DeleteStatement", + "operationId": "SparkSessions_DeleteStatement", "x-ms-examples": { "Kill a statement within a session": { "$ref": "./examples/HDI_SparkSession_DeleteStatement.json" @@ -922,7 +898,7 @@ "type": "string", "x-ms-enum": { "name": "SessionJobKind", - "modelAsString": false + "modelAsString": true }, "enum": [ "spark", @@ -1016,6 +992,10 @@ "format": "int32", "type": "integer" }, + "total": { + "format": "int64", + "type": "integer" + }, "log": { "x-ms-client-name": "logLines", "uniqueItems": false, @@ -1065,6 +1045,10 @@ }, "output": { "$ref": "#/definitions/SparkStatementOutput" + }, + "progress": { + "type": "number", + "format": "double" } } }, @@ -1102,12 +1086,21 @@ "type": "string" } } + }, + "SparkJobDeletedResult": { + "type": "object", + "properties": { + "msg": { + "x-ms-client-name": "deletedMessage", + "type": "string" + } + } } }, "parameters": { "requestedByParameter": { "name": "X-Requested-By", - "x-ms-parameter-location": "client", + "x-ms-parameter-location": "method", "x-ms-client-name": "requestedBy", "description": "Add default value for X-Requested-By in header.", "in": "header", @@ -1121,7 +1114,7 @@ "required": true, "type": "string", "x-ms-skip-url-encoding": true, - "description": "The endpoint against which the job management is to be.", + "description": "The cluster endpoint, for example https://clustername.azurehdinsight.net.", "x-ms-parameter-location": "client" } } From 830b3072d532f25778a27e544b9a57809c21b9b2 Mon Sep 17 00:00:00 2001 From: Shangwei Sun Date: Mon, 25 May 2020 15:44:44 +0800 Subject: [PATCH 5/6] prettier check --- .../2018-11-01-preview/examples/HDI_SparkBatch_Delete.json | 2 +- .../2018-11-01-preview/examples/HDI_SparkSession_Delete.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json index 9acc05bb98ec..c142db23107b 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json @@ -6,7 +6,7 @@ }, "responses": { "200": { - "body":{ + "body": { "msg": "deleted" } } diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json index 6b7e9b63362f..1de611517d3d 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json @@ -6,7 +6,7 @@ }, "responses": { "200": { - "body":{ + "body": { "msg": "deleted" } } From 1b645cbcbc97df8dec3a84aa1fb9c5e6e34ccbd9 Mon Sep 17 00:00:00 2001 From: Shangwei Sun Date: Fri, 29 May 2020 17:37:27 +0800 Subject: [PATCH 6/6] Update the code in order to unify the naming rules. --- ...ete.json => HDI_Job_DeleteSparkBatch.json} | 0 ...son => HDI_Job_DeleteSparkSessionJob.json} | 0 ...n => HDI_Job_DeleteSparkStatementJob.json} | 0 ...Get.json => HDI_Job_GetSparkBatchJob.json} | 0 ...ogs.json => HDI_Job_GetSparkBatchLog.json} | 0 ...e.json => HDI_Job_GetSparkBatchState.json} | 0 ...t.json => HDI_Job_GetSparkSessionJob.json} | 0 ...s.json => HDI_Job_GetSparkSessionLog.json} | 0 ...json => HDI_Job_GetSparkSessionState.json} | 0 ...json => HDI_Job_GetSparkStatementJob.json} | 0 ...st.json => HDI_Job_ListSparkBatchJob.json} | 0 ....json => HDI_Job_ListSparkSessionJob.json} | 0 ...son => HDI_Job_ListSparkStatementJob.json} | 0 ....json => HDI_Job_SubmitSparkBatchJob.json} | 0 ...son => HDI_Job_SubmitSparkSessionJob.json} | 0 ...n => HDI_Job_SubmitSparkStatementJob.json} | 0 .../preview/2018-11-01-preview/job.json | 2 +- .../preview/2018-11-01-preview/livySpark.json | 98 +++++++++---------- 18 files changed, 50 insertions(+), 50 deletions(-) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkBatch_Delete.json => HDI_Job_DeleteSparkBatch.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_Delete.json => HDI_Job_DeleteSparkSessionJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_DeleteStatement.json => HDI_Job_DeleteSparkStatementJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkBatch_Get.json => HDI_Job_GetSparkBatchJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkBatch_GetLogs.json => HDI_Job_GetSparkBatchLog.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkBatch_GetState.json => HDI_Job_GetSparkBatchState.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_Get.json => HDI_Job_GetSparkSessionJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_GetLogs.json => HDI_Job_GetSparkSessionLog.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_GetState.json => HDI_Job_GetSparkSessionState.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_GetStatement.json => HDI_Job_GetSparkStatementJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkBatch_List.json => HDI_Job_ListSparkBatchJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_List.json => HDI_Job_ListSparkSessionJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_ListStatements.json => HDI_Job_ListSparkStatementJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkBatch_Create.json => HDI_Job_SubmitSparkBatchJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_Create.json => HDI_Job_SubmitSparkSessionJob.json} (100%) rename specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/{HDI_SparkSession_CreateStatement.json => HDI_Job_SubmitSparkStatementJob.json} (100%) diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkBatch.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Delete.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkBatch.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkSessionJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Delete.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkSessionJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_DeleteStatement.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkStatementJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_DeleteStatement.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_DeleteSparkStatementJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Get.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchLog.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetLogs.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchLog.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchState.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_GetState.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkBatchState.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Get.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionLog.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetLogs.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionLog.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionState.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetState.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkSessionState.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkStatementJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_GetStatement.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_GetSparkStatementJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkBatchJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_List.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkBatchJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkSessionJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_List.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkSessionJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkStatementJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_ListStatements.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_ListSparkStatementJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkBatchJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkBatch_Create.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkBatchJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkSessionJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_Create.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkSessionJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkStatementJob.json similarity index 100% rename from specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_SparkSession_CreateStatement.json rename to specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/examples/HDI_Job_SubmitSparkStatementJob.json diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json index e04e428c3634..544f3e29c9c8 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/job.json @@ -1,7 +1,7 @@ { "swagger": "2.0", "info": { - "title": "HDInsightJobManagementClient", + "title": "HDInsightJobClient", "description": "The HDInsight Job Client.", "version": "2018-11-01-preview", "x-ms-code-generation-settings": { diff --git a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json index 59fe04a3fb30..d5cbc84df867 100644 --- a/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json +++ b/specification/hdinsight/data-plane/Microsoft.HDInsight/preview/2018-11-01-preview/livySpark.json @@ -1,7 +1,7 @@ { "swagger": "2.0", "info": { - "title": "HDInsightJobManagementClient", + "title": "HDInsightJobClient", "description": "The HDInsight Job Client.", "version": "2018-11-01-preview", "x-ms-code-generation-settings": { @@ -41,13 +41,13 @@ "/livy/batches": { "get": { "tags": [ - "SparkBatch" + "Job" ], "description": "List all spark batch jobs", - "operationId": "SparkBatches_List", + "operationId": "Job_ListSparkBatchJob", "x-ms-examples": { "List all spark batch jobs": { - "$ref": "./examples/HDI_SparkBatch_List.json" + "$ref": "./examples/HDI_Job_ListSparkBatchJob.json" } }, "produces": [ @@ -82,13 +82,13 @@ }, "post": { "tags": [ - "SparkBatch" + "Job" ], "description": "Create a new spark batch job.", - "operationId": "SparkBatches_Create", + "operationId": "Job_SubmitSparkBatchJob", "x-ms-examples": { "Create a spark batch job": { - "$ref": "./examples/HDI_SparkBatch_Create.json" + "$ref": "./examples/HDI_Job_SubmitSparkBatchJob.json" } }, "consumes": [ @@ -124,13 +124,13 @@ "/livy/batches/{batchId}": { "get": { "tags": [ - "SparkBatch" + "Job" ], "description": "Gets a single spark batch job.", - "operationId": "SparkBatches_Get", + "operationId": "Job_GetSparkBatchJob", "x-ms-examples": { "Gets a single spark batch job": { - "$ref": "./examples/HDI_SparkBatch_Get.json" + "$ref": "./examples/HDI_Job_GetSparkBatchJob.json" } }, "produces": [ @@ -157,13 +157,13 @@ }, "delete": { "tags": [ - "SparkBatch" + "Job" ], "description": "Cancels a running spark batch job.", - "operationId": "SparkBatches_Delete", + "operationId": "Job_DeleteSparkBatch", "x-ms-examples": { "Cancels a running spark batch job": { - "$ref": "./examples/HDI_SparkBatch_Delete.json" + "$ref": "./examples/HDI_Job_DeleteSparkBatch.json" } }, "consumes": [ @@ -198,13 +198,13 @@ "/livy/batches/{batchId}/log": { "get": { "tags": [ - "SparkBatch" + "Job" ], "description": "Gets a single spark batch job logs.", - "operationId": "SparkBatches_GetLogs", + "operationId": "Job_GetSparkBatchLog", "x-ms-examples": { "Gets a single spark batch job logs": { - "$ref": "./examples/HDI_SparkBatch_GetLogs.json" + "$ref": "./examples/HDI_Job_GetSparkBatchLog.json" } }, "produces": [ @@ -249,13 +249,13 @@ "/livy/batches/{batchId}/state": { "get": { "tags": [ - "SparkBatch" + "Job" ], "description": "Gets a single spark batch state.", - "operationId": "SparkBatches_GetState", + "operationId": "Job_GetSparkBatchState", "x-ms-examples": { "Gets a single spark batch state": { - "$ref": "./examples/HDI_SparkBatch_GetState.json" + "$ref": "./examples/HDI_Job_GetSparkBatchState.json" } }, "produces": [ @@ -284,13 +284,13 @@ "/livy/sessions": { "get": { "tags": [ - "SparkSession" + "Job" ], "description": "List all spark sessions.", - "operationId": "SparkSessions_List", + "operationId": "Job_ListSparkSessionJob", "x-ms-examples": { "List all spark sessions": { - "$ref": "./examples/HDI_SparkSession_List.json" + "$ref": "./examples/HDI_Job_ListSparkSessionJob.json" } }, "produces": [ @@ -325,13 +325,13 @@ }, "post": { "tags": [ - "SparkSession" + "Job" ], "description": "Create a new spark session.", - "operationId": "SparkSessions_Create", + "operationId": "Job_SubmitSparkSessionJob", "x-ms-examples": { "Create a new spark session": { - "$ref": "./examples/HDI_SparkSession_Create.json" + "$ref": "./examples/HDI_Job_SubmitSparkSessionJob.json" } }, "consumes": [ @@ -367,13 +367,13 @@ "/livy/sessions/{sessionId}": { "get": { "tags": [ - "SparkSession" + "Job" ], "description": "Gets a single spark session.", - "operationId": "SparkSessions_Get", + "operationId": "Job_GetSparkSessionJob", "x-ms-examples": { "Gets a single spark session": { - "$ref": "./examples/HDI_SparkSession_Get.json" + "$ref": "./examples/HDI_Job_GetSparkSessionJob.json" } }, "produces": [ @@ -400,13 +400,13 @@ }, "delete": { "tags": [ - "SparkSession" + "Job" ], "description": "Cancels a running spark session.", - "operationId": "SparkSessions_Delete", + "operationId": "Job_DeleteSparkSessionJob", "x-ms-examples": { "Cancels a running spark session": { - "$ref": "./examples/HDI_SparkSession_Delete.json" + "$ref": "./examples/HDI_Job_DeleteSparkSessionJob.json" } }, "consumes": [ @@ -441,13 +441,13 @@ "/livy/sessions/{sessionId}/log": { "get": { "tags": [ - "SparkSession" + "Job" ], "description": "Gets a single spark session job logs.", - "operationId": "SparkSessions_GetLogs", + "operationId": "Job_GetSparkSessionLog", "x-ms-examples": { "Gets a single spark session job logs": { - "$ref": "./examples/HDI_SparkSession_GetLogs.json" + "$ref": "./examples/HDI_Job_GetSparkSessionLog.json" } }, "produces": [ @@ -492,13 +492,13 @@ "/livy/sessions/{sessionId}/state": { "get": { "tags": [ - "SparkSession" + "Job" ], "description": "Gets a single spark session job state.", - "operationId": "SparkSessions_GetState", + "operationId": "Job_GetSparkSessionState", "x-ms-examples": { "Gets a single spark session job state": { - "$ref": "./examples/HDI_SparkSession_GetState.json" + "$ref": "./examples/HDI_Job_GetSparkSessionState.json" } }, "produces": [ @@ -527,13 +527,13 @@ "/livy/sessions/{sessionId}/statements": { "get": { "tags": [ - "SparkSession" + "Job" ], "description": "Gets a list of statements within a spark session.", - "operationId": "SparkSessions_ListStatements", + "operationId": "Job_ListSparkStatementJob", "x-ms-examples": { "Gets a list of statements within a spark session": { - "$ref": "./examples/HDI_SparkSession_ListStatements.json" + "$ref": "./examples/HDI_Job_ListSparkStatementJob.json" } }, "produces": [ @@ -560,13 +560,13 @@ }, "post": { "tags": [ - "SparkSession" + "Job" ], "description": "Create a statement within a spark session.", - "operationId": "SparkSessions_CreateStatement", + "operationId": "Job_SubmitSparkStatementJob", "x-ms-examples": { "Create a statement within a spark session": { - "$ref": "./examples/HDI_SparkSession_CreateStatement.json" + "$ref": "./examples/HDI_Job_SubmitSparkStatementJob.json" } }, "consumes": [ @@ -610,13 +610,13 @@ "/livy/sessions/{sessionId}/statements/{statementId}": { "get": { "tags": [ - "SparkSession" + "Job" ], "description": "Gets a single statement within a spark session.", - "operationId": "SparkSessions_GetStatement", + "operationId": "Job_GetSparkStatementJob", "x-ms-examples": { "Gets a single statement within a spark session": { - "$ref": "./examples/HDI_SparkSession_GetStatement.json" + "$ref": "./examples/HDI_Job_GetSparkStatementJob.json" } }, "produces": [ @@ -653,13 +653,13 @@ "/livy/sessions/{sessionId}/statements/{statementId}/cancel": { "post": { "tags": [ - "SparkSession" + "Job" ], "description": "Kill a statement within a session.", - "operationId": "SparkSessions_DeleteStatement", + "operationId": "Job_DeleteSparkStatementJob", "x-ms-examples": { "Kill a statement within a session": { - "$ref": "./examples/HDI_SparkSession_DeleteStatement.json" + "$ref": "./examples/HDI_Job_DeleteSparkStatementJob.json" } }, "consumes": [