diff --git a/readme.az.common.md b/readme.az.common.md index e3a65d46a..d0b904618 100644 --- a/readme.az.common.md +++ b/readme.az.common.md @@ -1,6 +1,7 @@ # configuration for az common ``` yaml $(az) +extension-mode: experimental cli: naming: diff --git a/src/plugins/azgenerator/CodeModelAz.ts b/src/plugins/azgenerator/CodeModelAz.ts index d58c5bb32..510a650a2 100644 --- a/src/plugins/azgenerator/CodeModelAz.ts +++ b/src/plugins/azgenerator/CodeModelAz.ts @@ -65,6 +65,7 @@ export interface CodeModelAz Extension_TestScenario: any; Extension_ClientSubscriptionBound: boolean; Extension_ClientBaseUrlBound: boolean; + Extension_Mode: string; SelectFirstCommandGroup(): boolean; SelectNextCommandGroup(): boolean; @@ -73,6 +74,7 @@ export interface CodeModelAz CommandGroup_Name: string; CommandGroup_Help: string; CommandGroup_DefaultName: string; + CommandGroup_HasShowCommand: boolean; SelectFirstCommand(): boolean; SelectNextCommand(): boolean; diff --git a/src/plugins/azgenerator/CodeModelAzImpl.ts b/src/plugins/azgenerator/CodeModelAzImpl.ts index f7af58f89..f798eecfe 100644 --- a/src/plugins/azgenerator/CodeModelAzImpl.ts +++ b/src/plugins/azgenerator/CodeModelAzImpl.ts @@ -159,13 +159,12 @@ export class CodeModelCliImpl implements CodeModelAz { if (this.SelectFirstMethod()) { let id_groups = new Map(); id_groups = parseResourceId(this.Request.protocol.http.path); - + let hasName = false; if (this.SelectFirstMethodParameter()) { do { let parameters = this.MethodParameter; let defaultName = parameters.language['cli']['cliKey']; let defaultToMatch = '{' + defaultName + '}'; - if(!isNullOrUndefined(id_groups)) { for(let k of id_groups.entries()) { if(k[1] == defaultToMatch && defaultName != 'resourceGroupName') { @@ -178,20 +177,43 @@ export class CodeModelCliImpl implements CodeModelAz { } else { this.MethodParameter['RequiredByMethod'] = paramRequired.get(this.MethodParameter_Name) == paramTime ? true : false; } + if (this.MethodParameter_MapsTo == 'name') { + hasName = true; + } } while (this.SelectNextMethodParameter()); + if (hasName) { + this.Method['hasName'] = true; + } } while (this.SelectNextMethod()) { + let id_groups = new Map(); + id_groups = parseResourceId(this.Request.protocol.http.path); + let hasName = false; if (this.SelectFirstMethodParameter()) { do { let parameters = this.MethodParameter; + let defaultName = parameters.language['cli']['cliKey']; + let defaultToMatch = '{' + defaultName + '}'; + if(!isNullOrUndefined(id_groups)) { + for(let k of id_groups.entries()) { + if(k[1] == defaultToMatch && defaultName != 'resourceGroupName') { + this.MethodParameter.language['az']['id_part'] = k[0]; + } + } + } if (parameters.language['cli'].required) { this.MethodParameter['RequiredByMethod'] = true; } else { this.MethodParameter['RequiredByMethod'] = paramRequired.get(this.MethodParameter_Name) == paramTime ? true : false; } + if (this.MethodParameter_MapsTo == 'name') { + hasName = true; + } } while (this.SelectNextMethodParameter()); + if (hasName) { + this.Method['hasName'] = true; + } } - } } } while (this.SelectNextCommand()); @@ -403,7 +425,11 @@ export class CodeModelCliImpl implements CodeModelAz { public get Extension_Name() { return this.extensionName; } - + + public get Extension_Mode() { + return this.codeModel.info['extensionMode']; + } + public get Extension_NameUnderscored() { return this.extensionName.replace(/-/g, '_'); } @@ -488,6 +514,10 @@ export class CodeModelCliImpl implements CodeModelAz { return this.CommandGroup.$key || this.CommandGroup_Name; } + public get CommandGroup_HasShowCommand(): boolean { + return this.CommandGroup.language['az']['hasShowCommand']; + } + public get CommandGroup_DefaultName(): string { let eps = new EnglishPluralizationService(); return eps.singularize(this.CommandGroup.language['cli'].cliKey); diff --git a/src/plugins/azgenerator/TemplateAzureCliAzextMetadata.ts b/src/plugins/azgenerator/TemplateAzureCliAzextMetadata.ts index 8f41ea3c7..e6ac4b986 100644 --- a/src/plugins/azgenerator/TemplateAzureCliAzextMetadata.ts +++ b/src/plugins/azgenerator/TemplateAzureCliAzextMetadata.ts @@ -9,7 +9,12 @@ export function GenerateAzureCliAzextMetadata(model: CodeModelAz) : string[] { var output: string[] = []; output.push('{'); - output.push(' "azext.isExperimental": true,'); + if(model.Extension_Mode == 'experimental') { + output.push(' "azext.isExperimental": true,'); + } else if(model.Extension_Mode == 'preview') { + output.push(' "azext.isPreview": true,'); + } + output.push(' "azext.minCliCoreVersion": "2.3.1"'); output.push('}'); diff --git a/src/plugins/azgenerator/TemplateAzureCliCommands.ts b/src/plugins/azgenerator/TemplateAzureCliCommands.ts index 645dba4d6..f7af5d69f 100644 --- a/src/plugins/azgenerator/TemplateAzureCliCommands.ts +++ b/src/plugins/azgenerator/TemplateAzureCliCommands.ts @@ -9,6 +9,7 @@ import { ToMultiLine } from "../../utils/helper" import { isNullOrUndefined } from "util"; import { SchemaType } from "@azure-tools/codemodel"; +let showCommandFunctionName = undefined; export function GenerateAzureCliCommands(model: CodeModelAz): string[] { let header: HeaderGenerator = new HeaderGenerator(); @@ -37,13 +38,15 @@ export function GenerateAzureCliCommands(model: CodeModelAz): string[] { output.push(" client_factory=" + cf_name + ")"); let groupinfos = model.CommandGroup_Name.split(' '); let extraInfo = ""; - if(groupinfos.length == 2) { + if(groupinfos.length == 2 && model.Extension_Mode == 'experimental') { extraInfo = ", is_experimental=True"; + } else if(groupinfos.length == 2 && model.Extension_Mode == 'preview') { + extraInfo = ", is_preview=True"; } ToMultiLine(" with self.command_group('" + model.CommandGroup_Name + "', " + model.Extension_NameUnderscored + "_" + model.GetModuleOperationName() + ", client_factory=" + cf_name + extraInfo + ") as g:", output); let needWait = false; do { - if (model.Command_IsLongRun) { + if (model.Command_IsLongRun && model.CommandGroup_HasShowCommand) { needWait = true; } output = output.concat(getCommandBody(model)); @@ -53,7 +56,7 @@ export function GenerateAzureCliCommands(model: CodeModelAz): string[] { } while (model.SelectNextCommand()); if (needWait) { - output.push(" g.wait_command('wait')"); + output.push(" g.custom_wait_command('wait', '" + showCommandFunctionName + "')"); } } } while (model.SelectNextCommandGroup()); @@ -76,7 +79,7 @@ function getCommandBody(model: CodeModelAz, needUpdate: boolean = false) { let functionName = model.Command_FunctionName; let methodName = model.Command_MethodName; let endStr = ")"; - if (model.Command_IsLongRun) { + if (model.Command_IsLongRun && model.CommandGroup_HasShowCommand) { endStr = ", supports_no_wait=True" + endStr; } if (methodName != "show") { @@ -113,6 +116,7 @@ function getCommandBody(model: CodeModelAz, needUpdate: boolean = false) { } } else { + showCommandFunctionName = functionName; ToMultiLine(" g.custom_show_command('" + methodName + "', '" + functionName + "'" + endStr, output); } return output; diff --git a/src/plugins/azgenerator/TemplateAzureCliCustom.ts b/src/plugins/azgenerator/TemplateAzureCliCustom.ts index 0a839eb7d..348ed23d8 100644 --- a/src/plugins/azgenerator/TemplateAzureCliCustom.ts +++ b/src/plugins/azgenerator/TemplateAzureCliCustom.ts @@ -28,6 +28,10 @@ export function GenerateAzureCliCustom(model: CodeModelAz): string[] { header.addFromImport("knack.util", ["CLIError"]); } + if(required['nowait']) { + header.addFromImport("azure.cli.core.util", ["sdk_no_wait"]); + } + let output = []; output = output.concat(body); output.push(""); @@ -60,9 +64,9 @@ function GenerateBody(model: CodeModelAz, required: any): string[] { needGeneric = true; } let needUpdate = model.Command_CanSplit; - output = output.concat(GetCommandBody(model, required, false, originalOperation, false)); + output = output.concat(GetCommandBody(model, required, false, originalOperation, false, genericParameter)); if (needUpdate) { - output = output.concat(GetCommandBody(model, required, needUpdate, originalOperation, needGeneric)); + output = output.concat(GetCommandBody(model, required, needUpdate, originalOperation, needGeneric, genericParameter)); } } while (model.SelectNextCommand()); @@ -153,7 +157,7 @@ function ConstructValuation(isGeneric: boolean, prefix: string, classNames: stri } -function GetSingleCommandDef(model: CodeModelAz, originalOperation: Operation, needUpdate: boolean = false, needGeneric: boolean = false) { +function GetSingleCommandDef(model: CodeModelAz, required: any, originalOperation: Operation, needUpdate: boolean = false, needGeneric: boolean = false, genericParameter: Parameter = null) { let output: string[] = []; let updatedMethodName: string = model.Command_FunctionName; @@ -170,9 +174,13 @@ function GetSingleCommandDef(model: CodeModelAz, originalOperation: Operation, n output.push(call); let allParam: Map = new Map(); + let hasLongRun = false; if (model.SelectFirstMethod()) { do { - + if(model.Method_IsLongRun && model.CommandGroup_HasShowCommand) { + required['nowait'] = true, + hasLongRun = true; + } if (model.SelectFirstMethodParameter()) { do { if (model.MethodParameter_IsFlattened) { @@ -182,6 +190,9 @@ function GetSingleCommandDef(model: CodeModelAz, originalOperation: Operation, n continue; } + if(needUpdate && !isNullOrUndefined(genericParameter) && model.MethodParameter_MapsTo == model.Parameter_MapsTo(genericParameter)) { + continue; + } if (model.MethodParameter_IsList && !model.MethodParameter_IsListOfSimple) { if (model.Parameter_IsPolyOfSimple(model.MethodParameter)) { continue; @@ -215,6 +226,10 @@ function GetSingleCommandDef(model: CodeModelAz, originalOperation: Operation, n if (model.MethodParameter_Type == SchemaType.Constant) { continue; } + + if(needUpdate && !isNullOrUndefined(genericParameter) && model.MethodParameter_MapsTo == model.Parameter_MapsTo(genericParameter)) { + continue; + } if (model.MethodParameter_IsList && !model.MethodParameter_IsListOfSimple) { if (model.Parameter_IsPolyOfSimple(model.MethodParameter)) { @@ -239,11 +254,15 @@ function GetSingleCommandDef(model: CodeModelAz, originalOperation: Operation, n } while (model.SelectNextMethod()); } + if(hasLongRun) { + output[output.length - 1] += ","; + output.push(indent + "no_wait=False"); + } output[output.length - 1] += "):"; return output; } -function GetSingleCommandBody(model: CodeModelAz, required, originalOperation: Operation = null, needGeneric: boolean = false) { +function GetSingleCommandBody(model: CodeModelAz, required, originalOperation: Operation = null, needGeneric: boolean = false, genericParameter: Parameter = null, needUpdate: boolean = false) { let originalParameters = null; if (!isNullOrUndefined(originalOperation)) { originalParameters = originalOperation.parameters; @@ -251,11 +270,10 @@ function GetSingleCommandBody(model: CodeModelAz, required, originalOperation: O originalParameters = originalParameters.concat(originalOperation.requests[0].parameters); } } - + let output: string[] = []; let output_body: string[] = [] let output_method_call: string[] = []; - if (model.SelectFirstMethod()) { // create body transformation for methods that support it let methodName: string = model.Command_MethodName; @@ -265,6 +283,9 @@ function GetSingleCommandBody(model: CodeModelAz, required, originalOperation: O do { if (model.SelectFirstMethodParameter()) { do { + if(needUpdate && !isNullOrUndefined(genericParameter) && model.MethodParameter_MapsTo == model.Parameter_MapsTo(genericParameter)) { + continue; + } if (model.MethodParameter_IsList && !model.MethodParameter_IsListOfSimple && !model.MethodParameter_IsSimpleArray) { if (model.Parameter_IsPolyOfSimple(model.MethodParameter)) { let baseParam = model.MethodParameter; @@ -379,27 +400,36 @@ function GetSingleCommandBody(model: CodeModelAz, required, originalOperation: O return output; } -function GetCommandBody(model: CodeModelAz, required: boolean, needUpdate: boolean = false, originalOperation: Operation = null, needGeneric: boolean = false) { +function GetCommandBody(model: CodeModelAz, required: any, needUpdate: boolean = false, originalOperation: Operation = null, needGeneric: boolean = false, genericParameter: Parameter = null) { // create, delete, list, show, update let output: string[] = []; output.push(""); output.push(""); - output = output.concat(GetSingleCommandDef(model, originalOperation, needUpdate, needGeneric)); - output = output.concat(GetSingleCommandBody(model, required, originalOperation, needGeneric)) + output = output.concat(GetSingleCommandDef(model, required, originalOperation, needUpdate, needGeneric, genericParameter)); + output = output.concat(GetSingleCommandBody(model, required, originalOperation, needGeneric, genericParameter, needUpdate)) return output; } function GetPolyMethodCall(model: CodeModelAz, prefix: any, originalOperation: Operation, originalParameters: Parameter[]): string[] { let methodCall: string = prefix + "return "; //methodCall += "client." + mode.GetModuleOperationName() +"." + ctx.Methods[methodIdx].Name + "("; + let indent = ""; let methodName = originalOperation.language['python'].name; - if (model.Method_IsLongRun) { + if (model.Method_IsLongRun && model.CommandGroup_HasShowCommand) { methodName = "begin_" + methodName; + methodCall += "sdk_no_wait("; + indent = " ".repeat(methodCall.length); + methodCall += "no_wait," + "\n" + indent + "client." + methodName; + + } else { + if(model.Method_IsLongRun) { + methodName = "begin_" + methodName; + } + methodCall += "client." + methodName + "("; + indent = " ".repeat(methodCall.length); } - methodCall += "client." + methodName + "("; - - let indent = " ".repeat(methodCall.length); + let cnt = 0; for (let param of originalParameters) { if (param.flattened) { @@ -444,12 +474,21 @@ function GetMethodCall(model: CodeModelAz, prefix: any): string[] { let methodCall: string = prefix + "return "; //methodCall += "client." + mode.GetModuleOperationName() +"." + ctx.Methods[methodIdx].Name + "("; let methodName = model.Method_Name; - if (model.Method_IsLongRun) { + let indent = ""; + if (model.Method_IsLongRun && model.CommandGroup_HasShowCommand) { methodName = "begin_" + methodName; + methodCall += "sdk_no_wait("; + indent = " ".repeat(methodCall.length); + methodCall += "no_wait," + "\n" + indent + "client." + methodName; + } else { + if(model.Method_IsLongRun) { + methodName = "begin_" + methodName; + } + methodCall += "client." + methodName + "("; + indent = " ".repeat(methodCall.length); } - methodCall += "client." + methodName + "("; + - let indent = " ".repeat(methodCall.length); if (model.SelectFirstMethodParameter(true)) { do { let param = model.MethodParameter; diff --git a/src/plugins/azgenerator/TemplateAzureCliHelp.ts b/src/plugins/azgenerator/TemplateAzureCliHelp.ts index 786bbfb0c..9e34666d0 100644 --- a/src/plugins/azgenerator/TemplateAzureCliHelp.ts +++ b/src/plugins/azgenerator/TemplateAzureCliHelp.ts @@ -4,8 +4,10 @@ *--------------------------------------------------------------------------------------------*/ import { CodeModelAz } from "./CodeModelAz" +import { SchemaType, Parameter, Schema } from "@azure-tools/codemodel"; import { HeaderGenerator } from "./Header"; -import { ToMultiLine, ToJsonString } from "../../utils/helper" +import { EscapeString, ToCamelCase, Capitalize, ToMultiLine, ToJsonString } from "../../utils/helper" +import { isNullOrUndefined, isArray } from "util"; const maxShortSummary = 119 let showExampleStr = ""; @@ -38,7 +40,7 @@ export function GenerateAzureCliHelp(model: CodeModelAz): string[] { allSubGroup.set(subCommandGroupName, true); output = output.concat(generateCommandGroupHelp(model, subCommandGroupName)); } - if(model.Command_IsLongRun) { + if(model.Command_IsLongRun && model.CommandGroup_HasShowCommand) { hasWait = true; let waitParam = ""; if (allSupportWaited.indexOf(model.Command_MethodName) < 0) { @@ -104,7 +106,6 @@ function generateCommandGroupHelp(model: CodeModelAz, subCommandGroupName = "") output.push("helps['" + model.CommandGroup_Name + "'] = \"\"\""); } output.push(" type: group"); - //output.push(" short-summary: " + model.CommandGroup_Help); let shortSummary = " short-summary: " + model.CommandGroup_Help; if(subCommandGroupName != "") { shortSummary = shortSummary + " sub group " + subCommandGroupName.split(" ").pop(); @@ -114,6 +115,126 @@ function generateCommandGroupHelp(model: CodeModelAz, subCommandGroupName = "") return output; } +function addParameterHelp(output: string[], model: CodeModelAz) { + let parameter_output = [" parameters:"]; + + let originalOperation = model.Method_GetOriginalOperation; + let baseParam = null; + if (model.SelectFirstMethodParameter()) { + do { + if (model.MethodParameter_IsFlattened) { + continue; + } + if (model.MethodParameter_Type == SchemaType.Constant || model.MethodParameter['readOnly']) { + continue; + } + let parameterName = model.MethodParameter_MapsTo; + if (!isNullOrUndefined(originalOperation) && model.MethodParameter['targetProperty']?.['isDiscriminator']) { + continue; + } + + let parameterAlias: string[] = []; + if (parameterName.endsWith('name') && parameterName.replace(/_name$|_/g, '') == model.CommandGroup_DefaultName.toLowerCase()) { + parameterAlias.push('name'); + parameterAlias.push('n'); + } + if (!isNullOrUndefined(model.MethodParameter?.language?.['cli']?.['alias'])) { + if (!isNullOrUndefined(model.MethodParameter?.language?.['cli']?.['alias'])) { + let alias = model.MethodParameter?.language?.['cli']?.['alias']; + + if (typeof alias === "string") { + parameterAlias.push(alias); + } + if (isArray(alias)) { + parameterAlias = parameterAlias.concat(alias); + } + } + } + if (parameterAlias.length == 0) parameterAlias.push(parameterName); + parameterAlias = parameterAlias.map((alias) => { + return '--' + alias.replace(/'/g, '').replace(/_/g, '-'); + }); + + if (model.MethodParameter_IsList && model.MethodParameter_IsListOfSimple && !model.MethodParameter_IsSimpleArray) { + if (model.Parameter_IsPolyOfSimple(model.MethodParameter)) { + baseParam = model.MethodParameter; + continue; + } + let action_output: string[] = []; + ToMultiLine(` - name: ${parameterAlias.join(' ')}`, action_output, 119, true); + if (model.MethodParameter_Description && model.MethodParameter_Description.trim().length > 0) { + ToMultiLine(` short-summary: ${model.MethodParameter_Description.trim()}`.replace(/\r?\n|\r/g, ''), action_output, 119, true); + } + let options: Parameter[] = []; + if (!isNullOrUndefined(model.Schema_ActionName(model.MethodParameter.schema))) { + if (baseParam && model.MethodParameter['polyBaseParam'] == baseParam) { + let keyToMatch = baseParam.schema?.['discriminator']?.property?.language['python']?.name; + let valueToMatch = model.MethodParameter.schema?.['discriminatorValue']; + options = GetActionOptions(model, model.MethodParameter, keyToMatch, valueToMatch); + } + else { + options = GetActionOptions( model, model.MethodParameter); + } + } + if (options.length > 0) { + action_output.push(` long-summary: |`); + ToMultiLine([" Usage:", parameterAlias[0]].concat(options.map(p => `${model.Parameter_NameAz(p)}=XX`)).join(" "), action_output, 119, true); + action_output.push(""); + for (let p of options) { + let pDesc = model.Parameter_Description(p); + if (!pDesc || pDesc.trim().length <= 0) continue; + let line = ` ${model.Parameter_NameAz(p)}: `; + if (p.required) line += "Required. "; + line += model.Parameter_Description(p).trim().replace(/\r?\n|\r/g, ''); + ToMultiLine(line, action_output, 119, true); + } + if (model.Schema_Type(model.MethodParameter.schema) == SchemaType.Array) { + action_output.push(""); + ToMultiLine(` Multiple actions can be specified by using more than one ${parameterAlias[0]} argument.`, action_output, 119, true); + } + parameter_output = parameter_output.concat(action_output); + } + } + } while (model.SelectNextMethodParameter()); + } + + if (parameter_output.length>1) { + return output.concat(parameter_output); + } + else { + return output; + } +} + + +function GetActionOptions( model: CodeModelAz, param: Parameter, keyToMatch: string = null, valueToMatch: string = null): Parameter[] { + let options: Parameter[] = []; + + if (!SchemaType.Object || !SchemaType.Array) { + return options; + } + if (model.EnterSubMethodParameters()) { + if (model.SelectFirstMethodParameter()) { + do { + if (model.SubMethodParameter['readOnly']) { + continue; + } + if (model.SubMethodParameter['schema']?.type == SchemaType.Constant) { + continue; + } + if (!isNullOrUndefined(keyToMatch) && !isNullOrUndefined(valueToMatch) && model.Parameter_NamePython(model.SubMethodParameter) == keyToMatch) { + continue; + } + if (model.SubMethodParameter) { + options.push(model.SubMethodParameter); + } + } while (model.SelectNextMethodParameter()); + } + } + model.ExitSubMethodParameters(); + return options; +} + function generateCommandHelp(model: CodeModelAz, needUpdate: boolean = false) { // create, delete, list, show, update //let method: string = methods[mi]; @@ -130,9 +251,9 @@ function generateCommandHelp(model: CodeModelAz, needUpdate: boolean = false) { // there will be just one method for create, update, delete, show, etc. // there may be a few list methods, so let's just take description from the first one. // as we can't use all of them - // output.push(" short-summary: " + model.Command_Help); let shortSummary = " short-summary: " + model.Command_Help; ToMultiLine(shortSummary, output, 119, true); + output = addParameterHelp(output, model); let examplesStarted: boolean = false; diff --git a/src/plugins/azgenerator/TemplateAzureCliParams.ts b/src/plugins/azgenerator/TemplateAzureCliParams.ts index b9dd580f2..b7b6303b3 100644 --- a/src/plugins/azgenerator/TemplateAzureCliParams.ts +++ b/src/plugins/azgenerator/TemplateAzureCliParams.ts @@ -30,10 +30,19 @@ export function GenerateAzureCliParams(model: CodeModelAz): string[] { if (model.SelectFirstCommandGroup()) { do { //let methods: string[] = model.CommandGroup_Commands; - + let needWait = false; + let show_output = []; if (model.SelectFirstCommand()) { do { - output_args = output_args.concat(getCommandBody(model)); + if (model.Command_IsLongRun && model.CommandGroup_HasShowCommand) { + needWait = true; + } + let command_output = getCommandBody(model); + if (model.Command_MethodName == "show") { + show_output = command_output + } + + output_args = output_args.concat(command_output); let originalOperation = model.Command_GetOriginalOperation; let genericParam = model.Command_GenericSetterParameter(model.Command); if(!isNullOrUndefined(originalOperation)) { @@ -49,6 +58,10 @@ export function GenerateAzureCliParams(model: CodeModelAz): string[] { } } while (model.SelectNextCommand()); + if (needWait && show_output.length > 1) { + show_output[1] = show_output[1].replace(/ show'/g, " wait'"); + output_args = output_args.concat(show_output); + } } } while (model.SelectNextCommandGroup()); } @@ -174,8 +187,10 @@ function getCommandBody(model: CodeModelAz, needUpdate: boolean = false, needGen } else if (parameterName.endsWith('name') && parameterName.replace(/_name$|_/g, '') == model.CommandGroup_DefaultName.toLowerCase() || !isNullOrUndefined(model.MethodParameter?.language?.['cli']?.['alias'])) { argument = " c.argument('" + parameterName + "'"; let aliases: string[] = []; - aliases.push('name'); - aliases.push('n'); + if(!model.Method['hasName']) { + aliases.push('name'); + aliases.push('n'); + } if(!isNullOrUndefined(model.MethodParameter?.language?.['cli']?.['alias'])) { let alias = model.MethodParameter?.language?.['cli']?.['alias']; @@ -285,19 +300,7 @@ function getCommandBody(model: CodeModelAz, needUpdate: boolean = false, needGen } } if (options.length>0) { - if (options.length>1) { - argument += " Expect value: KEY1=VALUE1 KEY2=VALUE2 ... , available KEYs are:"; - for (let i =0; i(host, {}, codeModelSchema); const plugin = await new Merger(session); + plugin.codeModel.info['extensionMode'] = extensionMode; const result = await plugin.process(); host.WriteFile('azmerger-temp-output.yaml', serialize(result)); } catch (E) { diff --git a/src/plugins/modifiers.ts b/src/plugins/modifiers.ts index bc316bc82..a0919de15 100644 --- a/src/plugins/modifiers.ts +++ b/src/plugins/modifiers.ts @@ -177,10 +177,13 @@ export class Modifiers { this.session.message({Channel:Channel.Warning, Text: "Trying to change the extension-name of a single command is not allowed!\n if you want to change the whole extension-name you can change the configuration in readme.az.md \n"}); continue; } + let newAzName = newCommandArr.length > 2? newCommandArr.slice(2, newCommandArr.length).join(" "): newCommandArr.last; + this.session.message({Channel: Channel.Warning, Text: " newAzName:" + newAzName}); newCommandArr.pop(); - let newGroup = newCommandArr.join(' '); + let newGroup = newCommandArr.length >= 2? newCommandArr[0] + " " + newCommandArr[1]: newCommandArr.join(" "); + oldCommandArr.pop(); - let oldGroup = oldCommandArr.join(' '); + let oldGroup = oldCommandArr.length >= 2? oldCommandArr[0] + " " + oldCommandArr[1]: oldCommandArr.join(" "); if (oldGroup != newGroup) { // if there's only one command in the operationGroup it's okay to change the group name if(operationGroup.operations.length == 1) { @@ -208,6 +211,10 @@ export class Modifiers { } } operation.language["az"]["command"] = newCommand; + if(newCommandArr.length > 2) { + operation.language["az"]["name"] = newAzName; + operation.language["az"]["subCommandGroup"] = newCommandArr.join(" "); + } } operation.language["az"]["description"] = commandDescriptionReplacer? commandDescriptionReplacer: operation.language["az"]["description"]; } diff --git a/src/test/resources/attestation/attestation-az-modifier.yaml b/src/test/resources/attestation/attestation-az-modifier.yaml index 568f45a77..fc961eef1 100644 --- a/src/test/resources/attestation/attestation-az-modifier.yaml +++ b/src/test/resources/attestation/attestation-az-modifier.yaml @@ -3028,6 +3028,7 @@ operationGroups: name: AttestationProvider description: '' command: attestation attestation-provider + hasShowCommand: true hasUpdate: true cli: name: AttestationProvider diff --git a/src/test/resources/attestation/attestation-az-namer.yaml b/src/test/resources/attestation/attestation-az-namer.yaml index 568f45a77..fc961eef1 100644 --- a/src/test/resources/attestation/attestation-az-namer.yaml +++ b/src/test/resources/attestation/attestation-az-namer.yaml @@ -3028,6 +3028,7 @@ operationGroups: name: AttestationProvider description: '' command: attestation attestation-provider + hasShowCommand: true hasUpdate: true cli: name: AttestationProvider diff --git a/src/test/resources/managed-network/managed-network-az-modifier.yaml b/src/test/resources/managed-network/managed-network-az-modifier.yaml index 6a5cca11e..3c057a5de 100644 --- a/src/test/resources/managed-network/managed-network-az-modifier.yaml +++ b/src/test/resources/managed-network/managed-network-az-modifier.yaml @@ -3585,6 +3585,7 @@ operationGroups: name: ManagedNetwork description: '' command: managed-network managed-network + hasShowCommand: true hasUpdate: true cli: name: ManagedNetwork @@ -4131,6 +4132,7 @@ operationGroups: name: ScopeAssignment description: '' command: managed-network scope-assignment + hasShowCommand: true cli: name: ScopeAssignment description: '' @@ -4929,6 +4931,7 @@ operationGroups: name: ManagedNetworkGroup description: '' command: managed-network managed-network-group + hasShowCommand: true cli: name: ManagedNetworkGroup description: '' @@ -5650,6 +5653,7 @@ operationGroups: name: ManagedNetworkPeeringPolicy description: '' command: managed-network managed-network-peering-policy + hasShowCommand: true cli: name: ManagedNetworkPeeringPolicy description: '' diff --git a/src/test/resources/managed-network/managed-network-az-namer.yaml b/src/test/resources/managed-network/managed-network-az-namer.yaml index 1f5852a96..b45f40a51 100644 --- a/src/test/resources/managed-network/managed-network-az-namer.yaml +++ b/src/test/resources/managed-network/managed-network-az-namer.yaml @@ -3585,6 +3585,7 @@ operationGroups: name: ManagedNetwork description: '' command: managed-network managed-network + hasShowCommand: true hasUpdate: true cli: name: ManagedNetwork @@ -4131,6 +4132,7 @@ operationGroups: name: ScopeAssignment description: '' command: managed-network scope-assignment + hasShowCommand: true cli: name: ScopeAssignment description: '' @@ -4929,6 +4931,7 @@ operationGroups: name: ManagedNetworkGroup description: '' command: managed-network managed-network-group + hasShowCommand: true cli: name: ManagedNetworkGroup description: '' @@ -5649,6 +5652,7 @@ operationGroups: name: ManagedNetworkPeeringPolicy description: '' command: managed-network managed-network-peering-policy + hasShowCommand: true cli: name: ManagedNetworkPeeringPolicy description: '' diff --git a/src/test/scenarios/attestation/configuration/readme.az.md b/src/test/scenarios/attestation/configuration/readme.az.md index cbe7132e2..20bbe0ea3 100644 --- a/src/test/scenarios/attestation/configuration/readme.az.md +++ b/src/test/scenarios/attestation/configuration/readme.az.md @@ -14,7 +14,7 @@ directive: - where: command: attestation attestation-provider list set: - command: attestation attestation-provider list-attestation + command: attestation attestation-provider provider list - where: command: attestation operation list set: diff --git a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/_help.py b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/_help.py index 6909ffc5a..c31b62237 100644 --- a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/_help.py +++ b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/_help.py @@ -20,6 +20,47 @@ helps['attestation create-provider'] = """ type: command short-summary: Creates or updates the Attestation Provider. + parameters: + - name: --policy-signing-certificates-keys + short-summary: The value of the "keys" parameter is an array of JWK values. Bydefault, the order of the JWK va\ +lues within the array does not implyan order of preference among them, although applications of JWK Setscan choose to a\ +ssign a meaning to the order for their purposes, ifdesired. + long-summary: | + Usage: --policy-signing-certificates-keys alg=XX crv=XX d=XX dp=XX dq=XX e=XX k=XX kid=XX kty=XX n=XX p=XX \ +q=XX qi=XX use=XX x=XX x5-c=XX y=XX + + alg: Required. The "alg" (algorithm) parameter identifies the algorithm intended foruse with the key. The \ +values used should either be registered in theIANA "JSON Web Signature and Encryption Algorithms" registryestablished b\ +y [JWA] or be a value that contains a Collision-Resistant Name. + crv: The "crv" (curve) parameter identifies the curve type + d: RSA private exponent or ECC private key + dp: RSA Private Key Parameter + dq: RSA Private Key Parameter + e: RSA public exponent, in Base64 + k: Symmetric key + kid: Required. The "kid" (key ID) parameter is used to match a specific key. Thisis used, for instance, to\ + choose among a set of keys within a JWK Setduring key rollover. The structure of the "kid" value isunspecified. When\ + "kid" values are used within a JWK Set, differentkeys within the JWK Set SHOULD use distinct "kid" values. (Oneexampl\ +e in which different keys might use the same "kid" value is ifthey have different "kty" (key type) values but are consi\ +dered to beequivalent alternatives by the application using them.) The "kid"value is a case-sensitive string. + kty: Required. The "kty" (key type) parameter identifies the cryptographic algorithmfamily used with the ke\ +y, such as "RSA" or "EC". "kty" values shouldeither be registered in the IANA "JSON Web Key Types" registryestablished \ +by [JWA] or be a value that contains a Collision-Resistant Name. The "kty" value is a case-sensitive string. + n: RSA modulus, in Base64 + p: RSA secret prime + q: RSA secret prime, with p < q + qi: RSA Private Key Parameter + use: Required. Use ("public key use") identifies the intended use ofthe public key. The "use" parameter is \ +employed to indicate whethera public key is used for encrypting data or verifying the signatureon data. Values are comm\ +only "sig" (signature) or "enc" (encryption). + x: X coordinate for the Elliptic Curve point + x5-c: The "x5c" (X.509 certificate chain) parameter contains a chain of oneor more PKIX certificates [RFC52\ +80]. The certificate chain isrepresented as a JSON array of certificate value strings. Eachstring in the array is a b\ +ase64-encoded (Section 4 of [RFC4648] --not base64url-encoded) DER [ITU.X690.1994] PKIX certificate value.The PKIX cert\ +ificate containing the key value MUST be the firstcertificate. + y: Y coordinate for the Elliptic Curve point + + Multiple actions can be specified by using more than one --policy-signing-certificates-keys argument. examples: - name: AttestationProviders_Create text: |- @@ -41,6 +82,20 @@ short-summary: attestation attestation-provider """ +helps['attestation attestation-provider provider'] = """ + type: group + short-summary: attestation attestation-provider sub group provider +""" + +helps['attestation attestation-provider provider list'] = """ + type: command + short-summary: Returns a list of attestation providers in a subscription. + examples: + - name: AttestationProviders_ListByResourceGroup + text: |- + az attestation attestation-provider provider list --resource-group "testrg1" +""" + helps['attestation attestation-provider show'] = """ type: command short-summary: Get the status of Attestation Provider. @@ -70,12 +125,3 @@ az attestation attestation-provider delete --provider-name "myattestationprovider" --resource-group "sam\ ple-resource-group" """ - -helps['attestation attestation-provider list-attestation'] = """ - type: command - short-summary: Returns a list of attestation providers in a subscription. - examples: - - name: AttestationProviders_ListByResourceGroup - text: |- - az attestation attestation-provider list-attestation --resource-group "testrg1" -""" diff --git a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/_params.py b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/_params.py index cd1b5218a..bacbe05ec 100644 --- a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/_params.py +++ b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/_params.py @@ -32,13 +32,14 @@ def load_arguments(self, _): c.argument('policy_signing_certificates_keys', action=AddPolicySigningCertificatesKeys, nargs='+', help='The va' 'lue of the "keys" parameter is an array of JWK values. By default, the order of the JWK values wit' 'hin the array does not imply an order of preference among them, although applications of JWK Sets c' - 'an choose to assign a meaning to the order for their purposes, if desired. Expect value: KEY1=VALUE' - '1 KEY2=VALUE2 ... , available KEYs are: alg, crv, d, dp, dq, e, k, kid, kty, n, p, q, qi, use, x, x' - '5-c, y.') + 'an choose to assign a meaning to the order for their purposes, if desired.') with self.argument_context('attestation list-operation') as c: pass + with self.argument_context('attestation attestation-provider provider list') as c: + c.argument('resource_group_name', resource_group_name_type) + with self.argument_context('attestation attestation-provider show') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('provider_name', help='Name of the attestation service instance', id_part='name') @@ -51,6 +52,3 @@ def load_arguments(self, _): with self.argument_context('attestation attestation-provider delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('provider_name', help='Name of the attestation service', id_part='name') - - with self.argument_context('attestation attestation-provider list-attestation') as c: - c.argument('resource_group_name', resource_group_name_type) diff --git a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/commands.py b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/commands.py index 8397f6cbc..fe20eea04 100644 --- a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/commands.py +++ b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/commands.py @@ -29,7 +29,7 @@ def load_command_table(self, _): client_factory=cf_attestation_provider) with self.command_group('attestation attestation-provider', attestation_attestation_provider, client_factory=cf_attestation_provider, is_experimental=True) as g: + g.custom_command('provider list', 'attestation_attestation_provider_provider_list') g.custom_show_command('show', 'attestation_attestation_provider_show') g.custom_command('update', 'attestation_attestation_provider_update') g.custom_command('delete', 'attestation_attestation_provider_delete') - g.custom_command('list-attestation', 'attestation_attestation_provider_list_attestation') diff --git a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/custom.py b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/custom.py index ae07242e2..1b5288dad 100644 --- a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/custom.py +++ b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/generated/custom.py @@ -29,6 +29,13 @@ def attestation_list_operation(cmd, client): return client.list() +def attestation_attestation_provider_provider_list(cmd, client, + resource_group_name=None): + if resource_group_name: + return client.list_by_resource_group(resource_group_name=resource_group_name) + return client.list() + + def attestation_attestation_provider_show(cmd, client, resource_group_name, provider_name): @@ -50,10 +57,3 @@ def attestation_attestation_provider_delete(cmd, client, provider_name): return client.delete(resource_group_name=resource_group_name, provider_name=provider_name) - - -def attestation_attestation_provider_list_attestation(cmd, client, - resource_group_name=None): - if resource_group_name: - return client.list_by_resource_group(resource_group_name=resource_group_name) - return client.list() diff --git a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/tests/latest/test_attestation_scenario.py b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/tests/latest/test_attestation_scenario.py index 937a2f6d4..e764e1032 100644 --- a/src/test/scenarios/attestation/output/src/attestation/azext_attestation/tests/latest/test_attestation_scenario.py +++ b/src/test/scenarios/attestation/output/src/attestation/azext_attestation/tests/latest/test_attestation_scenario.py @@ -55,7 +55,7 @@ def mytest(test, rg, rg_2, rg_3): # EXAMPLE: AttestationProviders_List @try_manual def step_attestationproviders_list(test, rg, rg_2, rg_3): - test.cmd('az attestation attestation-provider list-attestation ' + test.cmd('az attestation attestation-provider provider list ' '-g ""', checks=[]) @@ -63,7 +63,7 @@ def step_attestationproviders_list(test, rg, rg_2, rg_3): # EXAMPLE: AttestationProviders_ListByResourceGroup @try_manual def step_attestationproviders_listbyresourcegroup(test, rg, rg_2, rg_3): - test.cmd('az attestation attestation-provider list-attestation ' + test.cmd('az attestation attestation-provider provider list ' '--resource-group "{rg_2}"', checks=[]) diff --git a/src/test/scenarios/attestation/output/src/attestation/report.md b/src/test/scenarios/attestation/output/src/attestation/report.md index 48bd14832..a96c5cf57 100644 --- a/src/test/scenarios/attestation/output/src/attestation/report.md +++ b/src/test/scenarios/attestation/output/src/attestation/report.md @@ -8,9 +8,9 @@ delete a attestation attestation-provider. |------|----|-----------|----------|--------------| |**--resource-group-name**|string|The name of the resource group. The name is case insensitive.|resource_group_name| |**--provider-name**|string|Name of the attestation service|provider_name| -### attestation attestation-provider list-attestation +### attestation attestation-provider provider list -list-attestation a attestation attestation-provider. +provider list a attestation attestation-provider. |Option|Type|Description|Path (SDK)|Path (swagger)| |------|----|-----------|----------|--------------| diff --git a/src/test/scenarios/datafactory/configuration/readme.az.md b/src/test/scenarios/datafactory/configuration/readme.az.md index 9c02d8dda..1fe5d301f 100644 --- a/src/test/scenarios/datafactory/configuration/readme.az.md +++ b/src/test/scenarios/datafactory/configuration/readme.az.md @@ -12,18 +12,26 @@ python-sdk-output-folder: "$(az-output-folder)/azext_datafactory/vendored_sdks/d directive: - where: - group: datafactory factory + group: datafactory factory set: - group: datafactory - + group: datafactory + - where: + command: datafactory integration-runtime create-linked-integration-runtime + set: + command: datafactory integration-runtime linked-integration-runtime create cli: cli-directive: # directive on operationGroup - where: + group: datafactory parameter: factoryName alias: - name - n - + - where: + group: IntegrationRuntimes + op: CreateOrUpdate + param: properties + poly-resource: true ``` diff --git a/src/test/scenarios/datafactory/input/datafactory.json b/src/test/scenarios/datafactory/input/datafactory.json index 26e45d4e4..785a370b3 100644 --- a/src/test/scenarios/datafactory/input/datafactory.json +++ b/src/test/scenarios/datafactory/input/datafactory.json @@ -461,7 +461,1264 @@ } } - } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers": { + "get": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_ListByFactory", + "x-ms-examples": { + "Triggers_ListByFactory": { + "$ref": "./examples/Triggers_ListByFactory.json" + } + }, + "description": "Lists triggers.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/TriggerListResponse" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers": { + "post": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_QueryByFactory", + "x-ms-examples": { + "Triggers_QueryByFactory": { + "$ref": "./examples/Triggers_QueryByFactory.json" + } + }, + "description": "Query triggers.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "filterParameters", + "description": "Parameters to filter the triggers.", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/TriggerFilterParameters" + } + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/TriggerQueryResponse" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}": { + "put": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_CreateOrUpdate", + "x-ms-examples": { + "Triggers_Create": { + "$ref": "./examples/Triggers_Create.json" + }, + "Triggers_Update": { + "$ref": "./examples/Triggers_Update.json" + } + }, + "description": "Creates or updates a trigger.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/triggerName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "If-Match", + "in": "header", + "required": false, + "type": "string", + "description": "ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update." + }, + { + "name": "trigger", + "description": "Trigger resource definition.", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/TriggerResource" + } + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/TriggerResource" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "get": { + "tags": [ + "trigger" + ], + "operationId": "Triggers_Get", + "x-ms-examples": { + "Triggers_Get": { + "$ref": "./examples/Triggers_Get.json" + } + }, + "description": "Gets a trigger.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/triggerName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "If-None-Match", + "in": "header", + "required": false, + "type": "string", + "description": "ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned." + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/TriggerResource" + } + }, + "304": { + "description": "Not modified." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "delete": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_Delete", + "x-ms-examples": { + "Triggers_Delete": { + "$ref": "./examples/Triggers_Delete.json" + } + }, + "description": "Deletes a trigger.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/triggerName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK." + }, + "204": { + "description": "No Content." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents": { + "post": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_SubscribeToEvents", + "x-ms-examples": { + "Triggers_SubscribeToEvents": { + "$ref": "./examples/Triggers_SubscribeToEvents.json" + } + }, + "description": "Subscribe event trigger to events.", + "x-ms-long-running-operation": true, + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/triggerName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "Trigger is subscribed to events.", + "schema": { + "$ref": "#/definitions/TriggerSubscriptionOperationStatus" + } + }, + "202": { + "description": "Accepted." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus": { + "post": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_GetEventSubscriptionStatus", + "x-ms-examples": { + "Triggers_GetEventSubscriptionStatus": { + "$ref": "./examples/Triggers_GetEventSubscriptionStatus.json" + } + }, + "description": "Get a trigger's event subscription status.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/triggerName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "Trigger event subscription state.", + "schema": { + "$ref": "#/definitions/TriggerSubscriptionOperationStatus" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents": { + "post": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_UnsubscribeFromEvents", + "x-ms-examples": { + "Triggers_UnsubscribeFromEvents": { + "$ref": "./examples/Triggers_UnsubscribeFromEvents.json" + } + }, + "description": "Unsubscribe event trigger from events.", + "x-ms-long-running-operation": true, + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/triggerName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "Trigger is unsubscribed from events.", + "schema": { + "$ref": "#/definitions/TriggerSubscriptionOperationStatus" + } + }, + "202": { + "description": "Accepted." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start": { + "post": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_Start", + "x-ms-examples": { + "Triggers_Start": { + "$ref": "./examples/Triggers_Start.json" + } + }, + "description": "Starts a trigger.", + "x-ms-long-running-operation": true, + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/triggerName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "Trigger has been started successfully." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop": { + "post": { + "tags": [ + "triggers" + ], + "operationId": "Triggers_Stop", + "x-ms-examples": { + "Triggers_Stop": { + "$ref": "./examples/Triggers_Stop.json" + } + }, + "description": "Stops a trigger.", + "x-ms-long-running-operation": true, + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/triggerName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "Trigger has been stopped successfully." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes": { + "get": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_ListByFactory", + "x-ms-examples": { + "IntegrationRuntimes_ListByFactory": { + "$ref": "./examples/IntegrationRuntimes_ListByFactory.json" + } + }, + "description": "Lists integration runtimes.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/IntegrationRuntimeListResponse" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + }, + "x-ms-pageable": { + "nextLinkName": "nextLink" + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}": { + "put": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_CreateOrUpdate", + "x-ms-examples": { + "IntegrationRuntimes_Create": { + "$ref": "./examples/IntegrationRuntimes_Create.json" + } + }, + "description": "Creates or updates an integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "If-Match", + "in": "header", + "required": false, + "type": "string", + "description": "ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update." + }, + { + "name": "integrationRuntime", + "description": "Integration runtime resource definition.", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/IntegrationRuntimeResource" + } + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/IntegrationRuntimeResource" + } + }, + "default": { + "description": "An error response received from PUT integration runtime operation.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "get": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_Get", + "x-ms-examples": { + "IntegrationRuntimes_Get": { + "$ref": "./examples/IntegrationRuntimes_Get.json" + } + }, + "description": "Gets an integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "If-None-Match", + "in": "header", + "required": false, + "type": "string", + "description": "ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned." + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/IntegrationRuntimeResource" + } + }, + "304": { + "description": "Not modified." + }, + "default": { + "description": "An error response received from GET integration runtime operation.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "patch": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_Update", + "x-ms-examples": { + "IntegrationRuntimes_Update": { + "$ref": "./examples/IntegrationRuntimes_Update.json" + } + }, + "description": "Updates an integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "updateIntegrationRuntimeRequest", + "description": "The parameters for updating an integration runtime.", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/UpdateIntegrationRuntimeRequest" + } + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/IntegrationRuntimeResource" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + }, + "delete": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_Delete", + "x-ms-examples": { + "IntegrationRuntimes_Delete": { + "$ref": "./examples/IntegrationRuntimes_Delete.json" + } + }, + "description": "Deletes an integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK." + }, + "204": { + "description": "No Content." + }, + "default": { + "description": "An error response received from DELETE integration runtime operation.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_GetStatus", + "x-ms-examples": { + "IntegrationRuntimes_GetStatus": { + "$ref": "./examples/IntegrationRuntimes_GetStatus.json" + } + }, + "description": "Gets detailed status information for an integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/IntegrationRuntimeStatusResponse" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_GetConnectionInfo", + "x-ms-examples": { + "IntegrationRuntimes_GetConnectionInfo": { + "$ref": "./examples/IntegrationRuntimes_GetConnectionInfo.json" + } + }, + "description": "Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "./definitions/IntegrationRuntimeConnectionInfo" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_RegenerateAuthKey", + "x-ms-examples": { + "IntegrationRuntimes_RegenerateAuthKey": { + "$ref": "./examples/IntegrationRuntimes_RegenerateAuthKey.json" + } + }, + "description": "Regenerates the authentication key for an integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "regenerateKeyParameters", + "description": "The parameters for regenerating integration runtime authentication key.", + "in": "body", + "required": true, + "schema": { + "$ref": "./definitions/IntegrationRuntimeRegenerateKeyParameters" + } + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "./definitions/IntegrationRuntimeAuthKeys" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_ListAuthKeys", + "x-ms-examples": { + "IntegrationRuntimes_ListAuthKeys": { + "$ref": "./examples/IntegrationRuntimes_ListAuthKeys.json" + } + }, + "description": "Retrieves the authentication keys for an integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "./definitions/IntegrationRuntimeAuthKeys" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_Start", + "x-ms-examples": { + "IntegrationRuntimes_Start": { + "$ref": "./examples/IntegrationRuntimes_Start.json" + } + }, + "description": "Starts a ManagedReserved type integration runtime.", + "x-ms-long-running-operation": true, + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/IntegrationRuntimeStatusResponse" + } + }, + "202": { + "description": "Accepted." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_Stop", + "x-ms-examples": { + "IntegrationRuntimes_Stop": { + "$ref": "./examples/IntegrationRuntimes_Stop.json" + } + }, + "description": "Stops a ManagedReserved type integration runtime.", + "x-ms-long-running-operation": true, + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK." + }, + "202": { + "description": "Accepted." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_SyncCredentials", + "x-ms-examples": { + "IntegrationRuntimes_SyncCredentials": { + "$ref": "./examples/IntegrationRuntimes_SyncCredentials.json" + } + }, + "description": "Force the integration runtime to synchronize credentials across integration runtime nodes, and this will override the credentials across all worker nodes with those available on the dispatcher node. If you already have the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime node than using this API directly.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_GetMonitoringData", + "x-ms-examples": { + "IntegrationRuntimes_GetMonitoringData": { + "$ref": "./examples/IntegrationRuntimes_GetMonitoringData.json" + } + }, + "description": "Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "./definitions/IntegrationRuntimeMonitoringData" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_Upgrade", + "x-ms-examples": { + "IntegrationRuntimes_Upgrade": { + "$ref": "./examples/IntegrationRuntimes_Upgrade.json" + } + }, + "description": "Upgrade self-hosted integration runtime to latest version if availability.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + } + ], + "responses": { + "200": { + "description": "OK." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_RemoveLinks", + "x-ms-examples": { + "IntegrationRuntimes_Upgrade": { + "$ref": "./examples/IntegrationRuntimes_RemoveLinks.json" + } + }, + "description": "Remove all linked integration runtimes under specific data factory in a self-hosted integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "linkedIntegrationRuntimeRequest", + "description": "The data factory name for the linked integration runtime.", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/LinkedIntegrationRuntimeRequest" + } + } + ], + "responses": { + "200": { + "description": "OK." + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime": { + "post": { + "tags": [ + "integrationRuntimes" + ], + "operationId": "IntegrationRuntimes_CreateLinkedIntegrationRuntime", + "x-ms-examples": { + "IntegrationRuntimes_CreateLinkedIntegrationRuntime": { + "$ref": "./examples/IntegrationRuntimes_CreateLinkedIntegrationRuntime.json" + } + }, + "description": "Create a linked integration runtime entry in a shared integration runtime.", + "parameters": [ + { + "$ref": "#/parameters/subscriptionId" + }, + { + "$ref": "#/parameters/resourceGroupName" + }, + { + "$ref": "#/parameters/factoryName" + }, + { + "$ref": "#/parameters/integrationRuntimeName" + }, + { + "$ref": "#/parameters/api-version" + }, + { + "name": "createLinkedIntegrationRuntimeRequest", + "description": "The linked integration runtime properties.", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/CreateLinkedIntegrationRuntimeRequest" + } + } + ], + "responses": { + "200": { + "description": "OK.", + "schema": { + "$ref": "#/definitions/IntegrationRuntimeStatusResponse" + } + }, + "default": { + "description": "An error response received from the Azure Data Factory service.", + "schema": { + "$ref": "#/definitions/CloudError" + } + } + } + } + } }, "definitions": { "Resource": { @@ -860,30 +2117,2443 @@ "required": [ "name" ] - } - }, - "parameters": { - "subscriptionId": { - "name": "subscriptionId", - "description": "The subscription identifier.", - "in": "path", - "required": true, - "type": "string" - }, - "resourceGroupName": { - "name": "resourceGroupName", - "description": "The resource group name.", - "in": "path", - "required": true, - "type": "string", - "pattern": "^[-\\w\\._\\(\\)]+$", - "minLength": 1, - "maxLength": 90, - "x-ms-parameter-location": "method" }, - "factoryName": { - "name": "factoryName", - "description": "The factory name.", + "TriggerQueryResponse": { + "description": "A query of triggers.", + "type": "object", + "required": [ + "value" + ], + "properties": { + "value": { + "type": "array", + "description": "List of triggers.", + "items": { + "$ref": "#/definitions/TriggerResource" + } + }, + "continuationToken": { + "description": "The continuation token for getting the next page of results, if any remaining results exist, null otherwise.", + "type": "string" + } + } + }, + "TriggerListResponse": { + "description": "A list of trigger resources.", + "type": "object", + "required": [ + "value" + ], + "properties": { + "value": { + "type": "array", + "description": "List of triggers.", + "items": { + "$ref": "#/definitions/TriggerResource" + } + }, + "nextLink": { + "description": "The link to the next page of results, if any remaining results exist.", + "type": "string" + } + } + }, + "TriggerSubscriptionOperationStatus": { + "description": "Defines the response of a trigger subscription operation.", + "type": "object", + "properties": { + "triggerName": { + "description": "Trigger name.", + "type": "string", + "readOnly": true + }, + "status": { + "type": "string", + "enum": [ + "Enabled", + "Provisioning", + "Deprovisioning", + "Disabled", + "Unknown" + ], + "x-ms-enum": { + "name": "EventSubscriptionStatus", + "modelAsString": true + }, + "description": "Event Subscription Status.", + "readOnly": true + } + } + }, + "SubResource": { + "description": "Azure Data Factory nested resource, which belongs to a factory.", + "properties": { + "id": { + "type": "string", + "description": "The resource identifier.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "The resource name.", + "readOnly": true + }, + "type": { + "type": "string", + "description": "The resource type.", + "readOnly": true + }, + "etag": { + "type": "string", + "description": "Etag identifies change in the resource.", + "readOnly": true + } + }, + "x-ms-azure-resource": true + }, + "TriggerResource": { + "description": "Trigger resource type.", + "allOf": [ + { + "$ref": "#/definitions/SubResource" + } + ], + "properties": { + "properties": { + "$ref": "./definitions/Trigger", + "description": "Properties of the trigger." + } + }, + "required": [ + "properties" + ] + }, + "Trigger": { + "description": "Azure data factory nested object which contains information about creating pipeline run", + "type": "object", + "discriminator": "type", + "properties": { + "type": { + "type": "string", + "description": "Trigger type." + }, + "description": { + "description": "Trigger description.", + "type": "string" + }, + "runtimeState": { + "$ref": "#/definitions/TriggerRuntimeState", + "description": "Indicates if trigger is running or not. Updated when Start/Stop APIs are called on the Trigger.", + "readOnly": true + }, + "annotations": { + "description": "List of tags that can be used for describing the trigger.", + "type": "array", + "items": { + "type": "object" + } + } + }, + "additionalProperties": { + "type": "object" + }, + "required": [ + "type" + ] + }, + "TriggerFilterParameters": { + "description": "Query parameters for triggers.", + "type": "object", + "properties": { + "continuationToken": { + "description": "The continuation token for getting the next page of results. Null for first page.", + "type": "string" + }, + "parentTriggerName": { + "description": "The name of the parent TumblingWindowTrigger to get the child rerun triggers", + "type": "string" + } + } + }, + "TriggerRuntimeState": { + "type": "string", + "description": "Enumerates possible state of Triggers.", + "enum": [ + "Started", + "Stopped", + "Disabled" + ], + "x-ms-enum": { + "name": "TriggerRuntimeState", + "modelAsString": true + } + }, + "MultiplePipelineTrigger": { + "x-ms-discriminator-value": "MultiplePipelineTrigger", + "description": "Base class for all triggers that support one to many model for trigger to pipeline.", + "allOf": [ + { + "$ref": "#/definitions/Trigger" + } + ], + "properties": { + "pipelines": { + "type": "array", + "items": { + "$ref": "./definitions/TriggerPipelineReference" + }, + "description": "Pipelines that need to be started." + } + } + }, + "ScheduleTrigger": { + "description": "Trigger that creates pipeline runs periodically, on schedule.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/MultiplePipelineTrigger" + } + ], + "properties": { + "typeProperties": { + "description": "Schedule Trigger properties.", + "x-ms-client-flatten": true, + "properties": { + "recurrence": { + "$ref": "#/definitions/ScheduleTriggerRecurrence", + "description": "Recurrence schedule configuration." + } + }, + "required": [ + "recurrence" + ] + } + }, + "required": [ + "typeProperties" + ] + }, + "ScheduleTriggerRecurrence": { + "type": "object", + "properties": { + "frequency": { + "$ref": "#/definitions/RecurrenceFrequency", + "description": "The frequency." + }, + "interval": { + "type": "integer", + "format": "int32", + "description": "The interval." + }, + "startTime": { + "type": "string", + "format": "date-time", + "description": "The start time." + }, + "endTime": { + "type": "string", + "format": "date-time", + "description": "The end time." + }, + "timeZone": { + "type": "string", + "description": "The time zone." + }, + "schedule": { + "$ref": "#/definitions/RecurrenceSchedule", + "description": "The recurrence schedule." + } + }, + "additionalProperties": { + "type": "object" + }, + "description": "The workflow trigger recurrence." + }, + "RecurrenceFrequency": { + "description": "Enumerates possible frequency option for the schedule trigger.", + "type": "string", + "enum": [ + "NotSpecified", + "Minute", + "Hour", + "Day", + "Week", + "Month", + "Year" + ], + "x-ms-enum": { + "name": "RecurrenceFrequency", + "modelAsString": true + } + }, + "RecurrenceSchedule": { + "type": "object", + "properties": { + "minutes": { + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "description": "The minutes." + }, + "hours": { + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "description": "The hours." + }, + "weekDays": { + "type": "array", + "items": { + "type": "string", + "enum": [ + "Sunday", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday" + ], + "x-ms-enum": { + "name": "DaysOfWeek", + "modelAsString": false + } + }, + "description": "The days of the week." + }, + "monthDays": { + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "description": "The month days." + }, + "monthlyOccurrences": { + "type": "array", + "items": { + "$ref": "#/definitions/RecurrenceScheduleOccurrence" + }, + "description": "The monthly occurrences." + } + }, + "additionalProperties": { + "type": "object" + }, + "description": "The recurrence schedule." + }, + "RecurrenceScheduleOccurrence": { + "type": "object", + "properties": { + "day": { + "$ref": "#/definitions/DayOfWeek", + "description": "The day of the week." + }, + "occurrence": { + "type": "integer", + "format": "int32", + "description": "The occurrence." + } + }, + "additionalProperties": { + "type": "object" + }, + "description": "The recurrence schedule occurrence." + }, + "DayOfWeek": { + "type": "string", + "enum": [ + "Sunday", + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday" + ], + "x-ms-enum": { + "name": "DayOfWeek", + "modelAsString": false + }, + "description": "The days of the week." + }, + "BlobTrigger": { + "description": "Trigger that runs every time the selected Blob container changes.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/MultiplePipelineTrigger" + } + ], + "properties": { + "typeProperties": { + "description": "Blob Trigger properties.", + "x-ms-client-flatten": true, + "properties": { + "folderPath": { + "description": "The path of the container/folder that will trigger the pipeline.", + "type": "string" + }, + "maxConcurrency": { + "description": "The max number of parallel files to handle when it is triggered.", + "type": "integer" + }, + "linkedService": { + "description": "The Azure Storage linked service reference.", + "$ref": "./definitions/LinkedServiceReference" + } + }, + "required": [ + "folderPath", + "maxConcurrency", + "linkedService" + ] + } + }, + "required": [ + "typeProperties" + ] + }, + "BlobEventsTrigger": { + "description": "Trigger that runs every time a Blob event occurs.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/MultiplePipelineTrigger" + } + ], + "properties": { + "typeProperties": { + "description": "Blob Events Trigger properties.", + "x-ms-client-flatten": true, + "properties": { + "blobPathBeginsWith": { + "description": "The blob path must begin with the pattern provided for trigger to fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the december folder under the records container. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith.", + "type": "string" + }, + "blobPathEndsWith": { + "description": "The blob path must end with the pattern provided for trigger to fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith.", + "type": "string" + }, + "ignoreEmptyBlobs": { + "description": "If set to true, blobs with zero bytes will be ignored.", + "type": "boolean" + }, + "events": { + "description": "The type of events that cause this trigger to fire.", + "$ref": "#/definitions/BlobEventTypes" + }, + "scope": { + "description": "The ARM resource ID of the Storage Account.", + "type": "string" + } + }, + "required": [ + "events", + "scope" + ] + } + }, + "required": [ + "typeProperties" + ] + }, + "BlobEventTypes": { + "type": "array", + "items": { + "type": "string", + "enum": [ + "Microsoft.Storage.BlobCreated", + "Microsoft.Storage.BlobDeleted" + ], + "x-ms-enum": { + "name": "BlobEventTypes", + "modelAsString": true + } + }, + "description": "Blob event types." + }, + "TumblingWindowTrigger": { + "description": "Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past).", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/Trigger" + } + ], + "properties": { + "pipeline": { + "$ref": "./definitions/TriggerPipelineReference", + "description": "Pipeline for which runs are created when an event is fired for trigger window that is ready." + }, + "typeProperties": { + "description": "Tumbling Window Trigger properties.", + "x-ms-client-flatten": true, + "properties": { + "frequency": { + "$ref": "#/definitions/TumblingWindowFrequency", + "description": "The frequency of the time windows." + }, + "interval": { + "type": "integer", + "format": "int32", + "description": "The interval of the time windows. The minimum interval allowed is 15 Minutes." + }, + "startTime": { + "type": "string", + "format": "date-time", + "description": "The start time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported." + }, + "endTime": { + "type": "string", + "format": "date-time", + "description": "The end time for the time period for the trigger during which events are fired for windows that are ready. Only UTC time is currently supported." + }, + "delay": { + "type": "object", + "description": "Specifies how long the trigger waits past due time before triggering new run. It doesn't alter window start and end time. The default is 0. Type: string (or Expression with resultType string), pattern: ((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))." + }, + "maxConcurrency": { + "description": "The max number of parallel time windows (ready for execution) for which a new run is triggered.", + "type": "integer", + "minimum": 1, + "maximum": 50 + }, + "retryPolicy": { + "$ref": "#/definitions/RetryPolicy", + "description": "Retry policy that will be applied for failed pipeline runs." + }, + "dependsOn": { + "type": "array", + "description": "Triggers that this trigger depends on. Only tumbling window triggers are supported.", + "items": { + "$ref": "#/definitions/DependencyReference" + } + } + }, + "required": [ + "frequency", + "interval", + "startTime", + "maxConcurrency" + ] + } + }, + "required": [ + "pipeline", + "typeProperties" + ] + }, + "TumblingWindowFrequency": { + "description": "Enumerates possible frequency option for the tumbling window trigger.", + "type": "string", + "enum": [ + "Minute", + "Hour" + ], + "x-ms-enum": { + "name": "TumblingWindowFrequency", + "modelAsString": true + } + }, + "RetryPolicy": { + "description": "Execution policy for an activity.", + "properties": { + "count": { + "type": "object", + "description": "Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with resultType integer), minimum: 0." + }, + "intervalInSeconds": { + "description": "Interval between retries in seconds. Default is 30.", + "type": "integer", + "minimum": 30, + "maximum": 86400 + } + } + }, + "TriggerReference": { + "description": "Trigger reference type.", + "type": "object", + "properties": { + "type": { + "description": "Trigger reference type.", + "type": "string", + "enum": [ + "TriggerReference" + ] + }, + "referenceName": { + "description": "Reference trigger name.", + "type": "string" + } + }, + "required": [ + "type", + "referenceName" + ] + }, + "DependencyReference": { + "description": "Referenced dependency.", + "type": "object", + "discriminator": "type", + "properties": { + "type": { + "description": "The type of dependency reference.", + "type": "string" + } + }, + "required": [ + "type" + ] + }, + "TriggerDependencyReference": { + "description": "Trigger referenced dependency.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/DependencyReference" + } + ], + "properties": { + "referenceTrigger": { + "description": "Referenced trigger.", + "$ref": "#/definitions/TriggerReference" + } + }, + "required": [ + "referenceTrigger" + ] + }, + "TumblingWindowTriggerDependencyReference": { + "description": "Referenced tumbling window trigger dependency.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/TriggerDependencyReference" + } + ], + "properties": { + "offset": { + "description": "Timespan applied to the start time of a tumbling window when evaluating dependency.", + "type": "string", + "pattern": "-?((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))", + "minLength": 8, + "maxLength": 15 + }, + "size": { + "description": "The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used.", + "type": "string", + "pattern": "((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))", + "minLength": 8, + "maxLength": 15 + } + } + }, + "SelfDependencyTumblingWindowTriggerReference": { + "description": "Self referenced tumbling window trigger dependency.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/DependencyReference" + } + ], + "properties": { + "offset": { + "description": "Timespan applied to the start time of a tumbling window when evaluating dependency.", + "type": "string", + "pattern": "-((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))", + "minLength": 8, + "maxLength": 15 + }, + "size": { + "description": "The size of the window when evaluating the dependency. If undefined the frequency of the tumbling window will be used.", + "type": "string", + "pattern": "((\\d+)\\.)?(\\d\\d):(60|([0-5][0-9])):(60|([0-5][0-9]))", + "minLength": 8, + "maxLength": 15 + } + }, + "required": [ + "offset" + ] + }, + "RerunTumblingWindowTrigger": { + "description": "Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/Trigger" + } + ], + "properties": { + "typeProperties": { + "description": "Rerun Trigger properties.", + "x-ms-client-flatten": true, + "properties": { + "parentTrigger": { + "type": "object", + "description": "The parent trigger reference." + }, + "requestedStartTime": { + "type": "string", + "format": "date-time", + "description": "The start time for the time period for which restatement is initiated. Only UTC time is currently supported." + }, + "requestedEndTime": { + "type": "string", + "format": "date-time", + "description": "The end time for the time period for which restatement is initiated. Only UTC time is currently supported." + }, + "rerunConcurrency": { + "description": "The max number of parallel time windows (ready for execution) for which a rerun is triggered.", + "type": "integer", + "minimum": 1, + "maximum": 50 + } + }, + "required": [ + "parentTrigger", + "requestedStartTime", + "requestedEndTime", + "rerunConcurrency" + ] + } + }, + "required": [ + "typeProperties" + ] + }, + "ChainingTrigger": { + "description": "Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/Trigger" + } + ], + "properties": { + "pipeline": { + "$ref": "./definitions/TriggerPipelineReference", + "description": "Pipeline for which runs are created when all upstream pipelines complete successfully." + }, + "typeProperties": { + "description": "Chaining Trigger properties.", + "x-ms-client-flatten": true, + "properties": { + "dependsOn": { + "type": "array", + "items": { + "$ref": "./definitions/PipelineReference" + }, + "description": "Upstream Pipelines." + }, + "runDimension": { + "description": "Run Dimension property that needs to be emitted by upstream pipelines.", + "type": "string" + } + }, + "required": [ + "runDimension", + "dependsOn" + ] + } + }, + "required": [ + "pipeline", + "typeProperties" + ] + }, + "IntegrationRuntimeListResponse": { + "description": "A list of integration runtime resources.", + "type": "object", + "required": [ + "value" + ], + "properties": { + "value": { + "type": "array", + "description": "List of integration runtimes.", + "items": { + "$ref": "#/definitions/IntegrationRuntimeResource" + } + }, + "nextLink": { + "description": "The link to the next page of results, if any remaining results exist.", + "type": "string" + } + } + }, + "IntegrationRuntimeReference": { + "description": "Integration runtime reference type.", + "properties": { + "type": { + "type": "string", + "description": "Type of integration runtime.", + "enum": [ + "IntegrationRuntimeReference" + ] + }, + "referenceName": { + "type": "string", + "description": "Reference integration runtime name." + }, + "parameters": { + "$ref": "./definitions/ParameterValueSpecification", + "description": "Arguments for integration runtime." + } + }, + "required": [ + "type", + "referenceName" + ] + }, + "ParameterValueSpecification": { + "description": "An object mapping parameter names to argument values.", + "type": "object", + "additionalProperties": { + "type": "object" + } + }, + "IntegrationRuntimeResource": { + "description": "Integration runtime resource type.", + "allOf": [ + { + "$ref": "#/definitions/SubResource" + } + ], + "properties": { + "properties": { + "$ref": "./definitions/IntegrationRuntime", + "description": "Integration runtime properties." + } + }, + "required": [ + "properties" + ] + }, + "IntegrationRuntimeStatusResponse": { + "description": "Integration runtime status response.", + "properties": { + "name": { + "type": "string", + "description": "The integration runtime name.", + "readOnly": true + }, + "properties": { + "$ref": "./definitions/IntegrationRuntimeStatus", + "description": "Integration runtime properties." + } + }, + "required": [ + "properties" + ] + }, + "IntegrationRuntimeStatusListResponse": { + "description": "A list of integration runtime status.", + "type": "object", + "required": [ + "value" + ], + "properties": { + "value": { + "type": "array", + "description": "List of integration runtime status.", + "items": { + "$ref": "#/definitions/IntegrationRuntimeStatusResponse" + } + }, + "nextLink": { + "description": "The link to the next page of results, if any remaining results exist.", + "type": "string" + } + } + }, + "UpdateIntegrationRuntimeRequest": { + "description": "Update integration runtime request.", + "type": "object", + "properties": { + "autoUpdate": { + "description": "Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.", + "$ref": "./definitions/IntegrationRuntimeAutoUpdate" + }, + "updateDelayOffset": { + "description": "The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.", + "type": "string" + } + } + }, + "IntegrationRuntime": { + "description": "Azure Data Factory nested object which serves as a compute resource for activities.", + "type": "object", + "discriminator": "type", + "properties": { + "type": { + "description": "Type of integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeType" + }, + "description": { + "description": "Integration runtime description.", + "type": "string" + } + }, + "additionalProperties": { + "type": "object" + }, + "required": [ + "type" + ] + }, + "IntegrationRuntimeType": { + "description": "The type of integration runtime.", + "type": "string", + "enum": [ + "Managed", + "SelfHosted" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeType", + "modelAsString": true + } + }, + "ManagedIntegrationRuntime": { + "x-ms-discriminator-value": "Managed", + "description": "Managed integration runtime, including managed elastic and managed dedicated integration runtimes.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/IntegrationRuntime" + } + ], + "properties": { + "state": { + "description": "Integration runtime state, only valid for managed dedicated integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeState", + "readOnly": true + }, + "typeProperties": { + "description": "Managed integration runtime properties.", + "x-ms-client-flatten": true, + "$ref": "#/definitions/ManagedIntegrationRuntimeTypeProperties" + } + }, + "required": [ + "typeProperties" + ] + }, + "ManagedIntegrationRuntimeTypeProperties": { + "description": "Managed integration runtime type properties.", + "type": "object", + "properties": { + "computeProperties": { + "description": "The compute resource for managed integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeComputeProperties" + }, + "ssisProperties": { + "description": "SSIS properties for managed integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeSsisProperties" + } + } + }, + "IntegrationRuntimeComputeProperties": { + "description": "The compute resource properties for managed integration runtime.", + "type": "object", + "properties": { + "location": { + "description": "The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities", + "type": "string" + }, + "nodeSize": { + "description": "The node size requirement to managed integration runtime.", + "type": "string" + }, + "numberOfNodes": { + "description": "The required number of nodes for managed integration runtime.", + "type": "integer", + "minimum": 1 + }, + "maxParallelExecutionsPerNode": { + "description": "Maximum parallel executions count per node for managed integration runtime.", + "type": "integer", + "minimum": 1 + }, + "dataFlowProperties": { + "description": "Data flow properties for managed integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeDataFlowProperties" + }, + "vNetProperties": { + "description": "VNet properties for managed integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeVNetProperties" + } + }, + "additionalProperties": { + "type": "object" + } + }, + "IntegrationRuntimeDataFlowProperties": { + "description": "Data flow properties for managed integration runtime.", + "type": "object", + "properties": { + "computeType": { + "description": "Compute type of the cluster which will execute data flow job.", + "type": "string", + "enum": [ + "General", + "MemoryOptimized", + "ComputeOptimized" + ], + "x-ms-enum": { + "name": "DataFlowComputeType", + "modelAsString": true + } + }, + "coreCount": { + "description": "Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272.", + "type": "integer" + }, + "timeToLive": { + "description": "Time to live (in minutes) setting of the cluster which will execute data flow job.", + "type": "integer", + "minimum": 0 + } + }, + "additionalProperties": { + "type": "object" + } + }, + "IntegrationRuntimeVNetProperties": { + "description": "VNet properties for managed integration runtime.", + "type": "object", + "properties": { + "vNetId": { + "description": "The ID of the VNet that this integration runtime will join.", + "type": "string" + }, + "subnet": { + "description": "The name of the subnet this integration runtime will join.", + "type": "string" + }, + "publicIPs": { + "description": "Resource IDs of the public IP addresses that this integration runtime will use.", + "type": "array", + "items": { + "type": "string", + "description": "The ID of the public IP address." + } + } + }, + "additionalProperties": { + "type": "object" + } + }, + "IntegrationRuntimeSsisProperties": { + "description": "SSIS properties for managed integration runtime.", + "type": "object", + "properties": { + "catalogInfo": { + "description": "Catalog information for managed dedicated integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeSsisCatalogInfo" + }, + "licenseType": { + "description": "License type for bringing your own license scenario.", + "type": "string", + "enum": [ + "BasePrice", + "LicenseIncluded" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeLicenseType", + "modelAsString": true + } + }, + "customSetupScriptProperties": { + "description": "Custom setup script properties for a managed dedicated integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeCustomSetupScriptProperties" + }, + "dataProxyProperties": { + "description": "Data proxy properties for a managed dedicated integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeDataProxyProperties" + }, + "edition": { + "description": "The edition for the SSIS Integration Runtime", + "type": "string", + "enum": [ + "Standard", + "Enterprise" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeEdition", + "modelAsString": true + } + }, + "expressCustomSetupProperties": { + "description": "Custom setup without script properties for a SSIS integration runtime.", + "type": "array", + "items": { + "$ref": "#/definitions/CustomSetupBase" + } + }, + "packageStores": { + "description": "Package stores for the SSIS Integration Runtime.", + "type": "array", + "items": { + "$ref": "#/definitions/PackageStore" + } + } + }, + "additionalProperties": { + "type": "object" + } + }, + "SecureString": { + "x-ms-discriminator-value": "SecureString", + "description": "Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List API calls.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/SecretBase" + } + ], + "properties": { + "value": { + "type": "string", + "description": "Value of secure string." + } + }, + "required": [ + "value" + ] + }, + "SecretBase": { + "description": "The base definition of a secret type.", + "discriminator": "type", + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "Type of the secret." + } + }, + "required": [ + "type" + ] + }, + "IntegrationRuntimeSsisCatalogInfo": { + "description": "Catalog information for managed dedicated integration runtime.", + "type": "object", + "properties": { + "catalogServerEndpoint": { + "description": "The catalog database server URL.", + "type": "string" + }, + "catalogAdminUserName": { + "description": "The administrator user name of catalog database.", + "type": "string", + "minLength": 1, + "maxLength": 128 + }, + "catalogAdminPassword": { + "description": "The password of the administrator user account of the catalog database.", + "$ref": "./definitions/SecureString" + }, + "catalogPricingTier": { + "description": "The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/", + "type": "string", + "enum": [ + "Basic", + "Standard", + "Premium", + "PremiumRS" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeSsisCatalogPricingTier", + "modelAsString": true + } + } + }, + "additionalProperties": { + "type": "object" + } + }, + "IntegrationRuntimeCustomSetupScriptProperties": { + "description": "Custom setup script properties for a managed dedicated integration runtime.", + "type": "object", + "properties": { + "blobContainerUri": { + "description": "The URI of the Azure blob container that contains the custom setup script.", + "type": "string" + }, + "sasToken": { + "description": "The SAS token of the Azure blob container.", + "$ref": "./definitions/SecureString" + } + } + }, + "IntegrationRuntimeDataProxyProperties": { + "description": "Data proxy properties for a managed dedicated integration runtime.", + "type": "object", + "properties": { + "connectVia": { + "description": "The self-hosted integration runtime reference.", + "$ref": "#/definitions/EntityReference" + }, + "stagingLinkedService": { + "description": "The staging linked service reference.", + "$ref": "#/definitions/EntityReference" + }, + "path": { + "description": "The path to contain the staged data in the Blob storage.", + "type": "string" + } + } + }, + "PackageStore": { + "description": "Package store for the SSIS integration runtime.", + "type": "object", + "properties": { + "name": { + "description": "The name of the package store", + "type": "string" + }, + "packageStoreLinkedService": { + "description": "The package store linked service reference.", + "$ref": "#/definitions/EntityReference" + } + }, + "required": [ + "name", + "packageStoreLinkedService" + ] + }, + "CustomSetupBase": { + "description": "The base definition of the custom setup.", + "type": "object", + "discriminator": "type", + "properties": { + "type": { + "description": "The type of custom setup.", + "type": "string" + } + }, + "required": [ + "type" + ] + }, + "CmdkeySetup": { + "description": "The custom setup of running cmdkey commands.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/CustomSetupBase" + } + ], + "properties": { + "typeProperties": { + "description": "Cmdkey command custom setup type properties.", + "x-ms-client-flatten": true, + "$ref": "#/definitions/CmdkeySetupTypeProperties" + } + }, + "required": [ + "typeProperties" + ] + }, + "CmdkeySetupTypeProperties": { + "description": "Cmdkey command custom setup type properties.", + "type": "object", + "properties": { + "targetName": { + "description": "The server name of data source access.", + "type": "object" + }, + "userName": { + "description": "The user name of data source access.", + "type": "object" + }, + "password": { + "description": "The password of data source access.", + "$ref": "./definitions/SecretBase" + } + }, + "required": [ + "targetName", + "userName", + "password" + ] + }, + "EnvironmentVariableSetup": { + "description": "The custom setup of setting environment variable.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/CustomSetupBase" + } + ], + "properties": { + "typeProperties": { + "description": "Add environment variable type properties.", + "x-ms-client-flatten": true, + "$ref": "#/definitions/EnvironmentVariableSetupTypeProperties" + } + }, + "required": [ + "typeProperties" + ] + }, + "EnvironmentVariableSetupTypeProperties": { + "description": "Environment variable custom setup type properties.", + "type": "object", + "properties": { + "variableName": { + "description": "The name of the environment variable.", + "type": "string" + }, + "variableValue": { + "description": "The value of the environment variable.", + "type": "string" + } + }, + "required": [ + "variableName", + "variableValue" + ] + }, + "ComponentSetup": { + "description": "The custom setup of installing 3rd party components.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/CustomSetupBase" + } + ], + "properties": { + "typeProperties": { + "description": "Install 3rd party component type properties.", + "x-ms-client-flatten": true, + "$ref": "#/definitions/LicensedComponentSetupTypeProperties" + } + }, + "required": [ + "typeProperties" + ] + }, + "LicensedComponentSetupTypeProperties": { + "description": "Installation of licensed component setup type properties.", + "type": "object", + "properties": { + "componentName": { + "description": "The name of the 3rd party component.", + "type": "string" + }, + "licenseKey": { + "description": "The license key to activate the component.", + "$ref": "./definitions/SecretBase" + } + }, + "required": [ + "componentName" + ] + }, + "EntityReference": { + "description": "The entity reference.", + "type": "object", + "properties": { + "type": { + "description": "The type of this referenced entity.", + "type": "string", + "enum": [ + "IntegrationRuntimeReference", + "LinkedServiceReference" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeEntityReferenceType", + "modelAsString": true + } + }, + "referenceName": { + "description": "The name of this referenced entity.", + "type": "string" + } + } + }, + "SelfHostedIntegrationRuntime": { + "x-ms-discriminator-value": "SelfHosted", + "description": "Self-hosted integration runtime.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/IntegrationRuntime" + } + ], + "properties": { + "typeProperties": { + "description": "When this property is not null, means this is a linked integration runtime. The property is used to access original integration runtime.", + "x-ms-client-flatten": true, + "$ref": "#/definitions/SelfHostedIntegrationRuntimeTypeProperties" + } + } + }, + "SelfHostedIntegrationRuntimeTypeProperties": { + "description": "The self-hosted integration runtime properties.", + "type": "object", + "properties": { + "linkedInfo": { + "$ref": "#/definitions/LinkedIntegrationRuntimeType" + } + } + }, + "LinkedIntegrationRuntimeType": { + "description": "The base definition of a linked integration runtime.", + "discriminator": "authorizationType", + "type": "object", + "properties": { + "authorizationType": { + "type": "string", + "description": "The authorization type for integration runtime sharing." + } + }, + "required": [ + "authorizationType" + ] + }, + "LinkedIntegrationRuntimeKeyAuthorization": { + "x-ms-discriminator-value": "Key", + "description": "The key authorization type integration runtime.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/LinkedIntegrationRuntimeType" + } + ], + "properties": { + "key": { + "description": "The key used for authorization.", + "$ref": "./definitions/SecureString" + } + }, + "required": [ + "key" + ] + }, + "LinkedIntegrationRuntimeRbacAuthorization": { + "x-ms-discriminator-value": "RBAC", + "description": "The role based access control (RBAC) authorization type integration runtime.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/LinkedIntegrationRuntimeType" + } + ], + "properties": { + "resourceId": { + "description": "The resource identifier of the integration runtime to be shared.", + "type": "string" + } + }, + "required": [ + "resourceId" + ] + }, + "IntegrationRuntimeStatus": { + "description": "Integration runtime status.", + "type": "object", + "discriminator": "type", + "properties": { + "type": { + "description": "Type of integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeType" + }, + "dataFactoryName": { + "description": "The data factory name which the integration runtime belong to.", + "type": "string", + "readOnly": true + }, + "state": { + "description": "The state of integration runtime.", + "$ref": "#/definitions/IntegrationRuntimeState", + "readOnly": true + } + }, + "additionalProperties": { + "type": "object" + }, + "required": [ + "type" + ] + }, + "IntegrationRuntimeState": { + "description": "The state of integration runtime.", + "type": "string", + "readOnly": true, + "enum": [ + "Initial", + "Stopped", + "Started", + "Starting", + "Stopping", + "NeedRegistration", + "Online", + "Limited", + "Offline", + "AccessDenied" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeState", + "modelAsString": true + } + }, + "ManagedIntegrationRuntimeStatus": { + "x-ms-discriminator-value": "Managed", + "description": "Managed integration runtime status.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/IntegrationRuntimeStatus" + } + ], + "properties": { + "typeProperties": { + "description": "Managed integration runtime status type properties.", + "x-ms-client-flatten": true, + "$ref": "#/definitions/ManagedIntegrationRuntimeStatusTypeProperties" + } + }, + "required": [ + "typeProperties" + ] + }, + "ManagedIntegrationRuntimeStatusTypeProperties": { + "description": "Managed integration runtime status type properties.", + "type": "object", + "properties": { + "createTime": { + "description": "The time at which the integration runtime was created, in ISO8601 format.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "nodes": { + "description": "The list of nodes for managed integration runtime.", + "type": "array", + "items": { + "$ref": "#/definitions/ManagedIntegrationRuntimeNode" + }, + "readOnly": true + }, + "otherErrors": { + "description": "The errors that occurred on this integration runtime.", + "type": "array", + "items": { + "$ref": "#/definitions/ManagedIntegrationRuntimeError" + }, + "readOnly": true + }, + "lastOperation": { + "description": "The last operation result that occurred on this integration runtime.", + "$ref": "#/definitions/ManagedIntegrationRuntimeOperationResult", + "readOnly": true + } + } + }, + "ManagedIntegrationRuntimeOperationResult": { + "description": "Properties of managed integration runtime operation result.", + "properties": { + "type": { + "description": "The operation type. Could be start or stop.", + "type": "string", + "readOnly": true + }, + "startTime": { + "description": "The start time of the operation.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "result": { + "description": "The operation result.", + "type": "string", + "readOnly": true + }, + "errorCode": { + "description": "The error code.", + "type": "string", + "readOnly": true + }, + "parameters": { + "description": "Managed integration runtime error parameters.", + "type": "array", + "items": { + "type": "string", + "description": "Error message parameters." + }, + "readOnly": true + }, + "activityId": { + "description": "The activity id for the operation request.", + "type": "string", + "readOnly": true + } + }, + "additionalProperties": { + "type": "object" + } + }, + "ManagedIntegrationRuntimeNode": { + "description": "Properties of integration runtime node.", + "properties": { + "nodeId": { + "description": "The managed integration runtime node id.", + "type": "string", + "readOnly": true + }, + "status": { + "description": "The managed integration runtime node status.", + "type": "string", + "readOnly": true, + "enum": [ + "Starting", + "Available", + "Recycling", + "Unavailable" + ], + "x-ms-enum": { + "name": "ManagedIntegrationRuntimeNodeStatus", + "modelAsString": true + } + }, + "errors": { + "description": "The errors that occurred on this integration runtime node.", + "type": "array", + "items": { + "$ref": "#/definitions/ManagedIntegrationRuntimeError" + } + } + }, + "additionalProperties": { + "type": "object" + } + }, + "ManagedIntegrationRuntimeError": { + "description": "Error definition for managed integration runtime.", + "properties": { + "time": { + "description": "The time when the error occurred.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "code": { + "description": "Error code.", + "type": "string", + "readOnly": true + }, + "parameters": { + "description": "Managed integration runtime error parameters.", + "type": "array", + "items": { + "type": "string", + "description": "Error message parameters." + }, + "readOnly": true + }, + "message": { + "description": "Error message.", + "type": "string", + "readOnly": true + } + }, + "additionalProperties": { + "type": "object" + } + }, + "SelfHostedIntegrationRuntimeStatus": { + "x-ms-discriminator-value": "SelfHosted", + "description": "Self-hosted integration runtime status.", + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/IntegrationRuntimeStatus" + } + ], + "properties": { + "typeProperties": { + "description": "Self-hosted integration runtime status type properties.", + "x-ms-client-flatten": true, + "$ref": "#/definitions/SelfHostedIntegrationRuntimeStatusTypeProperties" + } + }, + "required": [ + "typeProperties" + ] + }, + "SelfHostedIntegrationRuntimeStatusTypeProperties": { + "description": "Self-hosted integration runtime status type properties.", + "type": "object", + "properties": { + "createTime": { + "description": "The time at which the integration runtime was created, in ISO8601 format.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "taskQueueId": { + "description": "The task queue id of the integration runtime.", + "type": "string", + "readOnly": true + }, + "internalChannelEncryption": { + "description": "It is used to set the encryption mode for node-node communication channel (when more than 2 self-hosted integration runtime nodes exist).", + "type": "string", + "readOnly": true, + "enum": [ + "NotSet", + "SslEncrypted", + "NotEncrypted" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeInternalChannelEncryptionMode", + "modelAsString": true + } + }, + "version": { + "description": "Version of the integration runtime.", + "type": "string", + "readOnly": true + }, + "nodes": { + "description": "The list of nodes for this integration runtime.", + "type": "array", + "items": { + "$ref": "#/definitions/SelfHostedIntegrationRuntimeNode" + } + }, + "scheduledUpdateDate": { + "description": "The date at which the integration runtime will be scheduled to update, in ISO8601 format.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "updateDelayOffset": { + "description": "The time in the date scheduled by service to update the integration runtime, e.g., PT03H is 3 hours", + "type": "string", + "readOnly": true + }, + "localTimeZoneOffset": { + "description": "The local time zone offset in hours.", + "type": "string", + "readOnly": true + }, + "capabilities": { + "description": "Object with additional information about integration runtime capabilities.", + "type": "object", + "readOnly": true, + "additionalProperties": { + "type": "string" + } + }, + "serviceUrls": { + "description": "The URLs for the services used in integration runtime backend service.", + "type": "array", + "items": { + "description": "The service URL", + "type": "string" + }, + "readOnly": true + }, + "autoUpdate": { + "description": "Whether Self-hosted integration runtime auto update has been turned on.", + "$ref": "#/definitions/IntegrationRuntimeAutoUpdate", + "readOnly": true + }, + "versionStatus": { + "description": "Status of the integration runtime version.", + "type": "string", + "readOnly": true + }, + "links": { + "description": "The list of linked integration runtimes that are created to share with this integration runtime.", + "type": "array", + "items": { + "$ref": "#/definitions/LinkedIntegrationRuntime" + } + }, + "pushedVersion": { + "description": "The version that the integration runtime is going to update to.", + "type": "string", + "readOnly": true + }, + "latestVersion": { + "description": "The latest version on download center.", + "type": "string", + "readOnly": true + }, + "autoUpdateETA": { + "description": "The estimated time when the self-hosted integration runtime will be updated.", + "type": "string", + "format": "date-time", + "readOnly": true + } + } + }, + "IntegrationRuntimeAutoUpdate": { + "description": "The state of integration runtime auto update.", + "type": "string", + "enum": [ + "On", + "Off" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeAutoUpdate", + "modelAsString": true + } + }, + "LinkedIntegrationRuntime": { + "description": "The linked integration runtime information.", + "type": "object", + "properties": { + "name": { + "description": "The name of the linked integration runtime.", + "type": "string", + "readOnly": true + }, + "subscriptionId": { + "description": "The subscription ID for which the linked integration runtime belong to.", + "type": "string", + "readOnly": true + }, + "dataFactoryName": { + "description": "The name of the data factory for which the linked integration runtime belong to.", + "type": "string", + "readOnly": true + }, + "dataFactoryLocation": { + "description": "The location of the data factory for which the linked integration runtime belong to.", + "type": "string", + "readOnly": true + }, + "createTime": { + "description": "The creating time of the linked integration runtime.", + "type": "string", + "format": "date-time", + "readOnly": true + } + } + }, + "SelfHostedIntegrationRuntimeNode": { + "description": "Properties of Self-hosted integration runtime node.", + "properties": { + "nodeName": { + "description": "Name of the integration runtime node.", + "type": "string", + "readOnly": true + }, + "machineName": { + "description": "Machine name of the integration runtime node.", + "type": "string", + "readOnly": true + }, + "hostServiceUri": { + "description": "URI for the host machine of the integration runtime.", + "type": "string", + "readOnly": true + }, + "status": { + "description": "Status of the integration runtime node.", + "type": "string", + "readOnly": true, + "enum": [ + "NeedRegistration", + "Online", + "Limited", + "Offline", + "Upgrading", + "Initializing", + "InitializeFailed" + ], + "x-ms-enum": { + "name": "SelfHostedIntegrationRuntimeNodeStatus", + "modelAsString": true + } + }, + "capabilities": { + "description": "The integration runtime capabilities dictionary", + "type": "object", + "readOnly": true, + "additionalProperties": { + "type": "string" + } + }, + "versionStatus": { + "description": "Status of the integration runtime node version.", + "type": "string", + "readOnly": true + }, + "version": { + "description": "Version of the integration runtime node.", + "type": "string", + "readOnly": true + }, + "registerTime": { + "description": "The time at which the integration runtime node was registered in ISO8601 format.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "lastConnectTime": { + "description": "The most recent time at which the integration runtime was connected in ISO8601 format.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "expiryTime": { + "description": "The time at which the integration runtime will expire in ISO8601 format.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "lastStartTime": { + "description": "The time the node last started up.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "lastStopTime": { + "description": "The integration runtime node last stop time.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "lastUpdateResult": { + "description": "The result of the last integration runtime node update.", + "type": "string", + "enum": [ + "None", + "Succeed", + "Fail" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeUpdateResult", + "modelAsString": true + }, + "readOnly": true + }, + "lastStartUpdateTime": { + "description": "The last time for the integration runtime node update start.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "lastEndUpdateTime": { + "description": "The last time for the integration runtime node update end.", + "type": "string", + "format": "date-time", + "readOnly": true + }, + "isActiveDispatcher": { + "description": "Indicates whether this node is the active dispatcher for integration runtime requests.", + "type": "boolean", + "readOnly": true + }, + "concurrentJobsLimit": { + "description": "Maximum concurrent jobs on the integration runtime node.", + "type": "integer", + "readOnly": true + }, + "maxConcurrentJobs": { + "description": "The maximum concurrent jobs in this integration runtime.", + "type": "integer", + "readOnly": true + } + }, + "additionalProperties": { + "type": "object" + } + }, + "IntegrationRuntimeConnectionInfo": { + "description": "Connection information for encrypting the on-premises data source credentials.", + "type": "object", + "properties": { + "serviceToken": { + "description": "The token generated in service. Callers use this token to authenticate to integration runtime.", + "type": "string", + "readOnly": true + }, + "identityCertThumbprint": { + "description": "The integration runtime SSL certificate thumbprint. Click-Once application uses it to do server validation.", + "type": "string", + "readOnly": true + }, + "hostServiceUri": { + "description": "The on-premises integration runtime host URL.", + "type": "string", + "readOnly": true + }, + "version": { + "description": "The integration runtime version.", + "type": "string", + "readOnly": true + }, + "publicKey": { + "description": "The public key for encrypting a credential when transferring the credential to the integration runtime.", + "type": "string", + "readOnly": true + }, + "isIdentityCertExprired": { + "description": "Whether the identity certificate is expired.", + "type": "boolean", + "readOnly": true + } + }, + "additionalProperties": { + "type": "object" + } + }, + "IntegrationRuntimeRegenerateKeyParameters": { + "description": "Parameters to regenerate the authentication key.", + "type": "object", + "properties": { + "keyName": { + "description": "The name of the authentication key to regenerate.", + "type": "string", + "enum": [ + "authKey1", + "authKey2" + ], + "x-ms-enum": { + "name": "IntegrationRuntimeAuthKeyName", + "modelAsString": true + } + } + } + }, + "IntegrationRuntimeAuthKeys": { + "description": "The integration runtime authentication keys.", + "type": "object", + "properties": { + "authKey1": { + "type": "string", + "description": "The primary integration runtime authentication key." + }, + "authKey2": { + "type": "string", + "description": "The secondary integration runtime authentication key." + } + } + }, + "IntegrationRuntimeMonitoringData": { + "description": "Get monitoring data response.", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Integration runtime name." + }, + "nodes": { + "type": "array", + "description": "Integration runtime node monitoring data.", + "items": { + "$ref": "#/definitions/IntegrationRuntimeNodeMonitoringData" + } + } + } + }, + "IntegrationRuntimeNodeMonitoringData": { + "description": "Monitoring data for integration runtime node.", + "type": "object", + "properties": { + "nodeName": { + "description": "Name of the integration runtime node.", + "type": "string", + "readOnly": true + }, + "availableMemoryInMB": { + "description": "Available memory (MB) on the integration runtime node.", + "type": "integer", + "readOnly": true + }, + "cpuUtilization": { + "description": "CPU percentage on the integration runtime node.", + "type": "integer", + "readOnly": true + }, + "concurrentJobsLimit": { + "description": "Maximum concurrent jobs on the integration runtime node.", + "type": "integer", + "readOnly": true + }, + "concurrentJobsRunning": { + "description": "The number of jobs currently running on the integration runtime node.", + "type": "integer", + "readOnly": true + }, + "maxConcurrentJobs": { + "description": "The maximum concurrent jobs in this integration runtime.", + "type": "integer", + "readOnly": true + }, + "sentBytes": { + "description": "Sent bytes on the integration runtime node.", + "type": "number", + "readOnly": true + }, + "receivedBytes": { + "description": "Received bytes on the integration runtime node.", + "type": "number", + "readOnly": true + } + }, + "additionalProperties": { + "type": "object" + } + }, + "IntegrationRuntimeNodeIpAddress": { + "type": "object", + "description": "The IP address of self-hosted integration runtime node.", + "properties": { + "ipAddress": { + "description": "The IP address of self-hosted integration runtime node.", + "type": "string", + "readOnly": true + } + } + }, + "SsisObjectMetadataListResponse": { + "type": "object", + "description": "A list of SSIS object metadata.", + "properties": { + "value": { + "type": "array", + "description": "List of SSIS object metadata.", + "items": { + "$ref": "#/definitions/SsisObjectMetadata" + } + }, + "nextLink": { + "type": "string", + "description": "The link to the next page of results, if any remaining results exist." + } + } + }, + "SsisObjectMetadata": { + "type": "object", + "description": "SSIS object metadata.", + "discriminator": "type", + "properties": { + "type": { + "description": "Type of metadata.", + "$ref": "#/definitions/SsisObjectMetadataType" + }, + "id": { + "type": "integer", + "format": "int64", + "description": "Metadata id." + }, + "name": { + "type": "string", + "description": "Metadata name." + }, + "description": { + "type": "string", + "description": "Metadata description." + } + }, + "required": [ + "type" + ] + }, + "SsisObjectMetadataType": { + "description": "The type of SSIS object metadata.", + "type": "string", + "enum": [ + "Folder", + "Project", + "Package", + "Environment" + ], + "x-ms-enum": { + "name": "SsisObjectMetadataType", + "modelAsString": true + } + }, + "SsisFolder": { + "x-ms-discriminator-value": "Folder", + "type": "object", + "description": "Ssis folder.", + "allOf": [ + { + "$ref": "#/definitions/SsisObjectMetadata" + } + ] + }, + "SsisProject": { + "x-ms-discriminator-value": "Project", + "type": "object", + "description": "Ssis project.", + "allOf": [ + { + "$ref": "#/definitions/SsisObjectMetadata" + } + ], + "properties": { + "folderId": { + "type": "integer", + "format": "int64", + "description": "Folder id which contains project." + }, + "version": { + "type": "integer", + "format": "int64", + "description": "Project version." + }, + "environmentRefs": { + "type": "array", + "description": "Environment reference in project", + "items": { + "$ref": "#/definitions/SsisEnvironmentReference" + } + }, + "parameters": { + "type": "array", + "description": "Parameters in project", + "items": { + "$ref": "#/definitions/SsisParameter" + } + } + } + }, + "SsisPackage": { + "x-ms-discriminator-value": "Package", + "type": "object", + "description": "Ssis Package.", + "allOf": [ + { + "$ref": "#/definitions/SsisObjectMetadata" + } + ], + "properties": { + "folderId": { + "type": "integer", + "format": "int64", + "description": "Folder id which contains package." + }, + "projectVersion": { + "type": "integer", + "format": "int64", + "description": "Project version which contains package." + }, + "projectId": { + "type": "integer", + "format": "int64", + "description": "Project id which contains package." + }, + "parameters": { + "type": "array", + "description": "Parameters in package", + "items": { + "$ref": "#/definitions/SsisParameter" + } + } + } + }, + "SsisEnvironment": { + "x-ms-discriminator-value": "Environment", + "type": "object", + "description": "Ssis environment.", + "allOf": [ + { + "$ref": "#/definitions/SsisObjectMetadata" + } + ], + "properties": { + "folderId": { + "type": "integer", + "format": "int64", + "description": "Folder id which contains environment." + }, + "variables": { + "type": "array", + "description": "Variable in environment", + "items": { + "$ref": "#/definitions/SsisVariable" + } + } + } + }, + "SsisParameter": { + "type": "object", + "description": "Ssis parameter.", + "properties": { + "id": { + "type": "integer", + "format": "int64", + "description": "Parameter id." + }, + "name": { + "type": "string", + "description": "Parameter name." + }, + "description": { + "type": "string", + "description": "Parameter description." + }, + "dataType": { + "type": "string", + "description": "Parameter type." + }, + "required": { + "type": "boolean", + "description": "Whether parameter is required." + }, + "sensitive": { + "type": "boolean", + "description": "Whether parameter is sensitive." + }, + "designDefaultValue": { + "type": "string", + "description": "Design default value of parameter." + }, + "defaultValue": { + "type": "string", + "description": "Default value of parameter." + }, + "sensitiveDefaultValue": { + "type": "string", + "description": "Default sensitive value of parameter." + }, + "valueType": { + "type": "string", + "description": "Parameter value type." + }, + "valueSet": { + "type": "boolean", + "description": "Parameter value set." + }, + "variable": { + "type": "string", + "description": "Parameter reference variable." + } + } + }, + "SsisVariable": { + "type": "object", + "description": "Ssis variable.", + "properties": { + "id": { + "type": "integer", + "format": "int64", + "description": "Variable id." + }, + "name": { + "type": "string", + "description": "Variable name." + }, + "description": { + "type": "string", + "description": "Variable description." + }, + "dataType": { + "type": "string", + "description": "Variable type." + }, + "sensitive": { + "type": "boolean", + "description": "Whether variable is sensitive." + }, + "value": { + "type": "string", + "description": "Variable value." + }, + "sensitiveValue": { + "type": "string", + "description": "Variable sensitive value." + } + } + }, + "SsisEnvironmentReference": { + "type": "object", + "description": "Ssis environment reference.", + "properties": { + "id": { + "type": "integer", + "format": "int64", + "description": "Environment reference id." + }, + "environmentFolderName": { + "type": "string", + "description": "Environment folder name." + }, + "environmentName": { + "type": "string", + "description": "Environment name." + }, + "referenceType": { + "type": "string", + "description": "Reference type" + } + } + }, + "LinkedIntegrationRuntimeRequest": { + "description": "Data factory name for linked integration runtime request.", + "type": "object", + "properties": { + "factoryName": { + "description": "The data factory name for linked integration runtime.", + "type": "string", + "x-ms-client-name": "linkedFactoryName" + } + }, + "required": [ + "factoryName" + ] + }, + "CreateLinkedIntegrationRuntimeRequest": { + "description": "The linked integration runtime information.", + "type": "object", + "properties": { + "name": { + "description": "The name of the linked integration runtime.", + "type": "string" + }, + "subscriptionId": { + "description": "The ID of the subscription that the linked integration runtime belongs to.", + "type": "string" + }, + "dataFactoryName": { + "description": "The name of the data factory that the linked integration runtime belongs to.", + "type": "string" + }, + "dataFactoryLocation": { + "description": "The location of the data factory that the linked integration runtime belongs to.", + "type": "string" + } + } + } + }, + "parameters": { + "subscriptionId": { + "name": "subscriptionId", + "description": "The subscription identifier.", + "in": "path", + "required": true, + "type": "string" + }, + "resourceGroupName": { + "name": "resourceGroupName", + "description": "The resource group name.", + "in": "path", + "required": true, + "type": "string", + "pattern": "^[-\\w\\._\\(\\)]+$", + "minLength": 1, + "maxLength": 90, + "x-ms-parameter-location": "method" + }, + "factoryName": { + "name": "factoryName", + "description": "The factory name.", "in": "path", "required": true, "type": "string", @@ -906,6 +4576,28 @@ "in": "query", "required": true, "type": "string" + }, + "triggerName": { + "name": "triggerName", + "description": "The trigger name.", + "in": "path", + "required": true, + "type": "string", + "pattern": "^[A-Za-z0-9_][^<>*#.%&:\\\\+?/]*$", + "minLength": 1, + "maxLength": 260, + "x-ms-parameter-location": "method" + }, + "integrationRuntimeName": { + "name": "integrationRuntimeName", + "description": "The integration runtime name.", + "in": "path", + "required": true, + "type": "string", + "pattern": "^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$", + "minLength": 3, + "maxLength": 63, + "x-ms-parameter-location": "method" } } } diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Create.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Create.json new file mode 100644 index 000000000..97e23db02 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Create.json @@ -0,0 +1,37 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "ifMatch": null, + "integrationRuntime": { + "properties": { + "type": "SelfHosted", + "description": "A selfhosted integration runtime" + } + }, + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Wed, 13 Jun 2018 22:20:47 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-writes": "1197", + "x-ms-request-id": "125c07fa-e39a-4541-885f-5019631a5ecd", + "x-ms-correlation-request-id": "125c07fa-e39a-4541-885f-5019631a5ecd" + }, + "body": { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/integrationruntimes/exampleIntegrationRuntime", + "name": "exampleIntegrationRuntime", + "type": "Microsoft.DataFactory/factories/integrationruntimes", + "properties": { + "type": "SelfHosted", + "description": "A selfhosted integration runtime" + }, + "etag": "000046c4-0000-0000-0000-5b2198bf0000" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_CreateLinkedIntegrationRuntime.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_CreateLinkedIntegrationRuntime.json new file mode 100644 index 000000000..bfafc36f7 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_CreateLinkedIntegrationRuntime.json @@ -0,0 +1,90 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "createLinkedIntegrationRuntimeRequest": { + "name": "bfa92911-9fb6-4fbe-8f23-beae87bc1c83", + "subscriptionId": "061774c7-4b5a-4159-a55b-365581830283", + "dataFactoryName": "e9955d6d-56ea-4be3-841c-52a12c1a9981", + "dataFactoryLocation": "West US" + }, + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Fri, 17 Aug 2018 06:31:03 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-writes": "1199", + "x-ms-request-id": "f9a67067-62f3-43ce-b891-2ccb3de4a15f", + "x-ms-correlation-request-id": "f9a67067-62f3-43ce-b891-2ccb3de4a15f" + }, + "body": { + "name": "exampleIntegrationRuntime", + "properties": { + "dataFactoryName": "exampleFactoryName", + "state": "Online", + "type": "SelfHosted", + "typeProperties": { + "autoUpdate": "On", + "taskQueueId": "823da112-f2d9-426b-a0d8-5f361b94f72a", + "version": "3.8.6743.6", + "nodes": [ + { + "nodeName": "Node_1", + "machineName": "YANZHANG-DT", + "hostServiceUri": "https://yanzhang-dt.fareast.corp.microsoft.com:8050/HostServiceRemote.svc/", + "status": "Online", + "capabilities": { + "serviceBusConnected": "True", + "httpsPortEnabled": "True", + "credentialInSync": "True", + "connectedToResourceManager": "True", + "nodeEnabled": "True" + }, + "versionStatus": "UpToDate", + "version": "3.8.6743.6", + "registerTime": "2018-08-17T03:44:55.8012825Z", + "lastConnectTime": "2018-08-17T06:30:46.6262976Z", + "lastStartTime": "2018-08-17T03:45:30.8499851Z", + "lastUpdateResult": "None", + "isActiveDispatcher": true, + "maxConcurrentJobs": 20 + } + ], + "scheduledUpdateDate": "2018-08-20T00:00:00Z", + "updateDelayOffset": "PT19H", + "localTimeZoneOffset": "PT8H", + "serviceUrls": [ + "wu.frontend.int.clouddatahub-int.net", + "*.servicebus.windows.net" + ], + "links": [ + { + "name": "bfa92911-9fb6-4fbe-8f23-beae87bc1c83", + "subscriptionId": "061774c7-4b5a-4159-a55b-365581830283", + "dataFactoryName": "e9955d6d-56ea-4be3-841c-52a12c1a9981", + "dataFactoryLocation": "West US", + "createTime": "2018-08-17T06:31:04.0617928Z" + } + ], + "versionStatus": "UpdateAvailable", + "capabilities": { + "serviceBusConnected": "True", + "httpsPortEnabled": "True", + "credentialInSync": "True", + "connectedToResourceManager": "True", + "nodeEnabled": "True" + }, + "pushedVersion": "3.9.6774.1", + "latestVersion": "3.9.6774.1", + "autoUpdateETA": "2018-08-20T19:00:00Z", + "createTime": "2018-08-17T03:43:25.7055573Z" + } + } + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Delete.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Delete.json new file mode 100644 index 000000000..742aefa60 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Delete.json @@ -0,0 +1,13 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "200": {}, + "204": {} + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Get.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Get.json new file mode 100644 index 000000000..55fe8b7f9 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Get.json @@ -0,0 +1,32 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "ifNoneMatch": "15003c4f-0000-0200-0000-5cbe090b0000", + "api-version": "2018-06-01" + }, + "responses": { + "304": {}, + "200": { + "headers": { + "Date": "Mon, 22 Apr 2019 18:33:47 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "11997", + "x-ms-request-id": "ad824326-e577-4a47-a092-2e621c4d59d9", + "x-ms-correlation-request-id": "ad824326-e577-4a47-a092-2e621c4d59d9" + }, + "body": { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/integrationruntimes/exampleIntegrationRuntime", + "name": "exampleIntegrationRuntime", + "type": "Microsoft.DataFactory/factories/integrationruntimes", + "properties": { + "type": "SelfHosted", + "description": "A selfhosted integration runtime" + }, + "etag": "15003c4f-0000-0200-0000-5cbe090b0000" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetConnectionInfo.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetConnectionInfo.json new file mode 100644 index 000000000..85dcb8cce --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetConnectionInfo.json @@ -0,0 +1,28 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Thu, 14 Jun 2018 14:53:21 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "14998", + "x-ms-request-id": "05774db5-e79b-4224-9097-46714b92fbd8", + "x-ms-correlation-request-id": "05774db5-e79b-4224-9097-46714b92fbd8" + }, + "body": { + "version": "3.8.6730.2", + "publicKey": "**********", + "serviceToken": "**********", + "identityCertThumbprint": "**********", + "hostServiceUri": "https://yanzhang-dt.fareast.corp.microsoft.com:8050/HostServiceRemote.svc/", + "isIdentityCertExprired": false + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetMonitoringData.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetMonitoringData.json new file mode 100644 index 000000000..a31a69d5f --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetMonitoringData.json @@ -0,0 +1,34 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Thu, 14 Jun 2018 15:27:44 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "14999", + "x-ms-request-id": "39277952-edbe-4336-ae94-f6f42b50b5f7", + "x-ms-correlation-request-id": "39277952-edbe-4336-ae94-f6f42b50b5f7" + }, + "body": { + "name": "exampleIntegrationRuntime", + "nodes": [ + { + "nodeName": "Node_1", + "availableMemoryInMB": 16740, + "cpuUtilization": 15, + "concurrentJobsLimit": 28, + "concurrentJobsRunning": 0, + "sentBytes": 2.6474916934967041, + "receivedBytes": 6.7314233779907227 + } + ] + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetStatus.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetStatus.json new file mode 100644 index 000000000..e7263d9c6 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_GetStatus.json @@ -0,0 +1,71 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Thu, 14 Jun 2018 14:53:22 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "14997", + "x-ms-request-id": "19efe790-074f-4241-ae94-0422935d0f46", + "x-ms-correlation-request-id": "19efe790-074f-4241-ae94-0422935d0f46" + }, + "body": { + "name": "exampleIntegrationRuntime", + "properties": { + "state": "Online", + "type": "SelfHosted", + "typeProperties": { + "autoUpdate": "Off", + "taskQueueId": "1a6296ab-423c-4346-9bcc-85a78c2c0582", + "version": "3.8.6730.2", + "nodes": [ + { + "nodeName": "Node_1", + "machineName": "YANZHANG-DT", + "hostServiceUri": "https://yanzhang-dt.fareast.corp.microsoft.com:8050/HostServiceRemote.svc/", + "status": "Online", + "capabilities": { + "serviceBusConnected": "True", + "httpsPortEnabled": "True", + "credentialInSync": "True", + "connectedToResourceManager": "True", + "nodeEnabled": "True" + }, + "versionStatus": "UpToDate", + "version": "3.8.6730.2", + "registerTime": "2018-06-14T14:51:44.9237069Z", + "lastConnectTime": "2018-06-14T14:52:59.8933313Z", + "lastStartTime": "2018-06-14T14:52:59.8933313Z", + "lastUpdateResult": "None", + "isActiveDispatcher": true, + "maxConcurrentJobs": 56 + } + ], + "updateDelayOffset": "PT3H", + "localTimeZoneOffset": "PT8H", + "serviceUrls": [ + "wu.frontend.int.clouddatahub-int.net", + "*.servicebus.windows.net" + ], + "versionStatus": "UpToDate", + "capabilities": { + "serviceBusConnected": "True", + "httpsPortEnabled": "True", + "credentialInSync": "True", + "connectedToResourceManager": "True", + "nodeEnabled": "True" + }, + "latestVersion": "3.7.6711.1", + "createTime": "2018-06-14T09:17:45.1839685Z" + } + } + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_ListAuthKeys.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_ListAuthKeys.json new file mode 100644 index 000000000..9d143cf73 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_ListAuthKeys.json @@ -0,0 +1,24 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Thu, 14 Jun 2018 14:51:00 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "14999", + "x-ms-request-id": "2ea25426-9d1d-49f3-88b6-fb853eb02cb9", + "x-ms-correlation-request-id": "2ea25426-9d1d-49f3-88b6-fb853eb02cb9" + }, + "body": { + "authKey1": "**********", + "authKey2": "**********" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_ListByFactory.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_ListByFactory.json new file mode 100644 index 000000000..716811d7f --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_ListByFactory.json @@ -0,0 +1,34 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "ifNoneMatch": null, + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Wed, 13 Jun 2018 21:33:05 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "14909", + "x-ms-request-id": "f632df92-1140-4e19-9e19-0b1c8cbe393b", + "x-ms-correlation-request-id": "f632df92-1140-4e19-9e19-0b1c8cbe393b" + }, + "body": { + "value": [ + { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/integrationruntimes/exampleIntegrationRuntime", + "name": "exampleIntegrationRuntime", + "type": "Microsoft.DataFactory/factories/integrationruntimes", + "properties": { + "type": "SelfHosted", + "description": "A selfhosted integration runtime" + }, + "etag": "0400f1a1-0000-0000-0000-5b2188640000" + } + ] + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_RegenerateAuthKey.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_RegenerateAuthKey.json new file mode 100644 index 000000000..81c80e555 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_RegenerateAuthKey.json @@ -0,0 +1,26 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "regenerateKeyParameters": { + "keyName": "authKey2" + }, + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Thu, 14 Jun 2018 14:50:41 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-writes": "1198", + "x-ms-request-id": "25af6d34-52c6-40a4-a882-16052af85c99", + "x-ms-correlation-request-id": "25af6d34-52c6-40a4-a882-16052af85c99" + }, + "body": { + "authKey2": "**********" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_RemoveLinks.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_RemoveLinks.json new file mode 100644 index 000000000..33c1f7eda --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_RemoveLinks.json @@ -0,0 +1,15 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "linkedIntegrationRuntimeRequest": { + "factoryName": "exampleFactoryName-linked" + }, + "api-version": "2018-06-01" + }, + "responses": { + "200": {} + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Start.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Start.json new file mode 100644 index 000000000..81bcbb87f --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Start.json @@ -0,0 +1,34 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleManagedIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "202": {}, + "200": { + "headers": { + "Date": "Wed, 13 Jun 2018 21:33:00 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "14912", + "x-ms-request-id": "2af47f9f-5625-4b01-a3a5-bccb576a4677", + "x-ms-correlation-request-id": "2af47f9f-5625-4b01-a3a5-bccb576a4677" + }, + "body": { + "name": "exampleManagedIntegrationRuntime", + "properties": { + "dataFactoryName": "exampleFactoryName", + "type": "Managed", + "state": "Started", + "typeProperties": { + "nodes": [], + "otherErrors": [], + "createTime": "2018-06-13T21:11:01.8695494Z" + } + } + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Stop.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Stop.json new file mode 100644 index 000000000..5ae0f3f8f --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Stop.json @@ -0,0 +1,13 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleManagedIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "202": {}, + "200": {} + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_SyncCredentials.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_SyncCredentials.json new file mode 100644 index 000000000..cdd180d2e --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_SyncCredentials.json @@ -0,0 +1,12 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "200": {} + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Update.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Update.json new file mode 100644 index 000000000..ebac05865 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Update.json @@ -0,0 +1,34 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "updateIntegrationRuntimeRequest": { + "autoUpdate": "Off", + "updateDelayOffset": "\"PT3H\"" + }, + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Wed, 13 Jun 2018 21:33:04 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-writes": "1192", + "x-ms-request-id": "eb1c35f8-4b37-4d08-b5dd-d6a0ad8b182d", + "x-ms-correlation-request-id": "eb1c35f8-4b37-4d08-b5dd-d6a0ad8b182d" + }, + "body": { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/integrationruntimes/exampleIntegrationRuntime", + "name": "exampleIntegrationRuntime", + "type": "Microsoft.DataFactory/factories/integrationruntimes", + "properties": { + "type": "SelfHosted", + "description": "A selfhosted integration runtime" + }, + "etag": "0400f1a1-0000-0000-0000-5b2188640000" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Upgrade.json b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Upgrade.json new file mode 100644 index 000000000..cdd180d2e --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/IntegrationRuntimes_Upgrade.json @@ -0,0 +1,12 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "integrationRuntimeName": "exampleIntegrationRuntime", + "api-version": "2018-06-01" + }, + "responses": { + "200": {} + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_Create.json b/src/test/scenarios/datafactory/input/examples/Triggers_Create.json new file mode 100644 index 000000000..d5dd06221 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_Create.json @@ -0,0 +1,80 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "ifMatch": null, + "trigger": { + "properties": { + "type": "ScheduleTrigger", + "typeProperties": { + "recurrence": { + "frequency": "Minute", + "interval": 4, + "startTime": "2018-06-16T00:39:13.8441801Z", + "endTime": "2018-06-16T00:55:13.8441801Z", + "timeZone": "UTC" + } + }, + "pipelines": [ + { + "pipelineReference": { + "referenceName": "examplePipeline", + "type": "PipelineReference" + }, + "parameters": { + "OutputBlobNameList": [ + "exampleoutput.csv" + ] + } + } + ] + } + }, + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Sat, 16 Jun 2018 00:40:14 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-writes": "1186", + "x-ms-request-id": "373f1a49-685d-4c07-8857-0fcf1bcaffcb", + "x-ms-correlation-request-id": "373f1a49-685d-4c07-8857-0fcf1bcaffcb" + }, + "body": { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger", + "name": "exampleTrigger", + "type": "Microsoft.DataFactory/factories/triggers", + "properties": { + "type": "ScheduleTrigger", + "typeProperties": { + "recurrence": { + "frequency": "Minute", + "interval": 4, + "startTime": "2018-06-16T00:39:13.8441801Z", + "endTime": "2018-06-16T00:55:13.8441801Z", + "timeZone": "UTC" + } + }, + "pipelines": [ + { + "pipelineReference": { + "referenceName": "examplePipeline", + "type": "PipelineReference" + }, + "parameters": { + "OutputBlobNameList": [ + "exampleoutput.csv" + ] + } + } + ], + "runtimeState": "Stopped" + }, + "etag": "0a008ad4-0000-0000-0000-5b245c6e0000" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_Delete.json b/src/test/scenarios/datafactory/input/examples/Triggers_Delete.json new file mode 100644 index 000000000..e7d4a03ea --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_Delete.json @@ -0,0 +1,13 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "api-version": "2018-06-01" + }, + "responses": { + "200": {}, + "204": {} + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_Get.json b/src/test/scenarios/datafactory/input/examples/Triggers_Get.json new file mode 100644 index 000000000..446697f3a --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_Get.json @@ -0,0 +1,54 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "ifNoneMatch": "1500544f-0000-0200-0000-5cbe09100000", + "api-version": "2018-06-01" + }, + "responses": { + "304": {}, + "200": { + "headers": { + "Date": "Mon, 22 Apr 2019 18:33:52 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "11989", + "x-ms-request-id": "8ad1759f-24c1-46a5-aef5-edd267e18870", + "x-ms-correlation-request-id": "8ad1759f-24c1-46a5-aef5-edd267e18870" + }, + "body": { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger", + "name": "exampleTrigger", + "type": "Microsoft.DataFactory/factories/triggers", + "properties": { + "type": "ScheduleTrigger", + "typeProperties": { + "recurrence": { + "frequency": "Minute", + "interval": 4, + "startTime": "2019-04-22T18:32:52.527912Z", + "endTime": "2019-04-22T18:48:52.5281747Z", + "timeZone": "UTC" + } + }, + "pipelines": [ + { + "pipelineReference": { + "referenceName": "examplePipeline", + "type": "PipelineReference" + }, + "parameters": { + "OutputBlobNameList": [ + "exampleoutput.csv" + ] + } + } + ], + "runtimeState": "Stopped" + }, + "etag": "1500544f-0000-0200-0000-5cbe09100000" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_GetEventSubscriptionStatus.json b/src/test/scenarios/datafactory/input/examples/Triggers_GetEventSubscriptionStatus.json new file mode 100644 index 000000000..5efc8eebb --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_GetEventSubscriptionStatus.json @@ -0,0 +1,17 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "body": { + "triggerName": "exampleTrigger", + "status": "Enabled" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_ListByFactory.json b/src/test/scenarios/datafactory/input/examples/Triggers_ListByFactory.json new file mode 100644 index 000000000..783f84c01 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_ListByFactory.json @@ -0,0 +1,56 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Sat, 16 Jun 2018 00:40:20 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "14986", + "x-ms-request-id": "e474f8f8-b34f-4536-b059-ca740e6b44c3", + "x-ms-correlation-request-id": "e474f8f8-b34f-4536-b059-ca740e6b44c3" + }, + "body": { + "value": [ + { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger", + "name": "exampleTrigger", + "type": "Microsoft.DataFactory/factories/triggers", + "properties": { + "type": "ScheduleTrigger", + "typeProperties": { + "recurrence": { + "frequency": "Minute", + "interval": 4, + "startTime": "2018-06-16T00:39:14.905167Z", + "endTime": "2018-06-16T00:55:14.905167Z", + "timeZone": "UTC" + } + }, + "pipelines": [ + { + "pipelineReference": { + "referenceName": "examplePipeline", + "type": "PipelineReference" + }, + "parameters": { + "OutputBlobNameList": [ + "exampleoutput.csv" + ] + } + } + ], + "description": "Example description", + "runtimeState": "Started" + }, + "etag": "0a008ed4-0000-0000-0000-5b245c740000" + } + ] + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_QueryByFactory.json b/src/test/scenarios/datafactory/input/examples/Triggers_QueryByFactory.json new file mode 100644 index 000000000..7a266ba3b --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_QueryByFactory.json @@ -0,0 +1,42 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "api-version": "2018-06-01", + "filterParameters": { + "parentTriggerName": "exampleTrigger" + } + }, + "responses": { + "200": { + "headers": { + "Date": "Sat, 16 Jun 2018 00:40:20 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-reads": "14986", + "x-ms-request-id": "e474f8f8-b34f-4536-b059-ca740e6b44c3", + "x-ms-correlation-request-id": "e474f8f8-b34f-4536-b059-ca740e6b44c3" + }, + "body": { + "value": [ + { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleRerunTrigger", + "name": "exampleRerunTrigger", + "type": "Microsoft.DataFactory/factories/triggers", + "properties": { + "type": "RerunTumblingWindowTrigger", + "typeProperties": { + "parentTrigger": "exampleTrigger", + "requestedStartTime": "2018-06-16T00:39:14.905167Z", + "requestedEndTime": "2018-06-16T00:55:14.905167Z", + "rerunConcurrency": 4 + }, + "description": "Example description" + } + } + ] + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_Start.json b/src/test/scenarios/datafactory/input/examples/Triggers_Start.json new file mode 100644 index 000000000..558c3f6b3 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_Start.json @@ -0,0 +1,12 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "api-version": "2018-06-01" + }, + "responses": { + "200": {} + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_Stop.json b/src/test/scenarios/datafactory/input/examples/Triggers_Stop.json new file mode 100644 index 000000000..558c3f6b3 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_Stop.json @@ -0,0 +1,12 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "api-version": "2018-06-01" + }, + "responses": { + "200": {} + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_SubscribeToEvents.json b/src/test/scenarios/datafactory/input/examples/Triggers_SubscribeToEvents.json new file mode 100644 index 000000000..82d16f4b2 --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_SubscribeToEvents.json @@ -0,0 +1,22 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "api-version": "2018-06-01" + }, + "responses": { + "202": { + "headers": { + "Location": "https://management.azure.com/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger/getEventSubscriptionStatus?api-version=2018-06-01" + } + }, + "200": { + "body": { + "triggerName": "exampleTrigger", + "status": "Enabled" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_UnsubscribeFromEvents.json b/src/test/scenarios/datafactory/input/examples/Triggers_UnsubscribeFromEvents.json new file mode 100644 index 000000000..55cb7979d --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_UnsubscribeFromEvents.json @@ -0,0 +1,22 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "api-version": "2018-06-01" + }, + "responses": { + "202": { + "headers": { + "Location": "https://management.azure.com/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger/getEventSubscriptionStatus?api-version=2018-06-01" + } + }, + "200": { + "body": { + "triggerName": "exampleTrigger", + "status": "Disabled" + } + } + } +} diff --git a/src/test/scenarios/datafactory/input/examples/Triggers_Update.json b/src/test/scenarios/datafactory/input/examples/Triggers_Update.json new file mode 100644 index 000000000..be49edc3d --- /dev/null +++ b/src/test/scenarios/datafactory/input/examples/Triggers_Update.json @@ -0,0 +1,82 @@ +{ + "parameters": { + "subscriptionId": "12345678-1234-1234-1234-12345678abc", + "resourceGroupName": "exampleResourceGroup", + "factoryName": "exampleFactoryName", + "triggerName": "exampleTrigger", + "ifMatch": null, + "trigger": { + "properties": { + "type": "ScheduleTrigger", + "typeProperties": { + "recurrence": { + "frequency": "Minute", + "interval": 4, + "startTime": "2018-06-16T00:39:14.905167Z", + "endTime": "2018-06-16T00:55:14.905167Z", + "timeZone": "UTC" + } + }, + "pipelines": [ + { + "pipelineReference": { + "referenceName": "examplePipeline", + "type": "PipelineReference" + }, + "parameters": { + "OutputBlobNameList": [ + "exampleoutput.csv" + ] + } + } + ], + "description": "Example description" + } + }, + "api-version": "2018-06-01" + }, + "responses": { + "200": { + "headers": { + "Date": "Sat, 16 Jun 2018 00:40:15 GMT", + "X-Content-Type-Options": "nosniff", + "x-ms-ratelimit-remaining-subscription-writes": "1185", + "x-ms-request-id": "d5ccf096-0618-4b26-9829-db77e4c391c7", + "x-ms-correlation-request-id": "d5ccf096-0618-4b26-9829-db77e4c391c7" + }, + "body": { + "id": "/subscriptions/12345678-1234-1234-1234-12345678abc/resourceGroups/exampleResourceGroup/providers/Microsoft.DataFactory/factories/exampleFactoryName/triggers/exampleTrigger", + "name": "exampleTrigger", + "type": "Microsoft.DataFactory/factories/triggers", + "properties": { + "type": "ScheduleTrigger", + "typeProperties": { + "recurrence": { + "frequency": "Minute", + "interval": 4, + "startTime": "2018-06-16T00:39:14.905167Z", + "endTime": "2018-06-16T00:55:14.905167Z", + "timeZone": "UTC" + } + }, + "pipelines": [ + { + "pipelineReference": { + "referenceName": "examplePipeline", + "type": "PipelineReference" + }, + "parameters": { + "OutputBlobNameList": [ + "exampleoutput.csv" + ] + } + } + ], + "description": "Example description", + "runtimeState": "Stopped" + }, + "etag": "0a008dd4-0000-0000-0000-5b245c6f0000" + } + } + } +} diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_client_factory.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_client_factory.py index 57b5a91b4..467281d5d 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_client_factory.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_client_factory.py @@ -17,3 +17,11 @@ def cf_datafactory(cli_ctx, *_): def cf_factory(cli_ctx, *_): return cf_datafactory(cli_ctx).factory + + +def cf_trigger(cli_ctx, *_): + return cf_datafactory(cli_ctx).trigger + + +def cf_integration_runtime(cli_ctx, *_): + return cf_datafactory(cli_ctx).integration_runtime diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_help.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_help.py index 3955bda92..96709f5b0 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_help.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_help.py @@ -38,6 +38,41 @@ helps['datafactory create'] = """ type: command short-summary: Creates or updates a factory. + parameters: + - name: --factory-vsts-configuration + short-summary: Factory's VSTS repo information. + long-summary: | + Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX repository-name=XX\ + collaboration-branch=XX root-folder=XX last-commit-id=XX + + project-name: Required. VSTS project name. + tenant-id: VSTS tenant id. + type: Required. Type of repo configuration. + account-name: Required. Account name. + repository-name: Required. Repository name. + collaboration-branch: Required. Collaboration branch. + root-folder: Required. Root folder. + last-commit-id: Last commit id. + - name: --factory-git-hub-configuration + short-summary: Factory's GitHub repo information. + long-summary: | + Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX collaboratio\ +n-branch=XX root-folder=XX last-commit-id=XX + + host-name: GitHub Enterprise host name. For example: https://github.mydomain.com + type: Required. Type of repo configuration. + account-name: Required. Account name. + repository-name: Required. Repository name. + collaboration-branch: Required. Collaboration branch. + root-folder: Required. Root folder. + last-commit-id: Last commit id. + - name: --fake-identity + short-summary: This is only for az test. + long-summary: | + Usage: --fake-identity name=XX zones-inside=XX + + name: Required. .. + zones-inside: sample of simple array examples: - name: Factories_CreateOrUpdate text: |- @@ -67,6 +102,34 @@ helps['datafactory configure-factory-repo'] = """ type: command short-summary: Updates a factory's repo information. + parameters: + - name: --factory-vsts-configuration + short-summary: Factory's VSTS repo information. + long-summary: | + Usage: --factory-vsts-configuration project-name=XX tenant-id=XX type=XX account-name=XX repository-name=XX\ + collaboration-branch=XX root-folder=XX last-commit-id=XX + + project-name: Required. VSTS project name. + tenant-id: VSTS tenant id. + type: Required. Type of repo configuration. + account-name: Required. Account name. + repository-name: Required. Repository name. + collaboration-branch: Required. Collaboration branch. + root-folder: Required. Root folder. + last-commit-id: Last commit id. + - name: --factory-git-hub-configuration + short-summary: Factory's GitHub repo information. + long-summary: | + Usage: --factory-git-hub-configuration host-name=XX type=XX account-name=XX repository-name=XX collaboratio\ +n-branch=XX root-folder=XX last-commit-id=XX + + host-name: GitHub Enterprise host name. For example: https://github.mydomain.com + type: Required. Type of repo configuration. + account-name: Required. Account name. + repository-name: Required. Repository name. + collaboration-branch: Required. Collaboration branch. + root-folder: Required. Root folder. + last-commit-id: Last commit id. examples: - name: Factories_ConfigureFactoryRepo text: |- @@ -96,3 +159,342 @@ az datafactory get-git-hub-access-token --name "exampleFactoryName" --git-hub-access-code "some" --git-h\ ub-access-token-base-url "some" --git-hub-client-id "some" --resource-group "exampleResourceGroup" """ + +helps['datafactory trigger'] = """ + type: group + short-summary: datafactory trigger +""" + +helps['datafactory trigger list'] = """ + type: command + short-summary: Lists triggers. + examples: + - name: Triggers_ListByFactory + text: |- + az datafactory trigger list --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" +""" + +helps['datafactory trigger show'] = """ + type: command + short-summary: Gets a trigger. + examples: + - name: Triggers_Get + text: |- + az datafactory trigger show --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" +""" + +helps['datafactory trigger create'] = """ + type: command + short-summary: Creates or updates a trigger. + examples: + - name: Triggers_Create + text: |- + az datafactory trigger create --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup\ +" --properties "{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlobNameList\\":[\\"exam\ +pleoutput.csv\\"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\\",\\"referenceName\\":\\"examplePipeline\\\ +"}}],\\"typeProperties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\ +\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}" --name "exampleTri\ +gger" + - name: Triggers_Update + text: |- + az datafactory trigger create --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup\ +" --properties "{\\"type\\":\\"ScheduleTrigger\\",\\"description\\":\\"Example description\\",\\"pipelines\\":[{\\"para\ +meters\\":{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\ +\\",\\"referenceName\\":\\"examplePipeline\\"}}],\\"typeProperties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00\ +:55:14.905167Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:14.905167Z\\",\\"ti\ +meZone\\":\\"UTC\\"}}}" --name "exampleTrigger" +""" + +helps['datafactory trigger update'] = """ + type: command + short-summary: Creates or updates a trigger. + examples: + - name: Triggers_Create + text: |- + az datafactory trigger update --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup\ +" --properties "{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{\\"parameters\\":{\\"OutputBlobNameList\\":[\\"exam\ +pleoutput.csv\\"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\\",\\"referenceName\\":\\"examplePipeline\\\ +"}}],\\"typeProperties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\ +\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}" --name "exampleTri\ +gger" + - name: Triggers_Update + text: |- + az datafactory trigger update --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup\ +" --properties "{\\"type\\":\\"ScheduleTrigger\\",\\"description\\":\\"Example description\\",\\"pipelines\\":[{\\"para\ +meters\\":{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]},\\"pipelineReference\\":{\\"type\\":\\"PipelineReference\ +\\",\\"referenceName\\":\\"examplePipeline\\"}}],\\"typeProperties\\":{\\"recurrence\\":{\\"endTime\\":\\"2018-06-16T00\ +:55:14.905167Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39:14.905167Z\\",\\"ti\ +meZone\\":\\"UTC\\"}}}" --name "exampleTrigger" +""" + +helps['datafactory trigger delete'] = """ + type: command + short-summary: Deletes a trigger. + examples: + - name: Triggers_Delete + text: |- + az datafactory trigger delete --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup\ +" --name "exampleTrigger" +""" + +helps['datafactory trigger get-event-subscription-status'] = """ + type: command + short-summary: Get a trigger's event subscription status. + examples: + - name: Triggers_GetEventSubscriptionStatus + text: |- + az datafactory trigger get-event-subscription-status --factory-name "exampleFactoryName" --resource-grou\ +p "exampleResourceGroup" --name "exampleTrigger" +""" + +helps['datafactory trigger query-by-factory'] = """ + type: command + short-summary: Query triggers. + examples: + - name: Triggers_QueryByFactory + text: |- + az datafactory trigger query-by-factory --factory-name "exampleFactoryName" --parent-trigger-name "examp\ +leTrigger" --resource-group "exampleResourceGroup" +""" + +helps['datafactory trigger start'] = """ + type: command + short-summary: Starts a trigger. + examples: + - name: Triggers_Start + text: |- + az datafactory trigger start --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup"\ + --name "exampleTrigger" +""" + +helps['datafactory trigger stop'] = """ + type: command + short-summary: Stops a trigger. + examples: + - name: Triggers_Stop + text: |- + az datafactory trigger stop --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" +""" + +helps['datafactory trigger subscribe-to-event'] = """ + type: command + short-summary: Subscribe event trigger to events. + examples: + - name: Triggers_SubscribeToEvents + text: |- + az datafactory trigger subscribe-to-event --factory-name "exampleFactoryName" --resource-group "exampleR\ +esourceGroup" --name "exampleTrigger" +""" + +helps['datafactory trigger unsubscribe-from-event'] = """ + type: command + short-summary: Unsubscribe event trigger from events. + examples: + - name: Triggers_UnsubscribeFromEvents + text: |- + az datafactory trigger unsubscribe-from-event --factory-name "exampleFactoryName" --resource-group "exam\ +pleResourceGroup" --name "exampleTrigger" +""" + +helps['datafactory trigger wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the datafactory trigger is met. + examples: + - name: Pause executing next line of CLI script until the datafactory trigger is successfully created. + text: |- + az datafactory trigger wait --factory-name "exampleFactoryName" --resource-group "exampleResourceGroup" \ +--name "exampleTrigger" --created +""" + +helps['datafactory integration-runtime'] = """ + type: group + short-summary: datafactory integration-runtime +""" + +helps['datafactory integration-runtime list'] = """ + type: command + short-summary: Lists integration runtimes. + examples: + - name: IntegrationRuntimes_ListByFactory + text: |- + az datafactory integration-runtime list --factory-name "exampleFactoryName" --resource-group "exampleRes\ +ourceGroup" +""" + +helps['datafactory integration-runtime show'] = """ + type: command + short-summary: Gets an integration runtime. + examples: + - name: IntegrationRuntimes_Get + text: |- + az datafactory integration-runtime show --factory-name "exampleFactoryName" --name "exampleIntegrationRu\ +ntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime create'] = """ + type: command + short-summary: Creates or updates an integration runtime. + examples: + - name: IntegrationRuntimes_Create + text: |- + az datafactory integration-runtime create --factory-name "exampleFactoryName" --properties "{\\"type\\":\ +\\"SelfHosted\\",\\"description\\":\\"A selfhosted integration runtime\\"}" --name "exampleIntegrationRuntime" --resour\ +ce-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime linked-integration-runtime'] = """ + type: group + short-summary: datafactory integration-runtime sub group linked-integration-runtime +""" + +helps['datafactory integration-runtime linked-integration-runtime create'] = """ + type: command + short-summary: Create a linked integration runtime entry in a shared integration runtime. + examples: + - name: IntegrationRuntimes_CreateLinkedIntegrationRuntime + text: |- + az datafactory integration-runtime linked-integration-runtime create --name "bfa92911-9fb6-4fbe-8f23-bea\ +e87bc1c83" --data-factory-location "West US" --data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" --subscription-\ +id "061774c7-4b5a-4159-a55b-365581830283" --factory-name "exampleFactoryName" --integration-runtime-name "exampleIntegr\ +ationRuntime" --resource-group "exampleResourceGroup" --subscription-id "12345678-1234-1234-1234-12345678abc" +""" + +helps['datafactory integration-runtime update'] = """ + type: command + short-summary: Updates an integration runtime. + examples: + - name: IntegrationRuntimes_Update + text: |- + az datafactory integration-runtime update --factory-name "exampleFactoryName" --name "exampleIntegration\ +Runtime" --resource-group "exampleResourceGroup" --auto-update "Off" --update-delay-offset "\\"PT3H\\"" +""" + +helps['datafactory integration-runtime delete'] = """ + type: command + short-summary: Deletes an integration runtime. + examples: + - name: IntegrationRuntimes_Delete + text: |- + az datafactory integration-runtime delete --factory-name "exampleFactoryName" --name "exampleIntegration\ +Runtime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime get-connection-info'] = """ + type: command + short-summary: Gets the on-premises integration runtime connection information for encrypting the on-premises data \ +source credentials. + examples: + - name: IntegrationRuntimes_GetConnectionInfo + text: |- + az datafactory integration-runtime get-connection-info --factory-name "exampleFactoryName" --name "examp\ +leIntegrationRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime get-monitoring-data'] = """ + type: command + short-summary: Get the integration runtime monitoring data, which includes the monitor data for all the nodes under\ + this integration runtime. + examples: + - name: IntegrationRuntimes_GetMonitoringData + text: |- + az datafactory integration-runtime get-monitoring-data --factory-name "exampleFactoryName" --name "examp\ +leIntegrationRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime get-status'] = """ + type: command + short-summary: Gets detailed status information for an integration runtime. + examples: + - name: IntegrationRuntimes_GetStatus + text: |- + az datafactory integration-runtime get-status --factory-name "exampleFactoryName" --name "exampleIntegra\ +tionRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime list-auth-key'] = """ + type: command + short-summary: Retrieves the authentication keys for an integration runtime. + examples: + - name: IntegrationRuntimes_ListAuthKeys + text: |- + az datafactory integration-runtime list-auth-key --factory-name "exampleFactoryName" --name "exampleInte\ +grationRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime regenerate-auth-key'] = """ + type: command + short-summary: Regenerates the authentication key for an integration runtime. + examples: + - name: IntegrationRuntimes_RegenerateAuthKey + text: |- + az datafactory integration-runtime regenerate-auth-key --factory-name "exampleFactoryName" --name "examp\ +leIntegrationRuntime" --regenerate-key-parameters "{\\"keyName\\":\\"authKey2\\"}" --resource-group "exampleResourceGro\ +up" +""" + +helps['datafactory integration-runtime remove-link'] = """ + type: command + short-summary: Remove all linked integration runtimes under specific data factory in a self-hosted integration runt\ +ime. + examples: + - name: IntegrationRuntimes_Upgrade + text: |- + az datafactory integration-runtime remove-link --factory-name "exampleFactoryName" --name "exampleIntegr\ +ationRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime start'] = """ + type: command + short-summary: Starts a ManagedReserved type integration runtime. + examples: + - name: IntegrationRuntimes_Start + text: |- + az datafactory integration-runtime start --factory-name "exampleFactoryName" --name "exampleManagedInteg\ +rationRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime stop'] = """ + type: command + short-summary: Stops a ManagedReserved type integration runtime. + examples: + - name: IntegrationRuntimes_Stop + text: |- + az datafactory integration-runtime stop --factory-name "exampleFactoryName" --name "exampleManagedIntegr\ +ationRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime sync-credentials'] = """ + type: command + short-summary: Force the integration runtime to synchronize credentials across integration runtime nodes, and this \ +will override the credentials across all worker nodes with those available on the dispatcher node. If you already have \ +the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime nod\ +e than using this API directly. + examples: + - name: IntegrationRuntimes_SyncCredentials + text: |- + az datafactory integration-runtime sync-credentials --factory-name "exampleFactoryName" --name "exampleI\ +ntegrationRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime upgrade'] = """ + type: command + short-summary: Upgrade self-hosted integration runtime to latest version if availability. + examples: + - name: IntegrationRuntimes_Upgrade + text: |- + az datafactory integration-runtime upgrade --factory-name "exampleFactoryName" --name "exampleIntegratio\ +nRuntime" --resource-group "exampleResourceGroup" +""" + +helps['datafactory integration-runtime wait'] = """ + type: command + short-summary: Place the CLI in a waiting state until a condition of the datafactory integration-runtime is met. + examples: + - name: Pause executing next line of CLI script until the datafactory integration-runtime is successfully created\ +. + text: |- + az datafactory integration-runtime wait --factory-name "exampleFactoryName" --name "exampleIntegrationRu\ +ntime" --resource-group "exampleResourceGroup" --created +""" diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_params.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_params.py index 1b71a6647..c490f5665 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_params.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/_params.py @@ -44,15 +44,10 @@ def load_arguments(self, _): validator=get_default_location_from_resource_group) c.argument('tags', tags_type) c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS r' - 'epo information. Expect value: KEY1=VALUE1 KEY2=VALUE2 ... , available KEYs are: project-name, tena' - 'nt-id, account-name, repository-name, collaboration-branch, root-folder, last-commit-id.', - arg_group='RepoConfiguration') + 'epo information.', arg_group='RepoConfiguration') c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s G' - 'itHub repo information. Expect value: KEY1=VALUE1 KEY2=VALUE2 ... , available KEYs are: host-name, ' - 'account-name, repository-name, collaboration-branch, root-folder, last-commit-id.', arg_group='Repo' - 'Configuration') - c.argument('fake_identity', action=AddFakeIdentity, nargs='+', help='This is only for az test. Expect value: KE' - 'Y1=VALUE1 KEY2=VALUE2 ... , available KEYs are: name, zones-inside.') + 'itHub repo information.', arg_group='RepoConfiguration') + c.argument('fake_identity', action=AddFakeIdentity, nargs='+', help='This is only for az test.') c.argument('zones', nargs='+', help='This is only for az test.') with self.argument_context('datafactory update') as c: @@ -68,13 +63,9 @@ def load_arguments(self, _): c.argument('location_id', help='The location identifier.', id_part='name') c.argument('factory_resource_id', help='The factory resource id.') c.argument('factory_vsts_configuration', action=AddFactoryVstsConfiguration, nargs='+', help='Factory\'s VSTS r' - 'epo information. Expect value: KEY1=VALUE1 KEY2=VALUE2 ... , available KEYs are: project-name, tena' - 'nt-id, account-name, repository-name, collaboration-branch, root-folder, last-commit-id.', - arg_group='RepoConfiguration') + 'epo information.', arg_group='RepoConfiguration') c.argument('factory_git_hub_configuration', action=AddFactoryGitHubConfiguration, nargs='+', help='Factory\'s G' - 'itHub repo information. Expect value: KEY1=VALUE1 KEY2=VALUE2 ... , available KEYs are: host-name, ' - 'account-name, repository-name, collaboration-branch, root-folder, last-commit-id.', arg_group='Repo' - 'Configuration') + 'itHub repo information.', arg_group='RepoConfiguration') with self.argument_context('datafactory get-data-plane-access') as c: c.argument('resource_group_name', resource_group_name_type) @@ -95,3 +86,198 @@ def load_arguments(self, _): c.argument('git_hub_access_code', help='GitHub access code.') c.argument('git_hub_client_id', help='GitHub application client ID.') c.argument('git_hub_access_token_base_url', help='GitHub access token base URL.') + + with self.argument_context('datafactory trigger list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.') + + with self.argument_context('datafactory trigger show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('if_none_match', help='ETag of the trigger entity. Should only be specified for get. If the ETag mat' + 'ches the existing entity tag, or if * was provided, then no content will be returned.') + + with self.argument_context('datafactory trigger create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.') + c.argument('if_match', help='ETag of the trigger entity. Should only be specified for update, for which it sho' + 'uld match existing entity or can be * for unconditional update.') + c.argument('properties', arg_type=CLIArgumentType(options_list=['--properties'], help='Properties of the trigge' + 'r. Expected value: json-string/@json-file.')) + + with self.argument_context('datafactory trigger update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('if_match', help='ETag of the trigger entity. Should only be specified for update, for which it sho' + 'uld match existing entity or can be * for unconditional update.') + c.argument('properties', arg_type=CLIArgumentType(options_list=['--properties'], help='Properties of the trigge' + 'r. Expected value: json-string/@json-file.')) + + with self.argument_context('datafactory trigger delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + + with self.argument_context('datafactory trigger get-event-subscription-status') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + + with self.argument_context('datafactory trigger query-by-factory') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('continuation_token', help='The continuation token for getting the next page of results. Null for fi' + 'rst page.') + c.argument('parent_trigger_name', help='The name of the parent TumblingWindowTrigger to get the child rerun tri' + 'ggers') + + with self.argument_context('datafactory trigger start') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + + with self.argument_context('datafactory trigger stop') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + + with self.argument_context('datafactory trigger subscribe-to-event') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + + with self.argument_context('datafactory trigger unsubscribe-from-event') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + + with self.argument_context('datafactory trigger wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('trigger_name', options_list=['--name', '-n'], help='The trigger name.', id_part='child_name_1') + c.argument('if_none_match', help='ETag of the trigger entity. Should only be specified for get. If the ETag mat' + 'ches the existing entity tag, or if * was provided, then no content will be returned.') + + with self.argument_context('datafactory integration-runtime list') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.') + + with self.argument_context('datafactory integration-runtime show') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + c.argument('if_none_match', help='ETag of the integration runtime entity. Should only be specified for get. If ' + 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') + + with self.argument_context('datafactory integration-runtime create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.') + c.argument('if_match', help='ETag of the integration runtime entity. Should only be specified for update, for w' + 'hich it should match existing entity or can be * for unconditional update.') + c.argument('properties', arg_type=CLIArgumentType(options_list=['--properties'], help='Integration runtime prop' + 'erties. Expected value: json-string/@json-file.')) + + with self.argument_context('datafactory integration-runtime linked-integration-runtime create') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.') + c.argument('integration_runtime_name', help='The integration runtime name.') + c.argument('name', help='The name of the linked integration runtime.') + c.argument('subscription_id', + help='The ID of the subscription that the linked integration runtime belongs to.') + c.argument('data_factory_name', help='The name of the data factory that the linked integration runtime belongs ' + 'to.') + c.argument('data_factory_location', help='The location of the data factory that the linked integration runtime ' + 'belongs to.') + + with self.argument_context('datafactory integration-runtime update') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + c.argument('auto_update', arg_type=CLIArgumentType(options_list=['--auto-update'], help='Enables or disables th' + 'e auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?' + 'linkid=854189. Expected value: json-string/@json-file.')) + c.argument('update_delay_offset', help='The time offset (in hours) in the day, e.g., PT03H is 3 hours. The inte' + 'gration runtime auto update will happen on that time.') + + with self.argument_context('datafactory integration-runtime delete') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + + with self.argument_context('datafactory integration-runtime get-connection-info') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + + with self.argument_context('datafactory integration-runtime get-monitoring-data') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + + with self.argument_context('datafactory integration-runtime get-status') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + + with self.argument_context('datafactory integration-runtime list-auth-key') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.') + + with self.argument_context('datafactory integration-runtime regenerate-auth-key') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + c.argument('regenerate_key_parameters', arg_type=CLIArgumentType(options_list=['--regenerate-key-parameters'], + help='The parameters for regenerating integration runtime authentication key. Expected value: json-s' + 'tring/@json-file.')) + + with self.argument_context('datafactory integration-runtime remove-link') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + c.argument('linked_factory_name', help='The data factory name for linked integration runtime.') + + with self.argument_context('datafactory integration-runtime start') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + + with self.argument_context('datafactory integration-runtime stop') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + + with self.argument_context('datafactory integration-runtime sync-credentials') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + + with self.argument_context('datafactory integration-runtime upgrade') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + + with self.argument_context('datafactory integration-runtime wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('factory_name', help='The factory name.', id_part='name') + c.argument('integration_runtime_name', options_list=['--name', '-n'], help='The integration runtime name.', + id_part='child_name_1') + c.argument('if_none_match', help='ETag of the integration runtime entity. Should only be specified for get. If ' + 'the ETag matches the existing entity tag, or if * was provided, then no content will be returned.') diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/commands.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/commands.py index 88be5a042..9e817806a 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/commands.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/commands.py @@ -27,3 +27,51 @@ def load_command_table(self, _): g.custom_command('configure-factory-repo', 'datafactory_configure_factory_repo') g.custom_command('get-data-plane-access', 'datafactory_get_data_plane_access') g.custom_command('get-git-hub-access-token', 'datafactory_get_git_hub_access_token') + + from azext_datafactory.generated._client_factory import cf_trigger + datafactory_trigger = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._trigger_operations#TriggerOperations.{' + '}', + client_factory=cf_trigger) + with self.command_group('datafactory trigger', datafactory_trigger, client_factory=cf_trigger, + is_experimental=True) as g: + g.custom_command('list', 'datafactory_trigger_list') + g.custom_show_command('show', 'datafactory_trigger_show') + g.custom_command('create', 'datafactory_trigger_create') + g.generic_update_command('update', setter_arg_name = 'properties', custom_func_name = 'datafactory_trigger_upda' + 'te') + g.custom_command('delete', 'datafactory_trigger_delete') + g.custom_command('get-event-subscription-status', 'datafactory_trigger_get_event_subscription_status') + g.custom_command('query-by-factory', 'datafactory_trigger_query_by_factory') + g.custom_command('start', 'datafactory_trigger_start', supports_no_wait=True) + g.custom_command('stop', 'datafactory_trigger_stop', supports_no_wait=True) + g.custom_command('subscribe-to-event', 'datafactory_trigger_subscribe_to_event', supports_no_wait=True) + g.custom_command('unsubscribe-from-event', 'datafactory_trigger_unsubscribe_from_event', + supports_no_wait=True) + g.custom_wait_command('wait', 'datafactory_trigger_show') + + from azext_datafactory.generated._client_factory import cf_integration_runtime + datafactory_integration_runtime = CliCommandType( + operations_tmpl='azext_datafactory.vendored_sdks.datafactory.operations._integration_runtime_operations#Integra' + 'tionRuntimeOperations.{}', + client_factory=cf_integration_runtime) + with self.command_group('datafactory integration-runtime', datafactory_integration_runtime, + client_factory=cf_integration_runtime, is_experimental=True) as g: + g.custom_command('list', 'datafactory_integration_runtime_list') + g.custom_show_command('show', 'datafactory_integration_runtime_show') + g.custom_command('create', 'datafactory_integration_runtime_create') + g.custom_command('linked-integration-runtime create', 'datafactory_integration_runtime_linked_integration_runti' + 'me_create') + g.custom_command('update', 'datafactory_integration_runtime_update') + g.custom_command('delete', 'datafactory_integration_runtime_delete') + g.custom_command('get-connection-info', 'datafactory_integration_runtime_get_connection_info') + g.custom_command('get-monitoring-data', 'datafactory_integration_runtime_get_monitoring_data') + g.custom_command('get-status', 'datafactory_integration_runtime_get_status') + g.custom_command('list-auth-key', 'datafactory_integration_runtime_list_auth_key') + g.custom_command('regenerate-auth-key', 'datafactory_integration_runtime_regenerate_auth_key') + g.custom_command('remove-link', 'datafactory_integration_runtime_remove_link') + g.custom_command('start', 'datafactory_integration_runtime_start', supports_no_wait=True) + g.custom_command('stop', 'datafactory_integration_runtime_stop', supports_no_wait=True) + g.custom_command('sync-credentials', 'datafactory_integration_runtime_sync_credentials') + g.custom_command('upgrade', 'datafactory_integration_runtime_upgrade') + g.custom_wait_command('wait', 'datafactory_integration_runtime_show') diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/custom.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/custom.py index 3a7339253..93343fccf 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/custom.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/generated/custom.py @@ -9,7 +9,9 @@ # -------------------------------------------------------------------------- # pylint: disable=too-many-lines +import json from knack.util import CLIError +from azure.cli.core.util import sdk_no_wait def datafactory_list(cmd, client, @@ -121,3 +123,297 @@ def datafactory_get_git_hub_access_token(cmd, client, git_hub_access_code=git_hub_access_code, git_hub_client_id=git_hub_client_id, git_hub_access_token_base_url=git_hub_access_token_base_url) + + +def datafactory_trigger_list(cmd, client, + resource_group_name, + factory_name): + return client.list_by_factory(resource_group_name=resource_group_name, + factory_name=factory_name) + + +def datafactory_trigger_show(cmd, client, + resource_group_name, + factory_name, + trigger_name, + if_none_match=None): + return client.get(resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + if_none_match=if_none_match) + + +def datafactory_trigger_create(cmd, client, + resource_group_name, + factory_name, + trigger_name, + properties, + if_match=None): + if isinstance(properties, str): + properties = json.loads(properties) + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + if_match=if_match, + properties=properties) + + +def datafactory_trigger_update(instance, cmd, + resource_group_name, + factory_name, + trigger_name, + if_match=None): + return instance + + +def datafactory_trigger_delete(cmd, client, + resource_group_name, + factory_name, + trigger_name): + return client.delete(resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name) + + +def datafactory_trigger_get_event_subscription_status(cmd, client, + resource_group_name, + factory_name, + trigger_name): + return client.get_event_subscription_status(resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name) + + +def datafactory_trigger_query_by_factory(cmd, client, + resource_group_name, + factory_name, + continuation_token=None, + parent_trigger_name=None): + return client.query_by_factory(resource_group_name=resource_group_name, + factory_name=factory_name, + continuation_token=continuation_token, + parent_trigger_name=parent_trigger_name) + + +def datafactory_trigger_start(cmd, client, + resource_group_name, + factory_name, + trigger_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_start, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name) + + +def datafactory_trigger_stop(cmd, client, + resource_group_name, + factory_name, + trigger_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_stop, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name) + + +def datafactory_trigger_subscribe_to_event(cmd, client, + resource_group_name, + factory_name, + trigger_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_subscribe_to_event, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name) + + +def datafactory_trigger_unsubscribe_from_event(cmd, client, + resource_group_name, + factory_name, + trigger_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_unsubscribe_from_event, + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name) + + +def datafactory_integration_runtime_list(cmd, client, + resource_group_name, + factory_name): + return client.list_by_factory(resource_group_name=resource_group_name, + factory_name=factory_name) + + +def datafactory_integration_runtime_show(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + if_none_match=None): + return client.get(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_none_match=if_none_match) + + +def datafactory_integration_runtime_create(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + properties, + if_match=None): + if isinstance(properties, str): + properties = json.loads(properties) + return client.create_or_update(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + if_match=if_match, + properties=properties) + + +def datafactory_integration_runtime_linked_integration_runtime_create(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + name=None, + subscription_id=None, + data_factory_name=None, + data_factory_location=None): + return client.create_linked_integration_runtime(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + name=name, + subscription_id=subscription_id, + data_factory_name=data_factory_name, + data_factory_location=data_factory_location) + + +def datafactory_integration_runtime_update(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + auto_update=None, + update_delay_offset=None): + if isinstance(auto_update, str): + auto_update = json.loads(auto_update) + return client.update(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + auto_update=auto_update, + update_delay_offset=update_delay_offset) + + +def datafactory_integration_runtime_delete(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name): + return client.delete(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + +def datafactory_integration_runtime_get_connection_info(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name): + return client.get_connection_info(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + +def datafactory_integration_runtime_get_monitoring_data(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name): + return client.get_monitoring_data(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + +def datafactory_integration_runtime_get_status(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name): + return client.get_status(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + +def datafactory_integration_runtime_list_auth_key(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name): + return client.list_auth_key(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + +def datafactory_integration_runtime_regenerate_auth_key(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + regenerate_key_parameters): + if isinstance(regenerate_key_parameters, str): + regenerate_key_parameters = json.loads(regenerate_key_parameters) + return client.regenerate_auth_key(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + regenerate_key_parameters=regenerate_key_parameters) + + +def datafactory_integration_runtime_remove_link(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + linked_factory_name): + return client.remove_link(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + linked_factory_name=linked_factory_name) + + +def datafactory_integration_runtime_start(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_start, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + +def datafactory_integration_runtime_stop(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_stop, + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + +def datafactory_integration_runtime_sync_credentials(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name): + return client.sync_credentials(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) + + +def datafactory_integration_runtime_upgrade(cmd, client, + resource_group_name, + factory_name, + integration_runtime_name): + return client.upgrade(resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name) diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py index eb6857957..39dbc4b11 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/tests/latest/test_datafactory_scenario.py @@ -32,15 +32,6 @@ def step__factories_put_factories_createorupdate(test, rg): checks=[]) -# EXAMPLE: /Factories/get/Factories_Get -@try_manual -def step__factories_get_factories_get(test, rg): - test.cmd('az datafactory show ' - '--name "{exampleFactoryName}" ' - '--resource-group "{rg}"', - checks=[]) - - # EXAMPLE: /Factories/get/Factories_List @try_manual def step__factories_get_factories_list(test, rg): @@ -105,6 +96,302 @@ def step__factories_patch_factories_update(test, rg): checks=[]) +# EXAMPLE: /Factories/get/Factories_Get +@try_manual +def step__factories_get_factories_get(test, rg): + test.cmd('az datafactory show ' + '--name "{exampleFactoryName}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/put/IntegrationRuntimes_Create +@try_manual +def step__integrationruntimes_put_integrationruntimes_create(test, rg): + test.cmd('az datafactory integration-runtime create ' + '--factory-name "{exampleFactoryName}" ' + '--properties "{{\\"type\\":\\"SelfHosted\\",\\"description\\":\\"A selfhosted integration runtime\\"}}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_ListByFactory +@try_manual +def step__integrationruntimes_get_integrationruntimes_listbyfactory(test, rg): + test.cmd('az datafactory integration-runtime list ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_CreateLinkedIntegrationRuntime +@try_manual +def step__integrationruntimes_post_integrationruntimes_createlinkedintegrationruntime(test, rg): + test.cmd('az datafactory integration-runtime linked-integration-runtime create ' + '--name "bfa92911-9fb6-4fbe-8f23-beae87bc1c83" ' + '--data-factory-location "West US" ' + '--data-factory-name "e9955d6d-56ea-4be3-841c-52a12c1a9981" ' + '--subscription-id "061774c7-4b5a-4159-a55b-365581830283" ' + '--factory-name "{exampleFactoryName}" ' + '--integration-runtime-name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}" ' + '--subscription-id "12345678-1234-1234-1234-12345678abc"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetConnectionInfo +@try_manual +def step__integrationruntimes_post_integrationruntimes_getconnectioninfo(test, rg): + test.cmd('az datafactory integration-runtime get-connection-info ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetMonitoringData +@try_manual +def step__integrationruntimes_post_integrationruntimes_getmonitoringdata(test, rg): + test.cmd('az datafactory integration-runtime get-monitoring-data ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_GetStatus +@try_manual +def step__integrationruntimes_post_integrationruntimes_getstatus(test, rg): + test.cmd('az datafactory integration-runtime get-status ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_ListAuthKeys +@try_manual +def step__integrationruntimes_post_integrationruntimes_listauthkeys(test, rg): + test.cmd('az datafactory integration-runtime list-auth-key ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_RegenerateAuthKey +@try_manual +def step__integrationruntimes_post_integrationruntimes_regenerateauthkey(test, rg): + test.cmd('az datafactory integration-runtime regenerate-auth-key ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--regenerate-key-parameters "{{\\"keyName\\":\\"authKey2\\"}}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Start +@try_manual +def step__integrationruntimes_post_integrationruntimes_start(test, rg): + test.cmd('az datafactory integration-runtime start ' + '--factory-name "{exampleFactoryName}" ' + '--name "{IntegrationRuntimes_2}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /Triggers/put/Triggers_Create +@try_manual +def step__triggers_put_triggers_create(test, rg): + test.cmd('az datafactory trigger create ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"pipelines\\":[{{\\"parameters\\":{{\\"OutputBlobNameL' + 'ist\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{\\"type\\":\\"PipelineReference\\",\\"refere' + 'nceName\\":\\"examplePipeline\\"}}}}],\\"typeProperties\\":{{\\"recurrence\\":{{\\"endTime\\":\\"2018-06-' + '16T00:55:13.8441801Z\\",\\"frequency\\":\\"Minute\\",\\"interval\\":4,\\"startTime\\":\\"2018-06-16T00:39' + ':13.8441801Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_SyncCredentials +@try_manual +def step__integrationruntimes_post_integrationruntimes_synccredentials(test, rg): + test.cmd('az datafactory integration-runtime sync-credentials ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Upgrade +@try_manual +def step__integrationruntimes_post_integrationruntimes_upgrade(test, rg): + test.cmd('az datafactory integration-runtime remove-link ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Upgrade +@try_manual +def step__integrationruntimes_post_integrationruntimes_upgrade(test, rg): + test.cmd('az datafactory integration-runtime remove-link ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/patch/IntegrationRuntimes_Update +@try_manual +def step__integrationruntimes_patch_integrationruntimes_update(test, rg): + test.cmd('az datafactory integration-runtime update ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}" ' + '--auto-update "Off" ' + '--update-delay-offset "\\"PT3H\\""', + checks=[]) + + +# EXAMPLE: /Triggers/put/Triggers_Update +@try_manual +def step__triggers_put_triggers_update(test, rg): + test.cmd('az datafactory trigger create ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--properties "{{\\"type\\":\\"ScheduleTrigger\\",\\"description\\":\\"Example description\\",\\"pipelines' + '\\":[{{\\"parameters\\":{{\\"OutputBlobNameList\\":[\\"exampleoutput.csv\\"]}},\\"pipelineReference\\":{{' + '\\"type\\":\\"PipelineReference\\",\\"referenceName\\":\\"examplePipeline\\"}}}}],\\"typeProperties\\":{{' + '\\"recurrence\\":{{\\"endTime\\":\\"2018-06-16T00:55:14.905167Z\\",\\"frequency\\":\\"Minute\\",\\"interv' + 'al\\":4,\\"startTime\\":\\"2018-06-16T00:39:14.905167Z\\",\\"timeZone\\":\\"UTC\\"}}}}}}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /Triggers/get/Triggers_Get +@try_manual +def step__triggers_get_triggers_get(test, rg): + test.cmd('az datafactory trigger show ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /Triggers/get/Triggers_ListByFactory +@try_manual +def step__triggers_get_triggers_listbyfactory(test, rg): + test.cmd('az datafactory trigger list ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /Triggers/post/Triggers_GetEventSubscriptionStatus +@try_manual +def step__triggers_post_triggers_geteventsubscriptionstatus(test, rg): + test.cmd('az datafactory trigger get-event-subscription-status ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /Triggers/post/Triggers_QueryByFactory +@try_manual +def step__triggers_post_triggers_querybyfactory(test, rg): + test.cmd('az datafactory trigger query-by-factory ' + '--factory-name "{exampleFactoryName}" ' + '--parent-trigger-name "exampleTrigger" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /Triggers/post/Triggers_Start +@try_manual +def step__triggers_post_triggers_start(test, rg): + test.cmd('az datafactory trigger start ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /Triggers/post/Triggers_Stop +@try_manual +def step__triggers_post_triggers_stop(test, rg): + test.cmd('az datafactory trigger stop ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /Triggers/post/Triggers_SubscribeToEvents +@try_manual +def step__triggers_post_triggers_subscribetoevents(test, rg): + test.cmd('az datafactory trigger subscribe-to-event ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /Triggers/post/Triggers_UnsubscribeFromEvents +@try_manual +def step__triggers_post_triggers_unsubscribefromevents(test, rg): + test.cmd('az datafactory trigger unsubscribe-from-event ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /Triggers/delete/Triggers_Delete +@try_manual +def step__triggers_delete_triggers_delete(test, rg): + test.cmd('az datafactory trigger delete ' + '--factory-name "{exampleFactoryName}" ' + '--resource-group "{rg}" ' + '--name "{exampleTrigger}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/get/IntegrationRuntimes_Get +@try_manual +def step__integrationruntimes_get_integrationruntimes_get(test, rg): + test.cmd('az datafactory integration-runtime show ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/post/IntegrationRuntimes_Stop +@try_manual +def step__integrationruntimes_post_integrationruntimes_stop(test, rg): + test.cmd('az datafactory integration-runtime stop ' + '--factory-name "{exampleFactoryName}" ' + '--name "{IntegrationRuntimes_2}" ' + '--resource-group "{rg}"', + checks=[]) + + +# EXAMPLE: /IntegrationRuntimes/delete/IntegrationRuntimes_Delete +@try_manual +def step__integrationruntimes_delete_integrationruntimes_delete(test, rg): + test.cmd('az datafactory integration-runtime delete ' + '--factory-name "{exampleFactoryName}" ' + '--name "{exampleIntegrationRuntime}" ' + '--resource-group "{rg}"', + checks=[]) + + # EXAMPLE: /Factories/delete/Factories_Delete @try_manual def step__factories_delete_factories_delete(test, rg): @@ -123,13 +410,40 @@ def cleanup(test, rg): def call_scenario(test, rg): setup(test, rg) step__factories_put_factories_createorupdate(test, rg) - step__factories_get_factories_get(test, rg) step__factories_get_factories_list(test, rg) step__factories_get_factories_listbyresourcegroup(test, rg) step__factories_post_factories_configurefactoryrepo(test, rg) step__factories_post_factories_getdataplaneaccess(test, rg) step__factories_post_factories_getgithubaccesstoken(test, rg) step__factories_patch_factories_update(test, rg) + step__factories_get_factories_get(test, rg) + step__integrationruntimes_put_integrationruntimes_create(test, rg) + step__integrationruntimes_get_integrationruntimes_listbyfactory(test, rg) + step__integrationruntimes_post_integrationruntimes_createlinkedintegrationruntime(test, rg) + step__integrationruntimes_post_integrationruntimes_getconnectioninfo(test, rg) + step__integrationruntimes_post_integrationruntimes_getmonitoringdata(test, rg) + step__integrationruntimes_post_integrationruntimes_getstatus(test, rg) + step__integrationruntimes_post_integrationruntimes_listauthkeys(test, rg) + step__integrationruntimes_post_integrationruntimes_regenerateauthkey(test, rg) + step__integrationruntimes_post_integrationruntimes_start(test, rg) + step__triggers_put_triggers_create(test, rg) + step__integrationruntimes_post_integrationruntimes_synccredentials(test, rg) + step__integrationruntimes_post_integrationruntimes_upgrade(test, rg) + step__integrationruntimes_post_integrationruntimes_upgrade(test, rg) + step__integrationruntimes_patch_integrationruntimes_update(test, rg) + step__triggers_put_triggers_update(test, rg) + step__triggers_get_triggers_get(test, rg) + step__triggers_get_triggers_listbyfactory(test, rg) + step__triggers_post_triggers_geteventsubscriptionstatus(test, rg) + step__triggers_post_triggers_querybyfactory(test, rg) + step__triggers_post_triggers_start(test, rg) + step__triggers_post_triggers_stop(test, rg) + step__triggers_post_triggers_subscribetoevents(test, rg) + step__triggers_post_triggers_unsubscribefromevents(test, rg) + step__triggers_delete_triggers_delete(test, rg) + step__integrationruntimes_get_integrationruntimes_get(test, rg) + step__integrationruntimes_post_integrationruntimes_stop(test, rg) + step__integrationruntimes_delete_integrationruntimes_delete(test, rg) step__factories_delete_factories_delete(test, rg) cleanup(test, rg) @@ -146,6 +460,9 @@ def test_datafactory(self, rg): self.kwargs.update({ 'exampleFactoryName': 'exampleFactoryName', + 'exampleTrigger': 'exampleTrigger', + 'exampleIntegrationRuntime': 'exampleIntegrationRuntime', + 'IntegrationRuntimes_2': 'exampleManagedIntegrationRuntime', }) call_scenario(self, rg) diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_dfaz_management_client.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_dfaz_management_client.py index 423b0df10..c815114f4 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_dfaz_management_client.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/_dfaz_management_client.py @@ -17,6 +17,8 @@ from ._configuration import DFAZManagementClientConfiguration from .operations import FactoryOperations +from .operations import TriggerOperations +from .operations import IntegrationRuntimeOperations from . import models @@ -25,6 +27,10 @@ class DFAZManagementClient(object): :ivar factory: FactoryOperations operations :vartype factory: azure.mgmt.datafactory.operations.FactoryOperations + :ivar trigger: TriggerOperations operations + :vartype trigger: azure.mgmt.datafactory.operations.TriggerOperations + :ivar integration_runtime: IntegrationRuntimeOperations operations + :vartype integration_runtime: azure.mgmt.datafactory.operations.IntegrationRuntimeOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials.TokenCredential :param subscription_id: The subscription identifier. @@ -52,6 +58,10 @@ def __init__( self.factory = FactoryOperations( self._client, self._config, self._serialize, self._deserialize) + self.trigger = TriggerOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtime = IntegrationRuntimeOperations( + self._client, self._config, self._serialize, self._deserialize) def close(self): # type: () -> None diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_dfaz_management_client_async.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_dfaz_management_client_async.py index f372e6248..fd731f545 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_dfaz_management_client_async.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/_dfaz_management_client_async.py @@ -13,6 +13,8 @@ from ._configuration_async import DFAZManagementClientConfiguration from .operations_async import FactoryOperations +from .operations_async import TriggerOperations +from .operations_async import IntegrationRuntimeOperations from .. import models @@ -21,6 +23,10 @@ class DFAZManagementClient(object): :ivar factory: FactoryOperations operations :vartype factory: azure.mgmt.datafactory.aio.operations_async.FactoryOperations + :ivar trigger: TriggerOperations operations + :vartype trigger: azure.mgmt.datafactory.aio.operations_async.TriggerOperations + :ivar integration_runtime: IntegrationRuntimeOperations operations + :vartype integration_runtime: azure.mgmt.datafactory.aio.operations_async.IntegrationRuntimeOperations :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param subscription_id: The subscription identifier. @@ -47,6 +53,10 @@ def __init__( self.factory = FactoryOperations( self._client, self._config, self._serialize, self._deserialize) + self.trigger = TriggerOperations( + self._client, self._config, self._serialize, self._deserialize) + self.integration_runtime = IntegrationRuntimeOperations( + self._client, self._config, self._serialize, self._deserialize) async def close(self) -> None: await self._client.close() diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py index 46ac50300..ec41f6669 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/__init__.py @@ -7,7 +7,11 @@ # -------------------------------------------------------------------------- from ._factory_operations_async import FactoryOperations +from ._trigger_operations_async import TriggerOperations +from ._integration_runtime_operations_async import IntegrationRuntimeOperations __all__ = [ 'FactoryOperations', + 'TriggerOperations', + 'IntegrationRuntimeOperations', ] diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py new file mode 100644 index 000000000..5f0c7d951 --- /dev/null +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_integration_runtime_operations_async.py @@ -0,0 +1,1156 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimeOperations: + """IntegrationRuntimeOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.IntegrationRuntimeListResponse"]: + """Lists integration runtimes. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of IntegrationRuntimeListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + properties: object, + if_match: Optional[str] = None, + **kwargs + ) -> "models.IntegrationRuntimeResource": + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param properties: Integration runtime properties. + :type properties: object + :param if_match: ETag of the integration runtime entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _integration_runtime = models.IntegrationRuntimeResource(properties=properties) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_integration_runtime, 'IntegrationRuntimeResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.IntegrationRuntimeResource": + """Gets an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. + If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + async def update( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + auto_update: Optional[object] = None, + update_delay_offset: Optional[str] = None, + **kwargs + ) -> "models.IntegrationRuntimeResource": + """Updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param auto_update: Enables or disables the auto-update feature of the self-hosted integration + runtime. See https://go.microsoft.com/fwlink/?linkid=854189. + :type auto_update: object + :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The + integration runtime auto update will happen on that time. + :type update_delay_offset: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + """Deletes an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + async def get_status( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeStatusResponse": + """Gets detailed status information for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get_status.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + + async def get_connection_info( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> object: + """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: object or the result of cls(response) + :rtype: object + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[object] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get_connection_info.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('object', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore + + async def regenerate_auth_key( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + regenerate_key_parameters: object, + **kwargs + ) -> object: + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. + :type regenerate_key_parameters: object + :keyword callable cls: A custom type or function that will be passed the direct response + :return: object or the result of cls(response) + :rtype: object + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[object] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.regenerate_auth_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(regenerate_key_parameters, 'object') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('object', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore + + async def list_auth_key( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> object: + """Retrieves the authentication keys for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: object or the result of cls(response) + :rtype: object + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[object] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.list_auth_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('object', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore + + async def _start_initial( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeStatusResponse": + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + async def start( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> "models.IntegrationRuntimeStatusResponse": + """Starts a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: IntegrationRuntimeStatusResponse + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = await self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + return await async_poller(self._client, raw_result, get_long_running_output, polling_method) + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + async def _stop_initial( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + async def stop( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + """Stops a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = await self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + return await async_poller(self._client, raw_result, get_long_running_output, polling_method) + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + async def sync_credentials( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + """Force the integration runtime to synchronize credentials across integration runtime nodes, and this will override the credentials across all worker nodes with those available on the dispatcher node. If you already have the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime node than using this API directly. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.sync_credentials.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore + + async def get_monitoring_data( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> object: + """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: object or the result of cls(response) + :rtype: object + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[object] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get_monitoring_data.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('object', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore + + async def upgrade( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + **kwargs + ) -> None: + """Upgrade self-hosted integration runtime to latest version if availability. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.upgrade.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore + + async def remove_link( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + linked_factory_name: str, + **kwargs + ) -> None: + """Remove all linked integration runtimes under specific data factory in a self-hosted integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param linked_factory_name: The data factory name for linked integration runtime. + :type linked_factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.remove_link.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore + + async def create_linked_integration_runtime( + self, + resource_group_name: str, + factory_name: str, + integration_runtime_name: str, + name: Optional[str] = None, + subscription_id: Optional[str] = None, + data_factory_name: Optional[str] = None, + data_factory_location: Optional[str] = None, + **kwargs + ) -> "models.IntegrationRuntimeStatusResponse": + """Create a linked integration runtime entry in a shared integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked integration runtime belongs + to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked integration runtime + belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the linked integration + runtime belongs to. + :type data_factory_location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_linked_integration_runtime.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py new file mode 100644 index 000000000..29b673b79 --- /dev/null +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/aio/operations_async/_trigger_operations_async.py @@ -0,0 +1,830 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union +import warnings + +from azure.core.async_paging import AsyncItemPaged, AsyncList +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest +from azure.core.polling import AsyncNoPolling, AsyncPollingMethod, async_poller +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling + +from ... import models + +T = TypeVar('T') +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] + +class TriggerOperations: + """TriggerOperations async operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer) -> None: + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name: str, + factory_name: str, + **kwargs + ) -> AsyncIterable["models.TriggerListResponse"]: + """Lists triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of TriggerListResponse or the result of cls(response) + :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.datafactory.models.TriggerListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + async def extract_data(pipeline_response): + deserialized = self._deserialize('TriggerListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, AsyncList(list_of_elem) + + async def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return AsyncItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore + + async def query_by_factory( + self, + resource_group_name: str, + factory_name: str, + continuation_token: Optional[str] = None, + parent_trigger_name: Optional[str] = None, + **kwargs + ) -> "models.TriggerQueryResponse": + """Query triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param continuation_token: The continuation token for getting the next page of results. Null + for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun + triggers. + :type parent_trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token, parent_trigger_name=parent_trigger_name) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_filter_parameters, 'TriggerFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore + + async def create_or_update( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + properties: object, + if_match: Optional[str] = None, + **kwargs + ) -> "models.TriggerResource": + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param properties: Properties of the trigger. + :type properties: object + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _trigger = models.TriggerResource(properties=properties) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_trigger, 'TriggerResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + async def get( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + if_none_match: Optional[str] = None, + **kwargs + ) -> "models.TriggerResource": + """Gets a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag + matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + async def delete( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> None: + """Deletes a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + async def _subscribe_to_event_initial( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> "models.TriggerSubscriptionOperationStatus": + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._subscribe_to_event_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + async def subscribe_to_event( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> "models.TriggerSubscriptionOperationStatus": + """Subscribe event trigger to events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: TriggerSubscriptionOperationStatus + :rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = await self._subscribe_to_event_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + return await async_poller(self._client, raw_result, get_long_running_output, polling_method) + subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + async def get_event_subscription_status( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> "models.TriggerSubscriptionOperationStatus": + """Get a trigger's event subscription status. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerSubscriptionOperationStatus or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get_event_subscription_status.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore + + async def _unsubscribe_from_event_initial( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> "models.TriggerSubscriptionOperationStatus": + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + async def unsubscribe_from_event( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> "models.TriggerSubscriptionOperationStatus": + """Unsubscribe event trigger from events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: TriggerSubscriptionOperationStatus + :rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = await self._unsubscribe_from_event_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + return await async_poller(self._client, raw_result, get_long_running_output, polling_method) + unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + async def _start_initial( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + async def start( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> None: + """Starts a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = await self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + return await async_poller(self._client, raw_result, get_long_running_output, polling_method) + start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + async def _stop_initial( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> None: + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore + + async def stop( + self, + resource_group_name: str, + factory_name: str, + trigger_name: str, + **kwargs + ) -> None: + """Stops a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: None + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = await self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = AsyncNoPolling() + else: polling_method = polling + return await async_poller(self._client, raw_result, get_long_running_output, polling_method) + stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py index a0649797b..6aab9e028 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/__init__.py @@ -8,7 +8,17 @@ try: from ._models_py3 import AccessPolicyResponse + from ._models_py3 import BlobEventsTrigger + from ._models_py3 import BlobTrigger + from ._models_py3 import ChainingTrigger from ._models_py3 import CloudError + from ._models_py3 import CmdkeySetup + from ._models_py3 import ComponentSetup + from ._models_py3 import CreateLinkedIntegrationRuntimeRequest + from ._models_py3 import CustomSetupBase + from ._models_py3 import DependencyReference + from ._models_py3 import EntityReference + from ._models_py3 import EnvironmentVariableSetup from ._models_py3 import Factory from ._models_py3 import FactoryGitHubConfiguration from ._models_py3 import FactoryIdentity @@ -20,11 +30,86 @@ from ._models_py3 import FakeFactoryIdentity from ._models_py3 import GitHubAccessTokenRequest from ._models_py3 import GitHubAccessTokenResponse + from ._models_py3 import IntegrationRuntime + from ._models_py3 import IntegrationRuntimeAuthKeys + from ._models_py3 import IntegrationRuntimeComputeProperties + from ._models_py3 import IntegrationRuntimeConnectionInfo + from ._models_py3 import IntegrationRuntimeCustomSetupScriptProperties + from ._models_py3 import IntegrationRuntimeDataFlowProperties + from ._models_py3 import IntegrationRuntimeDataProxyProperties + from ._models_py3 import IntegrationRuntimeListResponse + from ._models_py3 import IntegrationRuntimeMonitoringData + from ._models_py3 import IntegrationRuntimeNodeIpAddress + from ._models_py3 import IntegrationRuntimeNodeMonitoringData + from ._models_py3 import IntegrationRuntimeReference + from ._models_py3 import IntegrationRuntimeRegenerateKeyParameters + from ._models_py3 import IntegrationRuntimeResource + from ._models_py3 import IntegrationRuntimeSsisCatalogInfo + from ._models_py3 import IntegrationRuntimeSsisProperties + from ._models_py3 import IntegrationRuntimeStatus + from ._models_py3 import IntegrationRuntimeStatusListResponse + from ._models_py3 import IntegrationRuntimeStatusResponse + from ._models_py3 import IntegrationRuntimeVNetProperties + from ._models_py3 import LinkedIntegrationRuntime + from ._models_py3 import LinkedIntegrationRuntimeKeyAuthorization + from ._models_py3 import LinkedIntegrationRuntimeRbacAuthorization + from ._models_py3 import LinkedIntegrationRuntimeRequest + from ._models_py3 import LinkedIntegrationRuntimeType + from ._models_py3 import ManagedIntegrationRuntime + from ._models_py3 import ManagedIntegrationRuntimeError + from ._models_py3 import ManagedIntegrationRuntimeNode + from ._models_py3 import ManagedIntegrationRuntimeOperationResult + from ._models_py3 import ManagedIntegrationRuntimeStatus + from ._models_py3 import MultiplePipelineTrigger + from ._models_py3 import PackageStore + from ._models_py3 import RecurrenceSchedule + from ._models_py3 import RecurrenceScheduleOccurrence + from ._models_py3 import RerunTumblingWindowTrigger from ._models_py3 import Resource + from ._models_py3 import RetryPolicy + from ._models_py3 import ScheduleTrigger + from ._models_py3 import ScheduleTriggerRecurrence + from ._models_py3 import SecretBase + from ._models_py3 import SecureString + from ._models_py3 import SelfDependencyTumblingWindowTriggerReference + from ._models_py3 import SelfHostedIntegrationRuntime + from ._models_py3 import SelfHostedIntegrationRuntimeNode + from ._models_py3 import SelfHostedIntegrationRuntimeStatus + from ._models_py3 import SsisEnvironment + from ._models_py3 import SsisEnvironmentReference + from ._models_py3 import SsisFolder + from ._models_py3 import SsisObjectMetadata + from ._models_py3 import SsisObjectMetadataListResponse + from ._models_py3 import SsisPackage + from ._models_py3 import SsisParameter + from ._models_py3 import SsisProject + from ._models_py3 import SsisVariable + from ._models_py3 import SubResource + from ._models_py3 import Trigger + from ._models_py3 import TriggerDependencyReference + from ._models_py3 import TriggerFilterParameters + from ._models_py3 import TriggerListResponse + from ._models_py3 import TriggerQueryResponse + from ._models_py3 import TriggerReference + from ._models_py3 import TriggerResource + from ._models_py3 import TriggerSubscriptionOperationStatus + from ._models_py3 import TumblingWindowTrigger + from ._models_py3 import TumblingWindowTriggerDependencyReference + from ._models_py3 import UpdateIntegrationRuntimeRequest from ._models_py3 import UserAccessPolicy except (SyntaxError, ImportError): from ._models import AccessPolicyResponse # type: ignore + from ._models import BlobEventsTrigger # type: ignore + from ._models import BlobTrigger # type: ignore + from ._models import ChainingTrigger # type: ignore from ._models import CloudError # type: ignore + from ._models import CmdkeySetup # type: ignore + from ._models import ComponentSetup # type: ignore + from ._models import CreateLinkedIntegrationRuntimeRequest # type: ignore + from ._models import CustomSetupBase # type: ignore + from ._models import DependencyReference # type: ignore + from ._models import EntityReference # type: ignore + from ._models import EnvironmentVariableSetup # type: ignore from ._models import Factory # type: ignore from ._models import FactoryGitHubConfiguration # type: ignore from ._models import FactoryIdentity # type: ignore @@ -36,12 +121,111 @@ from ._models import FakeFactoryIdentity # type: ignore from ._models import GitHubAccessTokenRequest # type: ignore from ._models import GitHubAccessTokenResponse # type: ignore + from ._models import IntegrationRuntime # type: ignore + from ._models import IntegrationRuntimeAuthKeys # type: ignore + from ._models import IntegrationRuntimeComputeProperties # type: ignore + from ._models import IntegrationRuntimeConnectionInfo # type: ignore + from ._models import IntegrationRuntimeCustomSetupScriptProperties # type: ignore + from ._models import IntegrationRuntimeDataFlowProperties # type: ignore + from ._models import IntegrationRuntimeDataProxyProperties # type: ignore + from ._models import IntegrationRuntimeListResponse # type: ignore + from ._models import IntegrationRuntimeMonitoringData # type: ignore + from ._models import IntegrationRuntimeNodeIpAddress # type: ignore + from ._models import IntegrationRuntimeNodeMonitoringData # type: ignore + from ._models import IntegrationRuntimeReference # type: ignore + from ._models import IntegrationRuntimeRegenerateKeyParameters # type: ignore + from ._models import IntegrationRuntimeResource # type: ignore + from ._models import IntegrationRuntimeSsisCatalogInfo # type: ignore + from ._models import IntegrationRuntimeSsisProperties # type: ignore + from ._models import IntegrationRuntimeStatus # type: ignore + from ._models import IntegrationRuntimeStatusListResponse # type: ignore + from ._models import IntegrationRuntimeStatusResponse # type: ignore + from ._models import IntegrationRuntimeVNetProperties # type: ignore + from ._models import LinkedIntegrationRuntime # type: ignore + from ._models import LinkedIntegrationRuntimeKeyAuthorization # type: ignore + from ._models import LinkedIntegrationRuntimeRbacAuthorization # type: ignore + from ._models import LinkedIntegrationRuntimeRequest # type: ignore + from ._models import LinkedIntegrationRuntimeType # type: ignore + from ._models import ManagedIntegrationRuntime # type: ignore + from ._models import ManagedIntegrationRuntimeError # type: ignore + from ._models import ManagedIntegrationRuntimeNode # type: ignore + from ._models import ManagedIntegrationRuntimeOperationResult # type: ignore + from ._models import ManagedIntegrationRuntimeStatus # type: ignore + from ._models import MultiplePipelineTrigger # type: ignore + from ._models import PackageStore # type: ignore + from ._models import RecurrenceSchedule # type: ignore + from ._models import RecurrenceScheduleOccurrence # type: ignore + from ._models import RerunTumblingWindowTrigger # type: ignore from ._models import Resource # type: ignore + from ._models import RetryPolicy # type: ignore + from ._models import ScheduleTrigger # type: ignore + from ._models import ScheduleTriggerRecurrence # type: ignore + from ._models import SecretBase # type: ignore + from ._models import SecureString # type: ignore + from ._models import SelfDependencyTumblingWindowTriggerReference # type: ignore + from ._models import SelfHostedIntegrationRuntime # type: ignore + from ._models import SelfHostedIntegrationRuntimeNode # type: ignore + from ._models import SelfHostedIntegrationRuntimeStatus # type: ignore + from ._models import SsisEnvironment # type: ignore + from ._models import SsisEnvironmentReference # type: ignore + from ._models import SsisFolder # type: ignore + from ._models import SsisObjectMetadata # type: ignore + from ._models import SsisObjectMetadataListResponse # type: ignore + from ._models import SsisPackage # type: ignore + from ._models import SsisParameter # type: ignore + from ._models import SsisProject # type: ignore + from ._models import SsisVariable # type: ignore + from ._models import SubResource # type: ignore + from ._models import Trigger # type: ignore + from ._models import TriggerDependencyReference # type: ignore + from ._models import TriggerFilterParameters # type: ignore + from ._models import TriggerListResponse # type: ignore + from ._models import TriggerQueryResponse # type: ignore + from ._models import TriggerReference # type: ignore + from ._models import TriggerResource # type: ignore + from ._models import TriggerSubscriptionOperationStatus # type: ignore + from ._models import TumblingWindowTrigger # type: ignore + from ._models import TumblingWindowTriggerDependencyReference # type: ignore + from ._models import UpdateIntegrationRuntimeRequest # type: ignore from ._models import UserAccessPolicy # type: ignore +from ._dfaz_management_client_enums import ( + BlobEventTypes, + DataFlowComputeType, + DayOfWeek, + DaysOfWeek, + EventSubscriptionStatus, + IntegrationRuntimeAuthKeyName, + IntegrationRuntimeAutoUpdate, + IntegrationRuntimeEdition, + IntegrationRuntimeEntityReferenceType, + IntegrationRuntimeInternalChannelEncryptionMode, + IntegrationRuntimeLicenseType, + IntegrationRuntimeSsisCatalogPricingTier, + IntegrationRuntimeState, + IntegrationRuntimeType, + IntegrationRuntimeUpdateResult, + ManagedIntegrationRuntimeNodeStatus, + RecurrenceFrequency, + SelfHostedIntegrationRuntimeNodeStatus, + SsisObjectMetadataType, + TriggerRuntimeState, + TumblingWindowFrequency, +) + __all__ = [ 'AccessPolicyResponse', + 'BlobEventsTrigger', + 'BlobTrigger', + 'ChainingTrigger', 'CloudError', + 'CmdkeySetup', + 'ComponentSetup', + 'CreateLinkedIntegrationRuntimeRequest', + 'CustomSetupBase', + 'DependencyReference', + 'EntityReference', + 'EnvironmentVariableSetup', 'Factory', 'FactoryGitHubConfiguration', 'FactoryIdentity', @@ -53,6 +237,92 @@ 'FakeFactoryIdentity', 'GitHubAccessTokenRequest', 'GitHubAccessTokenResponse', + 'IntegrationRuntime', + 'IntegrationRuntimeAuthKeys', + 'IntegrationRuntimeComputeProperties', + 'IntegrationRuntimeConnectionInfo', + 'IntegrationRuntimeCustomSetupScriptProperties', + 'IntegrationRuntimeDataFlowProperties', + 'IntegrationRuntimeDataProxyProperties', + 'IntegrationRuntimeListResponse', + 'IntegrationRuntimeMonitoringData', + 'IntegrationRuntimeNodeIpAddress', + 'IntegrationRuntimeNodeMonitoringData', + 'IntegrationRuntimeReference', + 'IntegrationRuntimeRegenerateKeyParameters', + 'IntegrationRuntimeResource', + 'IntegrationRuntimeSsisCatalogInfo', + 'IntegrationRuntimeSsisProperties', + 'IntegrationRuntimeStatus', + 'IntegrationRuntimeStatusListResponse', + 'IntegrationRuntimeStatusResponse', + 'IntegrationRuntimeVNetProperties', + 'LinkedIntegrationRuntime', + 'LinkedIntegrationRuntimeKeyAuthorization', + 'LinkedIntegrationRuntimeRbacAuthorization', + 'LinkedIntegrationRuntimeRequest', + 'LinkedIntegrationRuntimeType', + 'ManagedIntegrationRuntime', + 'ManagedIntegrationRuntimeError', + 'ManagedIntegrationRuntimeNode', + 'ManagedIntegrationRuntimeOperationResult', + 'ManagedIntegrationRuntimeStatus', + 'MultiplePipelineTrigger', + 'PackageStore', + 'RecurrenceSchedule', + 'RecurrenceScheduleOccurrence', + 'RerunTumblingWindowTrigger', 'Resource', + 'RetryPolicy', + 'ScheduleTrigger', + 'ScheduleTriggerRecurrence', + 'SecretBase', + 'SecureString', + 'SelfDependencyTumblingWindowTriggerReference', + 'SelfHostedIntegrationRuntime', + 'SelfHostedIntegrationRuntimeNode', + 'SelfHostedIntegrationRuntimeStatus', + 'SsisEnvironment', + 'SsisEnvironmentReference', + 'SsisFolder', + 'SsisObjectMetadata', + 'SsisObjectMetadataListResponse', + 'SsisPackage', + 'SsisParameter', + 'SsisProject', + 'SsisVariable', + 'SubResource', + 'Trigger', + 'TriggerDependencyReference', + 'TriggerFilterParameters', + 'TriggerListResponse', + 'TriggerQueryResponse', + 'TriggerReference', + 'TriggerResource', + 'TriggerSubscriptionOperationStatus', + 'TumblingWindowTrigger', + 'TumblingWindowTriggerDependencyReference', + 'UpdateIntegrationRuntimeRequest', 'UserAccessPolicy', + 'BlobEventTypes', + 'DataFlowComputeType', + 'DayOfWeek', + 'DaysOfWeek', + 'EventSubscriptionStatus', + 'IntegrationRuntimeAuthKeyName', + 'IntegrationRuntimeAutoUpdate', + 'IntegrationRuntimeEdition', + 'IntegrationRuntimeEntityReferenceType', + 'IntegrationRuntimeInternalChannelEncryptionMode', + 'IntegrationRuntimeLicenseType', + 'IntegrationRuntimeSsisCatalogPricingTier', + 'IntegrationRuntimeState', + 'IntegrationRuntimeType', + 'IntegrationRuntimeUpdateResult', + 'ManagedIntegrationRuntimeNodeStatus', + 'RecurrenceFrequency', + 'SelfHostedIntegrationRuntimeNodeStatus', + 'SsisObjectMetadataType', + 'TriggerRuntimeState', + 'TumblingWindowFrequency', ] diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_dfaz_management_client_enums.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_dfaz_management_client_enums.py new file mode 100644 index 000000000..7aa38fb1a --- /dev/null +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_dfaz_management_client_enums.py @@ -0,0 +1,195 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum + +class BlobEventTypes(str, Enum): + + microsoft_storage_blob_created = "Microsoft.Storage.BlobCreated" + microsoft_storage_blob_deleted = "Microsoft.Storage.BlobDeleted" + +class DataFlowComputeType(str, Enum): + """Compute type of the cluster which will execute data flow job. + """ + + general = "General" + memory_optimized = "MemoryOptimized" + compute_optimized = "ComputeOptimized" + +class DayOfWeek(str, Enum): + """The days of the week. + """ + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + +class DaysOfWeek(str, Enum): + + sunday = "Sunday" + monday = "Monday" + tuesday = "Tuesday" + wednesday = "Wednesday" + thursday = "Thursday" + friday = "Friday" + saturday = "Saturday" + +class EventSubscriptionStatus(str, Enum): + """Event Subscription Status. + """ + + enabled = "Enabled" + provisioning = "Provisioning" + deprovisioning = "Deprovisioning" + disabled = "Disabled" + unknown = "Unknown" + +class IntegrationRuntimeAuthKeyName(str, Enum): + """The name of the authentication key to regenerate. + """ + + auth_key1 = "authKey1" + auth_key2 = "authKey2" + +class IntegrationRuntimeAutoUpdate(str, Enum): + """The state of integration runtime auto update. + """ + + on = "On" + off = "Off" + +class IntegrationRuntimeEdition(str, Enum): + """The edition for the SSIS Integration Runtime + """ + + standard = "Standard" + enterprise = "Enterprise" + +class IntegrationRuntimeEntityReferenceType(str, Enum): + """The type of this referenced entity. + """ + + integration_runtime_reference = "IntegrationRuntimeReference" + linked_service_reference = "LinkedServiceReference" + +class IntegrationRuntimeInternalChannelEncryptionMode(str, Enum): + """It is used to set the encryption mode for node-node communication channel (when more than 2 + self-hosted integration runtime nodes exist). + """ + + not_set = "NotSet" + ssl_encrypted = "SslEncrypted" + not_encrypted = "NotEncrypted" + +class IntegrationRuntimeLicenseType(str, Enum): + """License type for bringing your own license scenario. + """ + + base_price = "BasePrice" + license_included = "LicenseIncluded" + +class IntegrationRuntimeSsisCatalogPricingTier(str, Enum): + """The pricing tier for the catalog database. The valid values could be found in + https://azure.microsoft.com/en-us/pricing/details/sql-database/ + """ + + basic = "Basic" + standard = "Standard" + premium = "Premium" + premium_rs = "PremiumRS" + +class IntegrationRuntimeState(str, Enum): + """The state of integration runtime. + """ + + initial = "Initial" + stopped = "Stopped" + started = "Started" + starting = "Starting" + stopping = "Stopping" + need_registration = "NeedRegistration" + online = "Online" + limited = "Limited" + offline = "Offline" + access_denied = "AccessDenied" + +class IntegrationRuntimeType(str, Enum): + """The type of integration runtime. + """ + + managed = "Managed" + self_hosted = "SelfHosted" + +class IntegrationRuntimeUpdateResult(str, Enum): + """The result of the last integration runtime node update. + """ + + none = "None" + succeed = "Succeed" + fail = "Fail" + +class ManagedIntegrationRuntimeNodeStatus(str, Enum): + """The managed integration runtime node status. + """ + + starting = "Starting" + available = "Available" + recycling = "Recycling" + unavailable = "Unavailable" + +class RecurrenceFrequency(str, Enum): + """Enumerates possible frequency option for the schedule trigger. + """ + + not_specified = "NotSpecified" + minute = "Minute" + hour = "Hour" + day = "Day" + week = "Week" + month = "Month" + year = "Year" + +class SelfHostedIntegrationRuntimeNodeStatus(str, Enum): + """Status of the integration runtime node. + """ + + need_registration = "NeedRegistration" + online = "Online" + limited = "Limited" + offline = "Offline" + upgrading = "Upgrading" + initializing = "Initializing" + initialize_failed = "InitializeFailed" + +class SsisObjectMetadataType(str, Enum): + """The type of SSIS object metadata. + """ + + folder = "Folder" + project = "Project" + package = "Package" + environment = "Environment" + +class TriggerRuntimeState(str, Enum): + """Enumerates possible state of Triggers. + """ + + started = "Started" + stopped = "Stopped" + disabled = "Disabled" + +class TumblingWindowFrequency(str, Enum): + """Enumerates possible frequency option for the tumbling window trigger. + """ + + minute = "Minute" + hour = "Hour" diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py index 44563797e..8ad7b4a3e 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models.py @@ -36,6 +36,304 @@ def __init__( self.data_plane_url = kwargs.get('data_plane_url', None) +class Trigger(msrest.serialization.Model): + """Azure data factory nested object which contains information about creating pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ChainingTrigger': 'ChainingTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger'} + } + + def __init__( + self, + **kwargs + ): + super(Trigger, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'Trigger' + self.description = kwargs.get('description', None) + self.runtime_state = None + self.annotations = kwargs.get('annotations', None) + + +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[object] + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} + } + + def __init__( + self, + **kwargs + ): + super(MultiplePipelineTrigger, self).__init__(**kwargs) + self.type = 'MultiplePipelineTrigger' + self.pipelines = kwargs.get('pipelines', None) + + +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[object] + :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to + fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the + december folder under the records container. At least one of these must be provided: + blobPathBeginsWith, blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to + fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a + december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :type ignore_empty_blobs: bool + :param events: Required. The type of events that cause this trigger to fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobEventsTrigger, self).__init__(**kwargs) + self.type = 'BlobEventsTrigger' + self.blob_path_begins_with = kwargs.get('blob_path_begins_with', None) + self.blob_path_ends_with = kwargs.get('blob_path_ends_with', None) + self.ignore_empty_blobs = kwargs.get('ignore_empty_blobs', None) + self.events = kwargs['events'] + self.scope = kwargs['scope'] + + +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[object] + :param folder_path: Required. The path of the container/folder that will trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to handle when it is + triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service reference. + :type linked_service: object + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(BlobTrigger, self).__init__(**kwargs) + self.type = 'BlobTrigger' + self.folder_path = kwargs['folder_path'] + self.max_concurrency = kwargs['max_concurrency'] + self.linked_service = kwargs['linked_service'] + + +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines + complete successfully. + :type pipeline: object + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[object] + :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream + pipelines. + :type run_dimension: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'object'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[object]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ChainingTrigger, self).__init__(**kwargs) + self.type = 'ChainingTrigger' + self.pipeline = kwargs['pipeline'] + self.depends_on = kwargs['depends_on'] + self.run_dimension = kwargs['run_dimension'] + + class CloudError(msrest.serialization.Model): """The object that defines the structure of an Azure Data Factory error response. @@ -74,284 +372,3089 @@ def __init__( self.details = kwargs.get('details', None) -class Resource(msrest.serialization.Model): - """Azure Data Factory top-level resource. +class CustomSetupBase(msrest.serialization.Model): + """The base definition of the custom setup. - Variables are only populated by the server, and will be ignored when sending a request. + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CmdkeySetup, ComponentSetup, EnvironmentVariableSetup. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + 'type': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CmdkeySetup': 'CmdkeySetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup'} } def __init__( self, **kwargs ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None + super(CustomSetupBase, self).__init__(**kwargs) self.type = None - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.e_tag = None -class Factory(Resource): - """Factory resource type. +class CmdkeySetup(CustomSetupBase): + """The custom setup of running cmdkey commands. - Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str - :param additional_properties: Unmatched properties from the message are deserialized to this - collection. - :type additional_properties: dict[str, object] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :ivar provisioning_state: Factory provisioning state, example Succeeded. - :vartype provisioning_state: str - :ivar create_time: Time the factory was created in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar version: Version of the factory. - :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - :param fake_identity: This is only for az test. - :type fake_identity: ~azure.mgmt.datafactory.models.FakeFactoryIdentity - :param zones: This is only for az test. - :type zones: list[str] + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + :param target_name: Required. The server name of data source access. + :type target_name: object + :param user_name: Required. The user name of data source access. + :type user_name: object + :param password: Required. The password of data source access. + :type password: object """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, + 'type': {'required': True}, + 'target_name': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, - 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - 'fake_identity': {'key': 'properties.fakeIdentity', 'type': 'FakeFactoryIdentity'}, - 'zones': {'key': 'properties.zones', 'type': '[str]'}, + 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(CmdkeySetup, self).__init__(**kwargs) + self.type = 'CmdkeySetup' + self.target_name = kwargs['target_name'] + self.user_name = kwargs['user_name'] + self.password = kwargs['password'] + + +class ComponentSetup(CustomSetupBase): + """The custom setup of installing 3rd party components. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + :param component_name: Required. The name of the 3rd party component. + :type component_name: str + :param license_key: The license key to activate the component. + :type license_key: object + """ + + _validation = { + 'type': {'required': True}, + 'component_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, + 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(ComponentSetup, self).__init__(**kwargs) + self.type = 'ComponentSetup' + self.component_name = kwargs['component_name'] + self.license_key = kwargs.get('license_key', None) + + +class CreateLinkedIntegrationRuntimeRequest(msrest.serialization.Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked integration runtime belongs + to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked integration runtime + belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the linked integration + runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.subscription_id = kwargs.get('subscription_id', None) + self.data_factory_name = kwargs.get('data_factory_name', None) + self.data_factory_location = kwargs.get('data_factory_location', None) + + +class DependencyReference(msrest.serialization.Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__( + self, + **kwargs + ): + super(DependencyReference, self).__init__(**kwargs) + self.type = None + + +class EntityReference(msrest.serialization.Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + "IntegrationRuntimeReference", "LinkedServiceReference". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EntityReference, self).__init__(**kwargs) + self.type = kwargs.get('type', None) + self.reference_name = kwargs.get('reference_name', None) + + +class EnvironmentVariableSetup(CustomSetupBase): + """The custom setup of setting environment variable. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + :param variable_name: Required. The name of the environment variable. + :type variable_name: str + :param variable_value: Required. The value of the environment variable. + :type variable_value: str + """ + + _validation = { + 'type': {'required': True}, + 'variable_name': {'required': True}, + 'variable_value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'variable_value': {'key': 'typeProperties.variableValue', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(EnvironmentVariableSetup, self).__init__(**kwargs) + self.type = 'EnvironmentVariableSetup' + self.variable_name = kwargs['variable_name'] + self.variable_value = kwargs['variable_value'] + + +class Resource(msrest.serialization.Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: A set of tags. The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = kwargs.get('location', None) + self.tags = kwargs.get('tags', None) + self.e_tag = None + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: A set of tags. The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: ~datetime.datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :param fake_identity: This is only for az test. + :type fake_identity: ~azure.mgmt.datafactory.models.FakeFactoryIdentity + :param zones: This is only for az test. + :type zones: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + 'fake_identity': {'key': 'properties.fakeIdentity', 'type': 'FakeFactoryIdentity'}, + 'zones': {'key': 'properties.zones', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(Factory, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.identity = kwargs.get('identity', None) + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = kwargs.get('repo_configuration', None) + self.fake_identity = kwargs.get('fake_identity', None) + self.zones = kwargs.get('zones', None) + + +class FactoryRepoConfiguration(msrest.serialization.Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryGitHubConfiguration, FactoryVstsConfiguration. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of repo configuration.Constant filled by server. + :type type: str + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration', 'FactoryVSTSConfiguration': 'FactoryVstsConfiguration'} + } + + def __init__( + self, + **kwargs + ): + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.type = None + self.account_name = kwargs['account_name'] + self.repository_name = kwargs['repository_name'] + self.collaboration_branch = kwargs['collaboration_branch'] + self.root_folder = kwargs['root_folder'] + self.last_commit_id = kwargs.get('last_commit_id', None) + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of repo configuration.Constant filled by server. + :type type: str + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. + :type host_name: str + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FactoryGitHubConfiguration, self).__init__(**kwargs) + self.type = 'FactoryGitHubConfiguration' + self.host_name = kwargs.get('host_name', None) + + +class FactoryIdentity(msrest.serialization.Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. + Default value: "SystemAssigned". + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__( + self, + **kwargs + ): + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + + +class FactoryListResponse(msrest.serialization.Model): + """A list of factory resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of factories. + :type value: list[~azure.mgmt.datafactory.models.Factory] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Factory]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FactoryListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class FactoryRepoUpdate(msrest.serialization.Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__( + self, + **kwargs + ): + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = kwargs.get('factory_resource_id', None) + self.repo_configuration = kwargs.get('repo_configuration', None) + + +class FactoryUpdateParameters(msrest.serialization.Model): + """Parameters for updating a factory resource. + + :param tags: A set of tags. The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__( + self, + **kwargs + ): + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = kwargs.get('tags', None) + self.identity = kwargs.get('identity', None) + + +class FactoryVstsConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of repo configuration.Constant filled by server. + :type type: str + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(FactoryVstsConfiguration, self).__init__(**kwargs) + self.type = 'FactoryVSTSConfiguration' + self.project_name = kwargs['project_name'] + self.tenant_id = kwargs.get('tenant_id', None) + + +class FakeFactoryIdentity(msrest.serialization.Model): + """This is only for az test. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. .. + :type name: str + :param zones_inside: sample of simple array. + :type zones_inside: list[str] + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'zones_inside': {'key': 'zonesInside', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(FakeFactoryIdentity, self).__init__(**kwargs) + self.name = kwargs['name'] + self.zones_inside = kwargs.get('zones_inside', None) + + +class GitHubAccessTokenRequest(msrest.serialization.Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = kwargs['git_hub_access_code'] + self.git_hub_client_id = kwargs.get('git_hub_client_id', None) + self.git_hub_access_token_base_url = kwargs['git_hub_access_token_base_url'] + + +class GitHubAccessTokenResponse(msrest.serialization.Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = kwargs.get('git_hub_access_token', None) + + +class IntegrationRuntime(msrest.serialization.Model): + """Azure Data Factory nested object which serves as a compute resource for activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIntegrationRuntime, SelfHostedIntegrationRuntime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Managed': 'ManagedIntegrationRuntime', 'SelfHosted': 'SelfHostedIntegrationRuntime'} + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'IntegrationRuntime' + self.description = kwargs.get('description', None) + + +class IntegrationRuntimeAuthKeys(msrest.serialization.Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = kwargs.get('auth_key1', None) + self.auth_key2 = kwargs.get('auth_key2', None) + + +class IntegrationRuntimeComputeProperties(msrest.serialization.Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The supported regions could be + found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- + activities. + :type location: str + :param node_size: The node size requirement to managed integration runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed + integration runtime. + :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration runtime. + :type data_flow_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.location = kwargs.get('location', None) + self.node_size = kwargs.get('node_size', None) + self.number_of_nodes = kwargs.get('number_of_nodes', None) + self.max_parallel_executions_per_node = kwargs.get('max_parallel_executions_per_node', None) + self.data_flow_properties = kwargs.get('data_flow_properties', None) + self.v_net_properties = kwargs.get('v_net_properties', None) + + +class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): + """Connection information for encrypting the on-premises data source credentials. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this token to authenticate to + integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate thumbprint. Click-Once + application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when transferring the credential + to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None + + +class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that contains the custom setup + script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: object + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = kwargs.get('blob_container_uri', None) + self.sas_token = kwargs.get('sas_token', None) + + +class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data + flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.compute_type = kwargs.get('compute_type', None) + self.core_count = kwargs.get('core_count', None) + self.time_to_live = kwargs.get('time_to_live', None) + + +class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = kwargs.get('connect_via', None) + self.staging_linked_service = kwargs.get('staging_linked_service', None) + self.path = kwargs.get('path', None) + + +class IntegrationRuntimeListResponse(msrest.serialization.Model): + """A list of integration runtime resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtimes. + :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class IntegrationRuntimeMonitoringData(msrest.serialization.Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = kwargs.get('name', None) + self.nodes = kwargs.get('nodes', None) + + +class IntegrationRuntimeNodeIpAddress(msrest.serialization.Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None + + +class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the integration runtime + node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None + + +class IntegrationRuntimeReference(msrest.serialization.Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference". + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: object + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'object'}, + } + + type = "IntegrationRuntimeReference" + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = kwargs['reference_name'] + self.parameters = kwargs.get('parameters', None) + + +class IntegrationRuntimeRegenerateKeyParameters(msrest.serialization.Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. Possible values include: + "authKey1", "authKey2". + :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = kwargs.get('key_name', None) + + +class SubResource(msrest.serialization.Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: object + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] + + +class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user account of the catalog + database. + :type catalog_admin_password: object + :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could + be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values + include: "Basic", "Standard", "Premium", "PremiumRS". + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'object'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_server_endpoint = kwargs.get('catalog_server_endpoint', None) + self.catalog_admin_user_name = kwargs.get('catalog_admin_user_name', None) + self.catalog_admin_password = kwargs.get('catalog_admin_password', None) + self.catalog_pricing_tier = kwargs.get('catalog_pricing_tier', None) + + +class IntegrationRuntimeSsisProperties(msrest.serialization.Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration runtime. + :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. Possible values + include: "BasePrice", "LicenseIncluded". + :type license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for a managed dedicated + integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed dedicated integration + runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible values include: + "Standard", "Enterprise". + :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + :param express_custom_setup_properties: Custom setup without script properties for a SSIS + integration runtime. + :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] + :param package_stores: Package stores for the SSIS Integration Runtime. + :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, + 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.catalog_info = kwargs.get('catalog_info', None) + self.license_type = kwargs.get('license_type', None) + self.custom_setup_script_properties = kwargs.get('custom_setup_script_properties', None) + self.data_proxy_properties = kwargs.get('data_proxy_properties', None) + self.edition = kwargs.get('edition', None) + self.express_custom_setup_properties = kwargs.get('express_custom_setup_properties', None) + self.package_stores = kwargs.get('package_stores', None) + + +class IntegrationRuntimeStatus(msrest.serialization.Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIntegrationRuntimeStatus, SelfHostedIntegrationRuntimeStatus. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar data_factory_name: The data factory name which the integration runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "AccessDenied". + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState + """ + + _validation = { + 'type': {'required': True}, + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Managed': 'ManagedIntegrationRuntimeStatus', 'SelfHosted': 'SelfHostedIntegrationRuntimeStatus'} + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = 'IntegrationRuntimeStatus' + self.data_factory_name = None + self.state = None + + +class IntegrationRuntimeStatusListResponse(msrest.serialization.Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) + + +class IntegrationRuntimeStatusResponse(msrest.serialization.Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: object + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = kwargs['properties'] + + +class IntegrationRuntimeVNetProperties(msrest.serialization.Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will + use. + :type public_i_ps: list[str] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.v_net_id = kwargs.get('v_net_id', None) + self.subnet = kwargs.get('subnet', None) + self.public_i_ps = kwargs.get('public_i_ps', None) + + +class LinkedIntegrationRuntime(msrest.serialization.Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked integration runtime + belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which the linked integration + runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: ~datetime.datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None + + +class LinkedIntegrationRuntimeType(msrest.serialization.Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'Key': 'LinkedIntegrationRuntimeKeyAuthorization', 'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization'} + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type = None + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: object + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'object'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.authorization_type = 'Key' + self.key = kwargs['key'] + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.authorization_type = 'RBAC' + self.resource_id = kwargs['resource_id'] + + +class LinkedIntegrationRuntimeRequest(msrest.serialization.Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = kwargs['linked_factory_name'] + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. + Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", + "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration runtime. + :type compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIntegrationRuntime, self).__init__(**kwargs) + self.type = 'Managed' + self.state = None + self.compute_properties = kwargs.get('compute_properties', None) + self.ssis_properties = kwargs.get('ssis_properties', None) + + +class ManagedIntegrationRuntimeError(msrest.serialization.Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: ~datetime.datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.time = None + self.code = None + self.parameters = None + self.message = None + + +class ManagedIntegrationRuntimeNode(msrest.serialization.Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values include: "Starting", + "Available", "Recycling", "Unavailable". + :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_id = None + self.status = None + self.errors = kwargs.get('errors', None) + + +class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: ~datetime.datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar data_factory_name: The data factory name which the integration runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "AccessDenied". + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. + :vartype create_time: ~datetime.datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'type': {'required': True}, + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__( + self, + **kwargs + ): + super(ManagedIntegrationRuntimeStatus, self).__init__(**kwargs) + self.type = 'Managed' + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + + +class PackageStore(msrest.serialization.Model): + """Package store for the SSIS integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the package store. + :type name: str + :param package_store_linked_service: Required. The package store linked service reference. + :type package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference + """ + + _validation = { + 'name': {'required': True}, + 'package_store_linked_service': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'package_store_linked_service': {'key': 'packageStoreLinkedService', 'type': 'EntityReference'}, + } + + def __init__( + self, + **kwargs + ): + super(PackageStore, self).__init__(**kwargs) + self.name = kwargs['name'] + self.package_store_linked_service = kwargs['package_store_linked_service'] + + +class RecurrenceSchedule(msrest.serialization.Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[str]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__( + self, + **kwargs + ): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.minutes = kwargs.get('minutes', None) + self.hours = kwargs.get('hours', None) + self.week_days = kwargs.get('week_days', None) + self.month_days = kwargs.get('month_days', None) + self.monthly_occurrences = kwargs.get('monthly_occurrences', None) + + +class RecurrenceScheduleOccurrence(msrest.serialization.Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + "Wednesday", "Thursday", "Friday", "Saturday". + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'str'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.day = kwargs.get('day', None) + self.occurrence = kwargs.get('occurrence', None) + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param parent_trigger: Required. The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period for which restatement + is initiated. Only UTC time is currently supported. + :type requested_start_time: ~datetime.datetime + :param requested_end_time: Required. The end time for the time period for which restatement is + initiated. Only UTC time is currently supported. + :type requested_end_time: ~datetime.datetime + :param rerun_concurrency: Required. The max number of parallel time windows (ready for + execution) for which a rerun is triggered. + :type rerun_concurrency: int + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'parent_trigger': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(RerunTumblingWindowTrigger, self).__init__(**kwargs) + self.type = 'RerunTumblingWindowTrigger' + self.parent_trigger = kwargs['parent_trigger'] + self.requested_start_time = kwargs['requested_start_time'] + self.requested_end_time = kwargs['requested_end_time'] + self.rerun_concurrency = kwargs['rerun_concurrency'] + + +class RetryPolicy(msrest.serialization.Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(RetryPolicy, self).__init__(**kwargs) + self.count = kwargs.get('count', None) + self.interval_in_seconds = kwargs.get('interval_in_seconds', None) + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[object] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__( + self, + **kwargs + ): + super(ScheduleTrigger, self).__init__(**kwargs) + self.type = 'ScheduleTrigger' + self.recurrence = kwargs['recurrence'] + + +class ScheduleTriggerRecurrence(msrest.serialization.Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + "Day", "Week", "Month", "Year". + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: ~datetime.datetime + :param end_time: The end time. + :type end_time: ~datetime.datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__( + self, + **kwargs + ): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.frequency = kwargs.get('frequency', None) + self.interval = kwargs.get('interval', None) + self.start_time = kwargs.get('start_time', None) + self.end_time = kwargs.get('end_time', None) + self.time_zone = kwargs.get('time_zone', None) + self.schedule = kwargs.get('schedule', None) + + +class SecretBase(msrest.serialization.Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of the secret.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString'} + } + + def __init__( + self, + **kwargs + ): + super(SecretBase, self).__init__(**kwargs) + self.type = None + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of the secret.Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SecureString, self).__init__(**kwargs) + self.type = 'SecureString' + self.value = kwargs['value'] + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling window when + evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'-((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.type = 'SelfDependencyTumblingWindowTriggerReference' + self.offset = kwargs['offset'] + self.size = kwargs.get('size', None) + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :param linked_info: The base definition of a linked integration runtime. + :type linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__( + self, + **kwargs + ): + super(SelfHostedIntegrationRuntime, self).__init__(**kwargs) + self.type = 'SelfHosted' + self.linked_info = kwargs.get('linked_info', None) + + +class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values include: + "NeedRegistration", "Online", "Limited", "Offline", "Upgrading", "Initializing", + "InitializeFailed". + :vartype status: str or ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary. + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was registered in ISO8601 + format. + :vartype register_time: ~datetime.datetime + :ivar last_connect_time: The most recent time at which the integration runtime was connected in + ISO8601 format. + :vartype last_connect_time: ~datetime.datetime + :ivar expiry_time: The time at which the integration runtime will expire in ISO8601 format. + :vartype expiry_time: ~datetime.datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: ~datetime.datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: ~datetime.datetime + :ivar last_update_result: The result of the last integration runtime node update. Possible + values include: "None", "Succeed", "Fail". + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime node update start. + :vartype last_start_update_time: ~datetime.datetime + :ivar last_end_update_time: The last time for the integration runtime node update end. + :vartype last_end_update_time: ~datetime.datetime + :ivar is_active_dispatcher: Indicates whether this node is the active dispatcher for + integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__( + self, + **kwargs + ): + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = kwargs.get('additional_properties', None) + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar data_factory_name: The data factory name which the integration runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "AccessDenied". + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. + :vartype create_time: ~datetime.datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode for node-node + communication channel (when more than 2 self-hosted integration runtime nodes exist). Possible + values include: "NotSet", "SslEncrypted", "NotEncrypted". + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime will be scheduled to + update, in ISO8601 format. + :vartype scheduled_update_date: ~datetime.datetime + :ivar update_delay_offset: The time in the date scheduled by service to update the integration + runtime, e.g., PT03H is 3 hours. + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has been turned on. + Possible values include: "On", "Off". + :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to share with this + integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration runtime will be + updated. + :vartype auto_update_eta: ~datetime.datetime + """ + + _validation = { + 'type': {'required': True}, + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(SelfHostedIntegrationRuntimeStatus, self).__init__(**kwargs) + self.type = 'SelfHosted' + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = kwargs.get('nodes', None) + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = kwargs.get('links', None) + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + + +class SsisObjectMetadata(msrest.serialization.Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisFolder, SsisPackage, SsisProject. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Folder': 'SsisFolder', 'Package': 'SsisPackage', 'Project': 'SsisProject'} + } + + def __init__( + self, + **kwargs + ): + super(SsisObjectMetadata, self).__init__(**kwargs) + self.type = None + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment. + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__( + self, + **kwargs + ): + super(SsisEnvironment, self).__init__(**kwargs) + self.type = 'Environment' + self.folder_id = kwargs.get('folder_id', None) + self.variables = kwargs.get('variables', None) + + +class SsisEnvironmentReference(msrest.serialization.Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type. + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.environment_folder_name = kwargs.get('environment_folder_name', None) + self.environment_name = kwargs.get('environment_name', None) + self.reference_type = kwargs.get('reference_type', None) + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SsisFolder, self).__init__(**kwargs) + self.type = 'Folder' + + +class SsisObjectMetadataListResponse(msrest.serialization.Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = kwargs.get('value', None) + self.next_link = kwargs.get('next_link', None) + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package. + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__( + self, + **kwargs + ): + super(SsisPackage, self).__init__(**kwargs) + self.type = 'Package' + self.folder_id = kwargs.get('folder_id', None) + self.project_version = kwargs.get('project_version', None) + self.project_id = kwargs.get('project_id', None) + self.parameters = kwargs.get('parameters', None) + + +class SsisParameter(msrest.serialization.Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SsisParameter, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.required = kwargs.get('required', None) + self.sensitive = kwargs.get('sensitive', None) + self.design_default_value = kwargs.get('design_default_value', None) + self.default_value = kwargs.get('default_value', None) + self.sensitive_default_value = kwargs.get('sensitive_default_value', None) + self.value_type = kwargs.get('value_type', None) + self.value_set = kwargs.get('value_set', None) + self.variable = kwargs.get('variable', None) + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project. + :type environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project. + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__( + self, + **kwargs + ): + super(SsisProject, self).__init__(**kwargs) + self.type = 'Project' + self.folder_id = kwargs.get('folder_id', None) + self.version = kwargs.get('version', None) + self.environment_refs = kwargs.get('environment_refs', None) + self.parameters = kwargs.get('parameters', None) + + +class SsisVariable(msrest.serialization.Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, } def __init__( self, **kwargs ): - super(Factory, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.identity = kwargs.get('identity', None) - self.provisioning_state = None - self.create_time = None - self.version = None - self.repo_configuration = kwargs.get('repo_configuration', None) - self.fake_identity = kwargs.get('fake_identity', None) - self.zones = kwargs.get('zones', None) + super(SsisVariable, self).__init__(**kwargs) + self.id = kwargs.get('id', None) + self.name = kwargs.get('name', None) + self.description = kwargs.get('description', None) + self.data_type = kwargs.get('data_type', None) + self.sensitive = kwargs.get('sensitive', None) + self.value = kwargs.get('value', None) + self.sensitive_value = kwargs.get('sensitive_value', None) -class FactoryRepoConfiguration(msrest.serialization.Model): - """Factory's git repo information. +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryGitHubConfiguration, FactoryVstsConfiguration. + sub-classes are: TumblingWindowTriggerDependencyReference. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. + :param type: Required. The type of dependency reference.Constant filled by server. :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, + 'reference_trigger': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, } _subtype_map = { - 'type': {'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration', 'FactoryVSTSConfiguration': 'FactoryVstsConfiguration'} + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} } def __init__( self, **kwargs ): - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.type = None - self.account_name = kwargs['account_name'] - self.repository_name = kwargs['repository_name'] - self.collaboration_branch = kwargs['collaboration_branch'] - self.root_folder = kwargs['root_folder'] - self.last_commit_id = kwargs.get('last_commit_id', None) + super(TriggerDependencyReference, self).__init__(**kwargs) + self.type = 'TriggerDependencyReference' + self.reference_trigger = kwargs['reference_trigger'] -class FactoryGitHubConfiguration(FactoryRepoConfiguration): - """Factory's GitHub repo information. - - All required parameters must be populated in order to send to Azure. +class TriggerFilterParameters(msrest.serialization.Model): + """Query parameters for triggers. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. - :type host_name: str + :param continuation_token: The continuation token for getting the next page of results. Null + for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun + triggers. + :type parent_trigger_name: str """ - _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - } - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, } def __init__( self, **kwargs ): - super(FactoryGitHubConfiguration, self).__init__(**kwargs) - self.type = 'FactoryGitHubConfiguration' - self.host_name = kwargs.get('host_name', None) + super(TriggerFilterParameters, self).__init__(**kwargs) + self.continuation_token = kwargs.get('continuation_token', None) + self.parent_trigger_name = kwargs.get('parent_trigger_name', None) -class FactoryIdentity(msrest.serialization.Model): - """Identity properties of the factory resource. - - Variables are only populated by the server, and will be ignored when sending a request. +class TriggerListResponse(msrest.serialization.Model): + """A list of trigger resources. All required parameters must be populated in order to send to Azure. - :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. - Default value: "SystemAssigned". - :vartype type: str - :ivar principal_id: The principal id of the identity. - :vartype principal_id: str - :ivar tenant_id: The client tenant id of the identity. - :vartype tenant_id: str + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str """ _validation = { - 'type': {'required': True, 'constant': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, + 'value': {'required': True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, } - type = "SystemAssigned" - def __init__( self, **kwargs ): - super(FactoryIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None + super(TriggerListResponse, self).__init__(**kwargs) + self.value = kwargs['value'] + self.next_link = kwargs.get('next_link', None) -class FactoryListResponse(msrest.serialization.Model): - """A list of factory resources. +class TriggerQueryResponse(msrest.serialization.Model): + """A query of triggers. All required parameters must be populated in order to send to Azure. - :param value: Required. List of factories. - :type value: list[~azure.mgmt.datafactory.models.Factory] - :param next_link: The link to the next page of results, if any remaining results exist. - :type next_link: str + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param continuation_token: The continuation token for getting the next page of results, if any + remaining results exist, null otherwise. + :type continuation_token: str """ _validation = { @@ -359,198 +3462,282 @@ class FactoryListResponse(msrest.serialization.Model): } _attribute_map = { - 'value': {'key': 'value', 'type': '[Factory]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__( self, **kwargs ): - super(FactoryListResponse, self).__init__(**kwargs) + super(TriggerQueryResponse, self).__init__(**kwargs) self.value = kwargs['value'] - self.next_link = kwargs.get('next_link', None) + self.continuation_token = kwargs.get('continuation_token', None) -class FactoryRepoUpdate(msrest.serialization.Model): - """Factory's git repo information. +class TriggerReference(msrest.serialization.Model): + """Trigger reference type. - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: "TriggerReference". + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str """ + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, } + type = "TriggerReference" + def __init__( self, **kwargs ): - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = kwargs.get('factory_resource_id', None) - self.repo_configuration = kwargs.get('repo_configuration', None) + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = kwargs['reference_name'] -class FactoryUpdateParameters(msrest.serialization.Model): - """Parameters for updating a factory resource. +class TriggerResource(SubResource): + """Trigger resource type. - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: object """ + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, } def __init__( self, **kwargs ): - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.identity = kwargs.get('identity', None) + super(TriggerResource, self).__init__(**kwargs) + self.properties = kwargs['properties'] -class FactoryVstsConfiguration(FactoryRepoConfiguration): - """Factory's VSTS repo information. +class TriggerSubscriptionOperationStatus(msrest.serialization.Model): + """Defines the response of a trigger subscription operation. - All required parameters must be populated in order to send to Azure. + Variables are only populated by the server, and will be ignored when sending a request. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", + "Deprovisioning", "Disabled", "Unknown". + :vartype status: str or ~azure.mgmt.datafactory.models.EventSubscriptionStatus """ _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'project_name': {'required': True}, + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, } def __init__( self, **kwargs ): - super(FactoryVstsConfiguration, self).__init__(**kwargs) - self.type = 'FactoryVSTSConfiguration' - self.project_name = kwargs['project_name'] - self.tenant_id = kwargs.get('tenant_id', None) + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None -class FakeFactoryIdentity(msrest.serialization.Model): - """This is only for az test. +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param name: Required. .. - :type name: str - :param zones_inside: sample of simple array. - :type zones_inside: list[str] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when an event is fired for + trigger window that is ready. + :type pipeline: object + :param frequency: Required. The frequency of the time windows. Possible values include: + "Minute", "Hour". + :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 + Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the trigger during which + events are fired for windows that are ready. Only UTC time is currently supported. + :type start_time: ~datetime.datetime + :param end_time: The end time for the time period for the trigger during which events are fired + for windows that are ready. Only UTC time is currently supported. + :type end_time: ~datetime.datetime + :param delay: Specifies how long the trigger waits past due time before triggering new run. It + doesn't alter window start and end time. The default is 0. Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'zones_inside': {'key': 'zonesInside', 'type': '[str]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'object'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } def __init__( self, **kwargs ): - super(FakeFactoryIdentity, self).__init__(**kwargs) - self.name = kwargs['name'] - self.zones_inside = kwargs.get('zones_inside', None) - - -class GitHubAccessTokenRequest(msrest.serialization.Model): - """Get GitHub access token request definition. + super(TumblingWindowTrigger, self).__init__(**kwargs) + self.type = 'TumblingWindowTrigger' + self.pipeline = kwargs['pipeline'] + self.frequency = kwargs['frequency'] + self.interval = kwargs['interval'] + self.start_time = kwargs['start_time'] + self.end_time = kwargs.get('end_time', None) + self.delay = kwargs.get('delay', None) + self.max_concurrency = kwargs['max_concurrency'] + self.retry_policy = kwargs.get('retry_policy', None) + self.depends_on = kwargs.get('depends_on', None) + + +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. All required parameters must be populated in order to send to Azure. - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_access_token_base_url: Required. GitHub access token base URL. - :type git_hub_access_token_base_url: str + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str """ _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, } def __init__( self, **kwargs ): - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = kwargs['git_hub_access_code'] - self.git_hub_client_id = kwargs.get('git_hub_client_id', None) - self.git_hub_access_token_base_url = kwargs['git_hub_access_token_base_url'] + super(TumblingWindowTriggerDependencyReference, self).__init__(**kwargs) + self.type = 'TumblingWindowTriggerDependencyReference' + self.offset = kwargs.get('offset', None) + self.size = kwargs.get('size', None) -class GitHubAccessTokenResponse(msrest.serialization.Model): - """Get GitHub access token response definition. +class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): + """Update integration runtime request. - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str + :param auto_update: Enables or disables the auto-update feature of the self-hosted integration + runtime. See https://go.microsoft.com/fwlink/?linkid=854189. + :type auto_update: object + :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The + integration runtime auto update will happen on that time. + :type update_delay_offset: str """ _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + 'auto_update': {'key': 'autoUpdate', 'type': 'object'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, } def __init__( self, **kwargs ): - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = kwargs.get('git_hub_access_token', None) + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = kwargs.get('auto_update', None) + self.update_delay_offset = kwargs.get('update_delay_offset', None) class UserAccessPolicy(msrest.serialization.Model): diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py index fd6115758..d98f9c0ee 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/models/_models_py3.py @@ -6,10 +6,13 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Dict, List, Optional +import datetime +from typing import Dict, List, Optional, Union import msrest.serialization +from ._dfaz_management_client_enums import * + class AccessPolicyResponse(msrest.serialization.Model): """Get Data Plane read only token response definition. @@ -42,349 +45,3708 @@ def __init__( self.data_plane_url = data_plane_url -class CloudError(msrest.serialization.Model): - """The object that defines the structure of an Azure Data Factory error response. +class Trigger(msrest.serialization.Model): + """Azure data factory nested object which contains information about creating pipeline run. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ChainingTrigger, MultiplePipelineTrigger, RerunTumblingWindowTrigger, TumblingWindowTrigger. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param code: Required. Error code. - :type code: str - :param message: Required. Error message. - :type message: str - :param target: Property name/path in request associated with error. - :type target: str - :param details: Array with additional error details. - :type details: list[~azure.mgmt.datafactory.models.CloudError] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] """ _validation = { - 'code': {'required': True}, - 'message': {'required': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { - 'code': {'key': 'error.code', 'type': 'str'}, - 'message': {'key': 'error.message', 'type': 'str'}, - 'target': {'key': 'error.target', 'type': 'str'}, - 'details': {'key': 'error.details', 'type': '[CloudError]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'ChainingTrigger': 'ChainingTrigger', 'MultiplePipelineTrigger': 'MultiplePipelineTrigger', 'RerunTumblingWindowTrigger': 'RerunTumblingWindowTrigger', 'TumblingWindowTrigger': 'TumblingWindowTrigger'} } def __init__( self, *, - code: str, - message: str, - target: Optional[str] = None, - details: Optional[List["CloudError"]] = None, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, **kwargs ): - super(CloudError, self).__init__(**kwargs) - self.code = code - self.message = message - self.target = target - self.details = details + super(Trigger, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type: str = 'Trigger' + self.description = description + self.runtime_state = None + self.annotations = annotations -class Resource(msrest.serialization.Model): - """Azure Data Factory top-level resource. +class MultiplePipelineTrigger(Trigger): + """Base class for all triggers that support one to many model for trigger to pipeline. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: BlobEventsTrigger, BlobTrigger, ScheduleTrigger. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[object] """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + } + + _subtype_map = { + 'type': {'BlobEventsTrigger': 'BlobEventsTrigger', 'BlobTrigger': 'BlobTrigger', 'ScheduleTrigger': 'ScheduleTrigger'} } def __init__( self, *, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List[object]] = None, **kwargs ): - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.location = location - self.tags = tags - self.e_tag = None + super(MultiplePipelineTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type: str = 'MultiplePipelineTrigger' + self.pipelines = pipelines -class Factory(Resource): - """Factory resource type. +class BlobEventsTrigger(MultiplePipelineTrigger): + """Trigger that runs every time a Blob event occurs. Variables are only populated by the server, and will be ignored when sending a request. - :ivar id: The resource identifier. - :vartype id: str - :ivar name: The resource name. - :vartype name: str - :ivar type: The resource type. - :vartype type: str - :param location: The resource location. - :type location: str - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :ivar e_tag: Etag identifies change in the resource. - :vartype e_tag: str + All required parameters must be populated in order to send to Azure. + :param additional_properties: Unmatched properties from the message are deserialized to this collection. :type additional_properties: dict[str, object] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity - :ivar provisioning_state: Factory provisioning state, example Succeeded. - :vartype provisioning_state: str - :ivar create_time: Time the factory was created in ISO8601 format. - :vartype create_time: ~datetime.datetime - :ivar version: Version of the factory. - :vartype version: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration - :param fake_identity: This is only for az test. - :type fake_identity: ~azure.mgmt.datafactory.models.FakeFactoryIdentity - :param zones: This is only for az test. - :type zones: list[str] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[object] + :param blob_path_begins_with: The blob path must begin with the pattern provided for trigger to + fire. For example, '/records/blobs/december/' will only fire the trigger for blobs in the + december folder under the records container. At least one of these must be provided: + blobPathBeginsWith, blobPathEndsWith. + :type blob_path_begins_with: str + :param blob_path_ends_with: The blob path must end with the pattern provided for trigger to + fire. For example, 'december/boxes.csv' will only fire the trigger for blobs named boxes in a + december folder. At least one of these must be provided: blobPathBeginsWith, blobPathEndsWith. + :type blob_path_ends_with: str + :param ignore_empty_blobs: If set to true, blobs with zero bytes will be ignored. + :type ignore_empty_blobs: bool + :param events: Required. The type of events that cause this trigger to fire. + :type events: list[str or ~azure.mgmt.datafactory.models.BlobEventTypes] + :param scope: Required. The ARM resource ID of the Storage Account. + :type scope: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'e_tag': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'create_time': {'readonly': True}, - 'version': {'readonly': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'events': {'required': True}, + 'scope': {'required': True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'e_tag': {'key': 'eTag', 'type': 'str'}, 'additional_properties': {'key': '', 'type': '{object}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, - 'version': {'key': 'properties.version', 'type': 'str'}, - 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, - 'fake_identity': {'key': 'properties.fakeIdentity', 'type': 'FakeFactoryIdentity'}, - 'zones': {'key': 'properties.zones', 'type': '[str]'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'blob_path_begins_with': {'key': 'typeProperties.blobPathBeginsWith', 'type': 'str'}, + 'blob_path_ends_with': {'key': 'typeProperties.blobPathEndsWith', 'type': 'str'}, + 'ignore_empty_blobs': {'key': 'typeProperties.ignoreEmptyBlobs', 'type': 'bool'}, + 'events': {'key': 'typeProperties.events', 'type': '[str]'}, + 'scope': {'key': 'typeProperties.scope', 'type': 'str'}, } def __init__( self, *, - location: Optional[str] = None, - tags: Optional[Dict[str, str]] = None, + events: List[Union[str, "BlobEventTypes"]], + scope: str, additional_properties: Optional[Dict[str, object]] = None, - identity: Optional["FactoryIdentity"] = None, - repo_configuration: Optional["FactoryRepoConfiguration"] = None, - fake_identity: Optional["FakeFactoryIdentity"] = None, - zones: Optional[List[str]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List[object]] = None, + blob_path_begins_with: Optional[str] = None, + blob_path_ends_with: Optional[str] = None, + ignore_empty_blobs: Optional[bool] = None, **kwargs ): - super(Factory, self).__init__(location=location, tags=tags, **kwargs) - self.additional_properties = additional_properties - self.identity = identity - self.provisioning_state = None - self.create_time = None - self.version = None - self.repo_configuration = repo_configuration - self.fake_identity = fake_identity - self.zones = zones + super(BlobEventsTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type: str = 'BlobEventsTrigger' + self.blob_path_begins_with = blob_path_begins_with + self.blob_path_ends_with = blob_path_ends_with + self.ignore_empty_blobs = ignore_empty_blobs + self.events = events + self.scope = scope -class FactoryRepoConfiguration(msrest.serialization.Model): - """Factory's git repo information. +class BlobTrigger(MultiplePipelineTrigger): + """Trigger that runs every time the selected Blob container changes. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FactoryGitHubConfiguration, FactoryVstsConfiguration. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[object] + :param folder_path: Required. The path of the container/folder that will trigger the pipeline. + :type folder_path: str + :param max_concurrency: Required. The max number of parallel files to handle when it is + triggered. + :type max_concurrency: int + :param linked_service: Required. The Azure Storage linked service reference. + :type linked_service: object """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, + 'runtime_state': {'readonly': True}, + 'folder_path': {'required': True}, + 'max_concurrency': {'required': True}, + 'linked_service': {'required': True}, } _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'folder_path': {'key': 'typeProperties.folderPath', 'type': 'str'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'linked_service': {'key': 'typeProperties.linkedService', 'type': 'object'}, } - _subtype_map = { - 'type': {'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration', 'FactoryVSTSConfiguration': 'FactoryVstsConfiguration'} + def __init__( + self, + *, + folder_path: str, + max_concurrency: int, + linked_service: object, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List[object]] = None, + **kwargs + ): + super(BlobTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type: str = 'BlobTrigger' + self.folder_path = folder_path + self.max_concurrency = max_concurrency + self.linked_service = linked_service + + +class ChainingTrigger(Trigger): + """Trigger that allows the referenced pipeline to depend on other pipeline runs based on runDimension Name/Value pairs. Upstream pipelines should declare the same runDimension Name and their runs should have the values for those runDimensions. The referenced pipeline run would be triggered if the values for the runDimension match for all upstream pipeline runs. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when all upstream pipelines + complete successfully. + :type pipeline: object + :param depends_on: Required. Upstream Pipelines. + :type depends_on: list[object] + :param run_dimension: Required. Run Dimension property that needs to be emitted by upstream + pipelines. + :type run_dimension: str + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'depends_on': {'required': True}, + 'run_dimension': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'object'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[object]'}, + 'run_dimension': {'key': 'typeProperties.runDimension', 'type': 'str'}, } def __init__( self, *, - account_name: str, - repository_name: str, - collaboration_branch: str, - root_folder: str, - last_commit_id: Optional[str] = None, + pipeline: object, + depends_on: List[object], + run_dimension: str, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, **kwargs ): - super(FactoryRepoConfiguration, self).__init__(**kwargs) - self.type: Optional[str] = None - self.account_name = account_name - self.repository_name = repository_name - self.collaboration_branch = collaboration_branch - self.root_folder = root_folder - self.last_commit_id = last_commit_id + super(ChainingTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type: str = 'ChainingTrigger' + self.pipeline = pipeline + self.depends_on = depends_on + self.run_dimension = run_dimension -class FactoryGitHubConfiguration(FactoryRepoConfiguration): - """Factory's GitHub repo information. +class CloudError(msrest.serialization.Model): + """The object that defines the structure of an Azure Data Factory error response. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. - :type host_name: str + :param code: Required. Error code. + :type code: str + :param message: Required. Error message. + :type message: str + :param target: Property name/path in request associated with error. + :type target: str + :param details: Array with additional error details. + :type details: list[~azure.mgmt.datafactory.models.CloudError] + """ + + _validation = { + 'code': {'required': True}, + 'message': {'required': True}, + } + + _attribute_map = { + 'code': {'key': 'error.code', 'type': 'str'}, + 'message': {'key': 'error.message', 'type': 'str'}, + 'target': {'key': 'error.target', 'type': 'str'}, + 'details': {'key': 'error.details', 'type': '[CloudError]'}, + } + + def __init__( + self, + *, + code: str, + message: str, + target: Optional[str] = None, + details: Optional[List["CloudError"]] = None, + **kwargs + ): + super(CloudError, self).__init__(**kwargs) + self.code = code + self.message = message + self.target = target + self.details = details + + +class CustomSetupBase(msrest.serialization.Model): + """The base definition of the custom setup. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: CmdkeySetup, ComponentSetup, EnvironmentVariableSetup. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'CmdkeySetup': 'CmdkeySetup', 'ComponentSetup': 'ComponentSetup', 'EnvironmentVariableSetup': 'EnvironmentVariableSetup'} + } + + def __init__( + self, + **kwargs + ): + super(CustomSetupBase, self).__init__(**kwargs) + self.type: Optional[str] = None + + +class CmdkeySetup(CustomSetupBase): + """The custom setup of running cmdkey commands. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + :param target_name: Required. The server name of data source access. + :type target_name: object + :param user_name: Required. The user name of data source access. + :type user_name: object + :param password: Required. The password of data source access. + :type password: object + """ + + _validation = { + 'type': {'required': True}, + 'target_name': {'required': True}, + 'user_name': {'required': True}, + 'password': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'target_name': {'key': 'typeProperties.targetName', 'type': 'object'}, + 'user_name': {'key': 'typeProperties.userName', 'type': 'object'}, + 'password': {'key': 'typeProperties.password', 'type': 'object'}, + } + + def __init__( + self, + *, + target_name: object, + user_name: object, + password: object, + **kwargs + ): + super(CmdkeySetup, self).__init__(**kwargs) + self.type: str = 'CmdkeySetup' + self.target_name = target_name + self.user_name = user_name + self.password = password + + +class ComponentSetup(CustomSetupBase): + """The custom setup of installing 3rd party components. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + :param component_name: Required. The name of the 3rd party component. + :type component_name: str + :param license_key: The license key to activate the component. + :type license_key: object + """ + + _validation = { + 'type': {'required': True}, + 'component_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'component_name': {'key': 'typeProperties.componentName', 'type': 'str'}, + 'license_key': {'key': 'typeProperties.licenseKey', 'type': 'object'}, + } + + def __init__( + self, + *, + component_name: str, + license_key: Optional[object] = None, + **kwargs + ): + super(ComponentSetup, self).__init__(**kwargs) + self.type: str = 'ComponentSetup' + self.component_name = component_name + self.license_key = license_key + + +class CreateLinkedIntegrationRuntimeRequest(msrest.serialization.Model): + """The linked integration runtime information. + + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked integration runtime belongs + to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked integration runtime + belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the linked integration + runtime belongs to. + :type data_factory_location: str + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + subscription_id: Optional[str] = None, + data_factory_name: Optional[str] = None, + data_factory_location: Optional[str] = None, + **kwargs + ): + super(CreateLinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.name = name + self.subscription_id = subscription_id + self.data_factory_name = data_factory_name + self.data_factory_location = data_factory_location + + +class DependencyReference(msrest.serialization.Model): + """Referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SelfDependencyTumblingWindowTriggerReference, TriggerDependencyReference. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SelfDependencyTumblingWindowTriggerReference': 'SelfDependencyTumblingWindowTriggerReference', 'TriggerDependencyReference': 'TriggerDependencyReference'} + } + + def __init__( + self, + **kwargs + ): + super(DependencyReference, self).__init__(**kwargs) + self.type: Optional[str] = None + + +class EntityReference(msrest.serialization.Model): + """The entity reference. + + :param type: The type of this referenced entity. Possible values include: + "IntegrationRuntimeReference", "LinkedServiceReference". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEntityReferenceType + :param reference_name: The name of this referenced entity. + :type reference_name: str + """ + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + } + + def __init__( + self, + *, + type: Optional[Union[str, "IntegrationRuntimeEntityReferenceType"]] = None, + reference_name: Optional[str] = None, + **kwargs + ): + super(EntityReference, self).__init__(**kwargs) + self.type = type + self.reference_name = reference_name + + +class EnvironmentVariableSetup(CustomSetupBase): + """The custom setup of setting environment variable. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of custom setup.Constant filled by server. + :type type: str + :param variable_name: Required. The name of the environment variable. + :type variable_name: str + :param variable_value: Required. The value of the environment variable. + :type variable_value: str + """ + + _validation = { + 'type': {'required': True}, + 'variable_name': {'required': True}, + 'variable_value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'variable_name': {'key': 'typeProperties.variableName', 'type': 'str'}, + 'variable_value': {'key': 'typeProperties.variableValue', 'type': 'str'}, + } + + def __init__( + self, + *, + variable_name: str, + variable_value: str, + **kwargs + ): + super(EnvironmentVariableSetup, self).__init__(**kwargs) + self.type: str = 'EnvironmentVariableSetup' + self.variable_name = variable_name + self.variable_value = variable_value + + +class Resource(msrest.serialization.Model): + """Azure Data Factory top-level resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: A set of tags. The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + **kwargs + ): + super(Resource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.location = location + self.tags = tags + self.e_tag = None + + +class Factory(Resource): + """Factory resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :param location: The resource location. + :type location: str + :param tags: A set of tags. The resource tags. + :type tags: dict[str, str] + :ivar e_tag: Etag identifies change in the resource. + :vartype e_tag: str + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + :ivar provisioning_state: Factory provisioning state, example Succeeded. + :vartype provisioning_state: str + :ivar create_time: Time the factory was created in ISO8601 format. + :vartype create_time: ~datetime.datetime + :ivar version: Version of the factory. + :vartype version: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + :param fake_identity: This is only for az test. + :type fake_identity: ~azure.mgmt.datafactory.models.FakeFactoryIdentity + :param zones: This is only for az test. + :type zones: list[str] + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'e_tag': {'readonly': True}, + 'provisioning_state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'version': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'location': {'key': 'location', 'type': 'str'}, + 'tags': {'key': 'tags', 'type': '{str}'}, + 'e_tag': {'key': 'eTag', 'type': 'str'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + 'create_time': {'key': 'properties.createTime', 'type': 'iso-8601'}, + 'version': {'key': 'properties.version', 'type': 'str'}, + 'repo_configuration': {'key': 'properties.repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + 'fake_identity': {'key': 'properties.fakeIdentity', 'type': 'FakeFactoryIdentity'}, + 'zones': {'key': 'properties.zones', 'type': '[str]'}, + } + + def __init__( + self, + *, + location: Optional[str] = None, + tags: Optional[Dict[str, str]] = None, + additional_properties: Optional[Dict[str, object]] = None, + identity: Optional["FactoryIdentity"] = None, + repo_configuration: Optional["FactoryRepoConfiguration"] = None, + fake_identity: Optional["FakeFactoryIdentity"] = None, + zones: Optional[List[str]] = None, + **kwargs + ): + super(Factory, self).__init__(location=location, tags=tags, **kwargs) + self.additional_properties = additional_properties + self.identity = identity + self.provisioning_state = None + self.create_time = None + self.version = None + self.repo_configuration = repo_configuration + self.fake_identity = fake_identity + self.zones = zones + + +class FactoryRepoConfiguration(msrest.serialization.Model): + """Factory's git repo information. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: FactoryGitHubConfiguration, FactoryVstsConfiguration. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of repo configuration.Constant filled by server. + :type type: str + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'FactoryGitHubConfiguration': 'FactoryGitHubConfiguration', 'FactoryVSTSConfiguration': 'FactoryVstsConfiguration'} + } + + def __init__( + self, + *, + account_name: str, + repository_name: str, + collaboration_branch: str, + root_folder: str, + last_commit_id: Optional[str] = None, + **kwargs + ): + super(FactoryRepoConfiguration, self).__init__(**kwargs) + self.type: Optional[str] = None + self.account_name = account_name + self.repository_name = repository_name + self.collaboration_branch = collaboration_branch + self.root_folder = root_folder + self.last_commit_id = last_commit_id + + +class FactoryGitHubConfiguration(FactoryRepoConfiguration): + """Factory's GitHub repo information. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of repo configuration.Constant filled by server. + :type type: str + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param host_name: GitHub Enterprise host name. For example: https://github.mydomain.com. + :type host_name: str + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'host_name': {'key': 'hostName', 'type': 'str'}, + } + + def __init__( + self, + *, + account_name: str, + repository_name: str, + collaboration_branch: str, + root_folder: str, + last_commit_id: Optional[str] = None, + host_name: Optional[str] = None, + **kwargs + ): + super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.type: str = 'FactoryGitHubConfiguration' + self.host_name = host_name + + +class FactoryIdentity(msrest.serialization.Model): + """Identity properties of the factory resource. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. + Default value: "SystemAssigned". + :vartype type: str + :ivar principal_id: The principal id of the identity. + :vartype principal_id: str + :ivar tenant_id: The client tenant id of the identity. + :vartype tenant_id: str + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'principal_id': {'readonly': True}, + 'tenant_id': {'readonly': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'principal_id': {'key': 'principalId', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + type = "SystemAssigned" + + def __init__( + self, + **kwargs + ): + super(FactoryIdentity, self).__init__(**kwargs) + self.principal_id = None + self.tenant_id = None + + +class FactoryListResponse(msrest.serialization.Model): + """A list of factory resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of factories. + :type value: list[~azure.mgmt.datafactory.models.Factory] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[Factory]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["Factory"], + next_link: Optional[str] = None, + **kwargs + ): + super(FactoryListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class FactoryRepoUpdate(msrest.serialization.Model): + """Factory's git repo information. + + :param factory_resource_id: The factory resource id. + :type factory_resource_id: str + :param repo_configuration: Git repo information of the factory. + :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + """ + + _attribute_map = { + 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, + 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + } + + def __init__( + self, + *, + factory_resource_id: Optional[str] = None, + repo_configuration: Optional["FactoryRepoConfiguration"] = None, + **kwargs + ): + super(FactoryRepoUpdate, self).__init__(**kwargs) + self.factory_resource_id = factory_resource_id + self.repo_configuration = repo_configuration + + +class FactoryUpdateParameters(msrest.serialization.Model): + """Parameters for updating a factory resource. + + :param tags: A set of tags. The resource tags. + :type tags: dict[str, str] + :param identity: Managed service identity of the factory. + :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + """ + + _attribute_map = { + 'tags': {'key': 'tags', 'type': '{str}'}, + 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + } + + def __init__( + self, + *, + tags: Optional[Dict[str, str]] = None, + identity: Optional["FactoryIdentity"] = None, + **kwargs + ): + super(FactoryUpdateParameters, self).__init__(**kwargs) + self.tags = tags + self.identity = identity + + +class FactoryVstsConfiguration(FactoryRepoConfiguration): + """Factory's VSTS repo information. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of repo configuration.Constant filled by server. + :type type: str + :param account_name: Required. Account name. + :type account_name: str + :param repository_name: Required. Repository name. + :type repository_name: str + :param collaboration_branch: Required. Collaboration branch. + :type collaboration_branch: str + :param root_folder: Required. Root folder. + :type root_folder: str + :param last_commit_id: Last commit id. + :type last_commit_id: str + :param project_name: Required. VSTS project name. + :type project_name: str + :param tenant_id: VSTS tenant id. + :type tenant_id: str + """ + + _validation = { + 'type': {'required': True}, + 'account_name': {'required': True}, + 'repository_name': {'required': True}, + 'collaboration_branch': {'required': True}, + 'root_folder': {'required': True}, + 'project_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'account_name': {'key': 'accountName', 'type': 'str'}, + 'repository_name': {'key': 'repositoryName', 'type': 'str'}, + 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, + 'root_folder': {'key': 'rootFolder', 'type': 'str'}, + 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, + 'project_name': {'key': 'projectName', 'type': 'str'}, + 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + } + + def __init__( + self, + *, + account_name: str, + repository_name: str, + collaboration_branch: str, + root_folder: str, + project_name: str, + last_commit_id: Optional[str] = None, + tenant_id: Optional[str] = None, + **kwargs + ): + super(FactoryVstsConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) + self.type: str = 'FactoryVSTSConfiguration' + self.project_name = project_name + self.tenant_id = tenant_id + + +class FakeFactoryIdentity(msrest.serialization.Model): + """This is only for az test. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. .. + :type name: str + :param zones_inside: sample of simple array. + :type zones_inside: list[str] + """ + + _validation = { + 'name': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'zones_inside': {'key': 'zonesInside', 'type': '[str]'}, + } + + def __init__( + self, + *, + name: str, + zones_inside: Optional[List[str]] = None, + **kwargs + ): + super(FakeFactoryIdentity, self).__init__(**kwargs) + self.name = name + self.zones_inside = zones_inside + + +class GitHubAccessTokenRequest(msrest.serialization.Model): + """Get GitHub access token request definition. + + All required parameters must be populated in order to send to Azure. + + :param git_hub_access_code: Required. GitHub access code. + :type git_hub_access_code: str + :param git_hub_client_id: GitHub application client ID. + :type git_hub_client_id: str + :param git_hub_access_token_base_url: Required. GitHub access token base URL. + :type git_hub_access_token_base_url: str + """ + + _validation = { + 'git_hub_access_code': {'required': True}, + 'git_hub_access_token_base_url': {'required': True}, + } + + _attribute_map = { + 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, + 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, + 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + } + + def __init__( + self, + *, + git_hub_access_code: str, + git_hub_access_token_base_url: str, + git_hub_client_id: Optional[str] = None, + **kwargs + ): + super(GitHubAccessTokenRequest, self).__init__(**kwargs) + self.git_hub_access_code = git_hub_access_code + self.git_hub_client_id = git_hub_client_id + self.git_hub_access_token_base_url = git_hub_access_token_base_url + + +class GitHubAccessTokenResponse(msrest.serialization.Model): + """Get GitHub access token response definition. + + :param git_hub_access_token: GitHub access token. + :type git_hub_access_token: str + """ + + _attribute_map = { + 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + } + + def __init__( + self, + *, + git_hub_access_token: Optional[str] = None, + **kwargs + ): + super(GitHubAccessTokenResponse, self).__init__(**kwargs) + self.git_hub_access_token = git_hub_access_token + + +class IntegrationRuntime(msrest.serialization.Model): + """Azure Data Factory nested object which serves as a compute resource for activities. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIntegrationRuntime, SelfHostedIntegrationRuntime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Managed': 'ManagedIntegrationRuntime', 'SelfHosted': 'SelfHostedIntegrationRuntime'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntime, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type: str = 'IntegrationRuntime' + self.description = description + + +class IntegrationRuntimeAuthKeys(msrest.serialization.Model): + """The integration runtime authentication keys. + + :param auth_key1: The primary integration runtime authentication key. + :type auth_key1: str + :param auth_key2: The secondary integration runtime authentication key. + :type auth_key2: str + """ + + _attribute_map = { + 'auth_key1': {'key': 'authKey1', 'type': 'str'}, + 'auth_key2': {'key': 'authKey2', 'type': 'str'}, + } + + def __init__( + self, + *, + auth_key1: Optional[str] = None, + auth_key2: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntimeAuthKeys, self).__init__(**kwargs) + self.auth_key1 = auth_key1 + self.auth_key2 = auth_key2 + + +class IntegrationRuntimeComputeProperties(msrest.serialization.Model): + """The compute resource properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param location: The location for managed integration runtime. The supported regions could be + found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement- + activities. + :type location: str + :param node_size: The node size requirement to managed integration runtime. + :type node_size: str + :param number_of_nodes: The required number of nodes for managed integration runtime. + :type number_of_nodes: int + :param max_parallel_executions_per_node: Maximum parallel executions count per node for managed + integration runtime. + :type max_parallel_executions_per_node: int + :param data_flow_properties: Data flow properties for managed integration runtime. + :type data_flow_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeDataFlowProperties + :param v_net_properties: VNet properties for managed integration runtime. + :type v_net_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeVNetProperties + """ + + _validation = { + 'number_of_nodes': {'minimum': 1}, + 'max_parallel_executions_per_node': {'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'location': {'key': 'location', 'type': 'str'}, + 'node_size': {'key': 'nodeSize', 'type': 'str'}, + 'number_of_nodes': {'key': 'numberOfNodes', 'type': 'int'}, + 'max_parallel_executions_per_node': {'key': 'maxParallelExecutionsPerNode', 'type': 'int'}, + 'data_flow_properties': {'key': 'dataFlowProperties', 'type': 'IntegrationRuntimeDataFlowProperties'}, + 'v_net_properties': {'key': 'vNetProperties', 'type': 'IntegrationRuntimeVNetProperties'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + location: Optional[str] = None, + node_size: Optional[str] = None, + number_of_nodes: Optional[int] = None, + max_parallel_executions_per_node: Optional[int] = None, + data_flow_properties: Optional["IntegrationRuntimeDataFlowProperties"] = None, + v_net_properties: Optional["IntegrationRuntimeVNetProperties"] = None, + **kwargs + ): + super(IntegrationRuntimeComputeProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.location = location + self.node_size = node_size + self.number_of_nodes = number_of_nodes + self.max_parallel_executions_per_node = max_parallel_executions_per_node + self.data_flow_properties = data_flow_properties + self.v_net_properties = v_net_properties + + +class IntegrationRuntimeConnectionInfo(msrest.serialization.Model): + """Connection information for encrypting the on-premises data source credentials. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar service_token: The token generated in service. Callers use this token to authenticate to + integration runtime. + :vartype service_token: str + :ivar identity_cert_thumbprint: The integration runtime SSL certificate thumbprint. Click-Once + application uses it to do server validation. + :vartype identity_cert_thumbprint: str + :ivar host_service_uri: The on-premises integration runtime host URL. + :vartype host_service_uri: str + :ivar version: The integration runtime version. + :vartype version: str + :ivar public_key: The public key for encrypting a credential when transferring the credential + to the integration runtime. + :vartype public_key: str + :ivar is_identity_cert_exprired: Whether the identity certificate is expired. + :vartype is_identity_cert_exprired: bool + """ + + _validation = { + 'service_token': {'readonly': True}, + 'identity_cert_thumbprint': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'version': {'readonly': True}, + 'public_key': {'readonly': True}, + 'is_identity_cert_exprired': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'service_token': {'key': 'serviceToken', 'type': 'str'}, + 'identity_cert_thumbprint': {'key': 'identityCertThumbprint', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'public_key': {'key': 'publicKey', 'type': 'str'}, + 'is_identity_cert_exprired': {'key': 'isIdentityCertExprired', 'type': 'bool'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(IntegrationRuntimeConnectionInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.service_token = None + self.identity_cert_thumbprint = None + self.host_service_uri = None + self.version = None + self.public_key = None + self.is_identity_cert_exprired = None + + +class IntegrationRuntimeCustomSetupScriptProperties(msrest.serialization.Model): + """Custom setup script properties for a managed dedicated integration runtime. + + :param blob_container_uri: The URI of the Azure blob container that contains the custom setup + script. + :type blob_container_uri: str + :param sas_token: The SAS token of the Azure blob container. + :type sas_token: object + """ + + _attribute_map = { + 'blob_container_uri': {'key': 'blobContainerUri', 'type': 'str'}, + 'sas_token': {'key': 'sasToken', 'type': 'object'}, + } + + def __init__( + self, + *, + blob_container_uri: Optional[str] = None, + sas_token: Optional[object] = None, + **kwargs + ): + super(IntegrationRuntimeCustomSetupScriptProperties, self).__init__(**kwargs) + self.blob_container_uri = blob_container_uri + self.sas_token = sas_token + + +class IntegrationRuntimeDataFlowProperties(msrest.serialization.Model): + """Data flow properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param compute_type: Compute type of the cluster which will execute data flow job. Possible + values include: "General", "MemoryOptimized", "ComputeOptimized". + :type compute_type: str or ~azure.mgmt.datafactory.models.DataFlowComputeType + :param core_count: Core count of the cluster which will execute data flow job. Supported values + are: 8, 16, 32, 48, 80, 144 and 272. + :type core_count: int + :param time_to_live: Time to live (in minutes) setting of the cluster which will execute data + flow job. + :type time_to_live: int + """ + + _validation = { + 'time_to_live': {'minimum': 0}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'compute_type': {'key': 'computeType', 'type': 'str'}, + 'core_count': {'key': 'coreCount', 'type': 'int'}, + 'time_to_live': {'key': 'timeToLive', 'type': 'int'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + compute_type: Optional[Union[str, "DataFlowComputeType"]] = None, + core_count: Optional[int] = None, + time_to_live: Optional[int] = None, + **kwargs + ): + super(IntegrationRuntimeDataFlowProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.compute_type = compute_type + self.core_count = core_count + self.time_to_live = time_to_live + + +class IntegrationRuntimeDataProxyProperties(msrest.serialization.Model): + """Data proxy properties for a managed dedicated integration runtime. + + :param connect_via: The self-hosted integration runtime reference. + :type connect_via: ~azure.mgmt.datafactory.models.EntityReference + :param staging_linked_service: The staging linked service reference. + :type staging_linked_service: ~azure.mgmt.datafactory.models.EntityReference + :param path: The path to contain the staged data in the Blob storage. + :type path: str + """ + + _attribute_map = { + 'connect_via': {'key': 'connectVia', 'type': 'EntityReference'}, + 'staging_linked_service': {'key': 'stagingLinkedService', 'type': 'EntityReference'}, + 'path': {'key': 'path', 'type': 'str'}, + } + + def __init__( + self, + *, + connect_via: Optional["EntityReference"] = None, + staging_linked_service: Optional["EntityReference"] = None, + path: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntimeDataProxyProperties, self).__init__(**kwargs) + self.connect_via = connect_via + self.staging_linked_service = staging_linked_service + self.path = path + + +class IntegrationRuntimeListResponse(msrest.serialization.Model): + """A list of integration runtime resources. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtimes. + :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeResource] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeResource]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["IntegrationRuntimeResource"], + next_link: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntimeListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class IntegrationRuntimeMonitoringData(msrest.serialization.Model): + """Get monitoring data response. + + :param name: Integration runtime name. + :type name: str + :param nodes: Integration runtime node monitoring data. + :type nodes: list[~azure.mgmt.datafactory.models.IntegrationRuntimeNodeMonitoringData] + """ + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'nodes': {'key': 'nodes', 'type': '[IntegrationRuntimeNodeMonitoringData]'}, + } + + def __init__( + self, + *, + name: Optional[str] = None, + nodes: Optional[List["IntegrationRuntimeNodeMonitoringData"]] = None, + **kwargs + ): + super(IntegrationRuntimeMonitoringData, self).__init__(**kwargs) + self.name = name + self.nodes = nodes + + +class IntegrationRuntimeNodeIpAddress(msrest.serialization.Model): + """The IP address of self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar ip_address: The IP address of self-hosted integration runtime node. + :vartype ip_address: str + """ + + _validation = { + 'ip_address': {'readonly': True}, + } + + _attribute_map = { + 'ip_address': {'key': 'ipAddress', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(IntegrationRuntimeNodeIpAddress, self).__init__(**kwargs) + self.ip_address = None + + +class IntegrationRuntimeNodeMonitoringData(msrest.serialization.Model): + """Monitoring data for integration runtime node. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar available_memory_in_mb: Available memory (MB) on the integration runtime node. + :vartype available_memory_in_mb: int + :ivar cpu_utilization: CPU percentage on the integration runtime node. + :vartype cpu_utilization: int + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration runtime node. + :vartype concurrent_jobs_limit: int + :ivar concurrent_jobs_running: The number of jobs currently running on the integration runtime + node. + :vartype concurrent_jobs_running: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration runtime. + :vartype max_concurrent_jobs: int + :ivar sent_bytes: Sent bytes on the integration runtime node. + :vartype sent_bytes: float + :ivar received_bytes: Received bytes on the integration runtime node. + :vartype received_bytes: float + """ + + _validation = { + 'node_name': {'readonly': True}, + 'available_memory_in_mb': {'readonly': True}, + 'cpu_utilization': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'concurrent_jobs_running': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + 'sent_bytes': {'readonly': True}, + 'received_bytes': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'available_memory_in_mb': {'key': 'availableMemoryInMB', 'type': 'int'}, + 'cpu_utilization': {'key': 'cpuUtilization', 'type': 'int'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'concurrent_jobs_running': {'key': 'concurrentJobsRunning', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + 'sent_bytes': {'key': 'sentBytes', 'type': 'float'}, + 'received_bytes': {'key': 'receivedBytes', 'type': 'float'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(IntegrationRuntimeNodeMonitoringData, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.available_memory_in_mb = None + self.cpu_utilization = None + self.concurrent_jobs_limit = None + self.concurrent_jobs_running = None + self.max_concurrent_jobs = None + self.sent_bytes = None + self.received_bytes = None + + +class IntegrationRuntimeReference(msrest.serialization.Model): + """Integration runtime reference type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Type of integration runtime. Default value: + "IntegrationRuntimeReference". + :vartype type: str + :param reference_name: Required. Reference integration runtime name. + :type reference_name: str + :param parameters: Arguments for integration runtime. + :type parameters: object + """ + + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': 'object'}, + } + + type = "IntegrationRuntimeReference" + + def __init__( + self, + *, + reference_name: str, + parameters: Optional[object] = None, + **kwargs + ): + super(IntegrationRuntimeReference, self).__init__(**kwargs) + self.reference_name = reference_name + self.parameters = parameters + + +class IntegrationRuntimeRegenerateKeyParameters(msrest.serialization.Model): + """Parameters to regenerate the authentication key. + + :param key_name: The name of the authentication key to regenerate. Possible values include: + "authKey1", "authKey2". + :type key_name: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAuthKeyName + """ + + _attribute_map = { + 'key_name': {'key': 'keyName', 'type': 'str'}, + } + + def __init__( + self, + *, + key_name: Optional[Union[str, "IntegrationRuntimeAuthKeyName"]] = None, + **kwargs + ): + super(IntegrationRuntimeRegenerateKeyParameters, self).__init__(**kwargs) + self.key_name = key_name + + +class SubResource(msrest.serialization.Model): + """Azure Data Factory nested resource, which belongs to a factory. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(SubResource, self).__init__(**kwargs) + self.id = None + self.name = None + self.type = None + self.etag = None + + +class IntegrationRuntimeResource(SubResource): + """Integration runtime resource type. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Integration runtime properties. + :type properties: object + """ + + _validation = { + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__( + self, + *, + properties: object, + **kwargs + ): + super(IntegrationRuntimeResource, self).__init__(**kwargs) + self.properties = properties + + +class IntegrationRuntimeSsisCatalogInfo(msrest.serialization.Model): + """Catalog information for managed dedicated integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_server_endpoint: The catalog database server URL. + :type catalog_server_endpoint: str + :param catalog_admin_user_name: The administrator user name of catalog database. + :type catalog_admin_user_name: str + :param catalog_admin_password: The password of the administrator user account of the catalog + database. + :type catalog_admin_password: object + :param catalog_pricing_tier: The pricing tier for the catalog database. The valid values could + be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/. Possible values + include: "Basic", "Standard", "Premium", "PremiumRS". + :type catalog_pricing_tier: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogPricingTier + """ + + _validation = { + 'catalog_admin_user_name': {'max_length': 128, 'min_length': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_server_endpoint': {'key': 'catalogServerEndpoint', 'type': 'str'}, + 'catalog_admin_user_name': {'key': 'catalogAdminUserName', 'type': 'str'}, + 'catalog_admin_password': {'key': 'catalogAdminPassword', 'type': 'object'}, + 'catalog_pricing_tier': {'key': 'catalogPricingTier', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + catalog_server_endpoint: Optional[str] = None, + catalog_admin_user_name: Optional[str] = None, + catalog_admin_password: Optional[object] = None, + catalog_pricing_tier: Optional[Union[str, "IntegrationRuntimeSsisCatalogPricingTier"]] = None, + **kwargs + ): + super(IntegrationRuntimeSsisCatalogInfo, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_server_endpoint = catalog_server_endpoint + self.catalog_admin_user_name = catalog_admin_user_name + self.catalog_admin_password = catalog_admin_password + self.catalog_pricing_tier = catalog_pricing_tier + + +class IntegrationRuntimeSsisProperties(msrest.serialization.Model): + """SSIS properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param catalog_info: Catalog information for managed dedicated integration runtime. + :type catalog_info: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisCatalogInfo + :param license_type: License type for bringing your own license scenario. Possible values + include: "BasePrice", "LicenseIncluded". + :type license_type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeLicenseType + :param custom_setup_script_properties: Custom setup script properties for a managed dedicated + integration runtime. + :type custom_setup_script_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeCustomSetupScriptProperties + :param data_proxy_properties: Data proxy properties for a managed dedicated integration + runtime. + :type data_proxy_properties: + ~azure.mgmt.datafactory.models.IntegrationRuntimeDataProxyProperties + :param edition: The edition for the SSIS Integration Runtime. Possible values include: + "Standard", "Enterprise". + :type edition: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeEdition + :param express_custom_setup_properties: Custom setup without script properties for a SSIS + integration runtime. + :type express_custom_setup_properties: list[~azure.mgmt.datafactory.models.CustomSetupBase] + :param package_stores: Package stores for the SSIS Integration Runtime. + :type package_stores: list[~azure.mgmt.datafactory.models.PackageStore] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'catalog_info': {'key': 'catalogInfo', 'type': 'IntegrationRuntimeSsisCatalogInfo'}, + 'license_type': {'key': 'licenseType', 'type': 'str'}, + 'custom_setup_script_properties': {'key': 'customSetupScriptProperties', 'type': 'IntegrationRuntimeCustomSetupScriptProperties'}, + 'data_proxy_properties': {'key': 'dataProxyProperties', 'type': 'IntegrationRuntimeDataProxyProperties'}, + 'edition': {'key': 'edition', 'type': 'str'}, + 'express_custom_setup_properties': {'key': 'expressCustomSetupProperties', 'type': '[CustomSetupBase]'}, + 'package_stores': {'key': 'packageStores', 'type': '[PackageStore]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + catalog_info: Optional["IntegrationRuntimeSsisCatalogInfo"] = None, + license_type: Optional[Union[str, "IntegrationRuntimeLicenseType"]] = None, + custom_setup_script_properties: Optional["IntegrationRuntimeCustomSetupScriptProperties"] = None, + data_proxy_properties: Optional["IntegrationRuntimeDataProxyProperties"] = None, + edition: Optional[Union[str, "IntegrationRuntimeEdition"]] = None, + express_custom_setup_properties: Optional[List["CustomSetupBase"]] = None, + package_stores: Optional[List["PackageStore"]] = None, + **kwargs + ): + super(IntegrationRuntimeSsisProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.catalog_info = catalog_info + self.license_type = license_type + self.custom_setup_script_properties = custom_setup_script_properties + self.data_proxy_properties = data_proxy_properties + self.edition = edition + self.express_custom_setup_properties = express_custom_setup_properties + self.package_stores = package_stores + + +class IntegrationRuntimeStatus(msrest.serialization.Model): + """Integration runtime status. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: ManagedIntegrationRuntimeStatus, SelfHostedIntegrationRuntimeStatus. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar data_factory_name: The data factory name which the integration runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "AccessDenied". + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState + """ + + _validation = { + 'type': {'required': True}, + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Managed': 'ManagedIntegrationRuntimeStatus', 'SelfHosted': 'SelfHostedIntegrationRuntimeStatus'} + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(IntegrationRuntimeStatus, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type: str = 'IntegrationRuntimeStatus' + self.data_factory_name = None + self.state = None + + +class IntegrationRuntimeStatusListResponse(msrest.serialization.Model): + """A list of integration runtime status. + + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of integration runtime status. + :type value: list[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _validation = { + 'value': {'required': True}, + } + + _attribute_map = { + 'value': {'key': 'value', 'type': '[IntegrationRuntimeStatusResponse]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: List["IntegrationRuntimeStatusResponse"], + next_link: Optional[str] = None, + **kwargs + ): + super(IntegrationRuntimeStatusListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class IntegrationRuntimeStatusResponse(msrest.serialization.Model): + """Integration runtime status response. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar name: The integration runtime name. + :vartype name: str + :param properties: Required. Integration runtime properties. + :type properties: object + """ + + _validation = { + 'name': {'readonly': True}, + 'properties': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, + } + + def __init__( + self, + *, + properties: object, + **kwargs + ): + super(IntegrationRuntimeStatusResponse, self).__init__(**kwargs) + self.name = None + self.properties = properties + + +class IntegrationRuntimeVNetProperties(msrest.serialization.Model): + """VNet properties for managed integration runtime. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param v_net_id: The ID of the VNet that this integration runtime will join. + :type v_net_id: str + :param subnet: The name of the subnet this integration runtime will join. + :type subnet: str + :param public_i_ps: Resource IDs of the public IP addresses that this integration runtime will + use. + :type public_i_ps: list[str] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'v_net_id': {'key': 'vNetId', 'type': 'str'}, + 'subnet': {'key': 'subnet', 'type': 'str'}, + 'public_i_ps': {'key': 'publicIPs', 'type': '[str]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + v_net_id: Optional[str] = None, + subnet: Optional[str] = None, + public_i_ps: Optional[List[str]] = None, + **kwargs + ): + super(IntegrationRuntimeVNetProperties, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.v_net_id = v_net_id + self.subnet = subnet + self.public_i_ps = public_i_ps + + +class LinkedIntegrationRuntime(msrest.serialization.Model): + """The linked integration runtime information. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of the linked integration runtime. + :vartype name: str + :ivar subscription_id: The subscription ID for which the linked integration runtime belong to. + :vartype subscription_id: str + :ivar data_factory_name: The name of the data factory for which the linked integration runtime + belong to. + :vartype data_factory_name: str + :ivar data_factory_location: The location of the data factory for which the linked integration + runtime belong to. + :vartype data_factory_location: str + :ivar create_time: The creating time of the linked integration runtime. + :vartype create_time: ~datetime.datetime + """ + + _validation = { + 'name': {'readonly': True}, + 'subscription_id': {'readonly': True}, + 'data_factory_name': {'readonly': True}, + 'data_factory_location': {'readonly': True}, + 'create_time': {'readonly': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'data_factory_location': {'key': 'dataFactoryLocation', 'type': 'str'}, + 'create_time': {'key': 'createTime', 'type': 'iso-8601'}, + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntime, self).__init__(**kwargs) + self.name = None + self.subscription_id = None + self.data_factory_name = None + self.data_factory_location = None + self.create_time = None + + +class LinkedIntegrationRuntimeType(msrest.serialization.Model): + """The base definition of a linked integration runtime. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: LinkedIntegrationRuntimeKeyAuthorization, LinkedIntegrationRuntimeRbacAuthorization. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + """ + + _validation = { + 'authorization_type': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + } + + _subtype_map = { + 'authorization_type': {'Key': 'LinkedIntegrationRuntimeKeyAuthorization', 'RBAC': 'LinkedIntegrationRuntimeRbacAuthorization'} + } + + def __init__( + self, + **kwargs + ): + super(LinkedIntegrationRuntimeType, self).__init__(**kwargs) + self.authorization_type: Optional[str] = None + + +class LinkedIntegrationRuntimeKeyAuthorization(LinkedIntegrationRuntimeType): + """The key authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param key: Required. The key used for authorization. + :type key: object + """ + + _validation = { + 'authorization_type': {'required': True}, + 'key': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'key': {'key': 'key', 'type': 'object'}, + } + + def __init__( + self, + *, + key: object, + **kwargs + ): + super(LinkedIntegrationRuntimeKeyAuthorization, self).__init__(**kwargs) + self.authorization_type: str = 'Key' + self.key = key + + +class LinkedIntegrationRuntimeRbacAuthorization(LinkedIntegrationRuntimeType): + """The role based access control (RBAC) authorization type integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param authorization_type: Required. The authorization type for integration runtime + sharing.Constant filled by server. + :type authorization_type: str + :param resource_id: Required. The resource identifier of the integration runtime to be shared. + :type resource_id: str + """ + + _validation = { + 'authorization_type': {'required': True}, + 'resource_id': {'required': True}, + } + + _attribute_map = { + 'authorization_type': {'key': 'authorizationType', 'type': 'str'}, + 'resource_id': {'key': 'resourceId', 'type': 'str'}, + } + + def __init__( + self, + *, + resource_id: str, + **kwargs + ): + super(LinkedIntegrationRuntimeRbacAuthorization, self).__init__(**kwargs) + self.authorization_type: str = 'RBAC' + self.resource_id = resource_id + + +class LinkedIntegrationRuntimeRequest(msrest.serialization.Model): + """Data factory name for linked integration runtime request. + + All required parameters must be populated in order to send to Azure. + + :param linked_factory_name: Required. The data factory name for linked integration runtime. + :type linked_factory_name: str + """ + + _validation = { + 'linked_factory_name': {'required': True}, + } + + _attribute_map = { + 'linked_factory_name': {'key': 'factoryName', 'type': 'str'}, + } + + def __init__( + self, + *, + linked_factory_name: str, + **kwargs + ): + super(LinkedIntegrationRuntimeRequest, self).__init__(**kwargs) + self.linked_factory_name = linked_factory_name + + +class ManagedIntegrationRuntime(IntegrationRuntime): + """Managed integration runtime, including managed elastic and managed dedicated integration runtimes. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :ivar state: Integration runtime state, only valid for managed dedicated integration runtime. + Possible values include: "Initial", "Stopped", "Started", "Starting", "Stopping", + "NeedRegistration", "Online", "Limited", "Offline", "AccessDenied". + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :param compute_properties: The compute resource for managed integration runtime. + :type compute_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeComputeProperties + :param ssis_properties: SSIS properties for managed integration runtime. + :type ssis_properties: ~azure.mgmt.datafactory.models.IntegrationRuntimeSsisProperties + """ + + _validation = { + 'type': {'required': True}, + 'state': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'compute_properties': {'key': 'typeProperties.computeProperties', 'type': 'IntegrationRuntimeComputeProperties'}, + 'ssis_properties': {'key': 'typeProperties.ssisProperties', 'type': 'IntegrationRuntimeSsisProperties'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + compute_properties: Optional["IntegrationRuntimeComputeProperties"] = None, + ssis_properties: Optional["IntegrationRuntimeSsisProperties"] = None, + **kwargs + ): + super(ManagedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.type: str = 'Managed' + self.state = None + self.compute_properties = compute_properties + self.ssis_properties = ssis_properties + + +class ManagedIntegrationRuntimeError(msrest.serialization.Model): + """Error definition for managed integration runtime. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar time: The time when the error occurred. + :vartype time: ~datetime.datetime + :ivar code: Error code. + :vartype code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar message: Error message. + :vartype message: str + """ + + _validation = { + 'time': {'readonly': True}, + 'code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'message': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'time': {'key': 'time', 'type': 'iso-8601'}, + 'code': {'key': 'code', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'message': {'key': 'message', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ManagedIntegrationRuntimeError, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.time = None + self.code = None + self.parameters = None + self.message = None + + +class ManagedIntegrationRuntimeNode(msrest.serialization.Model): + """Properties of integration runtime node. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar node_id: The managed integration runtime node id. + :vartype node_id: str + :ivar status: The managed integration runtime node status. Possible values include: "Starting", + "Available", "Recycling", "Unavailable". + :vartype status: str or ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNodeStatus + :param errors: The errors that occurred on this integration runtime node. + :type errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + """ + + _validation = { + 'node_id': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_id': {'key': 'nodeId', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'errors': {'key': 'errors', 'type': '[ManagedIntegrationRuntimeError]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + errors: Optional[List["ManagedIntegrationRuntimeError"]] = None, + **kwargs + ): + super(ManagedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_id = None + self.status = None + self.errors = errors + + +class ManagedIntegrationRuntimeOperationResult(msrest.serialization.Model): + """Properties of managed integration runtime operation result. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar type: The operation type. Could be start or stop. + :vartype type: str + :ivar start_time: The start time of the operation. + :vartype start_time: ~datetime.datetime + :ivar result: The operation result. + :vartype result: str + :ivar error_code: The error code. + :vartype error_code: str + :ivar parameters: Managed integration runtime error parameters. + :vartype parameters: list[str] + :ivar activity_id: The activity id for the operation request. + :vartype activity_id: str + """ + + _validation = { + 'type': {'readonly': True}, + 'start_time': {'readonly': True}, + 'result': {'readonly': True}, + 'error_code': {'readonly': True}, + 'parameters': {'readonly': True}, + 'activity_id': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'result': {'key': 'result', 'type': 'str'}, + 'error_code': {'key': 'errorCode', 'type': 'str'}, + 'parameters': {'key': 'parameters', 'type': '[str]'}, + 'activity_id': {'key': 'activityId', 'type': 'str'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ManagedIntegrationRuntimeOperationResult, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.type = None + self.start_time = None + self.result = None + self.error_code = None + self.parameters = None + self.activity_id = None + + +class ManagedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Managed integration runtime status. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar data_factory_name: The data factory name which the integration runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "AccessDenied". + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. + :vartype create_time: ~datetime.datetime + :ivar nodes: The list of nodes for managed integration runtime. + :vartype nodes: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeNode] + :ivar other_errors: The errors that occurred on this integration runtime. + :vartype other_errors: list[~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeError] + :ivar last_operation: The last operation result that occurred on this integration runtime. + :vartype last_operation: + ~azure.mgmt.datafactory.models.ManagedIntegrationRuntimeOperationResult + """ + + _validation = { + 'type': {'required': True}, + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'nodes': {'readonly': True}, + 'other_errors': {'readonly': True}, + 'last_operation': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[ManagedIntegrationRuntimeNode]'}, + 'other_errors': {'key': 'typeProperties.otherErrors', 'type': '[ManagedIntegrationRuntimeError]'}, + 'last_operation': {'key': 'typeProperties.lastOperation', 'type': 'ManagedIntegrationRuntimeOperationResult'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(ManagedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.type: str = 'Managed' + self.create_time = None + self.nodes = None + self.other_errors = None + self.last_operation = None + + +class PackageStore(msrest.serialization.Model): + """Package store for the SSIS integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param name: Required. The name of the package store. + :type name: str + :param package_store_linked_service: Required. The package store linked service reference. + :type package_store_linked_service: ~azure.mgmt.datafactory.models.EntityReference + """ + + _validation = { + 'name': {'required': True}, + 'package_store_linked_service': {'required': True}, + } + + _attribute_map = { + 'name': {'key': 'name', 'type': 'str'}, + 'package_store_linked_service': {'key': 'packageStoreLinkedService', 'type': 'EntityReference'}, + } + + def __init__( + self, + *, + name: str, + package_store_linked_service: "EntityReference", + **kwargs + ): + super(PackageStore, self).__init__(**kwargs) + self.name = name + self.package_store_linked_service = package_store_linked_service + + +class RecurrenceSchedule(msrest.serialization.Model): + """The recurrence schedule. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param minutes: The minutes. + :type minutes: list[int] + :param hours: The hours. + :type hours: list[int] + :param week_days: The days of the week. + :type week_days: list[str or ~azure.mgmt.datafactory.models.DaysOfWeek] + :param month_days: The month days. + :type month_days: list[int] + :param monthly_occurrences: The monthly occurrences. + :type monthly_occurrences: list[~azure.mgmt.datafactory.models.RecurrenceScheduleOccurrence] + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'minutes': {'key': 'minutes', 'type': '[int]'}, + 'hours': {'key': 'hours', 'type': '[int]'}, + 'week_days': {'key': 'weekDays', 'type': '[str]'}, + 'month_days': {'key': 'monthDays', 'type': '[int]'}, + 'monthly_occurrences': {'key': 'monthlyOccurrences', 'type': '[RecurrenceScheduleOccurrence]'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + minutes: Optional[List[int]] = None, + hours: Optional[List[int]] = None, + week_days: Optional[List[Union[str, "DaysOfWeek"]]] = None, + month_days: Optional[List[int]] = None, + monthly_occurrences: Optional[List["RecurrenceScheduleOccurrence"]] = None, + **kwargs + ): + super(RecurrenceSchedule, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.minutes = minutes + self.hours = hours + self.week_days = week_days + self.month_days = month_days + self.monthly_occurrences = monthly_occurrences + + +class RecurrenceScheduleOccurrence(msrest.serialization.Model): + """The recurrence schedule occurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param day: The day of the week. Possible values include: "Sunday", "Monday", "Tuesday", + "Wednesday", "Thursday", "Friday", "Saturday". + :type day: str or ~azure.mgmt.datafactory.models.DayOfWeek + :param occurrence: The occurrence. + :type occurrence: int + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'day': {'key': 'day', 'type': 'str'}, + 'occurrence': {'key': 'occurrence', 'type': 'int'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + day: Optional[Union[str, "DayOfWeek"]] = None, + occurrence: Optional[int] = None, + **kwargs + ): + super(RecurrenceScheduleOccurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.day = day + self.occurrence = occurrence + + +class RerunTumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline reruns for all fixed time interval windows from a requested start time to requested end time. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param parent_trigger: Required. The parent trigger reference. + :type parent_trigger: object + :param requested_start_time: Required. The start time for the time period for which restatement + is initiated. Only UTC time is currently supported. + :type requested_start_time: ~datetime.datetime + :param requested_end_time: Required. The end time for the time period for which restatement is + initiated. Only UTC time is currently supported. + :type requested_end_time: ~datetime.datetime + :param rerun_concurrency: Required. The max number of parallel time windows (ready for + execution) for which a rerun is triggered. + :type rerun_concurrency: int + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'parent_trigger': {'required': True}, + 'requested_start_time': {'required': True}, + 'requested_end_time': {'required': True}, + 'rerun_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'parent_trigger': {'key': 'typeProperties.parentTrigger', 'type': 'object'}, + 'requested_start_time': {'key': 'typeProperties.requestedStartTime', 'type': 'iso-8601'}, + 'requested_end_time': {'key': 'typeProperties.requestedEndTime', 'type': 'iso-8601'}, + 'rerun_concurrency': {'key': 'typeProperties.rerunConcurrency', 'type': 'int'}, + } + + def __init__( + self, + *, + parent_trigger: object, + requested_start_time: datetime.datetime, + requested_end_time: datetime.datetime, + rerun_concurrency: int, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + **kwargs + ): + super(RerunTumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type: str = 'RerunTumblingWindowTrigger' + self.parent_trigger = parent_trigger + self.requested_start_time = requested_start_time + self.requested_end_time = requested_end_time + self.rerun_concurrency = rerun_concurrency + + +class RetryPolicy(msrest.serialization.Model): + """Execution policy for an activity. + + :param count: Maximum ordinary retry attempts. Default is 0. Type: integer (or Expression with + resultType integer), minimum: 0. + :type count: object + :param interval_in_seconds: Interval between retries in seconds. Default is 30. + :type interval_in_seconds: int + """ + + _validation = { + 'interval_in_seconds': {'maximum': 86400, 'minimum': 30}, + } + + _attribute_map = { + 'count': {'key': 'count', 'type': 'object'}, + 'interval_in_seconds': {'key': 'intervalInSeconds', 'type': 'int'}, + } + + def __init__( + self, + *, + count: Optional[object] = None, + interval_in_seconds: Optional[int] = None, + **kwargs + ): + super(RetryPolicy, self).__init__(**kwargs) + self.count = count + self.interval_in_seconds = interval_in_seconds + + +class ScheduleTrigger(MultiplePipelineTrigger): + """Trigger that creates pipeline runs periodically, on schedule. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipelines: Pipelines that need to be started. + :type pipelines: list[object] + :param recurrence: Required. Recurrence schedule configuration. + :type recurrence: ~azure.mgmt.datafactory.models.ScheduleTriggerRecurrence + """ + + _validation = { + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'recurrence': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipelines': {'key': 'pipelines', 'type': '[object]'}, + 'recurrence': {'key': 'typeProperties.recurrence', 'type': 'ScheduleTriggerRecurrence'}, + } + + def __init__( + self, + *, + recurrence: "ScheduleTriggerRecurrence", + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + pipelines: Optional[List[object]] = None, + **kwargs + ): + super(ScheduleTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, pipelines=pipelines, **kwargs) + self.type: str = 'ScheduleTrigger' + self.recurrence = recurrence + + +class ScheduleTriggerRecurrence(msrest.serialization.Model): + """The workflow trigger recurrence. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param frequency: The frequency. Possible values include: "NotSpecified", "Minute", "Hour", + "Day", "Week", "Month", "Year". + :type frequency: str or ~azure.mgmt.datafactory.models.RecurrenceFrequency + :param interval: The interval. + :type interval: int + :param start_time: The start time. + :type start_time: ~datetime.datetime + :param end_time: The end time. + :type end_time: ~datetime.datetime + :param time_zone: The time zone. + :type time_zone: str + :param schedule: The recurrence schedule. + :type schedule: ~azure.mgmt.datafactory.models.RecurrenceSchedule + """ + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'frequency': {'key': 'frequency', 'type': 'str'}, + 'interval': {'key': 'interval', 'type': 'int'}, + 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, + 'time_zone': {'key': 'timeZone', 'type': 'str'}, + 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + frequency: Optional[Union[str, "RecurrenceFrequency"]] = None, + interval: Optional[int] = None, + start_time: Optional[datetime.datetime] = None, + end_time: Optional[datetime.datetime] = None, + time_zone: Optional[str] = None, + schedule: Optional["RecurrenceSchedule"] = None, + **kwargs + ): + super(ScheduleTriggerRecurrence, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.time_zone = time_zone + self.schedule = schedule + + +class SecretBase(msrest.serialization.Model): + """The base definition of a secret type. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SecureString. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of the secret.Constant filled by server. + :type type: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'SecureString': 'SecureString'} + } + + def __init__( + self, + **kwargs + ): + super(SecretBase, self).__init__(**kwargs) + self.type: Optional[str] = None + + +class SecureString(SecretBase): + """Azure Data Factory secure string definition. The string value will be masked with asterisks '*' during Get or List API calls. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of the secret.Constant filled by server. + :type type: str + :param value: Required. Value of secure string. + :type value: str + """ + + _validation = { + 'type': {'required': True}, + 'value': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'value': {'key': 'value', 'type': 'str'}, + } + + def __init__( + self, + *, + value: str, + **kwargs + ): + super(SecureString, self).__init__(**kwargs) + self.type: str = 'SecureString' + self.value = value + + +class SelfDependencyTumblingWindowTriggerReference(DependencyReference): + """Self referenced tumbling window trigger dependency. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param offset: Required. Timespan applied to the start time of a tumbling window when + evaluating dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str + """ + + _validation = { + 'type': {'required': True}, + 'offset': {'required': True, 'max_length': 15, 'min_length': 8, 'pattern': r'-((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, + } + + def __init__( + self, + *, + offset: str, + size: Optional[str] = None, + **kwargs + ): + super(SelfDependencyTumblingWindowTriggerReference, self).__init__(**kwargs) + self.type: str = 'SelfDependencyTumblingWindowTriggerReference' + self.offset = offset + self.size = size + + +class SelfHostedIntegrationRuntime(IntegrationRuntime): + """Self-hosted integration runtime. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :param description: Integration runtime description. + :type description: str + :param linked_info: The base definition of a linked integration runtime. + :type linked_info: ~azure.mgmt.datafactory.models.LinkedIntegrationRuntimeType + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'linked_info': {'key': 'typeProperties.linkedInfo', 'type': 'LinkedIntegrationRuntimeType'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + linked_info: Optional["LinkedIntegrationRuntimeType"] = None, + **kwargs + ): + super(SelfHostedIntegrationRuntime, self).__init__(additional_properties=additional_properties, description=description, **kwargs) + self.type: str = 'SelfHosted' + self.linked_info = linked_info + + +class SelfHostedIntegrationRuntimeNode(msrest.serialization.Model): + """Properties of Self-hosted integration runtime node. + + Variables are only populated by the server, and will be ignored when sending a request. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :ivar node_name: Name of the integration runtime node. + :vartype node_name: str + :ivar machine_name: Machine name of the integration runtime node. + :vartype machine_name: str + :ivar host_service_uri: URI for the host machine of the integration runtime. + :vartype host_service_uri: str + :ivar status: Status of the integration runtime node. Possible values include: + "NeedRegistration", "Online", "Limited", "Offline", "Upgrading", "Initializing", + "InitializeFailed". + :vartype status: str or ~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNodeStatus + :ivar capabilities: The integration runtime capabilities dictionary. + :vartype capabilities: dict[str, str] + :ivar version_status: Status of the integration runtime node version. + :vartype version_status: str + :ivar version: Version of the integration runtime node. + :vartype version: str + :ivar register_time: The time at which the integration runtime node was registered in ISO8601 + format. + :vartype register_time: ~datetime.datetime + :ivar last_connect_time: The most recent time at which the integration runtime was connected in + ISO8601 format. + :vartype last_connect_time: ~datetime.datetime + :ivar expiry_time: The time at which the integration runtime will expire in ISO8601 format. + :vartype expiry_time: ~datetime.datetime + :ivar last_start_time: The time the node last started up. + :vartype last_start_time: ~datetime.datetime + :ivar last_stop_time: The integration runtime node last stop time. + :vartype last_stop_time: ~datetime.datetime + :ivar last_update_result: The result of the last integration runtime node update. Possible + values include: "None", "Succeed", "Fail". + :vartype last_update_result: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeUpdateResult + :ivar last_start_update_time: The last time for the integration runtime node update start. + :vartype last_start_update_time: ~datetime.datetime + :ivar last_end_update_time: The last time for the integration runtime node update end. + :vartype last_end_update_time: ~datetime.datetime + :ivar is_active_dispatcher: Indicates whether this node is the active dispatcher for + integration runtime requests. + :vartype is_active_dispatcher: bool + :ivar concurrent_jobs_limit: Maximum concurrent jobs on the integration runtime node. + :vartype concurrent_jobs_limit: int + :ivar max_concurrent_jobs: The maximum concurrent jobs in this integration runtime. + :vartype max_concurrent_jobs: int + """ + + _validation = { + 'node_name': {'readonly': True}, + 'machine_name': {'readonly': True}, + 'host_service_uri': {'readonly': True}, + 'status': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'version_status': {'readonly': True}, + 'version': {'readonly': True}, + 'register_time': {'readonly': True}, + 'last_connect_time': {'readonly': True}, + 'expiry_time': {'readonly': True}, + 'last_start_time': {'readonly': True}, + 'last_stop_time': {'readonly': True}, + 'last_update_result': {'readonly': True}, + 'last_start_update_time': {'readonly': True}, + 'last_end_update_time': {'readonly': True}, + 'is_active_dispatcher': {'readonly': True}, + 'concurrent_jobs_limit': {'readonly': True}, + 'max_concurrent_jobs': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'node_name': {'key': 'nodeName', 'type': 'str'}, + 'machine_name': {'key': 'machineName', 'type': 'str'}, + 'host_service_uri': {'key': 'hostServiceUri', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + 'capabilities': {'key': 'capabilities', 'type': '{str}'}, + 'version_status': {'key': 'versionStatus', 'type': 'str'}, + 'version': {'key': 'version', 'type': 'str'}, + 'register_time': {'key': 'registerTime', 'type': 'iso-8601'}, + 'last_connect_time': {'key': 'lastConnectTime', 'type': 'iso-8601'}, + 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, + 'last_start_time': {'key': 'lastStartTime', 'type': 'iso-8601'}, + 'last_stop_time': {'key': 'lastStopTime', 'type': 'iso-8601'}, + 'last_update_result': {'key': 'lastUpdateResult', 'type': 'str'}, + 'last_start_update_time': {'key': 'lastStartUpdateTime', 'type': 'iso-8601'}, + 'last_end_update_time': {'key': 'lastEndUpdateTime', 'type': 'iso-8601'}, + 'is_active_dispatcher': {'key': 'isActiveDispatcher', 'type': 'bool'}, + 'concurrent_jobs_limit': {'key': 'concurrentJobsLimit', 'type': 'int'}, + 'max_concurrent_jobs': {'key': 'maxConcurrentJobs', 'type': 'int'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + **kwargs + ): + super(SelfHostedIntegrationRuntimeNode, self).__init__(**kwargs) + self.additional_properties = additional_properties + self.node_name = None + self.machine_name = None + self.host_service_uri = None + self.status = None + self.capabilities = None + self.version_status = None + self.version = None + self.register_time = None + self.last_connect_time = None + self.expiry_time = None + self.last_start_time = None + self.last_stop_time = None + self.last_update_result = None + self.last_start_update_time = None + self.last_end_update_time = None + self.is_active_dispatcher = None + self.concurrent_jobs_limit = None + self.max_concurrent_jobs = None + + +class SelfHostedIntegrationRuntimeStatus(IntegrationRuntimeStatus): + """Self-hosted integration runtime status. + + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Type of integration runtime.Constant filled by server. Possible values + include: "Managed", "SelfHosted". + :type type: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeType + :ivar data_factory_name: The data factory name which the integration runtime belong to. + :vartype data_factory_name: str + :ivar state: The state of integration runtime. Possible values include: "Initial", "Stopped", + "Started", "Starting", "Stopping", "NeedRegistration", "Online", "Limited", "Offline", + "AccessDenied". + :vartype state: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeState + :ivar create_time: The time at which the integration runtime was created, in ISO8601 format. + :vartype create_time: ~datetime.datetime + :ivar task_queue_id: The task queue id of the integration runtime. + :vartype task_queue_id: str + :ivar internal_channel_encryption: It is used to set the encryption mode for node-node + communication channel (when more than 2 self-hosted integration runtime nodes exist). Possible + values include: "NotSet", "SslEncrypted", "NotEncrypted". + :vartype internal_channel_encryption: str or + ~azure.mgmt.datafactory.models.IntegrationRuntimeInternalChannelEncryptionMode + :ivar version: Version of the integration runtime. + :vartype version: str + :param nodes: The list of nodes for this integration runtime. + :type nodes: list[~azure.mgmt.datafactory.models.SelfHostedIntegrationRuntimeNode] + :ivar scheduled_update_date: The date at which the integration runtime will be scheduled to + update, in ISO8601 format. + :vartype scheduled_update_date: ~datetime.datetime + :ivar update_delay_offset: The time in the date scheduled by service to update the integration + runtime, e.g., PT03H is 3 hours. + :vartype update_delay_offset: str + :ivar local_time_zone_offset: The local time zone offset in hours. + :vartype local_time_zone_offset: str + :ivar capabilities: Object with additional information about integration runtime capabilities. + :vartype capabilities: dict[str, str] + :ivar service_urls: The URLs for the services used in integration runtime backend service. + :vartype service_urls: list[str] + :ivar auto_update: Whether Self-hosted integration runtime auto update has been turned on. + Possible values include: "On", "Off". + :vartype auto_update: str or ~azure.mgmt.datafactory.models.IntegrationRuntimeAutoUpdate + :ivar version_status: Status of the integration runtime version. + :vartype version_status: str + :param links: The list of linked integration runtimes that are created to share with this + integration runtime. + :type links: list[~azure.mgmt.datafactory.models.LinkedIntegrationRuntime] + :ivar pushed_version: The version that the integration runtime is going to update to. + :vartype pushed_version: str + :ivar latest_version: The latest version on download center. + :vartype latest_version: str + :ivar auto_update_eta: The estimated time when the self-hosted integration runtime will be + updated. + :vartype auto_update_eta: ~datetime.datetime + """ + + _validation = { + 'type': {'required': True}, + 'data_factory_name': {'readonly': True}, + 'state': {'readonly': True}, + 'create_time': {'readonly': True}, + 'task_queue_id': {'readonly': True}, + 'internal_channel_encryption': {'readonly': True}, + 'version': {'readonly': True}, + 'scheduled_update_date': {'readonly': True}, + 'update_delay_offset': {'readonly': True}, + 'local_time_zone_offset': {'readonly': True}, + 'capabilities': {'readonly': True}, + 'service_urls': {'readonly': True}, + 'auto_update': {'readonly': True}, + 'version_status': {'readonly': True}, + 'pushed_version': {'readonly': True}, + 'latest_version': {'readonly': True}, + 'auto_update_eta': {'readonly': True}, + } + + _attribute_map = { + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'data_factory_name': {'key': 'dataFactoryName', 'type': 'str'}, + 'state': {'key': 'state', 'type': 'str'}, + 'create_time': {'key': 'typeProperties.createTime', 'type': 'iso-8601'}, + 'task_queue_id': {'key': 'typeProperties.taskQueueId', 'type': 'str'}, + 'internal_channel_encryption': {'key': 'typeProperties.internalChannelEncryption', 'type': 'str'}, + 'version': {'key': 'typeProperties.version', 'type': 'str'}, + 'nodes': {'key': 'typeProperties.nodes', 'type': '[SelfHostedIntegrationRuntimeNode]'}, + 'scheduled_update_date': {'key': 'typeProperties.scheduledUpdateDate', 'type': 'iso-8601'}, + 'update_delay_offset': {'key': 'typeProperties.updateDelayOffset', 'type': 'str'}, + 'local_time_zone_offset': {'key': 'typeProperties.localTimeZoneOffset', 'type': 'str'}, + 'capabilities': {'key': 'typeProperties.capabilities', 'type': '{str}'}, + 'service_urls': {'key': 'typeProperties.serviceUrls', 'type': '[str]'}, + 'auto_update': {'key': 'typeProperties.autoUpdate', 'type': 'str'}, + 'version_status': {'key': 'typeProperties.versionStatus', 'type': 'str'}, + 'links': {'key': 'typeProperties.links', 'type': '[LinkedIntegrationRuntime]'}, + 'pushed_version': {'key': 'typeProperties.pushedVersion', 'type': 'str'}, + 'latest_version': {'key': 'typeProperties.latestVersion', 'type': 'str'}, + 'auto_update_eta': {'key': 'typeProperties.autoUpdateETA', 'type': 'iso-8601'}, + } + + def __init__( + self, + *, + additional_properties: Optional[Dict[str, object]] = None, + nodes: Optional[List["SelfHostedIntegrationRuntimeNode"]] = None, + links: Optional[List["LinkedIntegrationRuntime"]] = None, + **kwargs + ): + super(SelfHostedIntegrationRuntimeStatus, self).__init__(additional_properties=additional_properties, **kwargs) + self.type: str = 'SelfHosted' + self.create_time = None + self.task_queue_id = None + self.internal_channel_encryption = None + self.version = None + self.nodes = nodes + self.scheduled_update_date = None + self.update_delay_offset = None + self.local_time_zone_offset = None + self.capabilities = None + self.service_urls = None + self.auto_update = None + self.version_status = None + self.links = links + self.pushed_version = None + self.latest_version = None + self.auto_update_eta = None + + +class SsisObjectMetadata(msrest.serialization.Model): + """SSIS object metadata. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: SsisEnvironment, SsisFolder, SsisPackage, SsisProject. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + _subtype_map = { + 'type': {'Environment': 'SsisEnvironment', 'Folder': 'SsisFolder', 'Package': 'SsisPackage', 'Project': 'SsisProject'} + } + + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(SsisObjectMetadata, self).__init__(**kwargs) + self.type: Optional[str] = None + self.id = id + self.name = name + self.description = description + + +class SsisEnvironment(SsisObjectMetadata): + """Ssis environment. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param folder_id: Folder id which contains environment. + :type folder_id: long + :param variables: Variable in environment. + :type variables: list[~azure.mgmt.datafactory.models.SsisVariable] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'variables': {'key': 'variables', 'type': '[SsisVariable]'}, + } + + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + description: Optional[str] = None, + folder_id: Optional[int] = None, + variables: Optional[List["SsisVariable"]] = None, + **kwargs + ): + super(SsisEnvironment, self).__init__(id=id, name=name, description=description, **kwargs) + self.type: str = 'Environment' + self.folder_id = folder_id + self.variables = variables + + +class SsisEnvironmentReference(msrest.serialization.Model): + """Ssis environment reference. + + :param id: Environment reference id. + :type id: long + :param environment_folder_name: Environment folder name. + :type environment_folder_name: str + :param environment_name: Environment name. + :type environment_name: str + :param reference_type: Reference type. + :type reference_type: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'environment_folder_name': {'key': 'environmentFolderName', 'type': 'str'}, + 'environment_name': {'key': 'environmentName', 'type': 'str'}, + 'reference_type': {'key': 'referenceType', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[int] = None, + environment_folder_name: Optional[str] = None, + environment_name: Optional[str] = None, + reference_type: Optional[str] = None, + **kwargs + ): + super(SsisEnvironmentReference, self).__init__(**kwargs) + self.id = id + self.environment_folder_name = environment_folder_name + self.environment_name = environment_name + self.reference_type = reference_type + + +class SsisFolder(SsisObjectMetadata): + """Ssis folder. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + description: Optional[str] = None, + **kwargs + ): + super(SsisFolder, self).__init__(id=id, name=name, description=description, **kwargs) + self.type: str = 'Folder' + + +class SsisObjectMetadataListResponse(msrest.serialization.Model): + """A list of SSIS object metadata. + + :param value: List of SSIS object metadata. + :type value: list[~azure.mgmt.datafactory.models.SsisObjectMetadata] + :param next_link: The link to the next page of results, if any remaining results exist. + :type next_link: str + """ + + _attribute_map = { + 'value': {'key': 'value', 'type': '[SsisObjectMetadata]'}, + 'next_link': {'key': 'nextLink', 'type': 'str'}, + } + + def __init__( + self, + *, + value: Optional[List["SsisObjectMetadata"]] = None, + next_link: Optional[str] = None, + **kwargs + ): + super(SsisObjectMetadataListResponse, self).__init__(**kwargs) + self.value = value + self.next_link = next_link + + +class SsisPackage(SsisObjectMetadata): + """Ssis Package. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param folder_id: Folder id which contains package. + :type folder_id: long + :param project_version: Project version which contains package. + :type project_version: long + :param project_id: Project id which contains package. + :type project_id: long + :param parameters: Parameters in package. + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] + """ + + _validation = { + 'type': {'required': True}, + } + + _attribute_map = { + 'type': {'key': 'type', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'project_version': {'key': 'projectVersion', 'type': 'long'}, + 'project_id': {'key': 'projectId', 'type': 'long'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, + } + + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + description: Optional[str] = None, + folder_id: Optional[int] = None, + project_version: Optional[int] = None, + project_id: Optional[int] = None, + parameters: Optional[List["SsisParameter"]] = None, + **kwargs + ): + super(SsisPackage, self).__init__(id=id, name=name, description=description, **kwargs) + self.type: str = 'Package' + self.folder_id = folder_id + self.project_version = project_version + self.project_id = project_id + self.parameters = parameters + + +class SsisParameter(msrest.serialization.Model): + """Ssis parameter. + + :param id: Parameter id. + :type id: long + :param name: Parameter name. + :type name: str + :param description: Parameter description. + :type description: str + :param data_type: Parameter type. + :type data_type: str + :param required: Whether parameter is required. + :type required: bool + :param sensitive: Whether parameter is sensitive. + :type sensitive: bool + :param design_default_value: Design default value of parameter. + :type design_default_value: str + :param default_value: Default value of parameter. + :type default_value: str + :param sensitive_default_value: Default sensitive value of parameter. + :type sensitive_default_value: str + :param value_type: Parameter value type. + :type value_type: str + :param value_set: Parameter value set. + :type value_set: bool + :param variable: Parameter reference variable. + :type variable: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'required': {'key': 'required', 'type': 'bool'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'design_default_value': {'key': 'designDefaultValue', 'type': 'str'}, + 'default_value': {'key': 'defaultValue', 'type': 'str'}, + 'sensitive_default_value': {'key': 'sensitiveDefaultValue', 'type': 'str'}, + 'value_type': {'key': 'valueType', 'type': 'str'}, + 'value_set': {'key': 'valueSet', 'type': 'bool'}, + 'variable': {'key': 'variable', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + description: Optional[str] = None, + data_type: Optional[str] = None, + required: Optional[bool] = None, + sensitive: Optional[bool] = None, + design_default_value: Optional[str] = None, + default_value: Optional[str] = None, + sensitive_default_value: Optional[str] = None, + value_type: Optional[str] = None, + value_set: Optional[bool] = None, + variable: Optional[str] = None, + **kwargs + ): + super(SsisParameter, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.required = required + self.sensitive = sensitive + self.design_default_value = design_default_value + self.default_value = default_value + self.sensitive_default_value = sensitive_default_value + self.value_type = value_type + self.value_set = value_set + self.variable = variable + + +class SsisProject(SsisObjectMetadata): + """Ssis project. + + All required parameters must be populated in order to send to Azure. + + :param type: Required. Type of metadata.Constant filled by server. Possible values include: + "Folder", "Project", "Package", "Environment". + :type type: str or ~azure.mgmt.datafactory.models.SsisObjectMetadataType + :param id: Metadata id. + :type id: long + :param name: Metadata name. + :type name: str + :param description: Metadata description. + :type description: str + :param folder_id: Folder id which contains project. + :type folder_id: long + :param version: Project version. + :type version: long + :param environment_refs: Environment reference in project. + :type environment_refs: list[~azure.mgmt.datafactory.models.SsisEnvironmentReference] + :param parameters: Parameters in project. + :type parameters: list[~azure.mgmt.datafactory.models.SsisParameter] """ _validation = { 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'folder_id': {'key': 'folderId', 'type': 'long'}, + 'version': {'key': 'version', 'type': 'long'}, + 'environment_refs': {'key': 'environmentRefs', 'type': '[SsisEnvironmentReference]'}, + 'parameters': {'key': 'parameters', 'type': '[SsisParameter]'}, } def __init__( self, *, - account_name: str, - repository_name: str, - collaboration_branch: str, - root_folder: str, - last_commit_id: Optional[str] = None, - host_name: Optional[str] = None, + id: Optional[int] = None, + name: Optional[str] = None, + description: Optional[str] = None, + folder_id: Optional[int] = None, + version: Optional[int] = None, + environment_refs: Optional[List["SsisEnvironmentReference"]] = None, + parameters: Optional[List["SsisParameter"]] = None, **kwargs ): - super(FactoryGitHubConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.type: str = 'FactoryGitHubConfiguration' - self.host_name = host_name + super(SsisProject, self).__init__(id=id, name=name, description=description, **kwargs) + self.type: str = 'Project' + self.folder_id = folder_id + self.version = version + self.environment_refs = environment_refs + self.parameters = parameters -class FactoryIdentity(msrest.serialization.Model): - """Identity properties of the factory resource. +class SsisVariable(msrest.serialization.Model): + """Ssis variable. + + :param id: Variable id. + :type id: long + :param name: Variable name. + :type name: str + :param description: Variable description. + :type description: str + :param data_type: Variable type. + :type data_type: str + :param sensitive: Whether variable is sensitive. + :type sensitive: bool + :param value: Variable value. + :type value: str + :param sensitive_value: Variable sensitive value. + :type sensitive_value: str + """ + + _attribute_map = { + 'id': {'key': 'id', 'type': 'long'}, + 'name': {'key': 'name', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'data_type': {'key': 'dataType', 'type': 'str'}, + 'sensitive': {'key': 'sensitive', 'type': 'bool'}, + 'value': {'key': 'value', 'type': 'str'}, + 'sensitive_value': {'key': 'sensitiveValue', 'type': 'str'}, + } + + def __init__( + self, + *, + id: Optional[int] = None, + name: Optional[str] = None, + description: Optional[str] = None, + data_type: Optional[str] = None, + sensitive: Optional[bool] = None, + value: Optional[str] = None, + sensitive_value: Optional[str] = None, + **kwargs + ): + super(SsisVariable, self).__init__(**kwargs) + self.id = id + self.name = name + self.description = description + self.data_type = data_type + self.sensitive = sensitive + self.value = value + self.sensitive_value = sensitive_value - Variables are only populated by the server, and will be ignored when sending a request. + +class TriggerDependencyReference(DependencyReference): + """Trigger referenced dependency. + + You probably want to use the sub-classes and not this class directly. Known + sub-classes are: TumblingWindowTriggerDependencyReference. All required parameters must be populated in order to send to Azure. - :ivar type: Required. The identity type. Currently the only supported type is 'SystemAssigned'. - Default value: "SystemAssigned". - :vartype type: str - :ivar principal_id: The principal id of the identity. - :vartype principal_id: str - :ivar tenant_id: The client tenant id of the identity. - :vartype tenant_id: str + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference """ _validation = { - 'type': {'required': True, 'constant': True}, - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, + 'type': {'required': True}, + 'reference_trigger': {'required': True}, } _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, } - type = "SystemAssigned" + _subtype_map = { + 'type': {'TumblingWindowTriggerDependencyReference': 'TumblingWindowTriggerDependencyReference'} + } def __init__( self, + *, + reference_trigger: "TriggerReference", **kwargs ): - super(FactoryIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None + super(TriggerDependencyReference, self).__init__(**kwargs) + self.type: str = 'TriggerDependencyReference' + self.reference_trigger = reference_trigger -class FactoryListResponse(msrest.serialization.Model): - """A list of factory resources. +class TriggerFilterParameters(msrest.serialization.Model): + """Query parameters for triggers. + + :param continuation_token: The continuation token for getting the next page of results. Null + for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun + triggers. + :type parent_trigger_name: str + """ + + _attribute_map = { + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, + 'parent_trigger_name': {'key': 'parentTriggerName', 'type': 'str'}, + } + + def __init__( + self, + *, + continuation_token: Optional[str] = None, + parent_trigger_name: Optional[str] = None, + **kwargs + ): + super(TriggerFilterParameters, self).__init__(**kwargs) + self.continuation_token = continuation_token + self.parent_trigger_name = parent_trigger_name + + +class TriggerListResponse(msrest.serialization.Model): + """A list of trigger resources. All required parameters must be populated in order to send to Azure. - :param value: Required. List of factories. - :type value: list[~azure.mgmt.datafactory.models.Factory] + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] :param next_link: The link to the next page of results, if any remaining results exist. :type next_link: str """ @@ -394,224 +3756,342 @@ class FactoryListResponse(msrest.serialization.Model): } _attribute_map = { - 'value': {'key': 'value', 'type': '[Factory]'}, + 'value': {'key': 'value', 'type': '[TriggerResource]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } def __init__( self, *, - value: List["Factory"], + value: List["TriggerResource"], next_link: Optional[str] = None, **kwargs ): - super(FactoryListResponse, self).__init__(**kwargs) + super(TriggerListResponse, self).__init__(**kwargs) self.value = value self.next_link = next_link -class FactoryRepoUpdate(msrest.serialization.Model): - """Factory's git repo information. +class TriggerQueryResponse(msrest.serialization.Model): + """A query of triggers. - :param factory_resource_id: The factory resource id. - :type factory_resource_id: str - :param repo_configuration: Git repo information of the factory. - :type repo_configuration: ~azure.mgmt.datafactory.models.FactoryRepoConfiguration + All required parameters must be populated in order to send to Azure. + + :param value: Required. List of triggers. + :type value: list[~azure.mgmt.datafactory.models.TriggerResource] + :param continuation_token: The continuation token for getting the next page of results, if any + remaining results exist, null otherwise. + :type continuation_token: str """ + _validation = { + 'value': {'required': True}, + } + _attribute_map = { - 'factory_resource_id': {'key': 'factoryResourceId', 'type': 'str'}, - 'repo_configuration': {'key': 'repoConfiguration', 'type': 'FactoryRepoConfiguration'}, + 'value': {'key': 'value', 'type': '[TriggerResource]'}, + 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, } def __init__( self, *, - factory_resource_id: Optional[str] = None, - repo_configuration: Optional["FactoryRepoConfiguration"] = None, + value: List["TriggerResource"], + continuation_token: Optional[str] = None, **kwargs ): - super(FactoryRepoUpdate, self).__init__(**kwargs) - self.factory_resource_id = factory_resource_id - self.repo_configuration = repo_configuration + super(TriggerQueryResponse, self).__init__(**kwargs) + self.value = value + self.continuation_token = continuation_token -class FactoryUpdateParameters(msrest.serialization.Model): - """Parameters for updating a factory resource. +class TriggerReference(msrest.serialization.Model): + """Trigger reference type. - :param tags: A set of tags. The resource tags. - :type tags: dict[str, str] - :param identity: Managed service identity of the factory. - :type identity: ~azure.mgmt.datafactory.models.FactoryIdentity + Variables are only populated by the server, and will be ignored when sending a request. + + All required parameters must be populated in order to send to Azure. + + :ivar type: Required. Trigger reference type. Default value: "TriggerReference". + :vartype type: str + :param reference_name: Required. Reference trigger name. + :type reference_name: str """ + _validation = { + 'type': {'required': True, 'constant': True}, + 'reference_name': {'required': True}, + } + _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'FactoryIdentity'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_name': {'key': 'referenceName', 'type': 'str'}, } + type = "TriggerReference" + def __init__( self, *, - tags: Optional[Dict[str, str]] = None, - identity: Optional["FactoryIdentity"] = None, + reference_name: str, **kwargs ): - super(FactoryUpdateParameters, self).__init__(**kwargs) - self.tags = tags - self.identity = identity + super(TriggerReference, self).__init__(**kwargs) + self.reference_name = reference_name -class FactoryVstsConfiguration(FactoryRepoConfiguration): - """Factory's VSTS repo information. +class TriggerResource(SubResource): + """Trigger resource type. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param type: Required. Type of repo configuration.Constant filled by server. - :type type: str - :param account_name: Required. Account name. - :type account_name: str - :param repository_name: Required. Repository name. - :type repository_name: str - :param collaboration_branch: Required. Collaboration branch. - :type collaboration_branch: str - :param root_folder: Required. Root folder. - :type root_folder: str - :param last_commit_id: Last commit id. - :type last_commit_id: str - :param project_name: Required. VSTS project name. - :type project_name: str - :param tenant_id: VSTS tenant id. - :type tenant_id: str + :ivar id: The resource identifier. + :vartype id: str + :ivar name: The resource name. + :vartype name: str + :ivar type: The resource type. + :vartype type: str + :ivar etag: Etag identifies change in the resource. + :vartype etag: str + :param properties: Required. Properties of the trigger. + :type properties: object """ _validation = { - 'type': {'required': True}, - 'account_name': {'required': True}, - 'repository_name': {'required': True}, - 'collaboration_branch': {'required': True}, - 'root_folder': {'required': True}, - 'project_name': {'required': True}, + 'id': {'readonly': True}, + 'name': {'readonly': True}, + 'type': {'readonly': True}, + 'etag': {'readonly': True}, + 'properties': {'required': True}, } _attribute_map = { + 'id': {'key': 'id', 'type': 'str'}, + 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'repository_name': {'key': 'repositoryName', 'type': 'str'}, - 'collaboration_branch': {'key': 'collaborationBranch', 'type': 'str'}, - 'root_folder': {'key': 'rootFolder', 'type': 'str'}, - 'last_commit_id': {'key': 'lastCommitId', 'type': 'str'}, - 'project_name': {'key': 'projectName', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + 'etag': {'key': 'etag', 'type': 'str'}, + 'properties': {'key': 'properties', 'type': 'object'}, } def __init__( self, *, - account_name: str, - repository_name: str, - collaboration_branch: str, - root_folder: str, - project_name: str, - last_commit_id: Optional[str] = None, - tenant_id: Optional[str] = None, + properties: object, **kwargs ): - super(FactoryVstsConfiguration, self).__init__(account_name=account_name, repository_name=repository_name, collaboration_branch=collaboration_branch, root_folder=root_folder, last_commit_id=last_commit_id, **kwargs) - self.type: str = 'FactoryVSTSConfiguration' - self.project_name = project_name - self.tenant_id = tenant_id + super(TriggerResource, self).__init__(**kwargs) + self.properties = properties -class FakeFactoryIdentity(msrest.serialization.Model): - """This is only for az test. +class TriggerSubscriptionOperationStatus(msrest.serialization.Model): + """Defines the response of a trigger subscription operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar trigger_name: Trigger name. + :vartype trigger_name: str + :ivar status: Event Subscription Status. Possible values include: "Enabled", "Provisioning", + "Deprovisioning", "Disabled", "Unknown". + :vartype status: str or ~azure.mgmt.datafactory.models.EventSubscriptionStatus + """ + + _validation = { + 'trigger_name': {'readonly': True}, + 'status': {'readonly': True}, + } + + _attribute_map = { + 'trigger_name': {'key': 'triggerName', 'type': 'str'}, + 'status': {'key': 'status', 'type': 'str'}, + } + + def __init__( + self, + **kwargs + ): + super(TriggerSubscriptionOperationStatus, self).__init__(**kwargs) + self.trigger_name = None + self.status = None + + +class TumblingWindowTrigger(Trigger): + """Trigger that schedules pipeline runs for all fixed time interval windows from a start time without gaps and also supports backfill scenarios (when start time is in the past). + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :param name: Required. .. - :type name: str - :param zones_inside: sample of simple array. - :type zones_inside: list[str] + :param additional_properties: Unmatched properties from the message are deserialized to this + collection. + :type additional_properties: dict[str, object] + :param type: Required. Trigger type.Constant filled by server. + :type type: str + :param description: Trigger description. + :type description: str + :ivar runtime_state: Indicates if trigger is running or not. Updated when Start/Stop APIs are + called on the Trigger. Possible values include: "Started", "Stopped", "Disabled". + :vartype runtime_state: str or ~azure.mgmt.datafactory.models.TriggerRuntimeState + :param annotations: List of tags that can be used for describing the trigger. + :type annotations: list[object] + :param pipeline: Required. Pipeline for which runs are created when an event is fired for + trigger window that is ready. + :type pipeline: object + :param frequency: Required. The frequency of the time windows. Possible values include: + "Minute", "Hour". + :type frequency: str or ~azure.mgmt.datafactory.models.TumblingWindowFrequency + :param interval: Required. The interval of the time windows. The minimum interval allowed is 15 + Minutes. + :type interval: int + :param start_time: Required. The start time for the time period for the trigger during which + events are fired for windows that are ready. Only UTC time is currently supported. + :type start_time: ~datetime.datetime + :param end_time: The end time for the time period for the trigger during which events are fired + for windows that are ready. Only UTC time is currently supported. + :type end_time: ~datetime.datetime + :param delay: Specifies how long the trigger waits past due time before triggering new run. It + doesn't alter window start and end time. The default is 0. Type: string (or Expression with + resultType string), pattern: ((\d+).)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9])). + :type delay: object + :param max_concurrency: Required. The max number of parallel time windows (ready for execution) + for which a new run is triggered. + :type max_concurrency: int + :param retry_policy: Retry policy that will be applied for failed pipeline runs. + :type retry_policy: ~azure.mgmt.datafactory.models.RetryPolicy + :param depends_on: Triggers that this trigger depends on. Only tumbling window triggers are + supported. + :type depends_on: list[~azure.mgmt.datafactory.models.DependencyReference] """ _validation = { - 'name': {'required': True}, + 'type': {'required': True}, + 'runtime_state': {'readonly': True}, + 'pipeline': {'required': True}, + 'frequency': {'required': True}, + 'interval': {'required': True}, + 'start_time': {'required': True}, + 'max_concurrency': {'required': True, 'maximum': 50, 'minimum': 1}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'zones_inside': {'key': 'zonesInside', 'type': '[str]'}, + 'additional_properties': {'key': '', 'type': '{object}'}, + 'type': {'key': 'type', 'type': 'str'}, + 'description': {'key': 'description', 'type': 'str'}, + 'runtime_state': {'key': 'runtimeState', 'type': 'str'}, + 'annotations': {'key': 'annotations', 'type': '[object]'}, + 'pipeline': {'key': 'pipeline', 'type': 'object'}, + 'frequency': {'key': 'typeProperties.frequency', 'type': 'str'}, + 'interval': {'key': 'typeProperties.interval', 'type': 'int'}, + 'start_time': {'key': 'typeProperties.startTime', 'type': 'iso-8601'}, + 'end_time': {'key': 'typeProperties.endTime', 'type': 'iso-8601'}, + 'delay': {'key': 'typeProperties.delay', 'type': 'object'}, + 'max_concurrency': {'key': 'typeProperties.maxConcurrency', 'type': 'int'}, + 'retry_policy': {'key': 'typeProperties.retryPolicy', 'type': 'RetryPolicy'}, + 'depends_on': {'key': 'typeProperties.dependsOn', 'type': '[DependencyReference]'}, } def __init__( self, *, - name: str, - zones_inside: Optional[List[str]] = None, + pipeline: object, + frequency: Union[str, "TumblingWindowFrequency"], + interval: int, + start_time: datetime.datetime, + max_concurrency: int, + additional_properties: Optional[Dict[str, object]] = None, + description: Optional[str] = None, + annotations: Optional[List[object]] = None, + end_time: Optional[datetime.datetime] = None, + delay: Optional[object] = None, + retry_policy: Optional["RetryPolicy"] = None, + depends_on: Optional[List["DependencyReference"]] = None, **kwargs ): - super(FakeFactoryIdentity, self).__init__(**kwargs) - self.name = name - self.zones_inside = zones_inside + super(TumblingWindowTrigger, self).__init__(additional_properties=additional_properties, description=description, annotations=annotations, **kwargs) + self.type: str = 'TumblingWindowTrigger' + self.pipeline = pipeline + self.frequency = frequency + self.interval = interval + self.start_time = start_time + self.end_time = end_time + self.delay = delay + self.max_concurrency = max_concurrency + self.retry_policy = retry_policy + self.depends_on = depends_on -class GitHubAccessTokenRequest(msrest.serialization.Model): - """Get GitHub access token request definition. +class TumblingWindowTriggerDependencyReference(TriggerDependencyReference): + """Referenced tumbling window trigger dependency. All required parameters must be populated in order to send to Azure. - :param git_hub_access_code: Required. GitHub access code. - :type git_hub_access_code: str - :param git_hub_client_id: GitHub application client ID. - :type git_hub_client_id: str - :param git_hub_access_token_base_url: Required. GitHub access token base URL. - :type git_hub_access_token_base_url: str + :param type: Required. The type of dependency reference.Constant filled by server. + :type type: str + :param reference_trigger: Required. Referenced trigger. + :type reference_trigger: ~azure.mgmt.datafactory.models.TriggerReference + :param offset: Timespan applied to the start time of a tumbling window when evaluating + dependency. + :type offset: str + :param size: The size of the window when evaluating the dependency. If undefined the frequency + of the tumbling window will be used. + :type size: str """ _validation = { - 'git_hub_access_code': {'required': True}, - 'git_hub_access_token_base_url': {'required': True}, + 'type': {'required': True}, + 'reference_trigger': {'required': True}, + 'offset': {'max_length': 15, 'min_length': 8, 'pattern': r'-?((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, + 'size': {'max_length': 15, 'min_length': 8, 'pattern': r'((\d+)\.)?(\d\d):(60|([0-5][0-9])):(60|([0-5][0-9]))'}, } _attribute_map = { - 'git_hub_access_code': {'key': 'gitHubAccessCode', 'type': 'str'}, - 'git_hub_client_id': {'key': 'gitHubClientId', 'type': 'str'}, - 'git_hub_access_token_base_url': {'key': 'gitHubAccessTokenBaseUrl', 'type': 'str'}, + 'type': {'key': 'type', 'type': 'str'}, + 'reference_trigger': {'key': 'referenceTrigger', 'type': 'TriggerReference'}, + 'offset': {'key': 'offset', 'type': 'str'}, + 'size': {'key': 'size', 'type': 'str'}, } def __init__( self, *, - git_hub_access_code: str, - git_hub_access_token_base_url: str, - git_hub_client_id: Optional[str] = None, + reference_trigger: "TriggerReference", + offset: Optional[str] = None, + size: Optional[str] = None, **kwargs ): - super(GitHubAccessTokenRequest, self).__init__(**kwargs) - self.git_hub_access_code = git_hub_access_code - self.git_hub_client_id = git_hub_client_id - self.git_hub_access_token_base_url = git_hub_access_token_base_url + super(TumblingWindowTriggerDependencyReference, self).__init__(reference_trigger=reference_trigger, **kwargs) + self.type: str = 'TumblingWindowTriggerDependencyReference' + self.offset = offset + self.size = size -class GitHubAccessTokenResponse(msrest.serialization.Model): - """Get GitHub access token response definition. +class UpdateIntegrationRuntimeRequest(msrest.serialization.Model): + """Update integration runtime request. - :param git_hub_access_token: GitHub access token. - :type git_hub_access_token: str + :param auto_update: Enables or disables the auto-update feature of the self-hosted integration + runtime. See https://go.microsoft.com/fwlink/?linkid=854189. + :type auto_update: object + :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The + integration runtime auto update will happen on that time. + :type update_delay_offset: str """ _attribute_map = { - 'git_hub_access_token': {'key': 'gitHubAccessToken', 'type': 'str'}, + 'auto_update': {'key': 'autoUpdate', 'type': 'object'}, + 'update_delay_offset': {'key': 'updateDelayOffset', 'type': 'str'}, } def __init__( self, *, - git_hub_access_token: Optional[str] = None, + auto_update: Optional[object] = None, + update_delay_offset: Optional[str] = None, **kwargs ): - super(GitHubAccessTokenResponse, self).__init__(**kwargs) - self.git_hub_access_token = git_hub_access_token + super(UpdateIntegrationRuntimeRequest, self).__init__(**kwargs) + self.auto_update = auto_update + self.update_delay_offset = update_delay_offset class UserAccessPolicy(msrest.serialization.Model): diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py index 77faa5f29..c59809b9a 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/__init__.py @@ -7,7 +7,11 @@ # -------------------------------------------------------------------------- from ._factory_operations import FactoryOperations +from ._trigger_operations import TriggerOperations +from ._integration_runtime_operations import IntegrationRuntimeOperations __all__ = [ 'FactoryOperations', + 'TriggerOperations', + 'IntegrationRuntimeOperations', ] diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py new file mode 100644 index 000000000..51273f9cd --- /dev/null +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_integration_runtime_operations.py @@ -0,0 +1,1178 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class IntegrationRuntimeOperations(object): + """IntegrationRuntimeOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.IntegrationRuntimeListResponse"] + """Lists integration runtimes. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of IntegrationRuntimeListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.IntegrationRuntimeListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeListResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('IntegrationRuntimeListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + properties, # type: object + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeResource" + """Creates or updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param properties: Integration runtime properties. + :type properties: object + :param if_match: ETag of the integration runtime entity. Should only be specified for update, + for which it should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _integration_runtime = models.IntegrationRuntimeResource(properties=properties) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_integration_runtime, 'IntegrationRuntimeResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeResource" + """Gets an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param if_none_match: ETag of the integration runtime entity. Should only be specified for get. + If the ETag matches the existing entity tag, or if * was provided, then no content will be + returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + def update( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + auto_update=None, # type: Optional[object] + update_delay_offset=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeResource" + """Updates an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param auto_update: Enables or disables the auto-update feature of the self-hosted integration + runtime. See https://go.microsoft.com/fwlink/?linkid=854189. + :type auto_update: object + :param update_delay_offset: The time offset (in hours) in the day, e.g., PT03H is 3 hours. The + integration runtime auto update will happen on that time. + :type update_delay_offset: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _update_integration_runtime_request = models.UpdateIntegrationRuntimeRequest(auto_update=auto_update, update_delay_offset=update_delay_offset) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_update_integration_runtime_request, 'UpdateIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}'} # type: ignore + + def get_status( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeStatusResponse" + """Gets detailed status information for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get_status.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getStatus'} # type: ignore + + def get_connection_info( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> object + """Gets the on-premises integration runtime connection information for encrypting the on-premises data source credentials. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: object or the result of cls(response) + :rtype: object + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[object] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get_connection_info.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('object', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_connection_info.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/getConnectionInfo'} # type: ignore + + def regenerate_auth_key( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + regenerate_key_parameters, # type: object + **kwargs # type: Any + ): + # type: (...) -> object + """Regenerates the authentication key for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param regenerate_key_parameters: The parameters for regenerating integration runtime + authentication key. + :type regenerate_key_parameters: object + :keyword callable cls: A custom type or function that will be passed the direct response + :return: object or the result of cls(response) + :rtype: object + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[object] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.regenerate_auth_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(regenerate_key_parameters, 'object') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('object', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + regenerate_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/regenerateAuthKey'} # type: ignore + + def list_auth_key( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> object + """Retrieves the authentication keys for an integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: object or the result of cls(response) + :rtype: object + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[object] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.list_auth_key.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('object', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + list_auth_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/listAuthKeys'} # type: ignore + + def _start_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeStatusResponse" + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + def begin_start( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller + """Starts a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns IntegrationRuntimeStatusResponse + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/start'} # type: ignore + + def _stop_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + def begin_stop( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller + """Stops a ManagedReserved type integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + integration_runtime_name=integration_runtime_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/stop'} # type: ignore + + def sync_credentials( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Force the integration runtime to synchronize credentials across integration runtime nodes, and this will override the credentials across all worker nodes with those available on the dispatcher node. If you already have the latest credential backup file, you should manually import it (preferred) on any self-hosted integration runtime node than using this API directly. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.sync_credentials.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + sync_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/syncCredentials'} # type: ignore + + def get_monitoring_data( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> object + """Get the integration runtime monitoring data, which includes the monitor data for all the nodes under this integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: object or the result of cls(response) + :rtype: object + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[object] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get_monitoring_data.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('object', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_monitoring_data.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/monitoringData'} # type: ignore + + def upgrade( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Upgrade self-hosted integration runtime to latest version if availability. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.upgrade.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + upgrade.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/upgrade'} # type: ignore + + def remove_link( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + linked_factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Remove all linked integration runtimes under specific data factory in a self-hosted integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param linked_factory_name: The data factory name for linked integration runtime. + :type linked_factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _linked_integration_runtime_request = models.LinkedIntegrationRuntimeRequest(linked_factory_name=linked_factory_name) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.remove_link.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_linked_integration_runtime_request, 'LinkedIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + remove_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/removeLinks'} # type: ignore + + def create_linked_integration_runtime( + self, + resource_group_name, # type: str + factory_name, # type: str + integration_runtime_name, # type: str + name=None, # type: Optional[str] + subscription_id=None, # type: Optional[str] + data_factory_name=None, # type: Optional[str] + data_factory_location=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.IntegrationRuntimeStatusResponse" + """Create a linked integration runtime entry in a shared integration runtime. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param integration_runtime_name: The integration runtime name. + :type integration_runtime_name: str + :param name: The name of the linked integration runtime. + :type name: str + :param subscription_id: The ID of the subscription that the linked integration runtime belongs + to. + :type subscription_id: str + :param data_factory_name: The name of the data factory that the linked integration runtime + belongs to. + :type data_factory_name: str + :param data_factory_location: The location of the data factory that the linked integration + runtime belongs to. + :type data_factory_location: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: IntegrationRuntimeStatusResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.IntegrationRuntimeStatusResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationRuntimeStatusResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _create_linked_integration_runtime_request = models.CreateLinkedIntegrationRuntimeRequest(name=name, subscription_id=subscription_id, data_factory_name=data_factory_name, data_factory_location=data_factory_location) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_linked_integration_runtime.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'integrationRuntimeName': self._serialize.url("integration_runtime_name", integration_runtime_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_create_linked_integration_runtime_request, 'CreateLinkedIntegrationRuntimeRequest') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('IntegrationRuntimeStatusResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_linked_integration_runtime.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/integrationRuntimes/{integrationRuntimeName}/linkedIntegrationRuntime'} # type: ignore diff --git a/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py new file mode 100644 index 000000000..c135de638 --- /dev/null +++ b/src/test/scenarios/datafactory/output/src/datafactory/azext_datafactory/vendored_sdks/datafactory/operations/_trigger_operations.py @@ -0,0 +1,848 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- +from typing import TYPE_CHECKING +import warnings + +from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.paging import ItemPaged +from azure.core.pipeline import PipelineResponse +from azure.core.pipeline.transport import HttpRequest, HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod +from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling + +from .. import models + +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union + + T = TypeVar('T') + ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] + +class TriggerOperations(object): + """TriggerOperations operations. + + You should not instantiate this class directly. Instead, you should create a Client instance that + instantiates it for you and attaches it as an attribute. + + :ivar models: Alias to model classes used in this operation group. + :type models: ~azure.mgmt.datafactory.models + :param client: Client for service requests. + :param config: Configuration of service client. + :param serializer: An object model serializer. + :param deserializer: An object model deserializer. + """ + + models = models + + def __init__(self, client, config, serializer, deserializer): + self._client = client + self._serialize = serializer + self._deserialize = deserializer + self._config = config + + def list_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> Iterable["models.TriggerListResponse"] + """Lists triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: An iterator like instance of TriggerListResponse or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.datafactory.models.TriggerListResponse] + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerListResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + def prepare_request(next_link=None): + if not next_link: + # Construct URL + url = self.list_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + else: + url = next_link + query_parameters = {} # type: Dict[str, Any] + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + return request + + def extract_data(pipeline_response): + deserialized = self._deserialize('TriggerListResponse', pipeline_response) + list_of_elem = deserialized.value + if cls: + list_of_elem = cls(list_of_elem) + return deserialized.next_link or None, iter(list_of_elem) + + def get_next(next_link=None): + request = prepare_request(next_link) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + return pipeline_response + + return ItemPaged( + get_next, extract_data + ) + list_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers'} # type: ignore + + def query_by_factory( + self, + resource_group_name, # type: str + factory_name, # type: str + continuation_token=None, # type: Optional[str] + parent_trigger_name=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerQueryResponse" + """Query triggers. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param continuation_token: The continuation token for getting the next page of results. Null + for first page. + :type continuation_token: str + :param parent_trigger_name: The name of the parent TumblingWindowTrigger to get the child rerun + triggers. + :type parent_trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerQueryResponse or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerQueryResponse + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerQueryResponse"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _filter_parameters = models.TriggerFilterParameters(continuation_token=continuation_token, parent_trigger_name=parent_trigger_name) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.query_by_factory.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_filter_parameters, 'TriggerFilterParameters') + body_content_kwargs['content'] = body_content + request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerQueryResponse', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + query_by_factory.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/querytriggers'} # type: ignore + + def create_or_update( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + properties, # type: object + if_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerResource" + """Creates or updates a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param properties: Properties of the trigger. + :type properties: object + :param if_match: ETag of the trigger entity. Should only be specified for update, for which it + should match existing entity or can be * for unconditional update. + :type if_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerResource + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + + _trigger = models.TriggerResource(properties=properties) + api_version = "2018-06-01" + content_type = kwargs.pop("content_type", "application/json") + + # Construct URL + url = self.create_or_update.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_match is not None: + header_parameters['If-Match'] = self._serialize.header("if_match", if_match, 'str') + header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + body_content_kwargs = {} # type: Dict[str, Any] + body_content = self._serialize.body(_trigger, 'TriggerResource') + body_content_kwargs['content'] = body_content + request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) + + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + def get( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + if_none_match=None, # type: Optional[str] + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerResource" + """Gets a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :param if_none_match: ETag of the trigger entity. Should only be specified for get. If the ETag + matches the existing entity tag, or if * was provided, then no content will be returned. + :type if_none_match: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerResource or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerResource or None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerResource"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + if if_none_match is not None: + header_parameters['If-None-Match'] = self._serialize.header("if_none_match", if_none_match, 'str') + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.get(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 304]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerResource', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + def delete( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + """Deletes a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.delete.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.delete(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 204]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}'} # type: ignore + + def _subscribe_to_event_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerSubscriptionOperationStatus" + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._subscribe_to_event_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _subscribe_to_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + def begin_subscribe_to_event( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller + """Subscribe event trigger to events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns TriggerSubscriptionOperationStatus + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = self._subscribe_to_event_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_subscribe_to_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/subscribeToEvents'} # type: ignore + + def get_event_subscription_status( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerSubscriptionOperationStatus" + """Get a trigger's event subscription status. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: TriggerSubscriptionOperationStatus or the result of cls(response) + :rtype: ~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus + :raises: ~azure.core.exceptions.HttpResponseError + """ + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self.get_event_subscription_status.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + get_event_subscription_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/getEventSubscriptionStatus'} # type: ignore + + def _unsubscribe_from_event_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> "models.TriggerSubscriptionOperationStatus" + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._unsubscribe_from_event_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + header_parameters['Accept'] = 'application/json' + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200, 202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + deserialized = None + if response.status_code == 200: + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + + return deserialized + _unsubscribe_from_event_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + def begin_unsubscribe_from_event( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller + """Unsubscribe event trigger from events. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns TriggerSubscriptionOperationStatus + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.datafactory.models.TriggerSubscriptionOperationStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType["models.TriggerSubscriptionOperationStatus"] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = self._unsubscribe_from_event_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + deserialized = self._deserialize('TriggerSubscriptionOperationStatus', pipeline_response) + + if cls: + return cls(pipeline_response, deserialized, {}) + return deserialized + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_unsubscribe_from_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/unsubscribeFromEvents'} # type: ignore + + def _start_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._start_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _start_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + def begin_start( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller + """Starts a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = self._start_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_start.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/start'} # type: ignore + + def _stop_initial( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> None + cls = kwargs.pop('cls', None) # type: ClsType[None] + error_map = {404: ResourceNotFoundError, 409: ResourceExistsError} + error_map.update(kwargs.pop('error_map', {})) + api_version = "2018-06-01" + + # Construct URL + url = self._stop_initial.metadata['url'] # type: ignore + path_format_arguments = { + 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), + 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'), + 'factoryName': self._serialize.url("factory_name", factory_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]+(?:-[A-Za-z0-9]+)*$'), + 'triggerName': self._serialize.url("trigger_name", trigger_name, 'str', max_length=260, min_length=1, pattern=r'^[A-Za-z0-9_][^<>*#.%&:\\+?/]*$'), + } + url = self._client.format_url(url, **path_format_arguments) + + # Construct parameters + query_parameters = {} # type: Dict[str, Any] + query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') + + # Construct headers + header_parameters = {} # type: Dict[str, Any] + + # Construct and send request + request = self._client.post(url, query_parameters, header_parameters) + pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) + response = pipeline_response.http_response + + if response.status_code not in [200]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + raise HttpResponseError(response=response, error_format=ARMErrorFormat) + + if cls: + return cls(pipeline_response, None, {}) + + _stop_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore + + def begin_stop( + self, + resource_group_name, # type: str + factory_name, # type: str + trigger_name, # type: str + **kwargs # type: Any + ): + # type: (...) -> LROPoller + """Stops a trigger. + + :param resource_group_name: The resource group name. + :type resource_group_name: str + :param factory_name: The factory name. + :type factory_name: str + :param trigger_name: The trigger name. + :type trigger_name: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword polling: True for ARMPolling, False for no polling, or a + polling object for personal polling strategy + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. + :return: An instance of LROPoller that returns None + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] + cls = kwargs.pop('cls', None) # type: ClsType[None] + lro_delay = kwargs.pop( + 'polling_interval', + self._config.polling_interval + ) + raw_result = self._stop_initial( + resource_group_name=resource_group_name, + factory_name=factory_name, + trigger_name=trigger_name, + cls=lambda x,y,z: x, + **kwargs + ) + + def get_long_running_output(pipeline_response): + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) + elif polling is False: polling_method = NoPolling() + else: polling_method = polling + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + begin_stop.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DataFactory/factories/{factoryName}/triggers/{triggerName}/stop'} # type: ignore diff --git a/src/test/scenarios/datafactory/output/src/datafactory/report.md b/src/test/scenarios/datafactory/output/src/datafactory/report.md index 4df10f3d9..61e7c10ff 100644 --- a/src/test/scenarios/datafactory/output/src/datafactory/report.md +++ b/src/test/scenarios/datafactory/output/src/datafactory/report.md @@ -57,6 +57,160 @@ get-git-hub-access-token a datafactory. |**--git-hub-access-code**|string|GitHub access code.|git_hub_access_code| |**--git-hub-access-token-base-url**|string|GitHub access token base URL.|git_hub_access_token_base_url| |**--git-hub-client-id**|string|GitHub application client ID.|git_hub_client_id| +### datafactory integration-runtime create + +create a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +|**--properties**|any|Integration runtime properties.|properties| +|**--if-match**|string|ETag of the integration runtime entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match| +### datafactory integration-runtime delete + +delete a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime get-connection-info + +get-connection-info a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime get-monitoring-data + +get-monitoring-data a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime get-status + +get-status a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime linked-integration-runtime create + +linked-integration-runtime create a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +|**--name**|string|The name of the linked integration runtime.|name| +|**--subscription-id**|string|The ID of the subscription that the linked integration runtime belongs to.|subscription_id| +|**--data-factory-name**|string|The name of the data factory that the linked integration runtime belongs to.|data_factory_name| +|**--data-factory-location**|string|The location of the data factory that the linked integration runtime belongs to.|data_factory_location| +### datafactory integration-runtime list + +list a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +### datafactory integration-runtime list-auth-key + +list-auth-key a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime regenerate-auth-key + +regenerate-auth-key a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +|**--regenerate-key-parameters**|any|The parameters for regenerating integration runtime authentication key.|regenerate_key_parameters| +### datafactory integration-runtime remove-link + +remove-link a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +|**--linked-factory-name**|string|The data factory name for linked integration runtime.|linked_factory_name| +### datafactory integration-runtime show + +show a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +|**--if-none-match**|string|ETag of the integration runtime entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match| +### datafactory integration-runtime start + +start a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime stop + +stop a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime sync-credentials + +sync-credentials a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +### datafactory integration-runtime update + +update a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| +|**--auto-update**|any|Enables or disables the auto-update feature of the self-hosted integration runtime. See https://go.microsoft.com/fwlink/?linkid=854189.|auto_update| +|**--update-delay-offset**|string|The time offset (in hours) in the day, e.g., PT03H is 3 hours. The integration runtime auto update will happen on that time.|update_delay_offset| +### datafactory integration-runtime upgrade + +upgrade a datafactory integration-runtime. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--integration-runtime-name**|string|The integration runtime name.|integration_runtime_name| ### datafactory list list a datafactory. @@ -73,6 +227,110 @@ show a datafactory. |**--resource-group-name**|string|The resource group name.|resource_group_name| |**--factory-name**|string|The factory name.|factory_name| |**--if-none-match**|string|ETag of the factory entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match| +### datafactory trigger create + +create a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +|**--properties**|any|Properties of the trigger.|properties| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match| +### datafactory trigger delete + +delete a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +### datafactory trigger get-event-subscription-status + +get-event-subscription-status a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +### datafactory trigger list + +list a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +### datafactory trigger query-by-factory + +query-by-factory a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--continuation-token**|string|The continuation token for getting the next page of results. Null for first page.|continuation_token| +|**--parent-trigger-name**|string|The name of the parent TumblingWindowTrigger to get the child rerun triggers|parent_trigger_name| +### datafactory trigger show + +show a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +|**--if-none-match**|string|ETag of the trigger entity. Should only be specified for get. If the ETag matches the existing entity tag, or if * was provided, then no content will be returned.|if_none_match| +### datafactory trigger start + +start a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +### datafactory trigger stop + +stop a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +### datafactory trigger subscribe-to-event + +subscribe-to-event a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +### datafactory trigger unsubscribe-from-event + +unsubscribe-from-event a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +### datafactory trigger update + +create a datafactory trigger. + +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The resource group name.|resource_group_name| +|**--factory-name**|string|The factory name.|factory_name| +|**--trigger-name**|string|The trigger name.|trigger_name| +|**--properties**|any|Properties of the trigger.|properties| +|**--if-match**|string|ETag of the trigger entity. Should only be specified for update, for which it should match existing entity or can be * for unconditional update.|if_match| ### datafactory update update a datafactory. diff --git a/src/test/scenarios/managed-network/configuration/readme.az.md b/src/test/scenarios/managed-network/configuration/readme.az.md index a353c2699..31cf3c78a 100644 --- a/src/test/scenarios/managed-network/configuration/readme.az.md +++ b/src/test/scenarios/managed-network/configuration/readme.az.md @@ -11,6 +11,7 @@ az: az-output-folder: $(azure-cli-extension-folder)/src/managed-network python-sdk-output-folder: $(az-output-folder)/azext_managed_network/vendored_sdks/managednetwork +extension-mode: preview directive: - where: diff --git a/src/test/scenarios/managed-network/input/managedNetwork.json b/src/test/scenarios/managed-network/input/managedNetwork.json index 8571277bc..6adb1580e 100644 --- a/src/test/scenarios/managed-network/input/managedNetwork.json +++ b/src/test/scenarios/managed-network/input/managedNetwork.json @@ -39,7 +39,7 @@ "tags": [ "ManagedNetworks" ], - "operationId": "ManagedNetworks_Get", + "operationId": "ManagedNetworks_GetModify", "description": "The Get ManagedNetworks operation gets a Managed Network Resource, specified by the resource group and Managed Network name", "parameters": [ { diff --git a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/azext_metadata.json b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/azext_metadata.json index 7b56fb1e1..07a24290a 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/azext_metadata.json +++ b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/azext_metadata.json @@ -1,4 +1,4 @@ { - "azext.isExperimental": true, + "azext.isPreview": true, "azext.minCliCoreVersion": "2.3.1" } \ No newline at end of file diff --git a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/_help.py b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/_help.py index 37ed80cc0..e8bea713c 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/_help.py +++ b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/_help.py @@ -27,16 +27,6 @@ az managed-network mn list --resource-group "myResourceGroup" """ -helps['managed-network mn show'] = """ - type: command - short-summary: The Get ManagedNetworks operation gets a Managed Network Resource, specified by the resource group a\ -nd Managed Network name - examples: - - name: Get Managed Network - text: |- - az managed-network mn show --name "myManagedNetwork" --resource-group "myResourceGroup" -""" - helps['managed-network mn create'] = """ type: command short-summary: The Put ManagedNetworks operation creates/updates a Managed Network Resource, specified by resource \ @@ -74,16 +64,14 @@ az managed-network mn delete --name "myManagedNetwork" --resource-group "myResourceGroup" """ -helps['managed-network mn wait'] = """ +helps['managed-network mn get-modify'] = """ type: command - short-summary: Place the CLI in a waiting state until a condition of the managed-network mn is met. + short-summary: The Get ManagedNetworks operation gets a Managed Network Resource, specified by the resource group a\ +nd Managed Network name examples: - - name: Pause executing next line of CLI script until the managed-network mn is successfully updated. - text: |- - az managed-network mn wait --name "myManagedNetwork" --resource-group "myResourceGroup" --updated - - name: Pause executing next line of CLI script until the managed-network mn is successfully deleted. + - name: Get Managed Network text: |- - az managed-network mn wait --name "myManagedNetwork" --resource-group "myResourceGroup" --deleted + az managed-network mn get-modify --name "myManagedNetwork" --resource-group "myResourceGroup" """ helps['managed-network mn scope-assignment'] = """ @@ -172,6 +160,31 @@ helps['managed-network mn group create'] = """ type: command short-summary: The Put ManagedNetworkGroups operation creates or updates a Managed Network Group resource + parameters: + - name: --subscriptions + short-summary: The collection of subscriptions covered by the Managed Network + long-summary: | + Usage: --subscriptions id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --subscriptions argument. + - name: --virtual-networks + short-summary: The collection of virtual nets covered by the Managed Network + long-summary: | + Usage: --virtual-networks id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --virtual-networks argument. + - name: --subnets + short-summary: The collection of subnets covered by the Managed Network + long-summary: | + Usage: --subnets id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --subnets argument. examples: - name: Create/Update Managed Network Group text: |- @@ -185,6 +198,31 @@ helps['managed-network mn group update'] = """ type: command short-summary: The Put ManagedNetworkGroups operation creates or updates a Managed Network Group resource + parameters: + - name: --subscriptions + short-summary: The collection of subscriptions covered by the Managed Network + long-summary: | + Usage: --subscriptions id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --subscriptions argument. + - name: --virtual-networks + short-summary: The collection of virtual nets covered by the Managed Network + long-summary: | + Usage: --virtual-networks id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --virtual-networks argument. + - name: --subnets + short-summary: The collection of subnets covered by the Managed Network + long-summary: | + Usage: --subnets id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --subnets argument. examples: - name: Create/Update Managed Network Group text: |- @@ -255,6 +293,29 @@ helps['managed-network managed-network-peering-policy hub-and-spoke-topology create'] = """ type: command short-summary: The Put ManagedNetworkPeeringPolicies operation creates/updates a new Managed Network Peering Policy + parameters: + - name: --hub + short-summary: Gets or sets the hub virtual network ID + long-summary: | + Usage: --hub id=XX + + id: Resource Id + - name: --spokes + short-summary: Gets or sets the spokes group IDs + long-summary: | + Usage: --spokes id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --spokes argument. + - name: --mesh + short-summary: Gets or sets the mesh group IDs + long-summary: | + Usage: --mesh id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --mesh argument. examples: - name: Create/Update Managed Network Peering Policy text: |- @@ -268,6 +329,29 @@ helps['managed-network managed-network-peering-policy hub-and-spoke-topology update'] = """ type: command short-summary: The Put ManagedNetworkPeeringPolicies operation creates/updates a new Managed Network Peering Policy + parameters: + - name: --hub + short-summary: Gets or sets the hub virtual network ID + long-summary: | + Usage: --hub id=XX + + id: Resource Id + - name: --spokes + short-summary: Gets or sets the spokes group IDs + long-summary: | + Usage: --spokes id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --spokes argument. + - name: --mesh + short-summary: Gets or sets the mesh group IDs + long-summary: | + Usage: --mesh id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --mesh argument. examples: - name: Create/Update Managed Network Peering Policy text: |- @@ -286,6 +370,29 @@ helps['managed-network managed-network-peering-policy mesh-topology create'] = """ type: command short-summary: The Put ManagedNetworkPeeringPolicies operation creates/updates a new Managed Network Peering Policy + parameters: + - name: --hub + short-summary: Gets or sets the hub virtual network ID + long-summary: | + Usage: --hub id=XX + + id: Resource Id + - name: --spokes + short-summary: Gets or sets the spokes group IDs + long-summary: | + Usage: --spokes id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --spokes argument. + - name: --mesh + short-summary: Gets or sets the mesh group IDs + long-summary: | + Usage: --mesh id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --mesh argument. examples: - name: Create/Update Managed Network Peering Policy text: |- @@ -299,6 +406,29 @@ helps['managed-network managed-network-peering-policy mesh-topology update'] = """ type: command short-summary: The Put ManagedNetworkPeeringPolicies operation creates/updates a new Managed Network Peering Policy + parameters: + - name: --hub + short-summary: Gets or sets the hub virtual network ID + long-summary: | + Usage: --hub id=XX + + id: Resource Id + - name: --spokes + short-summary: Gets or sets the spokes group IDs + long-summary: | + Usage: --spokes id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --spokes argument. + - name: --mesh + short-summary: Gets or sets the mesh group IDs + long-summary: | + Usage: --mesh id=XX + + id: Resource Id + + Multiple actions can be specified by using more than one --mesh argument. examples: - name: Create/Update Managed Network Peering Policy text: |- diff --git a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/_params.py b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/_params.py index 43d3d26e9..5153cd845 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/_params.py +++ b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/_params.py @@ -36,11 +36,6 @@ def load_arguments(self, _): 'evious response contains a nextLink element, the value of the nextLink element will include a skipt' 'oken parameter that specifies a starting point to use for subsequent calls.') - with self.argument_context('managed-network mn show') as c: - c.argument('resource_group_name', resource_group_name_type) - c.argument('managed_network_name', options_list=['--name', '-n'], help='The name of the Managed Network.', - id_part='name') - with self.argument_context('managed-network mn create') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('managed_network_name', options_list=['--name', '-n'], help='The name of the Managed Network.') @@ -61,6 +56,11 @@ def load_arguments(self, _): c.argument('managed_network_name', options_list=['--name', '-n'], help='The name of the Managed Network.', id_part='name') + with self.argument_context('managed-network mn get-modify') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('managed_network_name', options_list=['--name', '-n'], help='The name of the Managed Network.', + id_part='name') + with self.argument_context('managed-network mn scope-assignment list') as c: c.argument('scope', help='The base resource of the scope assignment.') @@ -120,11 +120,11 @@ def load_arguments(self, _): c.argument('management_groups', arg_type=CLIArgumentType(options_list=['--management-groups'], help='The collec' 'tion of management groups covered by the Managed Network Expected value: json-string/@json-file.')) c.argument('subscriptions', action=AddSubscriptions, nargs='+', help='The collection of subscriptions covered b' - 'y the Managed Network Expect value: id=xx.') + 'y the Managed Network') c.argument('virtual_networks', action=AddVirtualNetworks, nargs='+', help='The collection of virtual nets cover' - 'ed by the Managed Network Expect value: id=xx.') + 'ed by the Managed Network') c.argument('subnets', action=AddSubnets, nargs='+', help='The collection of subnets covered by the Managed Net' - 'work Expect value: id=xx.') + 'work') with self.argument_context('managed-network mn group update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -135,17 +135,22 @@ def load_arguments(self, _): c.argument('management_groups', arg_type=CLIArgumentType(options_list=['--management-groups'], help='The collec' 'tion of management groups covered by the Managed Network Expected value: json-string/@json-file.')) c.argument('subscriptions', action=AddSubscriptions, nargs='+', help='The collection of subscriptions covered b' - 'y the Managed Network Expect value: id=xx.') + 'y the Managed Network') c.argument('virtual_networks', action=AddVirtualNetworks, nargs='+', help='The collection of virtual nets cover' - 'ed by the Managed Network Expect value: id=xx.') + 'ed by the Managed Network') c.argument('subnets', action=AddSubnets, nargs='+', help='The collection of subnets covered by the Managed Net' - 'work Expect value: id=xx.') + 'work') with self.argument_context('managed-network mn group delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('managed_network_name', help='The name of the Managed Network.', id_part='name') c.argument('group_name', help='The name of the Managed Network Group.', id_part='child_name_1') + with self.argument_context('managed-network mn group wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('managed_network_name', help='The name of the Managed Network.', id_part='name') + c.argument('group_name', help='The name of the Managed Network Group.', id_part='child_name_1') + with self.argument_context('managed-network managed-network-peering-policy list') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('managed_network_name', help='The name of the Managed Network.') @@ -165,11 +170,9 @@ def load_arguments(self, _): c.argument('policy_name', help='The name of the Managed Network Peering Policy.') c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group) - c.argument('hub', action=AddHub, nargs='+', - help='Gets or sets the hub virtual network ID Expect value: id=xx.') - c.argument('spokes', action=AddSpokes, nargs='+', - help='Gets or sets the spokes group IDs Expect value: id=xx.') - c.argument('mesh', action=AddMesh, nargs='+', help='Gets or sets the mesh group IDs Expect value: id=xx.') + c.argument('hub', action=AddHub, nargs='+', help='Gets or sets the hub virtual network ID') + c.argument('spokes', action=AddSpokes, nargs='+', help='Gets or sets the spokes group IDs') + c.argument('mesh', action=AddMesh, nargs='+', help='Gets or sets the mesh group IDs') with self.argument_context('managed-network managed-network-peering-policy hub-and-spoke-topology update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -177,11 +180,9 @@ def load_arguments(self, _): c.argument('policy_name', help='The name of the Managed Network Peering Policy.', id_part='child_name_1') c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group) - c.argument('hub', action=AddHub, nargs='+', - help='Gets or sets the hub virtual network ID Expect value: id=xx.') - c.argument('spokes', action=AddSpokes, nargs='+', - help='Gets or sets the spokes group IDs Expect value: id=xx.') - c.argument('mesh', action=AddMesh, nargs='+', help='Gets or sets the mesh group IDs Expect value: id=xx.') + c.argument('hub', action=AddHub, nargs='+', help='Gets or sets the hub virtual network ID') + c.argument('spokes', action=AddSpokes, nargs='+', help='Gets or sets the spokes group IDs') + c.argument('mesh', action=AddMesh, nargs='+', help='Gets or sets the mesh group IDs') c.ignore('managed_network_peering_policy_name', 'properties') with self.argument_context('managed-network managed-network-peering-policy mesh-topology create') as c: @@ -190,11 +191,9 @@ def load_arguments(self, _): c.argument('policy_name', help='The name of the Managed Network Peering Policy.') c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group) - c.argument('hub', action=AddHub, nargs='+', - help='Gets or sets the hub virtual network ID Expect value: id=xx.') - c.argument('spokes', action=AddSpokes, nargs='+', - help='Gets or sets the spokes group IDs Expect value: id=xx.') - c.argument('mesh', action=AddMesh, nargs='+', help='Gets or sets the mesh group IDs Expect value: id=xx.') + c.argument('hub', action=AddHub, nargs='+', help='Gets or sets the hub virtual network ID') + c.argument('spokes', action=AddSpokes, nargs='+', help='Gets or sets the spokes group IDs') + c.argument('mesh', action=AddMesh, nargs='+', help='Gets or sets the mesh group IDs') with self.argument_context('managed-network managed-network-peering-policy mesh-topology update') as c: c.argument('resource_group_name', resource_group_name_type) @@ -202,14 +201,17 @@ def load_arguments(self, _): c.argument('policy_name', help='The name of the Managed Network Peering Policy.', id_part='child_name_1') c.argument('location', arg_type=get_location_type(self.cli_ctx), validator=get_default_location_from_resource_group) - c.argument('hub', action=AddHub, nargs='+', - help='Gets or sets the hub virtual network ID Expect value: id=xx.') - c.argument('spokes', action=AddSpokes, nargs='+', - help='Gets or sets the spokes group IDs Expect value: id=xx.') - c.argument('mesh', action=AddMesh, nargs='+', help='Gets or sets the mesh group IDs Expect value: id=xx.') + c.argument('hub', action=AddHub, nargs='+', help='Gets or sets the hub virtual network ID') + c.argument('spokes', action=AddSpokes, nargs='+', help='Gets or sets the spokes group IDs') + c.argument('mesh', action=AddMesh, nargs='+', help='Gets or sets the mesh group IDs') c.ignore('managed_network_peering_policy_name', 'properties') with self.argument_context('managed-network managed-network-peering-policy delete') as c: c.argument('resource_group_name', resource_group_name_type) c.argument('managed_network_name', help='The name of the Managed Network.', id_part='name') c.argument('policy_name', help='The name of the Managed Network Peering Policy.', id_part='child_name_1') + + with self.argument_context('managed-network managed-network-peering-policy wait') as c: + c.argument('resource_group_name', resource_group_name_type) + c.argument('managed_network_name', help='The name of the Managed Network.', id_part='name') + c.argument('policy_name', help='The name of the Managed Network Peering Policy.', id_part='child_name_1') diff --git a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/commands.py b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/commands.py index 981039751..e879297b2 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/commands.py +++ b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/commands.py @@ -18,14 +18,12 @@ def load_command_table(self, _): operations_tmpl='azext_managed_network.vendored_sdks.managednetwork.operations._managed_network_operations#Mana' 'gedNetworkOperations.{}', client_factory=cf_mn) - with self.command_group('managed-network mn', managed_network_mn, client_factory=cf_mn, - is_experimental=True) as g: + with self.command_group('managed-network mn', managed_network_mn, client_factory=cf_mn, is_preview=True) as g: g.custom_command('list', 'managed_network_mn_list') - g.custom_show_command('show', 'managed_network_mn_show') g.custom_command('create', 'managed_network_mn_create') - g.custom_command('update', 'managed_network_mn_update', supports_no_wait=True) - g.custom_command('delete', 'managed_network_mn_delete', supports_no_wait=True) - g.wait_command('wait') + g.custom_command('update', 'managed_network_mn_update') + g.custom_command('delete', 'managed_network_mn_delete') + g.custom_command('get-modify', 'managed_network_mn_get_modify') from azext_managed_network.generated._client_factory import cf_scope_assignment managed_network_scope_assignment = CliCommandType( @@ -52,7 +50,7 @@ def load_command_table(self, _): g.custom_command('create', 'managed_network_mn_group_create', supports_no_wait=True) g.custom_command('update', 'managed_network_mn_group_update', supports_no_wait=True) g.custom_command('delete', 'managed_network_mn_group_delete', supports_no_wait=True) - g.wait_command('wait') + g.custom_wait_command('wait', 'managed_network_mn_group_show') from azext_managed_network.generated._client_factory import cf_managed_network_peering_policy managed_network_managed_network_peering_policy = CliCommandType( @@ -61,7 +59,7 @@ def load_command_table(self, _): client_factory=cf_managed_network_peering_policy) with self.command_group('managed-network managed-network-peering-policy', managed_network_managed_network_peering_policy, - client_factory=cf_managed_network_peering_policy, is_experimental=True) as g: + client_factory=cf_managed_network_peering_policy, is_preview=True) as g: g.custom_command('list', 'managed_network_managed_network_peering_policy_list') g.custom_show_command('show', 'managed_network_managed_network_peering_policy_show') g.custom_command('hub-and-spoke-topology create', 'managed_network_managed_network_peering_policy_hub_and_spoke' @@ -75,4 +73,4 @@ def load_command_table(self, _): '_update', custom_func_name = 'managed_network_managed_network_peering_policy_mesh_top' 'ology_update', supports_no_wait=True) g.custom_command('delete', 'managed_network_managed_network_peering_policy_delete', supports_no_wait=True) - g.wait_command('wait') + g.custom_wait_command('wait', 'managed_network_managed_network_peering_policy_show') diff --git a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/custom.py b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/custom.py index a0ca55993..dac335d28 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/custom.py +++ b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/generated/custom.py @@ -10,6 +10,7 @@ # pylint: disable=too-many-lines import json +from azure.cli.core.util import sdk_no_wait def managed_network_mn_list(cmd, client, @@ -24,13 +25,6 @@ def managed_network_mn_list(cmd, client, skiptoken=skiptoken) -def managed_network_mn_show(cmd, client, - resource_group_name, - managed_network_name): - return client.get(resource_group_name=resource_group_name, - managed_network_name=managed_network_name) - - def managed_network_mn_create(cmd, client, resource_group_name, managed_network_name, @@ -62,6 +56,13 @@ def managed_network_mn_delete(cmd, client, managed_network_name=managed_network_name) +def managed_network_mn_get_modify(cmd, client, + resource_group_name, + managed_network_name): + return client.get_modify(resource_group_name=resource_group_name, + managed_network_name=managed_network_name) + + def managed_network_mn_scope_assignment_list(cmd, client, scope): return client.list(scope=scope) @@ -131,17 +132,20 @@ def managed_network_mn_group_create(cmd, client, management_groups=None, subscriptions=None, virtual_networks=None, - subnets=None): + subnets=None, + no_wait=False): if isinstance(management_groups, str): management_groups = json.loads(management_groups) - return client.begin_create_or_update(resource_group_name=resource_group_name, - managed_network_name=managed_network_name, - managed_network_group_name=group_name, - location=location, - management_groups=management_groups, - subscriptions=subscriptions, - virtual_networks=virtual_networks, - subnets=subnets) + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + managed_network_name=managed_network_name, + managed_network_group_name=group_name, + location=location, + management_groups=management_groups, + subscriptions=subscriptions, + virtual_networks=virtual_networks, + subnets=subnets) def managed_network_mn_group_update(cmd, client, @@ -152,26 +156,32 @@ def managed_network_mn_group_update(cmd, client, management_groups=None, subscriptions=None, virtual_networks=None, - subnets=None): + subnets=None, + no_wait=False): if isinstance(management_groups, str): management_groups = json.loads(management_groups) - return client.begin_create_or_update(resource_group_name=resource_group_name, - managed_network_name=managed_network_name, - managed_network_group_name=group_name, - location=location, - management_groups=management_groups, - subscriptions=subscriptions, - virtual_networks=virtual_networks, - subnets=subnets) + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + managed_network_name=managed_network_name, + managed_network_group_name=group_name, + location=location, + management_groups=management_groups, + subscriptions=subscriptions, + virtual_networks=virtual_networks, + subnets=subnets) def managed_network_mn_group_delete(cmd, client, resource_group_name, managed_network_name, - group_name): - return client.begin_delete(resource_group_name=resource_group_name, - managed_network_name=managed_network_name, - managed_network_group_name=group_name) + group_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + managed_network_name=managed_network_name, + managed_network_group_name=group_name) def managed_network_managed_network_peering_policy_list(cmd, client, @@ -201,17 +211,20 @@ def managed_network_managed_network_peering_policy_hub_and_spoke_topology_create location, hub=None, spokes=None, - mesh=None): + mesh=None, + no_wait=False): properties = {} properties['type'] = 'HubAndSpokeTopology' properties['hub'] = hub properties['spokes'] = spokes properties['mesh'] = mesh - return client.begin_create_or_update(resource_group_name=resource_group_name, - managed_network_name=managed_network_name, - managed_network_peering_policy_name=policy_name, - location=location, - properties=properties) + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + managed_network_name=managed_network_name, + managed_network_peering_policy_name=policy_name, + location=location, + properties=properties) def managed_network_managed_network_peering_policy_hub_and_spoke_topology_update(instance, cmd, @@ -221,7 +234,8 @@ def managed_network_managed_network_peering_policy_hub_and_spoke_topology_update location, hub=None, spokes=None, - mesh=None): + mesh=None, + no_wait=False): instance.type = 'HubAndSpokeTopology' instance.hub = hub instance.spokes = spokes @@ -236,17 +250,20 @@ def managed_network_managed_network_peering_policy_mesh_topology_create(cmd, cli location, hub=None, spokes=None, - mesh=None): + mesh=None, + no_wait=False): properties = {} properties['type'] = 'MeshTopology' properties['hub'] = hub properties['spokes'] = spokes properties['mesh'] = mesh - return client.begin_create_or_update(resource_group_name=resource_group_name, - managed_network_name=managed_network_name, - managed_network_peering_policy_name=policy_name, - location=location, - properties=properties) + return sdk_no_wait(no_wait, + client.begin_create_or_update, + resource_group_name=resource_group_name, + managed_network_name=managed_network_name, + managed_network_peering_policy_name=policy_name, + location=location, + properties=properties) def managed_network_managed_network_peering_policy_mesh_topology_update(instance, cmd, @@ -256,7 +273,8 @@ def managed_network_managed_network_peering_policy_mesh_topology_update(instance location, hub=None, spokes=None, - mesh=None): + mesh=None, + no_wait=False): instance.type = 'MeshTopology' instance.hub = hub instance.spokes = spokes @@ -267,7 +285,10 @@ def managed_network_managed_network_peering_policy_mesh_topology_update(instance def managed_network_managed_network_peering_policy_delete(cmd, client, resource_group_name, managed_network_name, - policy_name): - return client.begin_delete(resource_group_name=resource_group_name, - managed_network_name=managed_network_name, - managed_network_peering_policy_name=policy_name) + policy_name, + no_wait=False): + return sdk_no_wait(no_wait, + client.begin_delete, + resource_group_name=resource_group_name, + managed_network_name=managed_network_name, + managed_network_peering_policy_name=policy_name) diff --git a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/tests/latest/test_managed_network_scenario.py b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/tests/latest/test_managed_network_scenario.py index 9a61ad866..d865bd3c2 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/tests/latest/test_managed_network_scenario.py +++ b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/tests/latest/test_managed_network_scenario.py @@ -91,7 +91,7 @@ def step_managednetworkpeeringpoliciesput(test, rg): # EXAMPLE: ManagedNetworksGet @try_manual def step_managednetworksget(test, rg): - test.cmd('az managed-network mn show ' + test.cmd('az managed-network mn get-modify ' '--name "{myManagedNetwork}" ' '--resource-group "{rg}"', checks=[]) diff --git a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/vendored_sdks/managednetwork/aio/operations_async/_managed_network_operations_async.py b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/vendored_sdks/managednetwork/aio/operations_async/_managed_network_operations_async.py index 4078b196a..9880c083c 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/vendored_sdks/managednetwork/aio/operations_async/_managed_network_operations_async.py +++ b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/vendored_sdks/managednetwork/aio/operations_async/_managed_network_operations_async.py @@ -43,7 +43,7 @@ def __init__(self, client, config, serializer, deserializer) -> None: self._deserialize = deserializer self._config = config - async def get( + async def get_modify( self, resource_group_name: str, managed_network_name: str, @@ -66,7 +66,7 @@ async def get( api_version = "2019-06-01-preview" # Construct URL - url = self.get.metadata['url'] # type: ignore + url = self.get_modify.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), @@ -98,7 +98,7 @@ async def get( return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedNetwork/managedNetworks/{managedNetworkName}'} # type: ignore + get_modify.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedNetwork/managedNetworks/{managedNetworkName}'} # type: ignore async def create_or_update( self, diff --git a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/vendored_sdks/managednetwork/operations/_managed_network_operations.py b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/vendored_sdks/managednetwork/operations/_managed_network_operations.py index da0bf2956..eaeda9700 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/vendored_sdks/managednetwork/operations/_managed_network_operations.py +++ b/src/test/scenarios/managed-network/output/src/managed-network/azext_managed_network/vendored_sdks/managednetwork/operations/_managed_network_operations.py @@ -47,7 +47,7 @@ def __init__(self, client, config, serializer, deserializer): self._deserialize = deserializer self._config = config - def get( + def get_modify( self, resource_group_name, # type: str managed_network_name, # type: str @@ -71,7 +71,7 @@ def get( api_version = "2019-06-01-preview" # Construct URL - url = self.get.metadata['url'] # type: ignore + url = self.get_modify.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), @@ -103,7 +103,7 @@ def get( return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedNetwork/managedNetworks/{managedNetworkName}'} # type: ignore + get_modify.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedNetwork/managedNetworks/{managedNetworkName}'} # type: ignore def create_or_update( self, diff --git a/src/test/scenarios/managed-network/output/src/managed-network/report.md b/src/test/scenarios/managed-network/output/src/managed-network/report.md index 40540e02a..ecba15b69 100644 --- a/src/test/scenarios/managed-network/output/src/managed-network/report.md +++ b/src/test/scenarios/managed-network/output/src/managed-network/report.md @@ -95,6 +95,14 @@ create a managed-network mn. delete a managed-network mn. +|Option|Type|Description|Path (SDK)|Path (swagger)| +|------|----|-----------|----------|--------------| +|**--resource-group-name**|string|The name of the resource group.|resource_group_name| +|**--managed-network-name**|string|The name of the Managed Network.|managed_network_name| +### managed-network mn get-modify + +get-modify a managed-network mn. + |Option|Type|Description|Path (SDK)|Path (swagger)| |------|----|-----------|----------|--------------| |**--resource-group-name**|string|The name of the resource group.|resource_group_name| @@ -207,14 +215,6 @@ create a managed-network mn scope-assignment. |**--scope-assignment-name**|string|The name of the scope assignment to create.|scope_assignment_name| |**--location**|string|The geo-location where the resource lives|location| |**--assigned-managed-network**|string|The managed network ID with scope will be assigned to.|assigned_managed_network| -### managed-network mn show - -show a managed-network mn. - -|Option|Type|Description|Path (SDK)|Path (swagger)| -|------|----|-----------|----------|--------------| -|**--resource-group-name**|string|The name of the resource group.|resource_group_name| -|**--managed-network-name**|string|The name of the Managed Network.|managed_network_name| ### managed-network mn update update a managed-network mn.