Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[TA] CodeGen based on 3.1-preview.3 #17177

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -215,8 +215,9 @@ private Mono<Response<AnalyzeSentimentResultCollection>> getAnalyzedSentimentRes
}
return service.sentimentWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
modelVersion, includeStatistics, includeOpinionMining, StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
modelVersion, includeStatistics, includeOpinionMining, StringIndexType.UTF16CODE_UNIT
)
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Analyzed sentiment for a batch of documents - {}", response))
.doOnError(error -> logger.warning("Failed to analyze sentiment - {}", error))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,15 +150,15 @@ private Response<DetectLanguageResultCollection> toTextAnalyticsResultDocumentRe
private Mono<Response<DetectLanguageResultCollection>> getDetectedLanguageResponse(
Iterable<DetectLanguageInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.languagesWithResponseAsync(
new LanguageBatchInput().setDocuments(toLanguageInput(documents)),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Detected languages for a batch of documents - {}",
response.getValue()))
.doOnError(error -> logger.warning("Failed to detect language - {}", error))
.map(this::toTextAnalyticsResultDocumentResponse)
.onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable));
new LanguageBatchInput().setDocuments(toLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics())
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Detected languages for a batch of documents - {}",
response.getValue()))
.doOnError(error -> logger.warning("Failed to detect language - {}", error))
.map(this::toTextAnalyticsResultDocumentResponse)
.onErrorMap(throwable -> mapToHttpResponseExceptionIfExist(throwable));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -182,9 +182,9 @@ private Mono<Response<ExtractKeyPhrasesResultCollection>> getExtractedKeyPhrases
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.keyPhrasesWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
options == null ? null : options.isIncludeStatistics())
.doOnSubscribe(ignoredValue -> logger.info("A batch of document - {}", documents.toString()))
.doOnSuccess(response -> logger.info("A batch of key phrases output - {}", response.getValue()))
.doOnError(error -> logger.warning("Failed to extract key phrases - {}", error))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,10 +187,10 @@ private Mono<Response<RecognizeEntitiesResultCollection>> getRecognizedEntitiesR
Iterable<TextDocumentInput> documents, TextAnalyticsRequestOptions options, Context context) {
return service.entitiesRecognitionGeneralWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
StringIndexType.UTF16CODE_UNIT)
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Recognized entities for a batch of documents- {}",
response.getValue()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,10 +201,10 @@ private IterableStream<LinkedEntity> mapLinkedEntity(
Context context) {
return service.entitiesLinkingWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
options == null ? null : options.getModelVersion(),
options == null ? null : options.isIncludeStatistics(),
StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
StringIndexType.UTF16CODE_UNIT)
.doOnSubscribe(ignoredValue -> logger.info("A batch of documents - {}", documents.toString()))
.doOnSuccess(response -> logger.info("Recognized linked entities for a batch of documents - {}",
response.getValue()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import com.azure.ai.textanalytics.implementation.models.DocumentError;
import com.azure.ai.textanalytics.implementation.models.EntitiesResult;
import com.azure.ai.textanalytics.implementation.models.MultiLanguageBatchInput;
import com.azure.ai.textanalytics.implementation.models.PiiEntitiesResult;
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

service renamed it to PiiResult

import com.azure.ai.textanalytics.implementation.models.PiiResult;
import com.azure.ai.textanalytics.implementation.models.StringIndexType;
import com.azure.ai.textanalytics.implementation.models.WarningCodeValue;
import com.azure.ai.textanalytics.models.EntityCategory;
Expand Down Expand Up @@ -145,8 +145,8 @@ Mono<Response<RecognizePiiEntitiesResultCollection>> recognizePiiEntitiesBatchWi
* @return A {@link Response} that contains {@link RecognizePiiEntitiesResultCollection}.
*/
private Response<RecognizePiiEntitiesResultCollection> toRecognizePiiEntitiesResultCollectionResponse(
final Response<PiiEntitiesResult> response) {
final PiiEntitiesResult piiEntitiesResult = response.getValue();
final Response<PiiResult> response) {
final PiiResult piiEntitiesResult = response.getValue();
// List of documents results
final List<RecognizePiiEntitiesResult> recognizeEntitiesResults = new ArrayList<>();
piiEntitiesResult.getDocuments().forEach(documentEntities -> {
Expand Down Expand Up @@ -211,8 +211,11 @@ private Mono<Response<RecognizePiiEntitiesResultCollection>> getRecognizePiiEnti
}
return service.entitiesRecognitionPiiWithResponseAsync(
new MultiLanguageBatchInput().setDocuments(toMultiLanguageInput(documents)),
modelVersion, includeStatistics, domainFilter, StringIndexType.UTF16CODE_UNIT,
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE))
context.addData(AZ_TRACING_NAMESPACE_KEY, COGNITIVE_TRACING_NAMESPACE_VALUE),
modelVersion,
includeStatistics,
domainFilter,
StringIndexType.UTF16CODE_UNIT)
.doOnSubscribe(ignoredValue -> logger.info(
"Start recognizing Personally Identifiable Information entities for a batch of documents."))
.doOnSuccess(response -> logger.info(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.

package com.azure.ai.textanalytics.implementation;

import com.azure.core.annotation.ServiceClientBuilder;
import com.azure.core.http.HttpPipeline;
import com.azure.core.http.HttpPipelineBuilder;
import com.azure.core.http.policy.CookiePolicy;
import com.azure.core.http.policy.RetryPolicy;
import com.azure.core.http.policy.UserAgentPolicy;

/**
* A builder for creating a new instance of the TextAnalyticsClient type.
*/
@ServiceClientBuilder(serviceClients = {TextAnalyticsClientImpl.class})
public final class TextAnalyticsClientBuilder {
/*
* Supported Cognitive Services endpoints (protocol and hostname, for
* example: https://westus.api.cognitive.microsoft.com).
*/
private String endpoint;

/**
* Sets Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus.api.cognitive.microsoft.com).
*
* @param endpoint the endpoint value.
* @return the TextAnalyticsClientBuilder.
*/
public TextAnalyticsClientBuilder endpoint(String endpoint) {
this.endpoint = endpoint;
return this;
}

/*
* The HTTP pipeline to send requests through
*/
private HttpPipeline pipeline;

/**
* Sets The HTTP pipeline to send requests through.
*
* @param pipeline the pipeline value.
* @return the TextAnalyticsClientBuilder.
*/
public TextAnalyticsClientBuilder pipeline(HttpPipeline pipeline) {
this.pipeline = pipeline;
return this;
}

/**
* Builds an instance of TextAnalyticsClientImpl with the provided parameters.
*
* @return an instance of TextAnalyticsClientImpl.
*/
public TextAnalyticsClientImpl buildClient() {
if (pipeline == null) {
this.pipeline = new HttpPipelineBuilder().policies(new UserAgentPolicy(), new RetryPolicy(), new CookiePolicy()).build();
}
TextAnalyticsClientImpl client = new TextAnalyticsClientImpl(pipeline);
client.setEndpoint(this.endpoint);
return client;
}
}
Loading