Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hid QQ Parquet from public API #21152

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion sdk/storage/azure-storage-blob/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

## 12.12.0-beta.1 (Unreleased)
- Added support for the 2020-08-04 service version.
- Added support to specify Parquet Input Serialization when querying a blob.

## 12.11.0 (2021-04-29)
- Fixed a bug where large files would hang when the upload method was called.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
import com.azure.storage.blob.models.BlobQueryDelimitedSerialization;
import com.azure.storage.blob.models.BlobQueryError;
import com.azure.storage.blob.models.BlobQueryJsonSerialization;
import com.azure.storage.blob.models.BlobQueryParquetSerialization;
import com.azure.storage.blob.models.BlobQueryProgress;
import com.azure.storage.blob.models.BlobQuerySerialization;
import com.azure.storage.internal.avro.implementation.AvroConstants;
Expand Down Expand Up @@ -231,12 +230,13 @@ public static QuerySerialization transformInputSerialization(BlobQuerySerializat
generatedFormat.setJsonTextConfiguration(transformJson(
(BlobQueryJsonSerialization) userSerialization));

} else if (userSerialization instanceof BlobQueryParquetSerialization) {
// TODO (gapra): uncomment when parquet is released
/*} else if (userSerialization instanceof BlobQueryParquetSerialization) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we get a TODO for why this is commented out?


generatedFormat.setType(QueryFormatType.PARQUET);
generatedFormat.setParquetTextConfiguration(transformParquet(
(BlobQueryParquetSerialization) userSerialization));

*/
} else {
throw logger.logExceptionAsError(new IllegalArgumentException(
"Please see values of valid input serialization in the documentation "
Expand Down Expand Up @@ -323,12 +323,13 @@ private static JsonTextConfiguration transformJson(BlobQueryJsonSerialization js
* @param parquetSerialization {@link BlobQueryParquetSerialization}
* @return {@link JsonTextConfiguration}
*/
private static Object transformParquet(BlobQueryParquetSerialization parquetSerialization) {
// TODO (gapra): uncomment when parquet is released
/*private static Object transformParquet(BlobQueryParquetSerialization parquetSerialization) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO

if (parquetSerialization == null) {
return null;
}
return new Object();
}
}*/

/**
* Transforms a BlobQueryArrowSerialization into a ArrowConfiguration.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@
/**
* Defines the input parquet serialization for a blob quick query request.
*/
public class BlobQueryParquetSerialization implements BlobQuerySerialization { }
// TODO (gapra): public when parquet is released
class BlobQueryParquetSerialization implements BlobQuerySerialization { }
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,13 @@
// Licensed under the MIT License.

package com.azure.storage.blob.models;

// TODO (gapra): add parquet to inputs when parquet is released
/**
* Defines the input and output serialization for a blob quick query request.
* either {@link BlobQueryJsonSerialization}, {@link BlobQueryDelimitedSerialization},
* {@link BlobQueryArrowSerialization}, or {@link BlobQueryParquetSerialization}.
* either {@link BlobQueryJsonSerialization}, {@link BlobQueryDelimitedSerialization}, or
* {@link BlobQueryArrowSerialization}.
* <p>
* Note: {@link BlobQueryParquetSerialization} can only be used as an input and
* {@link BlobQueryArrowSerialization} can only be used as an output.
* Note: {@link BlobQueryArrowSerialization} can only be used as an output.
* </p>
*/
public interface BlobQuerySerialization {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import reactor.core.Exceptions
import spock.lang.Requires
import spock.lang.Retry
import spock.lang.Unroll
import spock.lang.Ignore

import java.util.function.Consumer

Expand Down Expand Up @@ -292,6 +293,7 @@ class BlobBaseAPITest extends APISpec {
}

@Unroll
@Ignore /* TODO: Unignore when parquet is officially supported. */
def "Query Input parquet"() {
setup:
String fileName = "parquet.parquet"
Expand Down Expand Up @@ -693,6 +695,7 @@ class BlobBaseAPITest extends APISpec {
thrown(IllegalArgumentException)
}

@Ignore /* TODO: Unignore when parquet is officially supported. */
def "Query parquet output IA"() {
setup:
def outSer = new BlobQueryParquetSerialization()
Expand Down
1 change: 0 additions & 1 deletion sdk/storage/azure-storage-file-datalake/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

## 12.6.0-beta.1 (Unreleased)
- Added support for the 2020-08-04 service version.
- Added support to specify Parquet Input Serialization when querying a file.
- Added support to undelete a file or directory
- Added support to list deletedPaths
- Added support to get/set service properties
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import com.azure.storage.blob.models.BlobQueryError;
import com.azure.storage.blob.models.BlobQueryHeaders;
import com.azure.storage.blob.models.BlobQueryJsonSerialization;
import com.azure.storage.blob.models.BlobQueryParquetSerialization;
import com.azure.storage.blob.models.BlobQueryProgress;
import com.azure.storage.blob.models.BlobQueryResponse;
import com.azure.storage.blob.models.BlobQuerySerialization;
Expand Down Expand Up @@ -61,7 +60,6 @@
import com.azure.storage.file.datalake.models.FileQueryError;
import com.azure.storage.file.datalake.models.FileQueryHeaders;
import com.azure.storage.file.datalake.models.FileQueryJsonSerialization;
import com.azure.storage.file.datalake.models.FileQueryParquetSerialization;
import com.azure.storage.file.datalake.models.FileQueryProgress;
import com.azure.storage.file.datalake.models.FileQueryResponse;
import com.azure.storage.file.datalake.models.FileQuerySerialization;
Expand Down Expand Up @@ -446,15 +444,16 @@ static BlobQuerySerialization toBlobQuerySerialization(FileQuerySerialization se
} else if (ser instanceof FileQueryArrowSerialization) {
FileQueryArrowSerialization arrSer = (FileQueryArrowSerialization) ser;
return new BlobQueryArrowSerialization().setSchema(toBlobQueryArrowSchema(arrSer.getSchema()));
} else if (ser instanceof FileQueryParquetSerialization) {
return new BlobQueryParquetSerialization();
// TODO (gapra): uncomment when parquet is released
/*} else if (ser instanceof FileQueryParquetSerialization) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TODO

return new BlobQueryParquetSerialization(); */
} else {
throw new IllegalArgumentException(
String.format("'serialization' must be one of %s, %s, %s or %s",
String.format("'serialization' must be one of %s, %s, or %s",
FileQueryJsonSerialization.class.getSimpleName(),
FileQueryDelimitedSerialization.class.getSimpleName(),
FileQueryArrowSerialization.class.getSimpleName(),
FileQueryParquetSerialization.class.getSimpleName()));
FileQueryArrowSerialization.class.getSimpleName()
/*FileQueryParquetSerialization.class.getSimpleName()*/));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@
/**
* Defines the input parquet serialization for a file quick query request.
*/
public class FileQueryParquetSerialization implements FileQuerySerialization { }
// TODO (gapra): public when parquet is released
class FileQueryParquetSerialization implements FileQuerySerialization { }
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@

package com.azure.storage.file.datalake.models;

// TODO (gapra): add parquet to inputs when parquet is released
/**
* Defines the input and output serialization for a file quick query request.
* either {@link FileQueryJsonSerialization}, {@link FileQueryDelimitedSerialization},
* {@link FileQueryArrowSerialization}, or {@link FileQueryParquetSerialization}.
* {@link FileQueryArrowSerialization}.
* <p>
* Note: {@link FileQueryParquetSerialization} can only be used as an input and
* {@link FileQueryArrowSerialization} can only be used as an output.
* Note: {@link FileQueryArrowSerialization} can only be used as an output.
* </p>
*/
public interface FileQuerySerialization {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3157,6 +3157,7 @@ class FileAPITest extends APISpec {
}

@Unroll
@Ignore /* TODO: Unignore when parquet is officially supported. */
def "Query Input parquet"() {
setup:
String fileName = "parquet.parquet"
Expand Down Expand Up @@ -3527,6 +3528,7 @@ class FileAPITest extends APISpec {
thrown(IllegalArgumentException)
}

@Ignore /* TODO: Unignore when parquet is officially supported. */
def "Query parquet output IA"() {
setup:
def outSer = new FileQueryParquetSerialization()
Expand Down