diff --git a/graphql/server/src/main/java/io/helidon/graphql/server/ExecutionContext.java b/graphql/server/src/main/java/io/helidon/graphql/server/ExecutionContext.java index 45b986e6f69..d0b939c2037 100644 --- a/graphql/server/src/main/java/io/helidon/graphql/server/ExecutionContext.java +++ b/graphql/server/src/main/java/io/helidon/graphql/server/ExecutionContext.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020 Oracle and/or its affiliates. + * Copyright (c) 2020, 2021 Oracle and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/graphql/server/src/main/java/io/helidon/graphql/server/ExecutionContextImpl.java b/graphql/server/src/main/java/io/helidon/graphql/server/ExecutionContextImpl.java index cab5f933274..8adb0218e03 100644 --- a/graphql/server/src/main/java/io/helidon/graphql/server/ExecutionContextImpl.java +++ b/graphql/server/src/main/java/io/helidon/graphql/server/ExecutionContextImpl.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020 Oracle and/or its affiliates. + * Copyright (c) 2020, 2021 Oracle and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/graphql/server/src/main/java/io/helidon/graphql/server/InvocationHandlerImpl.java b/graphql/server/src/main/java/io/helidon/graphql/server/InvocationHandlerImpl.java index 5674d38d56e..ba8a81b37ec 100644 --- a/graphql/server/src/main/java/io/helidon/graphql/server/InvocationHandlerImpl.java +++ b/graphql/server/src/main/java/io/helidon/graphql/server/InvocationHandlerImpl.java @@ -1,5 +1,5 @@ /* - * Copyright (c) 2020 Oracle and/or its affiliates. + * Copyright (c) 2020, 2021 Oracle and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. diff --git a/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/DataFetcherUtils.java b/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/DataFetcherUtils.java index ee14d1e4ffb..16d2097b28b 100644 --- a/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/DataFetcherUtils.java +++ b/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/DataFetcherUtils.java @@ -20,6 +20,7 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; +import java.lang.reflect.Parameter; import java.math.BigDecimal; import java.math.BigInteger; import java.text.NumberFormat; @@ -120,28 +121,37 @@ static DataFetcher newMethodDataFetcher(Schema schema, Class clazz, Me } } - if (args.length > 0) { + ExecutionContext executionContext; + // check for a single DataFetchingEnvironment parameter as args will be zero + Parameter[] parameters = method.getParameters(); + if (parameters.length == 1 && parameters[0].getType().equals(DataFetchingEnvironment.class)) { + listArgumentValues.add(environment); + } else if (args.length > 0) { for (SchemaArgument argument : args) { - // ensure a Map is not used as an input type - Class originalType = argument.originalType(); - if (originalType != null && Map.class.isAssignableFrom(originalType)) { - ensureRuntimeException(LOGGER, MAP_MESSAGE); - } + if (argument.isDataFetchingEnvironment()) { + listArgumentValues.add(environment); + } else { + // ensure a Map is not used as an input type + Class originalType = argument.originalType(); + if (originalType != null && Map.class.isAssignableFrom(originalType)) { + ensureRuntimeException(LOGGER, MAP_MESSAGE); + } - if (argument.isArrayReturnType() && argument.arrayLevels() > 1 - && SchemaGeneratorHelper.isPrimitiveArray(argument.originalType())) { - throw new GraphQlConfigurationException("This implementation does not currently support " - + "multi-level primitive arrays as arguments. Please use " - + "List or Collection of Object equivalent. E.g. " - + "In place of method(int [][] value) use " - + " method(List> value)"); - } + if (argument.isArrayReturnType() && argument.arrayLevels() > 1 + && SchemaGeneratorHelper.isPrimitiveArray(argument.originalType())) { + throw new GraphQlConfigurationException("This implementation does not currently support " + + "multi-level primitive arrays as arguments. Please use " + + "List or Collection of Object equivalent. E.g. " + + "In place of method(int [][] value) use " + + " method(List> value)"); + } - listArgumentValues.add(generateArgumentValue(schema, argument.argumentType(), - argument.originalType(), - argument.originalArrayType(), - environment.getArgument(argument.argumentName()), - argument.format())); + listArgumentValues.add(generateArgumentValue(schema, argument.argumentType(), + argument.originalType(), + argument.originalArrayType(), + environment.getArgument(argument.argumentName()), + argument.format())); + } } } diff --git a/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaArgument.java b/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaArgument.java index 0f7cb1eafb1..7e50a43bf0b 100644 --- a/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaArgument.java +++ b/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaArgument.java @@ -19,6 +19,8 @@ import java.util.Arrays; import java.util.Objects; +import graphql.schema.DataFetchingEnvironment; + /** * The representation of a GraphQL Argument or Parameter. */ @@ -78,6 +80,11 @@ class SchemaArgument extends AbstractDescriptiveElement implements ElementGenera */ private Class originalArrayType; + /** + * Indicates if the argument type is the {@link DataFetchingEnvironment} and must be ignored in schema generation. + */ + private boolean isDataFetchingEnvironment; + /** * Construct a {@link SchemaArgument}. * @@ -95,6 +102,7 @@ private SchemaArgument(Builder builder) { this.arrayLevels = builder.arrayLevels; this.isArrayReturnTypeMandatory = builder.isArrayReturnTypeMandatory; this.originalArrayType = builder.originalArrayType; + this.isDataFetchingEnvironment = builder.isDataFetchingEnvironment; description(builder.description); } @@ -299,6 +307,15 @@ public void arrayReturnTypeMandatory(boolean arrayReturnTypeMandatory) { isArrayReturnTypeMandatory = arrayReturnTypeMandatory; } + /** + * Indicates if the argument type is the {@link DataFetchingEnvironment} and must be ignored in schema generation. + * + * @return true if the argument type is the {@link DataFetchingEnvironment} + */ + public boolean isDataFetchingEnvironment() { + return isDataFetchingEnvironment; + } + /** * Sets the original array type. * @@ -329,6 +346,7 @@ public String toString() { + ", isReturnTypeMandatory=" + isArrayReturnTypeMandatory + ", isArrayReturnType=" + isArrayReturnType + ", originalArrayType=" + originalArrayType + + ", isDataFetchingEnvironment=" + isDataFetchingEnvironment + ", arrayLevels=" + arrayLevels + ", format=" + Arrays.toString(format) + ", description='" + description() + '\'' + '}'; @@ -353,13 +371,14 @@ public boolean equals(Object o) { && Arrays.equals(format, schemaArgument.format) && Objects.equals(sourceArgument, schemaArgument.sourceArgument) && Objects.equals(originalArrayType, schemaArgument.originalArrayType) + && Objects.equals(isDataFetchingEnvironment, schemaArgument.isDataFetchingEnvironment) && Objects.equals(description(), schemaArgument.description()) && Objects.equals(defaultValue, schemaArgument.defaultValue); } @Override public int hashCode() { - return Objects.hash(super.hashCode(), argumentName, argumentType, sourceArgument, + return Objects.hash(super.hashCode(), argumentName, argumentType, sourceArgument, isDataFetchingEnvironment, isMandatory, defaultValue, description(), originalType, format, originalArrayType); } @@ -380,6 +399,7 @@ public static class Builder implements io.helidon.common.Builder private int arrayLevels; private boolean isArrayReturnTypeMandatory; private Class originalArrayType; + private boolean isDataFetchingEnvironment; /** * Set the argument name. @@ -506,7 +526,8 @@ public Builder arrayReturnTypeMandatory(boolean isArrayReturnTypeMandatory) { /** * Set the original array inner type if it is array type. - * @param originalArrayType the original array inner type if it is array type + * + * @param originalArrayType the original array inner type if it is array type * @return updated builder instance */ public Builder originalArrayType(Class originalArrayType) { @@ -514,6 +535,17 @@ public Builder originalArrayType(Class originalArrayType) { return this; } + /** + * Set if the argument type is the {@link DataFetchingEnvironment} and must be ignored in schema generation. + * + * @param isDataFetchingEnvironment if the argument type is the {@link DataFetchingEnvironment} + * @return updated builder instance + */ + public Builder dataFetchingEnvironment(boolean isDataFetchingEnvironment) { + this.isDataFetchingEnvironment = isDataFetchingEnvironment; + return this; + } + /** * Build the instance from this builder. * diff --git a/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaFieldDefinition.java b/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaFieldDefinition.java index 099c3208ad7..81b108450fb 100644 --- a/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaFieldDefinition.java +++ b/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaFieldDefinition.java @@ -142,10 +142,15 @@ public String getSchemaAsString() { StringBuilder sb = new StringBuilder(getSchemaElementDescription(format())) .append(name()); - if (listSchemaArguments.size() > 0) { + // determine if there are any arguments that are DataFetcherEnvironment as they should + // not be included as standard types + boolean hasSchemaArguments = listSchemaArguments.stream().anyMatch(a -> !a.isDataFetchingEnvironment()); + + if (hasSchemaArguments) { sb.append(OPEN_PARENTHESES) .append(NEWLINE) .append(listSchemaArguments.stream() + .filter(a -> !a.isDataFetchingEnvironment()) .map(SchemaArgument::getSchemaAsString) .collect(Collectors.joining(COMMA_SPACE + NEWLINE))); sb.append(NEWLINE).append(CLOSE_PARENTHESES); diff --git a/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaGenerator.java b/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaGenerator.java index 015696256df..757c3d64470 100644 --- a/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaGenerator.java +++ b/microprofile/graphql/server/src/main/java/io/helidon/microprofile/graphql/server/SchemaGenerator.java @@ -46,6 +46,7 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetcherFactories; +import graphql.schema.DataFetchingEnvironment; import graphql.schema.GraphQLScalarType; import graphql.schema.PropertyDataFetcher; import org.eclipse.microprofile.graphql.Description; @@ -515,11 +516,9 @@ private void processGraphQLApiAnnotations(SchemaType rootQueryType, Class clazz) throws IntrospectionException, ClassNotFoundException { - for (Map.Entry entry - : retrieveAllAnnotatedBeanMethods(clazz).entrySet()) { + for (Map.Entry entry : retrieveAllAnnotatedBeanMethods(clazz).entrySet()) { DiscoveredMethod discoveredMethod = entry.getValue(); Method method = discoveredMethod.method(); - SchemaFieldDefinition fd = null; // only include discovered methods in the original type where either the source is null @@ -552,7 +551,7 @@ private void processGraphQLApiAnnotations(SchemaType rootQueryType, a.argumentType(typeName); String returnType = a.argumentType(); - if (originalTypeName.equals(returnType) && !ID.equals(returnType)) { + if (originalTypeName.equals(returnType) && !ID.equals(returnType) && !a.isDataFetchingEnvironment()) { // type name has not changed, so this must be either a Scalar, Enum or a Type // Note: Interfaces are not currently supported as InputTypes in 1.0 of the Specification // if is Scalar or enum then add to unresolved types and they will be dealt with @@ -1244,6 +1243,7 @@ private void processMethodParameters(Method method, DiscoveredMethod discoveredM .defaultValue(argumentDefaultValue) .originalType(paramType) .description(getDescription(parameter.getAnnotation(Description.class))) + .dataFetchingEnvironment(paramType.equals(DataFetchingEnvironment.class)) .build(); String[] argumentFormat = getFormattingAnnotation(parameter); diff --git a/microprofile/graphql/server/src/test/java/io/helidon/microprofile/graphql/server/SchemaArgumentTest.java b/microprofile/graphql/server/src/test/java/io/helidon/microprofile/graphql/server/SchemaArgumentTest.java index e29820b006a..755b73c9b99 100644 --- a/microprofile/graphql/server/src/test/java/io/helidon/microprofile/graphql/server/SchemaArgumentTest.java +++ b/microprofile/graphql/server/src/test/java/io/helidon/microprofile/graphql/server/SchemaArgumentTest.java @@ -120,6 +120,17 @@ public void testSchemaGenerationWithArrays() { assertThat(schemaArgument.getSchemaAsString(), is("name: [String!]")); } + @Test + public void testSchemaArgumentGenerationWithDataFetchingEnvironment() { + SchemaArgument schemaArgument = SchemaArgument.builder() + .argumentName("test") + .dataFetchingEnvironment(true) + .argumentType("String") + .build(); + + assertThat(schemaArgument.isDataFetchingEnvironment(), is(true)); + } + @Test public void testSchemaGeneration() { SchemaArgument schemaArgument = SchemaArgument.builder() diff --git a/microprofile/graphql/server/src/test/java/io/helidon/microprofile/graphql/server/test/queries/DataFetchingEnvironmentQueriesAndMutations.java b/microprofile/graphql/server/src/test/java/io/helidon/microprofile/graphql/server/test/queries/DataFetchingEnvironmentQueriesAndMutations.java new file mode 100644 index 00000000000..6ad9f9cccd0 --- /dev/null +++ b/microprofile/graphql/server/src/test/java/io/helidon/microprofile/graphql/server/test/queries/DataFetchingEnvironmentQueriesAndMutations.java @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2020, 2021 Oracle and/or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.helidon.microprofile.graphql.server.test.queries; + +import graphql.schema.DataFetchingEnvironment; + +import javax.enterprise.context.ApplicationScoped; + +import org.eclipse.microprofile.graphql.GraphQLApi; +import org.eclipse.microprofile.graphql.Name; +import org.eclipse.microprofile.graphql.Query; + +/** + * Class that holds queries and mutations using {@link DataFetchingEnvironment}. + */ +@GraphQLApi +@ApplicationScoped +public class DataFetchingEnvironmentQueriesAndMutations { + + public DataFetchingEnvironmentQueriesAndMutations() { + } + + @Query + public String testNoArgs(DataFetchingEnvironment env) { + return env.getField().getName(); + } + + @Query + public String testWithArgs(@Name("name") String name, DataFetchingEnvironment env) { + return name + env.getField().getName(); + } + + @Query + public String testWithArgs2(@Name("name1") String name1, DataFetchingEnvironment env, @Name("name2") String name2) { + return name1 + name2 + env.getField().getName(); + } +} diff --git a/tests/integration/mp-graphql/src/test/java/io/helidon/microprofile/graphql/server/DataFetchingEnvironmentIT.java b/tests/integration/mp-graphql/src/test/java/io/helidon/microprofile/graphql/server/DataFetchingEnvironmentIT.java new file mode 100644 index 00000000000..b8607b5e52b --- /dev/null +++ b/tests/integration/mp-graphql/src/test/java/io/helidon/microprofile/graphql/server/DataFetchingEnvironmentIT.java @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2020, 2021 Oracle and/or its affiliates. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package io.helidon.microprofile.graphql.server; + +import java.util.Map; + +import javax.inject.Inject; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; + +import io.helidon.graphql.server.InvocationHandler; +import io.helidon.microprofile.graphql.server.test.queries.DataFetchingEnvironmentQueriesAndMutations; +import io.helidon.microprofile.tests.junit5.AddBean; + +import org.junit.jupiter.api.Test; + +/** + * Tests for {@link graphql.schema.DataFetchingEnvironment} injection. + */ +@AddBean(DataFetchingEnvironmentQueriesAndMutations.class) +class DataFetchingEnvironmentIT extends AbstractGraphQlCdiIT { + + @Inject + DataFetchingEnvironmentIT(GraphQlCdiExtension graphQlCdiExtension) { + super(graphQlCdiExtension); + } + + @Test + @SuppressWarnings("unchecked") + public void testWithNoArgs() throws Exception { + setupIndex(indexFileName, DataFetchingEnvironmentQueriesAndMutations.class); + InvocationHandler executionContext = createInvocationHandler(); + String query = "query { testNoArgs }"; + Map mapResults = getAndAssertResult(executionContext.execute(query)); + assertThat(mapResults, is(notNullValue())); + String results = (String) mapResults.get("testNoArgs"); + assertThat(results, is("testNoArgs")); + } + + @Test + @SuppressWarnings("unchecked") + public void testWithArgs() throws Exception { + setupIndex(indexFileName, DataFetchingEnvironmentQueriesAndMutations.class); + InvocationHandler executionContext = createInvocationHandler(); + + String query = "query { testWithArgs(name: \"Tim\") }"; + Map mapResults = getAndAssertResult(executionContext.execute(query)); + assertThat(mapResults, is(notNullValue())); + String results = (String) mapResults.get("testWithArgs"); + assertThat(results, is("Tim" + "testWithArgs")); + } + + @Test + @SuppressWarnings("unchecked") + public void testWithArgs2() throws Exception { + setupIndex(indexFileName, DataFetchingEnvironmentQueriesAndMutations.class); + InvocationHandler executionContext = createInvocationHandler(); + + String query = "query { testWithArgs2(name1: \"Tim\", name2: \"Tim\") }"; + Map mapResults = getAndAssertResult(executionContext.execute(query)); + assertThat(mapResults, is(notNullValue())); + String results = (String) mapResults.get("testWithArgs2"); + assertThat(results, is("TimTim" + "testWithArgs2" )); + } +}