Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AVRO-1764: YAML loader to add metadata properties on RecordSchema #61

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions lang/java/avro/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,11 @@
<artifactId>joda-time</artifactId>
<optional>true</optional>
</dependency>
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.16</version>
</dependency>
</dependencies>

</project>
Expand Down
48 changes: 48 additions & 0 deletions lang/java/avro/src/main/java/org/apache/avro/Schema.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
import java.io.InputStream;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
Expand All @@ -43,6 +45,7 @@
import org.codehaus.jackson.JsonGenerator;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.node.DoubleNode;
import org.yaml.snakeyaml.Yaml;

/** An abstract data type.
* <p>A schema may be one of:
Expand Down Expand Up @@ -82,6 +85,7 @@ public abstract class Schema extends JsonProperties {
static final ObjectMapper MAPPER = new ObjectMapper(FACTORY);

private static final int NO_HASHCODE = Integer.MIN_VALUE;
public static String metadataDirectory = "";

static {
FACTORY.enable(JsonParser.Feature.ALLOW_COMMENTS);
Expand Down Expand Up @@ -283,6 +287,16 @@ public boolean isError() {
throw new AvroRuntimeException("Not a record: "+this);
}

/** If this is a record and there is a metadata return metadata */
public Map getMetadata() {
throw new AvroRuntimeException("Not a record: "+this);
}

/** If this is a record set metadata for this record. **/
public void setMetadata(Map metadata) {
throw new AvroRuntimeException("Not a record: "+this);
}

/** If this is an array, returns its element type. */
public Schema getElementType() {
throw new AvroRuntimeException("Not an array: "+this);
Expand Down Expand Up @@ -605,6 +619,8 @@ private static class RecordSchema extends NamedSchema {
private List<Field> fields;
private Map<String, Field> fieldMap;
private final boolean isError;
private Map metadata;

public RecordSchema(Name name, String doc, boolean isError) {
super(Type.RECORD, name, doc);
this.isError = isError;
Expand Down Expand Up @@ -656,6 +672,18 @@ public void setFields(List<Field> fields) {
this.fields = ff.lock();
this.hashCode = NO_HASHCODE;
}

@Override
public Map getMetadata() {
return metadata;
}

@Override
public void setMetadata(Map metadata) {
this.metadata = metadata;

}

public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof RecordSchema)) return false;
Expand Down Expand Up @@ -1249,6 +1277,7 @@ static Schema parse(JsonNode schema, Names names) {
} else if (type.equals("record") || type.equals("error")) { // record
List<Field> fields = new ArrayList<Field>();
result = new RecordSchema(name, doc, type.equals("error"));
result.setMetadata(getMetadataFromRecord(name.name));
if (name != null) names.add(result);
JsonNode fieldsNode = schema.get("fields");
if (fieldsNode == null || !fieldsNode.isArray())
Expand Down Expand Up @@ -1343,6 +1372,25 @@ static Schema parse(JsonNode schema, Names names) {
}
}

private static Map getMetadataFromRecord(String fileName) {
Map map = null;
String nameWithExtension = fileName + ".yml";
File metadataFileFromRecord = new File(Schema.metadataDirectory + "/" + nameWithExtension);

if(metadataFileFromRecord.exists()) {
try {
FileInputStream metadataInputStream = new FileInputStream(metadataFileFromRecord);
Yaml yaml = new Yaml();
map = (Map) yaml.load(metadataInputStream);

} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}

return map;
}

private static Set<String> parseAliases(JsonNode node) {
JsonNode aliasesNode = node.get("aliases");
if (aliasesNode == null)
Expand Down
11 changes: 11 additions & 0 deletions lang/java/avro/src/test/java/org/apache/avro/TestSchema.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

Expand Down Expand Up @@ -99,5 +101,14 @@ public void testSchemaWithNullFields() {
Schema.createRecord("foobar", null, null, false, null);
}

@Test
public void testRecordMetadadaLoad() throws IOException {
Schema.metadataDirectory = "src/test/resources/record-metadata";
File file = new File("src/test/resources/SchemaBuilder.avsc");
Schema schema = new Schema.Parser().parse(file);

assertNotNull(schema);
assertNotNull(schema.getMetadata());
}

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
persistence:
db:
database: 'foobar'
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,17 @@

package org.apache.avro.mojo;

import java.io.File;
import java.io.IOException;
import java.util.Arrays;

import org.apache.avro.compiler.specific.SpecificCompiler;

import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.project.MavenProject;
import org.apache.maven.shared.model.fileset.FileSet;
import org.apache.maven.shared.model.fileset.util.FileSetManager;

import java.io.File;
import java.io.IOException;
import java.util.Arrays;

/**
* Base for Avro Compiler Mojos.
*/
Expand Down Expand Up @@ -114,6 +113,13 @@ public abstract class AbstractAvroMojo extends AbstractMojo {
*/
protected String templateDirectory = "/org/apache/avro/compiler/specific/templates/java/classic/";

/**
* The directory that contains yaml files to be used as Metadata attribute in RecordSchema.
*
* @parameter property="metadataDirectory"
*/
protected String metadataDirectory = "";

/**
* Determines whether or not to create setters for the fields of the record.
* The default is to create setters.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@

package org.apache.avro.mojo;

import org.apache.avro.Schema;
import org.apache.avro.compiler.specific.SpecificCompiler;
import org.apache.avro.generic.GenericData.StringType;

import java.io.File;
import java.io.IOException;

import org.apache.avro.Schema;
import org.apache.avro.compiler.specific.SpecificCompiler;

/**
* Generate Java classes from Avro schema files (.avsc)
*
Expand Down Expand Up @@ -61,18 +60,8 @@ public class SchemaMojo extends AbstractAvroMojo {
@Override
protected void doCompile(String filename, File sourceDirectory, File outputDirectory) throws IOException {
File src = new File(sourceDirectory, filename);
Schema schema;
Schema schema = parseSrc(src);

// This is necessary to maintain backward-compatibility. If there are
// no imported files then isolate the schemas from each other, otherwise
// allow them to share a single schema so resuse and sharing of schema
// is possible.
if (imports == null) {
schema = new Schema.Parser().parse(src);
} else {
schema = schemaParser.parse(src);
}

SpecificCompiler compiler = new SpecificCompiler(schema);
compiler.setTemplateDir(templateDirectory);
compiler.setStringType(StringType.valueOf(stringType));
Expand All @@ -91,4 +80,21 @@ protected String[] getIncludes() {
protected String[] getTestIncludes() {
return testIncludes;
}

private Schema parseSrc(File src) throws IOException {
Schema.metadataDirectory = this.metadataDirectory;
Schema schema;

// This is necessary to maintain backward-compatibility. If there are
// no imported files then isolate the schemas from each other, otherwise
// allow them to share a single schema so reuse and sharing of schema
// is possible.
if (imports == null) {
schema = new Schema.Parser().parse(src);
} else {
schema = schemaParser.parse(src);
}

return schema;
}
}