Skip to content

Commit

Permalink
revert unnecessary configs
Browse files Browse the repository at this point in the history
  • Loading branch information
modithah committed Oct 25, 2023
1 parent f0e3f4d commit 217577c
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 29 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -347,7 +347,7 @@ public class VenicePushJob implements AutoCloseable {
public static final String VALUE_SCHEMA_ID_PROP = "value.schema.id";
public static final String DERIVED_SCHEMA_ID_PROP = "derived.schema.id";
public static final String TOPIC_PROP = "venice.kafka.topic";
public static final String HADOOP_PREFIX = "hadoop-conf.";
protected static final String HADOOP_PREFIX = "hadoop-conf.";
protected static final String HADOOP_VALIDATE_SCHEMA_AND_BUILD_DICT_PREFIX = "hadoop-dict-build-conf.";
public static final String SSL_PREFIX = "ssl";

Expand Down Expand Up @@ -948,7 +948,6 @@ public void run() {
"The store {} is discovered in Venice cluster {}",
pushJobSetting.storeName,
pushJobSetting.clusterName);
HadoopUtils.setHadoopConfigurationFromProperties(jobConf, props);

if (pushJobSetting.isSourceKafka) {
initKIFRepushDetails();
Expand Down Expand Up @@ -3081,6 +3080,10 @@ protected void setupDefaultJobConf(

for (String key: props.keySet()) {
String lowerCase = key.toLowerCase();
if (lowerCase.startsWith(HADOOP_PREFIX)) {
String overrideKey = key.substring(HADOOP_PREFIX.length());
conf.set(overrideKey, props.getString(key));
}
for (String prefix: passThroughPrefixList) {
if (lowerCase.startsWith(prefix)) {
conf.set(key, props.getString(key));
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
package com.linkedin.venice.hadoop.utils;

import static com.linkedin.venice.hadoop.VenicePushJob.HADOOP_PREFIX;

import com.linkedin.venice.utils.VeniceProperties;
import java.io.IOException;
import java.util.Properties;
Expand Down Expand Up @@ -54,17 +52,6 @@ public static void cleanUpHDFSPath(String path, boolean recursive) {
}
}

public static void setHadoopConfigurationFromProperties(Configuration conf, VeniceProperties props) {
for (String key: props.keySet()) {
String lowerCase = key.toLowerCase();
if (lowerCase.startsWith(HADOOP_PREFIX)) {
String overrideKey = key.substring(HADOOP_PREFIX.length());
conf.set(overrideKey, props.getString(key));
LOGGER.info("Hadoop configuration {} is overwritten by {}", overrideKey, key);
}
}
}

private HadoopUtils() {
}
}
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
package com.linkedin.venice.hadoop.utils;

import com.linkedin.venice.utils.VeniceProperties;
import java.io.IOException;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -42,16 +40,4 @@ public void testCleanUpHDFSPath() throws IOException {
// validate the path
Assert.assertFalse(fs.exists(p));
}

@Test
public void testSetHadoopConfigurationFromProperties() {
Configuration conf = new Configuration();
Properties innerProps = new Properties();
innerProps.setProperty("non.valid.key", "shouldn't exist");
innerProps.setProperty("hadoop-conf.fs.s3a.access.key", "s3-key");
VeniceProperties props = new VeniceProperties(innerProps);
HadoopUtils.setHadoopConfigurationFromProperties(conf, props);
Assert.assertEquals(conf.get("fs.s3a.access.key"), "s3-key");
Assert.assertNull(conf.get("non.valid.key"));
}
}

0 comments on commit 217577c

Please sign in to comment.