Skip to content

Commit

Permalink
DataSegmentPusher: Add allowed hadoop property prefixes. (apache#4562)
Browse files Browse the repository at this point in the history
* DataSegmentPusher: Add allowed hadoop property prefixes.

* Fix dots.
  • Loading branch information
gianm authored and b-slim committed Jul 18, 2017
1 parent ae86323 commit 441ee56
Show file tree
Hide file tree
Showing 7 changed files with 48 additions and 7 deletions.
11 changes: 11 additions & 0 deletions api/src/main/java/io/druid/segment/loading/DataSegmentPusher.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,8 @@
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.Collections;
import java.util.List;
import java.util.Map;

public interface DataSegmentPusher
Expand All @@ -45,6 +47,15 @@ default String makeIndexPathName(DataSegment dataSegment, String indexName) {
return StringUtils.format("./%s/%s", getStorageDir(dataSegment), indexName);
}

/**
* Property prefixes that should be added to the "allowedHadoopPrefix" config for passing down to Hadoop jobs. These
* should be property prefixes like "druid.xxx", which means to include "druid.xxx" and "druid.xxx.*".
*/
default List<String> getAllowedPropertyPrefixesForHadoop()
{
return Collections.emptyList();
}

// Note: storage directory structure format = .../dataSource/interval/version/partitionNumber/
// If above format is ever changed, make sure to change it appropriately in other places
// e.g. HDFSDataSegmentKiller uses this information to clean the version, interval and dataSource directories
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject;
import com.microsoft.azure.storage.StorageException;
Expand All @@ -37,6 +38,7 @@
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;

Expand Down Expand Up @@ -73,6 +75,12 @@ public String getPathForHadoop()
return null;
}

@Override
public List<String> getAllowedPropertyPrefixesForHadoop()
{
return ImmutableList.of("druid.azure");
}

public File createSegmentDescriptorFile(final ObjectMapper jsonMapper, final DataSegment segment) throws
IOException
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject;
import io.druid.java.util.common.CompressionUtils;
Expand All @@ -38,6 +39,7 @@
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;

public class GoogleDataSegmentPusher implements DataSegmentPusher
Expand Down Expand Up @@ -75,6 +77,12 @@ public String getPathForHadoop()
return StringUtils.format("gs://%s/%s", config.getBucket(), config.getPrefix());
}

@Override
public List<String> getAllowedPropertyPrefixesForHadoop()
{
return ImmutableList.of("druid.google");
}

public File createDescriptorFile(final ObjectMapper jsonMapper, final DataSegment segment)
throws IOException
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject;
import com.metamx.emitter.EmittingLogger;
Expand All @@ -38,6 +39,7 @@
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;

Expand Down Expand Up @@ -79,6 +81,12 @@ public String getPathForHadoop(String dataSource)
return getPathForHadoop();
}

@Override
public List<String> getAllowedPropertyPrefixesForHadoop()
{
return ImmutableList.of("druid.s3");
}

@Override
public DataSegment push(final File indexFilesDir, final DataSegment inSegment) throws IOException
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
Expand Down Expand Up @@ -259,7 +260,13 @@ public ShardSpec apply(HadoopyShardSpec input)

}
this.rollupGran = spec.getDataSchema().getGranularitySpec().getQueryGranularity();
this.allowedHadoopPrefix = spec.getTuningConfig().getAllowedHadoopPrefix();

// User-specified list plus our additional bonus list.
this.allowedHadoopPrefix = new ArrayList<>();
this.allowedHadoopPrefix.add("druid.storage");
this.allowedHadoopPrefix.add("druid.javascript");
this.allowedHadoopPrefix.addAll(DATA_SEGMENT_PUSHER.getAllowedPropertyPrefixesForHadoop());
this.allowedHadoopPrefix.addAll(spec.getTuningConfig().getUserAllowedHadoopPrefix());
}

@JsonProperty(value = "spec")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -137,9 +137,7 @@ public HadoopTuningConfig(
this.forceExtendableShardSpecs = forceExtendableShardSpecs;
Preconditions.checkArgument(this.numBackgroundPersistThreads >= 0, "Not support persistBackgroundCount < 0");
this.useExplicitVersion = useExplicitVersion;
this.allowedHadoopPrefix = allowedHadoopPrefix == null
? ImmutableList.of("druid.storage.", "druid.javascript.")
: allowedHadoopPrefix;
this.allowedHadoopPrefix = allowedHadoopPrefix == null ? ImmutableList.of() : allowedHadoopPrefix;
}

@JsonProperty
Expand Down Expand Up @@ -323,9 +321,10 @@ public HadoopTuningConfig withShardSpecs(Map<Long, List<HadoopyShardSpec>> specs
);
}

@JsonProperty
public List<String> getAllowedHadoopPrefix()
@JsonProperty("allowedHadoopPrefix")
public List<String> getUserAllowedHadoopPrefix()
{
// Just the user-specified list. More are added in HadoopDruidIndexerConfig.
return allowedHadoopPrefix;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -313,7 +313,7 @@ public static void injectDruidProperties(Configuration configuration, List<Strin

for (String propName : System.getProperties().stringPropertyNames()) {
for (String prefix : listOfAllowedPrefix) {
if (propName.startsWith(prefix)) {
if (propName.equals(prefix) || propName.startsWith(prefix + ".")) {
mapJavaOpts = StringUtils.format("%s -D%s=%s", mapJavaOpts, propName, System.getProperty(propName));
reduceJavaOpts = StringUtils.format("%s -D%s=%s", reduceJavaOpts, propName, System.getProperty(propName));
break;
Expand Down

0 comments on commit 441ee56

Please sign in to comment.