Skip to content

Commit

Permalink
Avoid using Guava in DataSegmentPushers because of incompatibilities (a…
Browse files Browse the repository at this point in the history
…pache#4391)

* Avoid using Guava in DataSegmentPushers because of Hadoop incompatibilities

* Clarify comments
  • Loading branch information
leventov authored and drcrallen committed Jun 12, 2017
1 parent 5285eb9 commit c121845
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Inject;

import io.druid.java.util.common.CompressionUtils;
import io.druid.java.util.common.logger.Logger;
import io.druid.segment.SegmentUtils;
Expand All @@ -32,9 +31,9 @@
import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.util.Map;
import java.util.concurrent.Callable;

Expand Down Expand Up @@ -102,9 +101,9 @@ public DataSegment call() throws Exception
log.info("Pushing %s.", segmentData.getPath());
objectApi.put(segmentData);

try (FileOutputStream stream = new FileOutputStream(descFile)) {
stream.write(jsonMapper.writeValueAsBytes(inSegment));
}
// Avoid using Guava in DataSegmentPushers because they might be used with very diverse Guava versions in
// runtime, and because Guava deletes methods over time, that causes incompatibilities.
Files.write(descFile.toPath(), jsonMapper.writeValueAsBytes(inSegment));
CloudFilesObject descriptorData = new CloudFilesObject(
segmentPath, descFile,
objectApi.getRegion(), objectApi.getContainer()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.util.Map;

public class GoogleDataSegmentPusher implements DataSegmentPusher
Expand Down Expand Up @@ -78,10 +78,9 @@ public File createDescriptorFile(final ObjectMapper jsonMapper, final DataSegmen
throws IOException
{
File descriptorFile = File.createTempFile("descriptor", ".json");
try (FileOutputStream stream = new FileOutputStream(descriptorFile)) {
stream.write(jsonMapper.writeValueAsBytes(segment));
}

// Avoid using Guava in DataSegmentPushers because they might be used with very diverse Guava versions in
// runtime, and because Guava deletes methods over time, that causes incompatibilities.
Files.write(descriptorFile.toPath(), jsonMapper.writeValueAsBytes(segment));
return descriptorFile;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,8 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteStreams;
import com.google.common.io.Files;
import com.google.inject.Inject;
import com.metamx.emitter.EmittingLogger;

import io.druid.java.util.common.CompressionUtils;
import io.druid.segment.SegmentUtils;
import io.druid.segment.loading.DataSegmentPusher;
Expand All @@ -39,6 +36,7 @@
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.nio.file.Files;
import java.util.Map;
import java.util.concurrent.Callable;

Expand Down Expand Up @@ -116,7 +114,9 @@ public DataSegment call() throws Exception
.withBinaryVersion(SegmentUtils.getVersionFromDir(indexFilesDir));

File descriptorFile = File.createTempFile("druid", "descriptor.json");
Files.copy(ByteStreams.newInputStreamSupplier(jsonMapper.writeValueAsBytes(outSegment)), descriptorFile);
// Avoid using Guava in DataSegmentPushers because they might be used with very diverse Guava versions in
// runtime, and because Guava deletes methods over time, that causes incompatibilities.
Files.write(descriptorFile.toPath(), jsonMapper.writeValueAsBytes(outSegment));
S3Object descriptorObject = new S3Object(descriptorFile);
descriptorObject.setBucketName(outputBucket);
descriptorObject.setKey(s3DescriptorPath);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,7 @@

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.ByteStreams;
import com.google.common.io.Files;
import com.google.inject.Inject;

import io.druid.java.util.common.CompressionUtils;
import io.druid.java.util.common.logger.Logger;
import io.druid.segment.SegmentUtils;
Expand All @@ -35,6 +32,7 @@
import java.io.IOException;
import java.net.URI;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.Files;
import java.util.Map;
import java.util.UUID;

Expand Down Expand Up @@ -110,7 +108,7 @@ public DataSegment push(File dataSegmentFile, DataSegment segment) throws IOExce
// will be failed and will read the descriptor.json created by current push operation directly
FileUtils.forceMkdir(outDir.getParentFile());
try {
java.nio.file.Files.move(tmpOutDir.toPath(), outDir.toPath());
Files.move(tmpOutDir.toPath(), outDir.toPath());
}
catch (FileAlreadyExistsException e) {
log.warn("Push destination directory[%s] exists, ignore this message if replication is configured.", outDir);
Expand Down Expand Up @@ -143,7 +141,9 @@ private DataSegment createDescriptorFile(DataSegment segment, File outDir) throw
{
File descriptorFile = new File(outDir, "descriptor.json");
log.info("Creating descriptor file at[%s]", descriptorFile);
Files.copy(ByteStreams.newInputStreamSupplier(jsonMapper.writeValueAsBytes(segment)), descriptorFile);
// Avoid using Guava in DataSegmentPushers because they might be used with very diverse Guava versions in
// runtime, and because Guava deletes methods over time, that causes incompatibilities.
Files.write(descriptorFile.toPath(), jsonMapper.writeValueAsBytes(segment));
return segment;
}
}

0 comments on commit c121845

Please sign in to comment.