Skip to content

Commit

Permalink
PARQUET-318: Remove unnecessary object mapper
Browse files Browse the repository at this point in the history
Author: Nezih Yigitbasi <[email protected]>

Closes apache#227 from nezihyigitbasi/318 and squashes the following commits:

b8e4ca9 [Nezih Yigitbasi] Remove unnecessary object mapper
  • Loading branch information
nezihyigitbasi authored and rdblue committed Dec 16, 2015
1 parent fa7588c commit 367fe13
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectWriter;
import org.codehaus.jackson.map.SerializationConfig.Feature;

/**
Expand All @@ -38,19 +39,15 @@
*/
public class ParquetMetadata {

private static ObjectMapper objectMapper = new ObjectMapper();
private static ObjectMapper prettyObjectMapper = new ObjectMapper();
static {
prettyObjectMapper.configure(Feature.INDENT_OUTPUT, true);
}
private static final ObjectMapper objectMapper = new ObjectMapper();

/**
*
* @param parquetMetaData
* @return the json representation
*/
public static String toJSON(ParquetMetadata parquetMetaData) {
return toJSON(parquetMetaData, objectMapper);
return toJSON(parquetMetaData, false);
}

/**
Expand All @@ -59,13 +56,17 @@ public static String toJSON(ParquetMetadata parquetMetaData) {
* @return the pretty printed json representation
*/
public static String toPrettyJSON(ParquetMetadata parquetMetaData) {
return toJSON(parquetMetaData, prettyObjectMapper);
return toJSON(parquetMetaData, true);
}

private static String toJSON(ParquetMetadata parquetMetaData, ObjectMapper mapper) {
private static String toJSON(ParquetMetadata parquetMetaData, boolean isPrettyPrint) {
StringWriter stringWriter = new StringWriter();
try {
mapper.writeValue(stringWriter, parquetMetaData);
if (isPrettyPrint) {
objectMapper.writerWithDefaultPrettyPrinter().writeValue(stringWriter, parquetMetaData);
} else {
objectMapper.writeValue(stringWriter, parquetMetaData);
}
} catch (JsonGenerationException e) {
throw new RuntimeException(e);
} catch (JsonMappingException e) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ public void randomTestFilterMetaData() {
}

@Test
public void testNullFieldMetadataDebugLogging() throws NoSuchFieldException, IllegalAccessException, IOException {
public void testNullFieldMetadataDebugLogging() {
MessageType schema = parseMessageType("message test { optional binary some_null_field; }");
org.apache.parquet.hadoop.metadata.FileMetaData fileMetaData = new org.apache.parquet.hadoop.metadata.FileMetaData(schema, new HashMap<String, String>(), null);
List<BlockMetaData> blockMetaDataList = new ArrayList<BlockMetaData>();
Expand All @@ -273,6 +273,16 @@ public void testNullFieldMetadataDebugLogging() throws NoSuchFieldException, Ill
ParquetMetadata.toJSON(metadata);
}

@Test
public void testMetadataToJson() {
ParquetMetadata metadata = new ParquetMetadata(null, null);
assertEquals("{\"fileMetaData\":null,\"blocks\":null}", ParquetMetadata.toJSON(metadata));
assertEquals("{\n" +
" \"fileMetaData\" : null,\n" +
" \"blocks\" : null\n" +
"}", ParquetMetadata.toPrettyJSON(metadata));
}

private ColumnChunkMetaData createColumnChunkMetaData() {
Set<org.apache.parquet.column.Encoding> e = new HashSet<org.apache.parquet.column.Encoding>();
PrimitiveTypeName t = PrimitiveTypeName.BINARY;
Expand Down

0 comments on commit 367fe13

Please sign in to comment.