Skip to content

Commit

Permalink
Merge pull request mongodb#41 from rjurney/master
Browse files Browse the repository at this point in the history
Removed excessive logging for each record written to MongoDB
  • Loading branch information
Brendan W. McAdams committed Apr 3, 2012
2 parents 3ba6296 + 67c3444 commit 4c61394
Showing 1 changed file with 0 additions and 10 deletions.
10 changes: 0 additions & 10 deletions pig/src/main/java/com/mongodb/hadoop/pig/MongoStorage.java
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,10 @@ public void storeStatistics( ResourceStatistics stats, String location, Job job
public void putNext( Tuple tuple ) throws IOException{
final Configuration config = _recordWriter.getContext().getConfiguration();
final List<String> schema = Arrays.asList( config.get( PIG_OUTPUT_SCHEMA ).split( "," ) );
log.info( "Stored Schema: " + schema );
final BasicDBObjectBuilder builder = BasicDBObjectBuilder.start();

ResourceFieldSchema[] fields = this.schema.getFields();
for (int i = 0; i < fields.length; i++) {
log.info( "I: " + i + " tuple: " + tuple );
writeField(builder, fields[i], tuple.get(i));
}
_recordWriter.write( null, builder.get() );
Expand Down Expand Up @@ -113,7 +111,6 @@ private void writeField(BasicDBObjectBuilder builder,

// Given a TUPLE, create a Map so BSONEncoder will eat it
case DataType.TUPLE:
log.info( "In TUPLE!" );
if (s == null) {
throw new IOException("Schemas must be fully specified to use "
+ "this storage function. No schema found for field " +
Expand All @@ -129,7 +126,6 @@ private void writeField(BasicDBObjectBuilder builder,

// Given a BAG, create an Array so BSONEnconder will eat it.
case DataType.BAG:
log.info( "In BAG!" );
if (s == null) {
throw new IOException("Schemas must be fully specified to use "
+ "this storage function. No schema found for field " +
Expand Down Expand Up @@ -163,7 +159,6 @@ private void writeField(BasicDBObjectBuilder builder,
}
}


public void prepareToWrite( RecordWriter writer ) throws IOException{

_recordWriter = (MongoRecordWriter) writer;
Expand All @@ -190,21 +185,18 @@ public void prepareToWrite( RecordWriter writer ) throws IOException{
}
}


public OutputFormat getOutputFormat() throws IOException{
final MongoOutputFormat outputFmt = new MongoOutputFormat();
log.info( "OutputFormat... " + outputFmt );
return outputFmt;
}


public String relToAbsPathForStoreLocation( String location, org.apache.hadoop.fs.Path curDir ) throws IOException{
// Don't convert anything - override to keep base from messing with URI
log.info( "Converting path: " + location + "(curDir: " + curDir + ")" );
return location;
}


public void setStoreLocation( String location, Job job ) throws IOException{
final Configuration config = job.getConfiguration();
log.info( "Store Location Config: " + config + " For URI: " + location );
Expand All @@ -217,12 +209,10 @@ public void setStoreLocation( String location, Job job ) throws IOException{
config.set( PIG_OUTPUT_SCHEMA, properties.getProperty( PIG_OUTPUT_SCHEMA_UDF_CONTEXT ) );
}


public void setStoreFuncUDFContextSignature( String signature ){
_udfContextSignature = signature;
}

String _udfContextSignature = null;
MongoRecordWriter _recordWriter = null;

}

0 comments on commit 4c61394

Please sign in to comment.