Skip to content

Commit

Permalink
Remove lame inner Counter class and use standard util one. Bit of cod…
Browse files Browse the repository at this point in the history
…e cleanup.
  • Loading branch information
manning authored and Stanford NLP committed Nov 25, 2015
1 parent 8605985 commit aa7336f
Show file tree
Hide file tree
Showing 8 changed files with 24 additions and 92 deletions.
5 changes: 0 additions & 5 deletions doc/corenlp/pom-full.xml
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,6 @@
<artifactId>javax.json-api</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.12</version>
</dependency>
</dependencies>
<build>
<sourceDirectory>src</sourceDirectory>
Expand Down
65 changes: 6 additions & 59 deletions src/edu/stanford/nlp/dcoref/SieveCoreferenceSystem.java
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,6 @@
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.HashSet;
import java.util.TreeSet;
import java.util.logging.FileHandler;
import java.util.logging.Level;
Expand All @@ -62,7 +61,6 @@
import edu.stanford.nlp.dcoref.ScorerBCubed.BCubedType;
import edu.stanford.nlp.dcoref.sievepasses.DeterministicCorefSieve;
import edu.stanford.nlp.dcoref.sievepasses.ExactStringMatch;
import edu.stanford.nlp.hcoref.data.*;
import edu.stanford.nlp.io.IOUtils;
import edu.stanford.nlp.io.RuntimeIOException;
import edu.stanford.nlp.io.StringOutputStream;
Expand All @@ -72,7 +70,12 @@
import edu.stanford.nlp.stats.ClassicCounter;
import edu.stanford.nlp.stats.Counter;
import edu.stanford.nlp.trees.Tree;
import edu.stanford.nlp.util.*;
import edu.stanford.nlp.util.CoreMap;
import edu.stanford.nlp.util.Generics;
import edu.stanford.nlp.util.IntTuple;
import edu.stanford.nlp.util.Pair;
import edu.stanford.nlp.util.StringUtils;
import edu.stanford.nlp.util.SystemUtils;
import edu.stanford.nlp.util.logging.NewlineLogFormatter;


Expand Down Expand Up @@ -854,62 +857,6 @@ public Map<Integer, CorefChain> coref(Document document) throws Exception {
return result;
}

public Map<Integer, edu.stanford.nlp.hcoref.data.CorefChain> corefReturnHybridOutput(Document document) throws Exception {

// Multi-pass sieve coreference resolution
for (int i = 0; i < sieves.length ; i++){
currentSieve = i;
DeterministicCorefSieve sieve = sieves[i];
// Do coreference resolution using this pass
coreference(document, sieve);
}

// post processing (e.g., removing singletons, appositions for conll)
if((!Constants.USE_GOLD_MENTIONS && doPostProcessing) || replicateCoNLL) postProcessing(document);

// coref system output: edu.stanford.nlp.hcoref.data.CorefChain
Map<Integer, edu.stanford.nlp.hcoref.data.CorefChain> result = Generics.newHashMap();

for(CorefCluster c : document.corefClusters.values()) {
// build mentionsMap and represents
Map<IntPair, Set<edu.stanford.nlp.hcoref.data.CorefChain.CorefMention>> mentionsMap = Generics.newHashMap();
IntPair keyPair = new IntPair(0,0);
mentionsMap.put(keyPair, new HashSet<>());
Mention represents = null;
edu.stanford.nlp.hcoref.data.CorefChain.CorefMention representsHybridVersion = null;
for (Mention mention : c.getCorefMentions()) {
// convert dcoref CorefMention to hcoref CorefMention
//IntPair mentionPosition = new IntPair(mention.sentNum, mention.headIndex);
IntTuple mentionPosition = document.positions.get(mention);
CorefMention dcorefMention = new CorefMention(mention, mentionPosition);
edu.stanford.nlp.hcoref.data.CorefChain.CorefMention hcorefMention =
new edu.stanford.nlp.hcoref.data.CorefChain.CorefMention(
edu.stanford.nlp.hcoref.data.Dictionaries.MentionType.valueOf(dcorefMention.mentionType.name()),
edu.stanford.nlp.hcoref.data.Dictionaries.Number.valueOf(dcorefMention.number.name()),
edu.stanford.nlp.hcoref.data.Dictionaries.Gender.valueOf(dcorefMention.gender.name()),
edu.stanford.nlp.hcoref.data.Dictionaries.Animacy.valueOf(dcorefMention.animacy.name()),
dcorefMention.startIndex,
dcorefMention.endIndex,
dcorefMention.headIndex,
dcorefMention.corefClusterID,
dcorefMention.mentionID,
dcorefMention.sentNum,
dcorefMention.position,
dcorefMention.mentionSpan);
mentionsMap.get(keyPair).add(hcorefMention);
if (mention.moreRepresentativeThan(represents)) {
represents = mention;
representsHybridVersion = hcorefMention;
}
}
edu.stanford.nlp.hcoref.data.CorefChain hybridCorefChain =
new edu.stanford.nlp.hcoref.data.CorefChain(c.clusterID, mentionsMap, representsHybridVersion);
result.put(c.clusterID, hybridCorefChain);
}

return result;
}

/**
* Do coreference resolution using one sieve pass.
*
Expand Down
14 changes: 7 additions & 7 deletions src/edu/stanford/nlp/hcoref/CorefDocMaker.java
Original file line number Diff line number Diff line change
Expand Up @@ -162,21 +162,21 @@ public Document makeDocument(Annotation anno) throws Exception {
public Document makeDocument(InputDoc input) throws Exception {
if (input == null) return null;
Annotation anno = input.annotation;

if (Boolean.parseBoolean(props.getProperty("coref.useMarkedDiscourse", "false"))) {
anno.set(CoreAnnotations.UseMarkedDiscourseAnnotation.class, true);
}

// add missing annotation
if (addMissingAnnotations) {
addMissingAnnotation(anno);
}

if (Boolean.parseBoolean(props.getProperty("hcoref.useMarkedDiscourse", "false"))) {
anno.set(CoreAnnotations.UseMarkedDiscourseAnnotation.class, true);
}

// remove nested NP with same headword except newswire document for chinese

//if(input.conllDoc != null && CorefProperties.getLanguage(props)==Locale.CHINESE){
//CorefProperties.setRemoveNested(props, !input.conllDoc.documentID.contains("nw"));
//}
if(input.conllDoc != null && CorefProperties.getLanguage(props)==Locale.CHINESE){
CorefProperties.setRemoveNested(props, !input.conllDoc.documentID.contains("nw"));
}

// each sentence should have a CorefCoreAnnotations.CorefMentionsAnnotation.class which maps to List<Mention>
// this is set by the mentions annotator
Expand Down
2 changes: 1 addition & 1 deletion src/edu/stanford/nlp/naturalli/OpenIEDemo.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public class OpenIEDemo {
public static void main(String[] args) throws Exception {
// Create the Stanford CoreNLP pipeline
Properties props = new Properties();
props.setProperty("annotators", "tokenize,ssplit,pos,lemma,depparse,natlog,openie");
props.setProperty("annotators", "tokenize,ssplit,pos,depparse,natlog,openie");
StanfordCoreNLP pipeline = new StanfordCoreNLP(props);

// Annotate an example document.
Expand Down
17 changes: 3 additions & 14 deletions src/edu/stanford/nlp/pipeline/DeterministicCorefAnnotator.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@
import edu.stanford.nlp.ling.CoreAnnotations;
import edu.stanford.nlp.ling.CoreLabel;
import edu.stanford.nlp.dcoref.CorefCoreAnnotations;
import edu.stanford.nlp.hcoref.*;
import edu.stanford.nlp.hcoref.data.*;
import edu.stanford.nlp.semgraph.SemanticGraph;
import edu.stanford.nlp.semgraph.SemanticGraphCoreAnnotations;
import edu.stanford.nlp.semgraph.SemanticGraphFactory;
Expand Down Expand Up @@ -54,15 +52,12 @@ public class DeterministicCorefAnnotator implements Annotator {

private final boolean allowReparsing;

private final boolean outputHybridVersion;

public DeterministicCorefAnnotator(Properties props) {
try {
corefSystem = new SieveCoreferenceSystem(props);
mentionExtractor = new MentionExtractor(corefSystem.dictionaries(), corefSystem.semantics());
OLD_FORMAT = Boolean.parseBoolean(props.getProperty("oldCorefFormat", "false"));
allowReparsing = PropertiesUtils.getBool(props, Constants.ALLOW_REPARSING_PROP, Constants.ALLOW_REPARSING);
outputHybridVersion = Boolean.parseBoolean(props.getProperty("dcoref.outputHybridVersion", "false"));
} catch (Exception e) {
System.err.println("ERROR: cannot create DeterministicCorefAnnotator!");
e.printStackTrace();
Expand Down Expand Up @@ -131,17 +126,11 @@ public void annotate(Annotation annotation) {
}
}

if (outputHybridVersion) {
Map<Integer, edu.stanford.nlp.hcoref.data.CorefChain> result = corefSystem.corefReturnHybridOutput(document);
annotation.set(edu.stanford.nlp.hcoref.CorefCoreAnnotations.CorefChainAnnotation.class, result);
} else {
Map<Integer, CorefChain> result = corefSystem.coref(document);
annotation.set(CorefCoreAnnotations.CorefChainAnnotation.class, result);
}
Map<Integer, CorefChain> result = corefSystem.coref(document);
annotation.set(CorefCoreAnnotations.CorefChainAnnotation.class, result);

if(OLD_FORMAT) {
Map<Integer, CorefChain> oldResult = corefSystem.coref(document);
addObsoleteCoreferenceAnnotations(annotation, orderedMentions, oldResult);
addObsoleteCoreferenceAnnotations(annotation, orderedMentions, result);
}
} catch (RuntimeException e) {
throw e;
Expand Down
5 changes: 3 additions & 2 deletions src/edu/stanford/nlp/pipeline/JSONOutputter.java
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package edu.stanford.nlp.pipeline;

import edu.stanford.nlp.hcoref.data.CorefChain;
import edu.stanford.nlp.hcoref.CorefCoreAnnotations;
import edu.stanford.nlp.dcoref.CorefChain;
import edu.stanford.nlp.dcoref.CorefCoreAnnotations;


import edu.stanford.nlp.ie.machinereading.structure.Span;
import edu.stanford.nlp.ie.util.RelationTriple;
Expand Down
4 changes: 2 additions & 2 deletions src/edu/stanford/nlp/pipeline/TextOutputter.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@
import java.util.List;
import java.util.Map;

import edu.stanford.nlp.hcoref.data.CorefChain;
import edu.stanford.nlp.hcoref.CorefCoreAnnotations;
import edu.stanford.nlp.dcoref.CorefChain;
import edu.stanford.nlp.dcoref.CorefCoreAnnotations;
import edu.stanford.nlp.ie.machinereading.structure.EntityMention;
import edu.stanford.nlp.ie.machinereading.structure.MachineReadingAnnotations;
import edu.stanford.nlp.ie.machinereading.structure.RelationMention;
Expand Down
4 changes: 2 additions & 2 deletions src/edu/stanford/nlp/pipeline/XMLOutputter.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
import java.util.List;
import java.util.Map;

import edu.stanford.nlp.hcoref.data.CorefChain;
import edu.stanford.nlp.hcoref.CorefCoreAnnotations;
import edu.stanford.nlp.dcoref.CorefChain;
import edu.stanford.nlp.dcoref.CorefCoreAnnotations;
import edu.stanford.nlp.ie.machinereading.structure.EntityMention;
import edu.stanford.nlp.ie.machinereading.structure.ExtractionObject;
import edu.stanford.nlp.ie.machinereading.structure.MachineReadingAnnotations;
Expand Down

0 comments on commit aa7336f

Please sign in to comment.