All Downloads are FREE. Search and download functionalities are using the official Maven repository.

edu.stanford.nlp.patterns.surface.PatternsForEachTokenInMemory Maven / Gradle / Ivy

Go to download

Stanford CoreNLP provides a set of natural language analysis tools which can take raw English language text input and give the base forms of words, their parts of speech, whether they are names of companies, people, etc., normalize dates, times, and numeric quantities, mark up the structure of sentences in terms of phrases and word dependencies, and indicate which noun phrases refer to the same entities. It provides the foundational building blocks for higher level text understanding applications.

There is a newer version: 4.5.7
Show newest version
package edu.stanford.nlp.patterns.surface;

import edu.stanford.nlp.io.IOUtils;
import edu.stanford.nlp.patterns.Pattern;
import edu.stanford.nlp.util.ArgumentParser;
import edu.stanford.nlp.util.logging.Redwood;

import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;

/**
 * Created by sonalg on 10/22/14.
 */
public class PatternsForEachTokenInMemory extends PatternsForEachToken {
  public static ConcurrentHashMap>> patternsForEachToken = null;

  public PatternsForEachTokenInMemory(Properties props, Map>> pats) {
    ArgumentParser.fillOptions(this, props);


    //TODO: make this atomic
    if(patternsForEachToken == null)
      patternsForEachToken = new ConcurrentHashMap<>();

    if (pats != null)
      addPatterns(pats);
  }

  public PatternsForEachTokenInMemory(Properties props)  {
    this(props, null);
  }

  @Override
  public void addPatterns(String sentId, Map> patterns) {
    if (!patternsForEachToken.containsKey(sentId))
      patternsForEachToken.put(sentId, new ConcurrentHashMap<>());
    patternsForEachToken.get(sentId).putAll(patterns);

  }

  @Override
  public void addPatterns(Map>> pats) {
    for (Map.Entry>> en : pats.entrySet()) {
      addPatterns(en.getKey(), en.getValue());
    }
  }

  @Override
  public Map> getPatternsForAllTokens(String sentId) {
    return (Map>)(patternsForEachToken.containsKey(sentId) ? patternsForEachToken.get(sentId) : Collections.emptyMap());
  }

  @Override
  public void setupSearch() {
    //nothing to do
  }

//  @Override
//  public ConcurrentHashIndex readPatternIndex(String dir) throws IOException, ClassNotFoundException {
//    return IOUtils.readObjectFromFile(dir+"/patternshashindex.ser");
//  }
//
//  @Override
//  public void savePatternIndex(ConcurrentHashIndex index, String dir) throws IOException {
//    if(dir != null){
//    writePatternsIfInMemory(dir+"/allpatterns.ser");
//    IOUtils.writeObjectToFile(index, dir+"/patternshashindex.ser");
//    }
//  }

  @Override
  public Map>> getPatternsForAllTokens(Collection sampledSentIds) {
    Map>> pats = new HashMap<>();
    for(String s: sampledSentIds){
      pats.put(s, getPatternsForAllTokens(s));
    }
    return pats;
  }

  @Override
  public void close() {
    //nothing to do
  }

  @Override
  public void load(String allPatternsDir) {
    try {
      addPatterns(IOUtils.readObjectFromFile(allPatternsDir+"/allpatterns.ser"));
    } catch (IOException e) {
      throw new RuntimeException(e);
    } catch (ClassNotFoundException e) {
      throw new RuntimeException(e);
    }
  }

  @Override
  public boolean save(String dir) {
    try {
      IOUtils.ensureDir(new File(dir));
      String f = dir+"/allpatterns.ser";
      IOUtils.writeObjectToFile(this.patternsForEachToken, f);
      Redwood.log(Redwood.DBG, "Saving the patterns to " + f);
    } catch (IOException e) {
      throw new RuntimeException(e);
    }
    return true;
  }

  @Override
  public void createIndexIfUsingDBAndNotExists() {
    //nothing to do
    return;
  }

  public boolean containsSentId(String sentId) {
    return this.patternsForEachToken.containsKey(sentId);
  }

  @Override
  public int size(){
    return this.patternsForEachToken.size();
  };
}




© 2015 - 2024 Weber Informatics LLC | Privacy Policy