Skip to content

Commit

Permalink
add server_id
Browse files Browse the repository at this point in the history
  • Loading branch information
J38 authored and Stanford NLP committed Jan 17, 2017
1 parent df09e11 commit 1fbac4c
Showing 1 changed file with 48 additions and 16 deletions.
64 changes: 48 additions & 16 deletions src/edu/stanford/nlp/pipeline/StanfordCoreNLPServer.java
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,8 @@
public class StanfordCoreNLPServer implements Runnable {

protected HttpServer server;
@ArgumentParser.Option(name="server_id", gloss="a name for this server")
protected String serverID = null; // currently not used
@ArgumentParser.Option(name="port", gloss="The port to run the server on")
protected int serverPort = 9000;
@ArgumentParser.Option(name="status_port", gloss="The port to serve the status check endpoints on. If different from the server port, this will run in a separate thread.")
Expand Down Expand Up @@ -94,27 +96,49 @@ public class StanfordCoreNLPServer implements Runnable {
*/
private final ExecutorService corenlpExecutor;


/**
* Create a new Stanford CoreNLP Server.
* @param props A list of properties for the server (server_id ...)
* @param port The port to host the server from.
* @param timeout The timeout (in milliseconds) for each command.
* @param strict If true, conform more strictly to the HTTP spec (e.g., for character encoding).
* @throws IOException Thrown from the underlying socket implementation.
*/
public StanfordCoreNLPServer(int port, int timeout, boolean strict) throws IOException {
this();
public StanfordCoreNLPServer(Properties props, int port, int timeout, boolean strict) throws IOException {
this(props);
this.serverPort = port;
this.timeoutMilliseconds = timeout;
this.strict = strict;
}

/**
* Create a new Stanford CoreNLP Server.
* @param port The port to host the server from.
* @param timeout The timeout (in milliseconds) for each command.
* @param strict If true, conform more strictly to the HTTP spec (e.g., for character encoding).
* @throws IOException Thrown from the underlying socket implementation.
*/
public StanfordCoreNLPServer(int port, int timeout, boolean strict) throws IOException {
this(null, port, timeout, strict);
}

/**
* Create a new Stanford CoreNLP Server, with the default parameters
*
* @throws IOException Thrown if we could not write the shutdown key to the a file.
*/
public StanfordCoreNLPServer() throws IOException {
this(null);
}

/**
* Create a new Stanford CoreNLP Server with the default parameters and some
*
* pass in properties (server_id ...)
*
* @throws IOException Thrown if we could not write the shutdown key to the a file.
*/
public StanfordCoreNLPServer(Properties props) throws IOException {
// check if englishSR.ser.gz can be found (standard models jar doesn't have this)
String defaultParserPath;
ClassLoader classLoader = getClass().getClassLoader();
Expand All @@ -132,16 +156,16 @@ public StanfordCoreNLPServer() throws IOException {
log("https://stanfordnlp.github.io/CoreNLP/download.html");
}
this.defaultProps = PropertiesUtils.asProperties(
"annotators", defaultAnnotators, // Run these annotators by default
"mention.type", "dep", // Use dependency trees with coref by default
"coref.mode", "statistical", // Use the new coref
"coref.language", "en", // We're English by default
"inputFormat", "text", // By default, treat the POST data like text
"outputFormat", "json", // By default, return in JSON -- this is a server, after all.
"prettyPrint", "false", // Don't bother pretty-printing
"parse.model", defaultParserPath, // SR scales linearly with sentence length. Good for a server!
"parse.binaryTrees", "true", // needed for the Sentiment annotator
"openie.strip_entailments", "true"); // these are large to serialize, so ignore them
"annotators", defaultAnnotators, // Run these annotators by default
"mention.type", "dep", // Use dependency trees with coref by default
"coref.mode", "statistical", // Use the new coref
"coref.language", "en", // We're English by default
"inputFormat", "text", // By default, treat the POST data like text
"outputFormat", "json", // By default, return in JSON -- this is a server, after all.
"prettyPrint", "false", // Don't bother pretty-printing
"parse.model", defaultParserPath, // SR scales linearly with sentence length. Good for a server!
"parse.binaryTrees", "true", // needed for the Sentiment annotator
"openie.strip_entailments", "true"); // these are large to serialize, so ignore them

// overwrite all default properties with provided server properties
// for instance you might want to provide a default ner model
Expand All @@ -153,9 +177,15 @@ public StanfordCoreNLPServer() throws IOException {
this.serverExecutor = Executors.newFixedThreadPool(ArgumentParser.threads);
this.corenlpExecutor = Executors.newFixedThreadPool(ArgumentParser.threads);

// Generate and write a shutdown key
// Generate and write a shutdown key, get optional server_id from passed in properties
// this way if multiple servers running can shut them all down with different ids
String shutdownKeyFileName;
if (props != null && props.getProperty("server_id") != null)
shutdownKeyFileName = "corenlp.shutdown."+props.getProperty("server_id");
else
shutdownKeyFileName = "corenlp.shutdown";
String tmpDir = System.getProperty("java.io.tmpdir");
File tmpFile = new File(tmpDir + File.separator + "corenlp.shutdown");
File tmpFile = new File(tmpDir + File.separator + shutdownKeyFileName);
tmpFile.deleteOnExit();
if (tmpFile.exists()) {
if (!tmpFile.delete()) {
Expand Down Expand Up @@ -1247,7 +1277,9 @@ public static void main(String[] args) throws IOException {

// Fill arguments
ArgumentParser.fillOptions(StanfordCoreNLPServer.class, args);
StanfordCoreNLPServer server = new StanfordCoreNLPServer(); // must come after filling global options
// get server properties from command line, right now only property used is server_id
Properties serverProperties = StringUtils.argsToProperties(args);
StanfordCoreNLPServer server = new StanfordCoreNLPServer(serverProperties); // must come after filling global options
ArgumentParser.fillOptions(server, args);
log(" Threads: " + ArgumentParser.threads);

Expand Down

0 comments on commit 1fbac4c

Please sign in to comment.