List of usage examples for java.util StringTokenizer nextToken
public String nextToken()
From source file:com.liusoft.dlog4j.search.SearchProxy.java
/** * ?/* w w w. j a va2 s . c o m*/ * @param params * @return * @throws Exception */ public static List search(SearchParameter params) throws Exception { if (params == null) return null; SearchEnabled searching = (SearchEnabled) params.getSearchObject().newInstance(); StringBuffer path = new StringBuffer(_baseIndexPath); path.append(searching.name()); File f = new File(path.toString()); if (!f.exists()) return null; IndexSearcher searcher = new IndexSearcher(path.toString()); //? BooleanQuery comboQuery = new BooleanQuery(); int _query_count = 0; StringTokenizer st = new StringTokenizer(params.getSearchKey()); while (st.hasMoreElements()) { String q = st.nextToken(); String[] indexFields = searching.getIndexFields(); for (int i = 0; i < indexFields.length; i++) { QueryParser qp = new QueryParser(indexFields[i], analyzer); try { Query subjectQuery = qp.parse(q); comboQuery.add(subjectQuery, BooleanClause.Occur.SHOULD); _query_count++; } catch (Exception e) { log.error("Add query parameter failed. key=" + q, e); } } } if (_query_count == 0)//? return null; //?? MultiFilter multiFilter = null; HashMap conds = params.getConditions(); if (conds != null) { Iterator keys = conds.keySet().iterator(); while (keys.hasNext()) { if (multiFilter == null) multiFilter = new MultiFilter(0); String key = (String) keys.next(); multiFilter.add(new FieldFilter(key, conds.get(key).toString())); } } /* * Creates a sort, possibly in reverse, * by terms in the given field with the type of term values explicitly given. */ SortField[] s_fields = new SortField[2]; s_fields[0] = SortField.FIELD_SCORE; s_fields[1] = new SortField(searching.getKeywordField(), SortField.INT, true); Sort sort = new Sort(s_fields); Hits hits = searcher.search(comboQuery, multiFilter, sort); int numResults = hits.length(); //System.out.println(numResults + " found............................"); int result_count = Math.min(numResults, MAX_RESULT_COUNT); List results = new ArrayList(result_count); for (int i = 0; i < result_count; i++) { Document doc = (Document) hits.doc(i); //Java Object result = params.getSearchObject().newInstance(); Enumeration fields = doc.fields(); while (fields.hasMoreElements()) { Field field = (Field) fields.nextElement(); //System.out.println(field.name()+" -- "+field.stringValue()); if (CLASSNAME_FIELD.equals(field.name())) continue; //? if (!field.isStored()) continue; //System.out.println("=========== begin to mapping ============"); //String --> anything Class fieldType = getNestedPropertyType(result, field.name()); //System.out.println(field.name()+", class = " + fieldType.getName()); Object fieldValue = null; if (fieldType.equals(Date.class)) fieldValue = new Date(Long.parseLong(field.stringValue())); else fieldValue = ConvertUtils.convert(field.stringValue(), fieldType); //System.out.println(fieldValue+", class = " + fieldValue.getClass().getName()); setNestedProperty(result, field.name(), fieldValue); } results.add(result); } return results; }
From source file:amqp.spring.camel.component.SpringAMQPConsumer.java
protected static Map<String, Object> parseKeyValues(String routingKey) { StringTokenizer tokenizer = new StringTokenizer(routingKey, "&|"); Map<String, Object> pairs = new HashMap<String, Object>(); while (tokenizer.hasMoreTokens()) { String token = tokenizer.nextToken(); String[] keyValue = token.split("="); if (keyValue.length != 2) throw new IllegalArgumentException( "Couldn't parse key/value pair [" + token + "] out of string: " + routingKey); pairs.put(keyValue[0], keyValue[1]); }/* w ww. j a v a 2s.c o m*/ return pairs; }
From source file:edu.stanford.muse.index.NEROld.java
public static void readLocationNamesToSuppress() { String suppress_file = "suppress.locations.txt.gz"; try {// w ww . j a v a2 s .c o m InputStream is = new GZIPInputStream(NER.class.getClassLoader().getResourceAsStream(suppress_file)); LineNumberReader lnr = new LineNumberReader(new InputStreamReader(is, "UTF-8")); while (true) { String line = lnr.readLine(); if (line == null) break; StringTokenizer st = new StringTokenizer(line); if (st.hasMoreTokens()) { String s = st.nextToken(); if (!s.startsWith("#")) locationsToSuppress.add(s.toLowerCase()); } } } catch (Exception e) { log.warn("Error: unable to read " + suppress_file); Util.print_exception(e); } log.info(locationsToSuppress.size() + " names to suppress as locations"); }
From source file:net.pms.encoders.AviSynthFFmpeg.java
public static File getAVSScript(String filename, DLNAMediaSubtitle subTrack, int fromFrame, int toFrame, String frameRateRatio, String frameRateNumber, PmsConfiguration configuration) throws IOException { String onlyFileName = filename.substring(1 + filename.lastIndexOf('\\')); File file = new File(configuration.getTempFolder(), "pms-avs-" + onlyFileName + ".avs"); try (PrintWriter pw = new PrintWriter(new FileOutputStream(file))) { String numerator;//from w ww .j a v a 2 s .c o m String denominator; if (frameRateRatio != null && frameRateNumber != null) { if (frameRateRatio.equals(frameRateNumber)) { // No ratio was available numerator = frameRateRatio; denominator = "1"; } else { String[] frameRateNumDen = frameRateRatio.split("/"); numerator = frameRateNumDen[0]; denominator = "1001"; } } else { // No framerate was given so we should try the most common one numerator = "24000"; denominator = "1001"; frameRateNumber = "23.976"; } String assumeFPS = ".AssumeFPS(" + numerator + "," + denominator + ")"; String directShowFPS = ""; if (!"0".equals(frameRateNumber)) { directShowFPS = ", fps=" + frameRateNumber; } String convertfps = ""; if (configuration.getFfmpegAvisynthConvertFps()) { convertfps = ", convertfps=true"; } File f = new File(filename); if (f.exists()) { filename = ProcessUtil.getShortFileNameIfWideChars(filename); } String movieLine = "DirectShowSource(\"" + filename + "\"" + directShowFPS + convertfps + ")" + assumeFPS; String mtLine1 = ""; String mtLine2 = ""; String interframeLines = null; String interframePath = configuration.getInterFramePath(); int Cores = 1; if (configuration.isFfmpegAviSynthMultithreading()) { Cores = configuration.getNumberOfCpuCores(); // Goes at the start of the file to initiate multithreading mtLine1 = "SetMemoryMax(512)\nSetMTMode(3," + Cores + ")\n"; // Goes after the input line to make multithreading more efficient mtLine2 = "SetMTMode(2)"; } // True Motion if (configuration.getFfmpegAvisynthInterFrame()) { String GPU = ""; movieLine += ".ConvertToYV12()"; // Enable GPU to assist with CPU if (configuration.getFfmpegAvisynthInterFrameGPU() && interframegpu.isEnabled()) { GPU = ", GPU=true"; } interframeLines = "\n" + "PluginPath = \"" + interframePath + "\"\n" + "LoadPlugin(PluginPath+\"svpflow1.dll\")\n" + "LoadPlugin(PluginPath+\"svpflow2.dll\")\n" + "Import(PluginPath+\"InterFrame2.avsi\")\n" + "InterFrame(Cores=" + Cores + GPU + ", Preset=\"Faster\")\n"; } String subLine = null; if (subTrack != null && configuration.isAutoloadExternalSubtitles() && !configuration.isDisableSubtitles()) { if (subTrack.getExternalFile() != null) { LOGGER.info("AviSynth script: Using subtitle track: " + subTrack); String function = "TextSub"; if (subTrack.getType() == SubtitleType.VOBSUB) { function = "VobSub"; } subLine = function + "(\"" + ProcessUtil.getShortFileNameIfWideChars(subTrack.getExternalFile().getAbsolutePath()) + "\")"; } } ArrayList<String> lines = new ArrayList<>(); lines.add(mtLine1); boolean fullyManaged = false; String script = "<movie>\n<sub>\n"; StringTokenizer st = new StringTokenizer(script, PMS.AVS_SEPARATOR); while (st.hasMoreTokens()) { String line = st.nextToken(); if (line.contains("<movie") || line.contains("<sub")) { fullyManaged = true; } lines.add(line); } if (configuration.getFfmpegAvisynthInterFrame()) { lines.add(mtLine2); lines.add(interframeLines); } if (fullyManaged) { for (String s : lines) { if (s.contains("<moviefilename>")) { s = s.replace("<moviefilename>", filename); } s = s.replace("<movie>", movieLine); s = s.replace("<sub>", subLine != null ? subLine : "#"); pw.println(s); } } else { pw.println(movieLine); if (subLine != null) { pw.println(subLine); } pw.println("clip"); } } file.deleteOnExit(); return file; }
From source file:com.cisco.dvbu.ps.common.scriptutil.ScriptUtil.java
/** * Create a command file for UNIX or Windows from name value pairs strings constructed based on passed in xml content * @param xmlFilePath xml File Path//from ww w. ja v a 2 s .c om * @param nodeName String return name values for passed in node name and node value. * is treated as all nodes * @param nodeValue String return name values for passed in node name and node value. * is treated as all node values * @param options additional options to return the node name or attributes such as "-noid -noattributes" * @param commandOutputFile The fully qualifed path of the command output file * @param commandHeader The command file header such as #!/bin/bash * @param commandType The type of command file [UNIX|WINDOWS] {use export for UNIX, use set for WINDOWS) * UNIX: export name="value" * WINDOWS: set name=value * usage createCommandFileFromXML xmlFilePath * * "-noid -noattributes" /Users/rthummal/mywork/clients/ps/CisDeployTool/resources/abc.sh #!/bin/bash UNIX * usage createCommandFileFromXML xmlFilePath hostname localhost "" C:\opt\pdtool/abc.bat "echo off" WINDOWS * usage createCommandFileFromXML xmlFilePath hostname * "-noid" /opt/pdtool/abc.sh #!/bin/bash UNIX */ public static void createCommandFileFromXML(String xmlFilePath, String nodeName, String nodeValue, String options, String commandOutputFile, String commandHeader, String commandType) { boolean win = false; String cmdPrefix = "export "; if (commandType != null && commandType.equals("WINDOWS")) { win = true; cmdPrefix = "set "; } String nameValuePairs = XMLUtils.getNameValuePairsFromXML(xmlFilePath, null, null, nodeName, nodeValue, options); if (nameValuePairs != null) { StringBuffer sb = new StringBuffer(); sb.append(commandHeader + "\n"); StringTokenizer st = new StringTokenizer(nameValuePairs, "|"); while (st.hasMoreTokens()) { sb.append(cmdPrefix); String nameValuePair = st.nextToken(); if (!win) { nameValuePair = nameValuePair.replace("=", "=\""); nameValuePair += "\""; } sb.append(nameValuePair + "\n"); } try { Writer out = new OutputStreamWriter(new FileOutputStream(commandOutputFile)); out.write(sb.toString()); out.flush(); } catch (FileNotFoundException e) { logger.error("Could not wirte to command file " + commandOutputFile, e); throw new ValidationException(e.getMessage(), e); } catch (IOException e) { logger.error("Could not wirte to command file " + commandOutputFile, e); throw new ValidationException(e.getMessage(), e); } } }
From source file:keel.Algorithms.Neural_Networks.NNEP_Clas.KEELWrapperClas.java
/** * <p>/*from w w w . j av a 2 s. c om*/ * Configure the execution of the algorithm. * * @param jobFilename Name of the KEEL file with properties of the * execution * </p> */ @SuppressWarnings("unchecked") private static void configureJob(String jobFilename) { Properties props = new Properties(); try { InputStream paramsFile = new FileInputStream(jobFilename); props.load(paramsFile); paramsFile.close(); } catch (IOException ioe) { ioe.printStackTrace(); System.exit(0); } // Files training and test String trainFile; String testFile; StringTokenizer tokenizer = new StringTokenizer(props.getProperty("inputData")); tokenizer.nextToken(); trainFile = tokenizer.nextToken(); trainFile = trainFile.substring(1, trainFile.length() - 1); testFile = tokenizer.nextToken(); testFile = testFile.substring(1, testFile.length() - 1); // Classification or Regression ?? byte[] schema = null; try { schema = readSchema(trainFile); } catch (IOException e) { e.printStackTrace(); } catch (DatasetException e) { e.printStackTrace(); } // Algorithm auxiliar configuration XMLConfiguration algConf = new XMLConfiguration(); algConf.setRootElementName("algorithm"); algConf.addProperty("population-size", 1000); algConf.addProperty("max-of-generations", Integer.parseInt(props.getProperty("Generations"))); algConf.addProperty("creation-ratio", 10.0); algConf.addProperty("percentage-second-mutator", 10); algConf.addProperty("max-generations-without-improving-mean", 20); algConf.addProperty("max-generations-without-improving-best", 20); algConf.addProperty("fitness-difference", 0.0000001); algConf.addProperty("species[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.NeuralNetIndividualSpecies"); algConf.addProperty("species.neural-net-type", "keel.Algorithms.Neural_Networks.NNEP_Clas.neuralnet.NeuralNetClassifier"); if (props.getProperty("Transfer").equals("Product_Unit")) { algConf.addProperty("species.hidden-layer[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.neuralnet.ExpLayer"); algConf.addProperty("species.hidden-layer[@biased]", false); algConf.addProperty("evaluator[@log-input-data]", true); } else { algConf.addProperty("species.hidden-layer[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.neuralnet.SigmLayer"); algConf.addProperty("species.hidden-layer[@biased]", true); } int neurons = Integer.parseInt(props.getProperty("Hidden_nodes")); algConf.addProperty("species.hidden-layer.minimum-number-of-neurons", (neurons / 3) != 0 ? (neurons / 3) : 1); algConf.addProperty("species.hidden-layer.initial-maximum-number-of-neurons", (neurons / 2) != 0 ? (neurons / 2) : 1); algConf.addProperty("species.hidden-layer.maximum-number-of-neurons", neurons); algConf.addProperty("species.hidden-layer.initiator-of-links", "keel.Algorithms.Neural_Networks.NNEP_Common.initiators.RandomInitiator"); algConf.addProperty("species.hidden-layer.weight-range[@type]", "net.sf.jclec.util.range.Interval"); algConf.addProperty("species.hidden-layer.weight-range[@closure]", "closed-closed"); algConf.addProperty("species.hidden-layer.weight-range[@left]", -5.0); algConf.addProperty("species.hidden-layer.weight-range[@right]", 5.0); algConf.addProperty("species.output-layer[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.neuralnet.LinearLayer"); algConf.addProperty("species.output-layer[@biased]", true); algConf.addProperty("species.output-layer.initiator-of-links", "keel.Algorithms.Neural_Networks.NNEP_Common.initiators.RandomInitiator"); algConf.addProperty("species.output-layer.weight-range[@type]", "net.sf.jclec.util.range.Interval"); algConf.addProperty("species.output-layer.weight-range[@closure]", "closed-closed"); algConf.addProperty("species.output-layer.weight-range[@left]", -5.0); algConf.addProperty("species.output-layer.weight-range[@right]", 5.0); algConf.addProperty("evaluator[@type]", "keel.Algorithms.Neural_Networks.NNEP_Clas.problem.classification.softmax.SoftmaxClassificationProblemEvaluator"); algConf.addProperty("evaluator[@normalize-data]", true); algConf.addProperty("evaluator.error-function", "keel.Algorithms.Neural_Networks.NNEP_Clas.problem.errorfunctions.LogisticErrorFunction"); algConf.addProperty("evaluator.input-interval[@closure]", "closed-closed"); if (props.getProperty("Transfer").equals("Product_Unit")) { algConf.addProperty("evaluator.input-interval[@left]", 1.0); algConf.addProperty("evaluator.input-interval[@right]", 2.0); } else { algConf.addProperty("evaluator.input-interval[@left]", 0.1); algConf.addProperty("evaluator.input-interval[@right]", 0.9); } algConf.addProperty("evaluator.output-interval[@closure]", "closed-closed"); algConf.addProperty("evaluator.output-interval[@left]", 0.0); algConf.addProperty("evaluator.output-interval[@right]", 1.0); algConf.addProperty("provider[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.NeuralNetCreator"); algConf.addProperty("mutator1[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.mutators.structural.StructuralMutator"); algConf.addProperty("mutator1.temperature-exponent[@value]", 1.0); algConf.addProperty("mutator1.significative-weigth[@value]", 0.0000001); algConf.addProperty("mutator1.neuron-ranges.added[@min]", 1); algConf.addProperty("mutator1.neuron-ranges.added[@max]", 2); algConf.addProperty("mutator1.neuron-ranges.deleted[@min]", 1); algConf.addProperty("mutator1.neuron-ranges.deleted[@max]", 2); algConf.addProperty("mutator1.links-ranges[@relative]", true); algConf.addProperty("mutator1.links-ranges.percentages[@hidden]", 30); algConf.addProperty("mutator1.links-ranges.percentages[@output]", 5); algConf.addProperty("mutator2[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.mutators.parametric.ParametricSRMutator"); algConf.addProperty("mutator2.temperature-exponent[@value]", 0.0); algConf.addProperty("mutator2.amplitude[@value]", 5.0); algConf.addProperty("mutator2.fitness-difference[@value]", 0.0000001); algConf.addProperty("mutator2.initial-alpha-values[@input]", 0.5); algConf.addProperty("mutator2.initial-alpha-values[@output]", 1.0); algConf.addProperty("rand-gen-factory[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.util.random.RanNnepFactory"); algConf.addProperty("rand-gen-factory[@seed]", Integer.parseInt(props.getProperty("seed"))); // Neural Net Algorithm algorithm = new CCRElitistNeuralNetAlgorithm(); algorithm.configure(algConf); // Read data ProblemEvaluator evaluator = (ProblemEvaluator) algorithm.getEvaluator(); evaluator.readData(schema, new KeelDataSet(trainFile), new KeelDataSet(testFile)); ((NeuralNetIndividualSpecies) algorithm.getSpecies()).setNOfInputs(evaluator.getTrainData().getNofinputs()); ((NeuralNetIndividualSpecies) algorithm.getSpecies()) .setNOfOutputs(evaluator.getTrainData().getNofoutputs() - 1); // Read output files tokenizer = new StringTokenizer(props.getProperty("outputData")); String trainResultFile = tokenizer.nextToken(); trainResultFile = trainResultFile.substring(1, trainResultFile.length() - 1); consoleReporter.setTrainResultFile(trainResultFile); String testResultFile = tokenizer.nextToken(); testResultFile = testResultFile.substring(1, testResultFile.length() - 1); consoleReporter.setTestResultFile(testResultFile); String bestModelResultFile = tokenizer.nextToken(); bestModelResultFile = bestModelResultFile.substring(1, bestModelResultFile.length() - 1); consoleReporter.setBestModelResultFile(bestModelResultFile); listeners.add(consoleReporter); }
From source file:hepple.postag.POSTagger.java
/** * Reads one input file and creates the structure needed by the tagger * for input.// w w w . j a v a2 s . c om */ @SuppressWarnings("unused") private static List<List<String>> readInput(String file) throws IOException { BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(file)); String line = reader.readLine(); List<List<String>> result = new ArrayList<List<String>>(); while (line != null) { StringTokenizer tokens = new StringTokenizer(line); List<String> sentence = new ArrayList<String>(); while (tokens.hasMoreTokens()) sentence.add(tokens.nextToken()); result.add(sentence); line = reader.readLine(); } //while(line != null) return result; } finally { IOUtils.closeQuietly(reader); } }
From source file:keel.Algorithms.Neural_Networks.NNEP_Regr.KEELWrapperRegr.java
/** * <p>// w ww . j a va2 s. c om * Configure the execution of the algorithm. * </p> * @param jobFilename Name of the KEEL file with properties of the * execution */ @SuppressWarnings("unchecked") private static void configureJob(String jobFilename) { Properties props = new Properties(); try { InputStream paramsFile = new FileInputStream(jobFilename); props.load(paramsFile); paramsFile.close(); } catch (IOException ioe) { ioe.printStackTrace(); System.exit(0); } // Files training and test String trainFile; String testFile; StringTokenizer tokenizer = new StringTokenizer(props.getProperty("inputData")); tokenizer.nextToken(); trainFile = tokenizer.nextToken(); trainFile = trainFile.substring(1, trainFile.length() - 1); testFile = tokenizer.nextToken(); testFile = testFile.substring(1, testFile.length() - 1); // Classification or Regression ?? byte[] schema = null; try { schema = readSchema(trainFile); } catch (IOException e) { e.printStackTrace(); } catch (DatasetException e) { e.printStackTrace(); } // Algorithm auxiliar configuration XMLConfiguration algConf = new XMLConfiguration(); algConf.setRootElementName("algorithm"); algConf.addProperty("population-size", 1000); algConf.addProperty("max-of-generations", Integer.parseInt(props.getProperty("Generations"))); algConf.addProperty("creation-ratio", 10.0); algConf.addProperty("percentage-second-mutator", 10); algConf.addProperty("max-generations-without-improving-mean", 20); algConf.addProperty("max-generations-without-improving-best", 20); algConf.addProperty("fitness-difference", 0.0000001); algConf.addProperty("species[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.NeuralNetIndividualSpecies"); algConf.addProperty("species.neural-net-type", "keel.Algorithms.Neural_Networks.NNEP_Regr.neuralnet.NeuralNetRegressor"); if (props.getProperty("Transfer").equals("Product_Unit")) { algConf.addProperty("species.hidden-layer[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.neuralnet.ExpLayer"); algConf.addProperty("species.hidden-layer[@biased]", false); algConf.addProperty("evaluator[@log-input-data]", true); } else { algConf.addProperty("species.hidden-layer[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.neuralnet.SigmLayer"); algConf.addProperty("species.hidden-layer[@biased]", true); } int neurons = Integer.parseInt(props.getProperty("Hidden_nodes")); algConf.addProperty("species.hidden-layer.minimum-number-of-neurons", (neurons / 3) != 0 ? (neurons / 3) : 1); algConf.addProperty("species.hidden-layer.initial-maximum-number-of-neurons", (neurons / 2) != 0 ? (neurons / 2) : 1); algConf.addProperty("species.hidden-layer.maximum-number-of-neurons", neurons); algConf.addProperty("species.hidden-layer.initiator-of-links", "keel.Algorithms.Neural_Networks.NNEP_Common.initiators.RandomInitiator"); algConf.addProperty("species.hidden-layer.weight-range[@type]", "net.sf.jclec.util.range.Interval"); algConf.addProperty("species.hidden-layer.weight-range[@closure]", "closed-closed"); algConf.addProperty("species.hidden-layer.weight-range[@left]", -5.0); algConf.addProperty("species.hidden-layer.weight-range[@right]", 5.0); algConf.addProperty("species.output-layer[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.neuralnet.LinearLayer"); algConf.addProperty("species.output-layer[@biased]", true); algConf.addProperty("species.output-layer.initiator-of-links", "keel.Algorithms.Neural_Networks.NNEP_Common.initiators.RandomInitiator"); algConf.addProperty("species.output-layer.weight-range[@type]", "net.sf.jclec.util.range.Interval"); algConf.addProperty("species.output-layer.weight-range[@closure]", "closed-closed"); algConf.addProperty("species.output-layer.weight-range[@left]", -5.0); algConf.addProperty("species.output-layer.weight-range[@right]", 5.0); algConf.addProperty("evaluator[@type]", "keel.Algorithms.Neural_Networks.NNEP_Regr.problem.regression.RegressionProblemEvaluator"); algConf.addProperty("evaluator[@normalize-data]", true); algConf.addProperty("evaluator.error-function", "keel.Algorithms.Neural_Networks.NNEP_Regr.problem.errorfunctions.MSEErrorFunction"); algConf.addProperty("evaluator.input-interval[@closure]", "closed-closed"); if (props.getProperty("Transfer").equals("Product_Unit")) { algConf.addProperty("evaluator.input-interval[@left]", 1.0); algConf.addProperty("evaluator.input-interval[@right]", 2.0); } else { algConf.addProperty("evaluator.input-interval[@left]", 0.1); algConf.addProperty("evaluator.input-interval[@right]", 0.9); } algConf.addProperty("evaluator.output-interval[@closure]", "closed-closed"); algConf.addProperty("evaluator.output-interval[@left]", 1.0); algConf.addProperty("evaluator.output-interval[@right]", 2.0); algConf.addProperty("provider[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.NeuralNetCreator"); algConf.addProperty("mutator1[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.mutators.structural.StructuralMutator"); algConf.addProperty("mutator1.temperature-exponent[@value]", 1.0); algConf.addProperty("mutator1.significative-weigth[@value]", 0.0000001); algConf.addProperty("mutator1.neuron-ranges.added[@min]", 1); algConf.addProperty("mutator1.neuron-ranges.added[@max]", 2); algConf.addProperty("mutator1.neuron-ranges.deleted[@min]", 1); algConf.addProperty("mutator1.neuron-ranges.deleted[@max]", 2); algConf.addProperty("mutator1.links-ranges[@relative]", false); algConf.addProperty("mutator1.links-ranges.added[@min]", 1); algConf.addProperty("mutator1.links-ranges.added[@max]", 6); algConf.addProperty("mutator1.links-ranges.deleted[@min]", 1); algConf.addProperty("mutator1.links-ranges.deleted[@max]", 6); algConf.addProperty("mutator2[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.mutators.parametric.ParametricSAMutator"); algConf.addProperty("mutator2.temperature-exponent[@value]", 0.0); algConf.addProperty("mutator2.amplitude[@value]", 5.0); algConf.addProperty("mutator2.fitness-difference[@value]", 0.0000001); algConf.addProperty("mutator2.initial-alpha-values[@input]", 0.5); algConf.addProperty("mutator2.initial-alpha-values[@output]", 1.0); algConf.addProperty("rand-gen-factory[@type]", "keel.Algorithms.Neural_Networks.NNEP_Common.util.random.RanNnepFactory"); algConf.addProperty("rand-gen-factory[@seed]", Integer.parseInt(props.getProperty("seed"))); // Neural Net Algorithm algorithm = new NeuralNetAlgorithm<NeuralNetIndividual>(); algorithm.configure(algConf); // Read data ProblemEvaluator evaluator = (ProblemEvaluator) algorithm.getEvaluator(); evaluator.readData(schema, new KeelDataSet(trainFile), new KeelDataSet(testFile)); ((NeuralNetIndividualSpecies) algorithm.getSpecies()).setNOfInputs(evaluator.getTrainData().getNofinputs()); ((NeuralNetIndividualSpecies) algorithm.getSpecies()) .setNOfOutputs(evaluator.getTrainData().getNofoutputs()); // Read output files tokenizer = new StringTokenizer(props.getProperty("outputData")); String trainResultFile = tokenizer.nextToken(); trainResultFile = trainResultFile.substring(1, trainResultFile.length() - 1); consoleReporter.setTrainResultFile(trainResultFile); String testResultFile = tokenizer.nextToken(); testResultFile = testResultFile.substring(1, testResultFile.length() - 1); consoleReporter.setTestResultFile(testResultFile); String bestModelResultFile = tokenizer.nextToken(); bestModelResultFile = bestModelResultFile.substring(1, bestModelResultFile.length() - 1); consoleReporter.setBestModelResultFile(bestModelResultFile); listeners.add(consoleReporter); }
From source file:com.liusoft.dlog4j.search.SearchProxy.java
/** * /* w ww . ja va2 s. c o m*/ * @param obj * @param field * @param value * @throws IllegalAccessException * @throws InvocationTargetException * @throws NoSuchMethodException * @throws IntrospectionException * @throws InstantiationException */ private static void setNestedProperty(Object obj, String field, Object value) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException, IntrospectionException, InstantiationException { StringTokenizer st = new StringTokenizer(field, "."); Class nodeClass = obj.getClass(); StringBuffer tmp_prop = new StringBuffer(); while (st.hasMoreElements()) { String f = st.nextToken(); if (tmp_prop.length() > 0) tmp_prop.append('.'); tmp_prop.append(f); PropertyDescriptor[] props = Introspector.getBeanInfo(nodeClass).getPropertyDescriptors(); for (int i = 0; i < props.length; i++) { if (props[i].getName().equals(f)) { if (PropertyUtils.getNestedProperty(obj, tmp_prop.toString()) == null) { nodeClass = props[i].getPropertyType(); PropertyUtils.setNestedProperty(obj, f, nodeClass.newInstance()); } continue; } } } PropertyUtils.setNestedProperty(obj, field, value); }
From source file:edu.stanford.muse.index.NEROld.java
public static void readLocationsFreebase() throws IOException { InputStream is = new GZIPInputStream(NER.class.getClassLoader().getResourceAsStream("locations.gz")); LineNumberReader lnr = new LineNumberReader(new InputStreamReader(is, "UTF-8")); while (true) { String line = lnr.readLine(); if (line == null) break; StringTokenizer st = new StringTokenizer(line, "\t"); if (st.countTokens() == 3) { String locationName = st.nextToken(); String canonicalName = locationName.toLowerCase(); String lat = st.nextToken(); String longi = st.nextToken(); locations.put(canonicalName, new LocationInfo(locationName, lat, longi)); }/*from www . j a va 2 s .c om*/ } }