Example usage for java.util Scanner close

List of usage examples for java.util Scanner close

Introduction

In this page you can find the example usage for java.util Scanner close.

Prototype

public void close() 

Source Link

Document

Closes this scanner.

Usage

From source file:hu.petabyte.redflags.PassEncoder.java

@Test
public void test() {
    BCryptPasswordEncoder e = new BCryptPasswordEncoder();
    Scanner s = new Scanner(System.in);
    String i;// w  w  w. ja  v  a  2 s  . c o m
    while (!(i = s.nextLine()).isEmpty()) {
        System.out.println("ENC: " + e.encode(i));
    }
    s.close();
}

From source file:org.geppetto.core.model.services.OBJModelInterpreterService.java

@Override
public IModel readModel(URL url, List<URL> recordings, String instancePath) throws ModelInterpreterException {
    ModelWrapper collada = new ModelWrapper(instancePath);
    try {/*from ww w  . ja v a2s .  c o m*/
        Scanner scanner = new Scanner(url.openStream(), "UTF-8");
        String objContent = scanner.useDelimiter("\\A").next();
        scanner.close();
        collada.wrapModel(OBJ, objContent);
    } catch (IOException e) {
        throw new ModelInterpreterException(e);
    }

    return collada;
}

From source file:org.geppetto.core.model.services.ColladaModelInterpreterService.java

@Override
public IModel readModel(URL url, List<URL> recordings, String instancePath) throws ModelInterpreterException {
    ModelWrapper collada = new ModelWrapper(instancePath);
    try {/*from   w w  w . j  a va2s  .c  o m*/
        Scanner scanner = new Scanner(url.openStream(), "UTF-8");
        String colladaContent = scanner.useDelimiter("\\A").next();
        scanner.close();
        collada.wrapModel(COLLADA, colladaContent);
    } catch (IOException e) {
        throw new ModelInterpreterException(e);
    }

    return collada;
}

From source file:edu.msu.cme.rdp.unifrac.Unifrac.java

private static Map<String, UnifracSample> readSampleMap(String sampleFile) throws IOException {
    Map<String, UnifracSample> ret = new HashMap();
    Map<String, MCSample> sampleMap = new HashMap();

    int lineno = 1;
    Scanner s = new Scanner(new File(sampleFile)).useDelimiter("\n");
    while (s.hasNext()) {
        String line = s.next().trim();
        if (line.equals("")) {
            continue;
        }//from w w  w.  ja va 2s .  c  om

        String[] tokens = line.split("\\s+");
        if (tokens.length < 2) {
            throw new IOException("Failed to parse sample mapping file (lineno=" + lineno + ")");
        }

        String sampleName = tokens[1];
        String seqName = tokens[0];
        int sampleCount = 1;

        try {
            sampleCount = Integer.parseInt(tokens[2]);
        } catch (Exception e) {
        }

        if (!sampleMap.containsKey(sampleName))
            sampleMap.put(sampleName, new MCSample(sampleName));

        UnifracSample unifracSample = new UnifracSample();
        unifracSample.sample = sampleMap.get(sampleName);
        unifracSample.count = sampleCount;

        ret.put(seqName, unifracSample);

        lineno++;
    }
    s.close();

    return ret;
}

From source file:carmen.LocationResolver.java

protected static void loadNameAndAbbreviation(String filename, HashSet<String> fullName,
        HashMap<String, String> abbreviations, boolean secondColumnKey) throws FileNotFoundException {
    Scanner inputScanner = new Scanner(new FileInputStream(filename), "UTF-8");
    while (inputScanner.hasNextLine()) {
        String line = inputScanner.nextLine().toLowerCase();
        String[] splitString = line.split("\t");
        splitString[0] = splitString[0].trim();
        if (fullName != null)
            fullName.add(splitString[0]);
        if (abbreviations != null) {
            if (!secondColumnKey) {
                abbreviations.put(splitString[0], splitString[1]);
            } else {
                abbreviations.put(splitString[1], splitString[0]);
            }// ww  w  . j  a va  2 s .co m
        }
    }
    inputScanner.close();
}

From source file:br.cefetrj.sagitarii.nunki.comm.WebClient.java

private String convertStreamToString(java.io.InputStreamReader is) {
    java.util.Scanner s = new java.util.Scanner(is);
    s.useDelimiter("\\A");
    String retorno = s.hasNext() ? s.next() : "";
    s.close();
    return retorno;
}

From source file:ml.shifu.shifu.actor.TrainModelActorTest.java

@Test
public void testActor() throws IOException, InterruptedException {
    File tmpDir = new File("./tmp");
    FileUtils.forceMkdir(tmpDir);//from   w  w  w . ja v a2s.c  om

    // create normalize data
    actorSystem = ActorSystem.create("shifuActorSystem");
    ActorRef normalizeRef = actorSystem.actorOf(new Props(new UntypedActorFactory() {
        private static final long serialVersionUID = 6777309320338075269L;

        public UntypedActor create() throws IOException {
            return new NormalizeDataActor(modelConfig, columnConfigList, new AkkaExecStatus(true));
        }
    }), "normalize-calculator");

    List<Scanner> scanners = ShifuFileUtils.getDataScanners(
            "src/test/resources/example/cancer-judgement/DataStore/DataSet1", SourceType.LOCAL);
    normalizeRef.tell(new AkkaActorInputMessage(scanners), normalizeRef);

    while (!normalizeRef.isTerminated()) {
        Thread.sleep(5000);
    }

    File outputFile = new File("./tmp/NormalizedData");
    Assert.assertTrue(outputFile.exists());

    // start to run trainer
    actorSystem = ActorSystem.create("shifuActorSystem");
    File models = new File("./models");
    FileUtils.forceMkdir(models);

    final List<AbstractTrainer> trainers = new ArrayList<AbstractTrainer>();
    for (int i = 0; i < 5; i++) {
        AbstractTrainer trainer;
        if (modelConfig.getAlgorithm().equalsIgnoreCase("NN")) {
            trainer = new NNTrainer(this.modelConfig, i, false);
        } else if (modelConfig.getAlgorithm().equalsIgnoreCase("SVM")) {
            trainer = new SVMTrainer(this.modelConfig, i, false);
        } else if (modelConfig.getAlgorithm().equalsIgnoreCase("LR")) {
            trainer = new LogisticRegressionTrainer(this.modelConfig, i, false);
        } else {
            throw new RuntimeException("unsupport algorithm");
        }
        trainers.add(trainer);
    }

    // train model
    ActorRef modelTrainRef = actorSystem.actorOf(new Props(new UntypedActorFactory() {
        private static final long serialVersionUID = 6777309320338075269L;

        public UntypedActor create() throws IOException {
            return new TrainModelActor(modelConfig, columnConfigList, new AkkaExecStatus(true), trainers);
        }
    }), "trainer");

    scanners = ShifuFileUtils.getDataScanners("./tmp/NormalizedData", SourceType.LOCAL);
    modelTrainRef.tell(new AkkaActorInputMessage(scanners), modelTrainRef);

    while (!modelTrainRef.isTerminated()) {
        Thread.sleep(5000);
    }

    for (Scanner scanner : scanners) {
        scanner.close();
    }

    File model0 = new File("./models/model0.nn");
    File model1 = new File("./models/model0.nn");
    File model2 = new File("./models/model0.nn");
    File model3 = new File("./models/model0.nn");
    File model4 = new File("./models/model0.nn");

    Assert.assertTrue(model0.exists());
    Assert.assertTrue(model1.exists());
    Assert.assertTrue(model2.exists());
    Assert.assertTrue(model3.exists());
    Assert.assertTrue(model4.exists());

    File modelsTemp = new File("./modelsTmp");

    FileUtils.deleteDirectory(modelsTemp);
    FileUtils.deleteDirectory(models);
    FileUtils.deleteDirectory(tmpDir);
}

From source file:DataSci.main.JsonRequestResponseController.java

/**
 * Read the JSON schema from the file rrsJson.json
 * //  w  w w  .  ja v  a2 s . c  o  m
 * @param filename It expects a fully qualified file name that contains input JSON file
 */
public void readJson(String filename) {
    try {
        File apiFile = new File(filename);
        Scanner sc = new Scanner(apiFile);
        jsonBody = "";
        while (sc.hasNext()) {
            jsonBody += sc.nextLine() + "\n";
        }
        sc.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.kscs.server.web.source.JavaSourceCode.java

public String readfile(String filename) {
    File file = new File("/home/sinhlk/myspace/tool/src/main/resources/" + filename);
    StringBuilder result = new StringBuilder("");
    try {/*from   w w w .  j  av a 2s.  co  m*/
        Scanner scanner = new Scanner(file);
        while (scanner.hasNextLine()) {
            String line = scanner.nextLine();
            result.append(line).append("\n");
        }

        scanner.close();

    } catch (IOException e) {
        e.printStackTrace();
    }

    String sourceFile = result.toString();
    return sourceFile;
}

From source file:csns.test.Setup.java

/**
 * Spring's executeSqlScript() splits the script into statements and
 * executes each statement individually. The problem is that the split is
 * based on simple delimiters like semicolon and it does not recognize the
 * syntax of create function/procedure. So in order to run csns-create.sql,
 * we have to read the file into a string and pass the whole thing to the
 * JDBC driver.//from w  w  w . ja va 2s  . c  o m
 */
@SuppressWarnings("deprecation")
private void executeSqlScript(String path) {
    try {
        StringBuilder sb = new StringBuilder();
        Resource resource = applicationContext.getResource(path);
        Scanner in = new Scanner(resource.getFile());
        while (in.hasNextLine()) {
            sb.append(in.nextLine());
            sb.append("\n");
        }
        in.close();
        simpleJdbcTemplate.update(sb.toString());
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}