Example usage for java.io BufferedReader BufferedReader

List of usage examples for java.io BufferedReader BufferedReader

Introduction

In this page you can find the example usage for java.io BufferedReader BufferedReader.

Prototype

public BufferedReader(Reader in) 

Source Link

Document

Creates a buffering character-input stream that uses a default-sized input buffer.

Usage

From source file:httpclient.UploadAction.java

public static void main(String[] args) throws FileNotFoundException {
    File targetFile1 = new File("F:\\2.jpg");
    // File targetFile2 = new File("F:\\1.jpg");
    FileInputStream fis1 = new FileInputStream(targetFile1);
    // FileInputStream fis2 = new FileInputStream(targetFile2);
    // String targetURL =
    // "http://static.fangbiandian.com.cn/round_server/upload/uploadFile.do";
    String targetURL = "http://www.fangbiandian.com.cn/round_server/user/updateUserInfo.do";
    HttpPost filePost = new HttpPost(targetURL);
    try {//from   w w  w.  j  a v  a2 s . c om
        // ?????
        HttpClient client = new DefaultHttpClient();
        // FormBodyPart fbp1 = new FormBodyPart("file1", new
        // FileBody(targetFile1));
        // FormBodyPart fbp2 = new FormBodyPart("file2", new
        // FileBody(targetFile2));
        // FormBodyPart fbp3 = new FormBodyPart("file3", new
        // FileBody(targetFile3));
        // List<FormBodyPart> picList = new ArrayList<FormBodyPart>();
        // picList.add(fbp1);
        // picList.add(fbp2);
        // picList.add(fbp3);
        Map<String, Object> paramMap = new HashMap<String, Object>();
        paramMap.put("userId", "65478A5CD8D20C3807EE16CF22AF8A17");
        Map<String, Object> map = new HashMap<String, Object>();
        String jsonStr = JSON.toJSONString(paramMap);
        map.put("cid", 321);
        map.put("request", jsonStr);
        String jsonString = JSON.toJSONString(map);
        MultipartEntity multiEntity = new MultipartEntity();
        Charset charset = Charset.forName("UTF-8");
        multiEntity.addPart("request", new StringBody(jsonString, charset));
        multiEntity.addPart("photo", new InputStreamBody(fis1, "2.jpg"));
        // multiEntity.addPart("licenseUrl", new InputStreamBody(fis2,
        // "1.jpg"));
        filePost.setEntity(multiEntity);
        HttpResponse response = client.execute(filePost);
        int code = response.getStatusLine().getStatusCode();
        System.out.println(code);
        if (HttpStatus.SC_OK == code) {
            System.out.println("?");
            HttpEntity entity = response.getEntity();
            if (entity != null) {
                InputStream instream = entity.getContent();
                try {
                    // do something useful
                    BufferedReader reader = new BufferedReader(new InputStreamReader(instream));
                    String str = null;
                    while ((str = reader.readLine()) != null) {
                        System.out.println(str);
                    }
                } finally {
                    instream.close();
                }
            }
        } else {
            System.out.println("");
        }
    } catch (Exception ex) {
        ex.printStackTrace();
    } finally {
        filePost.releaseConnection();
    }
}

From source file:RemoveHTMLReader.java

/** The test program: read a text file, strip HTML, print to console */
public static void main(String[] args) {
    try {/*from ww  w .  j  a v  a2  s . co  m*/
        if (args.length != 1)
            throw new IllegalArgumentException("Wrong number of args");
        // Create a stream to read from the file and strip tags from it
        BufferedReader in = new BufferedReader(new RemoveHTMLReader(new FileReader(args[0])));
        // Read line by line, printing lines to the console
        String line;
        while ((line = in.readLine()) != null)
            System.out.println(line);
        in.close(); // Close the stream.
    } catch (Exception e) {
        System.err.println(e);
        System.err.println("Usage: java RemoveHTMLReader$Test" + " <filename>");
    }
}

From source file:edu.jhu.hlt.concrete.gigaword.expt.ConvertGigawordDocuments.java

/**
 * @param args//w  ww. j  a v  a  2  s.  c  om
 */
public static void main(String... args) {
    Thread.setDefaultUncaughtExceptionHandler(new UncaughtExceptionHandler() {

        @Override
        public void uncaughtException(Thread t, Throwable e) {
            logger.error("Thread {} caught unhandled exception.", t.getName());
            logger.error("Unhandled exception.", e);
        }
    });

    if (args.length != 2) {
        logger.info("Usage: {} {} {}", GigawordConcreteConverter.class.getName(), "path/to/expt/file",
                "path/to/out/folder");
        System.exit(1);
    }

    String exptPathStr = args[0];
    String outPathStr = args[1];

    // Verify path points to something.
    Path exptPath = Paths.get(exptPathStr);
    if (!Files.exists(exptPath)) {
        logger.error("File: {} does not exist. Re-run with the correct path to "
                + " the experiment 2 column file. See README.md.");
        System.exit(1);
    }

    logger.info("Experiment map located at: {}", exptPathStr);

    // Create output dir if not yet created.
    Path outPath = Paths.get(outPathStr);
    if (!Files.exists(outPath)) {
        logger.info("Creating directory: {}", outPath.toString());
        try {
            Files.createDirectories(outPath);
        } catch (IOException e) {
            logger.error("Caught an IOException when creating output dir.", e);
            System.exit(1);
        }
    }

    logger.info("Output directory located at: {}", outPathStr);

    // Read in expt map. See README.md.
    Map<String, Set<String>> exptMap = null;
    try (Reader r = ExperimentUtils.createReader(exptPath); BufferedReader br = new BufferedReader(r)) {
        exptMap = ExperimentUtils.createFilenameToIdMap(br);
    } catch (IOException e) {
        logger.error("Caught an IOException when creating expt map.", e);
        System.exit(1);
    }

    // Start a timer.
    logger.info("Gigaword -> Concrete beginning.");
    StopWatch sw = new StopWatch();
    sw.start();
    // Iterate over expt map.
    exptMap.entrySet()
            // .parallelStream()
            .forEach(p -> {
                final String pathStr = p.getKey();
                final Set<String> ids = p.getValue();
                final Path lp = Paths.get(pathStr);
                logger.info("Converting path: {}", pathStr);

                // Get the file name and immediate folder it is under.
                int nElements = lp.getNameCount();
                Path fileName = lp.getName(nElements - 1);
                Path subFolder = lp.getName(nElements - 2);
                String newFnStr = fileName.toString().split("\\.")[0] + ".tar";

                // Mirror folders in output dir.
                Path localOutFolder = outPath.resolve(subFolder);
                Path localOutPath = localOutFolder.resolve(newFnStr);

                // Create output subfolders.
                if (!Files.exists(localOutFolder) && !Files.isDirectory(localOutFolder)) {
                    logger.info("Creating out file: {}", localOutFolder.toString());
                    try {
                        Files.createDirectories(localOutFolder);
                    } catch (IOException e) {
                        throw new RuntimeException("Caught an IOException when creating output dir.", e);
                    }
                }

                // Iterate over communications.
                Iterator<Communication> citer;
                try (OutputStream os = Files.newOutputStream(localOutPath);
                        BufferedOutputStream bos = new BufferedOutputStream(os);
                        Archiver archiver = new TarArchiver(bos);) {
                    citer = new ConcreteGigawordDocumentFactory().iterator(lp);
                    while (citer.hasNext()) {
                        Communication c = citer.next();
                        String cId = c.getId();

                        // Document ID must be in the set. Remove.
                        boolean wasInSet = ids.remove(cId);
                        if (!wasInSet) {
                            // Some IDs are duplicated in Gigaword.
                            // See ERRATA.
                            logger.debug(
                                    "ID: {} was parsed from path: {}, but was not in the experiment map. Attempting to remove dupe.",
                                    cId, pathStr);

                            // Attempt to create a duplicate id (append .duplicate to the id).
                            // Then, try to remove again.
                            String newId = RepairDuplicateIDs.repairDuplicate(cId);
                            boolean dupeRemoved = ids.remove(newId);
                            // There are not nested duplicates, so this should never fire.
                            if (!dupeRemoved) {
                                logger.info("Failed to remove dupe.");
                                return;
                            } else
                                // Modify the communication ID to the unique version.
                                c.setId(newId);
                        }

                        archiver.addEntry(new ArchivableCommunication(c));
                    }

                    logger.info("Finished path: {}", pathStr);
                } catch (ConcreteException ex) {
                    logger.error("Caught ConcreteException during Concrete mapping.", ex);
                    logger.error("Path: {}", pathStr);
                } catch (IOException e) {
                    logger.error("Error archiving communications.", e);
                    logger.error("Path: {}", localOutPath.toString());
                }
            });

    sw.stop();
    logger.info("Finished.");
    Minutes m = new Duration(sw.getTime()).toStandardMinutes();
    logger.info("Runtime: Approximately {} minutes.", m.getMinutes());
}

From source file:com.janrain.backplane.common.HmacHashUtils.java

public static void main(String[] args) throws IOException {
    String password;// w  ww  .  j  a v  a 2 s .c o  m
    if (args.length == 1) {
        password = args[0];
    } else {
        System.out.print("Password: ");
        password = new BufferedReader(new InputStreamReader(System.in)).readLine();
    }
    System.out.println("hmac hash: " + hmacHash(password));
}

From source file:edu.mit.fss.examples.ISSFederate.java

/**
 * The main method. This configures the Orekit data path, creates the 
 * ISS federate objects and launches the associated graphical user 
 * interface./* w w  w  . ja  va2  s .  co  m*/
 *
 * @param args the arguments
 * @throws RTIexception the RTI exception
 * @throws URISyntaxException 
 */
public static void main(String[] args) throws RTIexception, URISyntaxException {
    BasicConfigurator.configure();

    boolean headless = false;

    logger.debug("Setting Orekit data path.");
    System.setProperty(DataProvidersManager.OREKIT_DATA_PATH,
            new File(ISSFederate.class.getResource("/orekit-data.zip").toURI()).getAbsolutePath());

    logger.trace("Creating federate instance.");
    final ISSFederate federate = new ISSFederate();

    logger.trace("Setting minimum step duration and time step.");
    long timeStep = 60 * 1000, minimumStepDuration = 100;
    federate.setMinimumStepDuration(minimumStepDuration);
    federate.setTimeStep(timeStep);

    try {
        logger.debug("Loading TLE data from file.");
        BufferedReader br = new BufferedReader(new InputStreamReader(
                federate.getClass().getClassLoader().getResourceAsStream("edu/mit/fss/examples/data.tle")));

        final SpaceSystem satellite;
        final SurfaceSystem station1, station2, station3;

        while (br.ready()) {
            if (br.readLine().matches(".*ISS.*")) {
                logger.debug("Found ISS data.");

                logger.trace("Adding FSS supplier space system.");
                satellite = new SpaceSystem("FSS Supplier", new TLE(br.readLine(), br.readLine()), 5123e3);
                federate.addObject(satellite);

                logger.trace("Adding Keio ground station.");
                station1 = new SurfaceSystem("Keio",
                        new GeodeticPoint(FastMath.toRadians(35.551929), FastMath.toRadians(139.647119), 300),
                        satellite.getState().getDate(), 5123e3, 5);
                federate.addObject(station1);

                logger.trace("Adding SkolTech ground station.");
                station2 = new SurfaceSystem("SkolTech",
                        new GeodeticPoint(FastMath.toRadians(55.698679), FastMath.toRadians(37.571994), 200),
                        satellite.getState().getDate(), 5123e3, 5);
                federate.addObject(station2);

                logger.trace("Adding MIT ground station.");
                station3 = new SurfaceSystem("MIT",
                        new GeodeticPoint(FastMath.toRadians(42.360184), FastMath.toRadians(-71.093742), 100),
                        satellite.getState().getDate(), 5123e3, 5);
                federate.addObject(station3);

                try {
                    logger.trace("Setting inital time.");
                    federate.setInitialTime(
                            satellite.getInitialState().getDate().toDate(TimeScalesFactory.getUTC()).getTime());
                } catch (IllegalArgumentException | OrekitException e) {
                    logger.error(e.getMessage());
                    e.printStackTrace();
                }

                if (!headless) {
                    logger.debug("Launching the graphical user interface.");
                    SwingUtilities.invokeAndWait(new Runnable() {
                        @Override
                        public void run() {
                            MemberFrame frame = new MemberFrame(federate,
                                    new MultiComponentPanel(
                                            Arrays.asList(new SpaceSystemPanel(federate, satellite),
                                                    new SurfaceSystemPanel(federate, station1),
                                                    new SurfaceSystemPanel(federate, station2),
                                                    new SurfaceSystemPanel(federate, station3))));
                            frame.pack();
                            frame.setVisible(true);
                        }
                    });
                }

                break;
            }
        }
        br.close();
    } catch (InvocationTargetException | InterruptedException | OrekitException | IOException e) {
        e.printStackTrace();
        logger.fatal(e);
    }

    logger.trace("Setting federate name, type, and FOM path.");
    federate.getConnection().setFederateName("ISS");
    federate.getConnection().setFederateType("FSS Supplier");
    federate.getConnection().setFederationName("FSS");
    federate.getConnection().setFomPath(
            new File(federate.getClass().getClassLoader().getResource("edu/mit/fss/hla/fss.xml").toURI())
                    .getAbsolutePath());
    federate.getConnection().setOfflineMode(false);
    federate.connect();

    if (headless) {
        federate.setMinimumStepDuration(10);
        federate.initialize();
        federate.run();
    }
}

From source file:DruidThroughput.java

@SuppressWarnings("InfiniteLoopStatement")
public static void main(String[] args) throws Exception {
    final int numQueries = QUERIES.length;
    final Random random = new Random(RANDOM_SEED);
    final AtomicInteger counter = new AtomicInteger(0);
    final AtomicLong totalResponseTime = new AtomicLong(0L);
    final ExecutorService executorService = Executors.newFixedThreadPool(NUM_CLIENTS);

    for (int i = 0; i < NUM_CLIENTS; i++) {
        executorService.submit(new Runnable() {
            @Override/*from ww  w  .j ava2s  .  co  m*/
            public void run() {
                try (CloseableHttpClient client = HttpClients.createDefault()) {
                    HttpPost post = new HttpPost("http://localhost:8082/druid/v2/?pretty");
                    post.addHeader("content-type", "application/json");
                    CloseableHttpResponse res;
                    while (true) {
                        try (BufferedReader reader = new BufferedReader(new FileReader(
                                QUERY_FILE_DIR + File.separator + random.nextInt(numQueries) + ".json"))) {
                            int length = reader.read(BUFFER);
                            post.setEntity(new StringEntity(new String(BUFFER, 0, length)));
                        }
                        long start = System.currentTimeMillis();
                        res = client.execute(post);
                        res.close();
                        counter.getAndIncrement();
                        totalResponseTime.getAndAdd(System.currentTimeMillis() - start);
                    }
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        });
    }

    long startTime = System.currentTimeMillis();
    while (true) {
        Thread.sleep(REPORT_INTERVAL_MILLIS);
        double timePassedSeconds = ((double) (System.currentTimeMillis() - startTime)) / MILLIS_PER_SECOND;
        int count = counter.get();
        double avgResponseTime = ((double) totalResponseTime.get()) / count;
        System.out.println("Time Passed: " + timePassedSeconds + "s, Query Executed: " + count + ", QPS: "
                + count / timePassedSeconds + ", Avg Response Time: " + avgResponseTime + "ms");
    }
}

From source file:edu.msu.cme.rdp.graph.sandbox.BloomFilterInterrogator.java

public static void main(String[] args) throws Exception {
    if (args.length != 1) {
        System.err.println("USAGE: BloomFilterStats <bloom_filter>");
        System.exit(1);//ww  w  . jav  a 2 s. c  o m
    }

    File bloomFile = new File(args[0]);

    ObjectInputStream ois = ois = new ObjectInputStream(
            new BufferedInputStream(new FileInputStream(bloomFile)));
    BloomFilter filter = (BloomFilter) ois.readObject();
    ois.close();

    printStats(filter, System.out);

    BufferedReader reader = new BufferedReader(new InputStreamReader(System.in));
    String line;
    CodonWalker walker = null;
    while ((line = reader.readLine()) != null) {
        char[] kmer = line.toCharArray();
        System.out.print(line + "\t");
        try {
            walker = filter.new RightCodonFacade(kmer);
            walker.jumpTo(kmer);
            System.out.print("present");
        } catch (Exception e) {
            System.out.print("not present\t" + e.getMessage());
        }
        System.out.println();
    }
}

From source file:gentracklets.Propagate.java

public static void main(String[] args) throws OrekitException {

    // load the data files
    File data = new File("/home/zittersteijn/Documents/java/libraries/orekit-data.zip");
    DataProvidersManager DM = DataProvidersManager.getInstance();
    ZipJarCrawler crawler = new ZipJarCrawler(data);
    DM.clearProviders();/*w w w  .j a va 2 s.c om*/
    DM.addProvider(crawler);

    // Read in TLE elements
    File tleFile = new File("/home/zittersteijn/Documents/TLEs/ASTRA20151207.tle");
    FileReader TLEfr;
    Vector<TLE> tles = new Vector<>();
    tles.setSize(30);

    try {
        // read and save TLEs to a vector
        TLEfr = new FileReader("/home/zittersteijn/Documents/TLEs/ASTRA20151207.tle");
        BufferedReader readTLE = new BufferedReader(TLEfr);

        Scanner s = new Scanner(tleFile);

        String line1, line2;
        TLE2 tle = new TLE2();

        int nrOfObj = 4;
        for (int ii = 1; ii < nrOfObj + 1; ii++) {
            System.out.println(ii);
            line1 = s.nextLine();
            line2 = s.nextLine();
            if (TLE.isFormatOK(line1, line2)) {
                tles.setElementAt(new TLE(line1, line2), ii);
                System.out.println(tles.get(ii).toString());
            } else {
                System.out.println("format problem");
            }

        }
        readTLE.close();

        // define a groundstation
        Frame inertialFrame = FramesFactory.getEME2000();
        TimeScale utc = TimeScalesFactory.getUTC();
        double longitude = FastMath.toRadians(7.465);
        double latitude = FastMath.toRadians(46.87);
        double altitude = 950.;
        GeodeticPoint station = new GeodeticPoint(latitude, longitude, altitude);
        Frame earthFrame = FramesFactory.getITRF(IERSConventions.IERS_2010, true);
        BodyShape earth = new OneAxisEllipsoid(Constants.WGS84_EARTH_EQUATORIAL_RADIUS,
                Constants.WGS84_EARTH_FLATTENING, earthFrame);
        TopocentricFrame staF = new TopocentricFrame(earth, station, "station");

        Vector<Orbit> eles = new Vector<>();
        eles.setSize(tles.size());
        for (int ii = 1; ii < nrOfObj + 1; ii++) {
            double a = FastMath.pow(Constants.WGS84_EARTH_MU / FastMath.pow(tles.get(ii).getMeanMotion(), 2),
                    (1.0 / 3));
            // convert them to orbits
            Orbit kep = new KeplerianOrbit(a, tles.get(ii).getE(), tles.get(ii).getI(),
                    tles.get(ii).getPerigeeArgument(), tles.get(ii).getRaan(), tles.get(ii).getMeanAnomaly(),
                    PositionAngle.MEAN, inertialFrame, tles.get(ii).getDate(), Constants.WGS84_EARTH_MU);

            eles.setElementAt(kep, ii);

            // set up propagators
            KeplerianPropagator kepler = new KeplerianPropagator(eles.get(ii));

            System.out.println("a: " + a);

            // Initial state definition
            double mass = 1000.0;
            SpacecraftState initialState = new SpacecraftState(kep, mass);

            // Adaptive step integrator
            // with a minimum step of 0.001 and a maximum step of 1000
            double minStep = 0.001;
            double maxstep = 1000.0;
            double positionTolerance = 10.0;
            OrbitType propagationType = OrbitType.KEPLERIAN;
            double[][] tolerances = NumericalPropagator.tolerances(positionTolerance, kep, propagationType);
            AdaptiveStepsizeIntegrator integrator = new DormandPrince853Integrator(minStep, maxstep,
                    tolerances[0], tolerances[1]);

            NumericalPropagator propagator = new NumericalPropagator(integrator);
            propagator.setOrbitType(propagationType);

            // set up and add force models
            double AMR = 4.0;
            double crossSection = mass * AMR;
            double Cd = 0.01;
            double Cr = 0.5;
            double Co = 0.8;
            NormalizedSphericalHarmonicsProvider provider = GravityFieldFactory.getNormalizedProvider(4, 4);
            ForceModel holmesFeatherstone = new HolmesFeatherstoneAttractionModel(
                    FramesFactory.getITRF(IERSConventions.IERS_2010, true), provider);
            SphericalSpacecraft ssc = new SphericalSpacecraft(crossSection, Cd, Cr, Co);
            PVCoordinatesProvider sun = CelestialBodyFactory.getSun();
            SolarRadiationPressure srp = new SolarRadiationPressure(sun,
                    Constants.WGS84_EARTH_EQUATORIAL_RADIUS, ssc);

            //                propagator.addForceModel(srp);
            //                propagator.addForceModel(holmesFeatherstone);
            propagator.setInitialState(initialState);

            // propagate the orbits with steps size and tracklet lenght at several epochs (tracklets)
            Vector<AbsoluteDate> startDates = new Vector<>();
            startDates.setSize(1);
            startDates.setElementAt(new AbsoluteDate(2016, 1, 26, 20, 00, 00, utc), 0);

            // set the step size [s] and total length
            double tstep = 100;
            double ld = 3;
            double ls = FastMath.floor(ld * (24 * 60 * 60) / tstep);
            System.out.println(ls);

            SpacecraftState currentStateKep = kepler.propagate(startDates.get(0));
            SpacecraftState currentStatePer = propagator.propagate(startDates.get(0));

            for (int tt = 0; tt < startDates.size(); tt++) {

                // set up output file
                String app = tles.get(ii).getSatelliteNumber() + "_" + startDates.get(tt) + ".txt";

                // with formatted output
                File file1 = new File("/home/zittersteijn/Documents/propagate/keplerian/MEO/" + app);
                File file2 = new File("/home/zittersteijn/Documents/propagate/perturbed/MEO/" + app);
                file1.createNewFile();
                file2.createNewFile();
                Formatter fmt1 = new Formatter(file1);
                Formatter fmt2 = new Formatter(file2);

                for (int kk = 0; kk < (int) ls; kk++) {
                    AbsoluteDate propDate = startDates.get(tt).shiftedBy(tstep * kk);
                    currentStateKep = kepler.propagate(propDate);
                    currentStatePer = propagator.propagate(propDate);

                    System.out.println(currentStateKep.getPVCoordinates().getPosition() + "\t"
                            + currentStateKep.getDate());

                    // convert to RADEC coordinates
                    double[] radecKep = conversions.geo2radec(currentStateKep.getPVCoordinates(), staF,
                            inertialFrame, propDate);
                    double[] radecPer = conversions.geo2radec(currentStatePer.getPVCoordinates(), staF,
                            inertialFrame, propDate);

                    // write the orbit to seperate files with the RA, DEC, epoch and fence given
                    AbsoluteDate year = new AbsoluteDate(YEAR, utc);
                    fmt1.format("%.12f %.12f %.12f %d%n", radecKep[0], radecKep[2],
                            (currentStateKep.getDate().durationFrom(year) / (24 * 3600)), (tt + 1));
                    fmt2.format("%.12f %.12f %.12f %d%n", radecPer[0], radecPer[2],
                            (currentStateKep.getDate().durationFrom(year) / (24 * 3600)), (tt + 1));

                }
                fmt1.flush();
                fmt1.close();
                fmt2.flush();
                fmt2.close();

            }
            double[] radecKep = conversions.geo2radec(currentStateKep.getPVCoordinates(), staF, inertialFrame,
                    new AbsoluteDate(startDates.get(0), ls * tstep));
            double[] radecPer = conversions.geo2radec(currentStatePer.getPVCoordinates(), staF, inertialFrame,
                    new AbsoluteDate(startDates.get(0), ls * tstep));
            double sig0 = 1.0 / 3600.0 / 180.0 * FastMath.PI;
            double dRA = radecKep[0] - radecPer[0] / (sig0 * sig0);
            double dDEC = radecKep[2] - radecPer[2] / (sig0 * sig0);

            System.out.println(dRA + "\t" + dDEC);

        }

    } catch (FileNotFoundException ex) {
        Logger.getLogger(GenTracklets.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException iox) {
        Logger.getLogger(GenTracklets.class.getName()).log(Level.SEVERE, null, iox);
    }

}

From source file:DIA_Umpire_Quant.DIA_Umpire_IntLibSearch.java

/**
 * @param args the command line arguments
 *//*from   w ww  .j a v a2 s .  co  m*/
public static void main(String[] args) throws FileNotFoundException, IOException, Exception {
    System.out.println(
            "=================================================================================================");
    System.out.println("DIA-Umpire targeted re-extraction analysis using internal library (version: "
            + UmpireInfo.GetInstance().Version + ")");
    if (args.length != 1) {
        System.out.println(
                "command format error, the correct format should be : java -jar -Xmx10G DIA_Umpire_IntLibSearch.jar diaumpire_module.params");
        return;
    }
    try {
        ConsoleLogger.SetConsoleLogger(Level.INFO);
        ConsoleLogger.SetFileLogger(Level.DEBUG,
                FilenameUtils.getFullPath(args[0]) + "diaumpire_intlibsearch.log");
    } catch (Exception e) {
    }

    Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version);
    Logger.getRootLogger().info("Parameter file:" + args[0]);

    BufferedReader reader = new BufferedReader(new FileReader(args[0]));
    String line = "";
    String WorkFolder = "";
    int NoCPUs = 2;

    String InternalLibID = "";

    float ProbThreshold = 0.99f;
    float RTWindow_Int = -1f;
    float Freq = 0f;
    int TopNFrag = 6;

    TandemParam tandemPara = new TandemParam(DBSearchParam.SearchInstrumentType.TOF5600);
    HashMap<String, File> AssignFiles = new HashMap<>();

    //<editor-fold defaultstate="collapsed" desc="Reading parameter file">
    while ((line = reader.readLine()) != null) {
        line = line.trim();
        Logger.getRootLogger().info(line);
        if (!"".equals(line) && !line.startsWith("#")) {
            //System.out.println(line);
            if (line.equals("==File list begin")) {
                do {
                    line = reader.readLine();
                    line = line.trim();
                    if (line.equals("==File list end")) {
                        continue;
                    } else if (!"".equals(line)) {
                        File newfile = new File(line);
                        if (newfile.exists()) {
                            AssignFiles.put(newfile.getAbsolutePath(), newfile);
                        } else {
                            Logger.getRootLogger().info("File: " + newfile + " does not exist.");
                        }
                    }
                } while (!line.equals("==File list end"));
            }
            if (line.split("=").length < 2) {
                continue;
            }
            String type = line.split("=")[0].trim();
            String value = line.split("=")[1].trim();
            switch (type) {
            case "Path": {
                WorkFolder = value;
                break;
            }
            case "path": {
                WorkFolder = value;
                break;
            }
            case "Thread": {
                NoCPUs = Integer.parseInt(value);
                break;
            }

            case "InternalLibID": {
                InternalLibID = value;
                break;
            }

            case "RTWindow_Int": {
                RTWindow_Int = Float.parseFloat(value);
                break;
            }

            case "ProbThreshold": {
                ProbThreshold = Float.parseFloat(value);
                break;
            }
            case "TopNFrag": {
                TopNFrag = Integer.parseInt(value);
                break;
            }
            case "Freq": {
                Freq = Float.parseFloat(value);
                break;
            }
            case "Fasta": {
                tandemPara.FastaPath = value;
                break;
            }
            }
        }
    }
    //</editor-fold>

    //Initialize PTM manager using compomics library
    PTMManager.GetInstance();

    //Check if the fasta file can be found
    if (!new File(tandemPara.FastaPath).exists()) {
        Logger.getRootLogger().info("Fasta file :" + tandemPara.FastaPath
                + " cannot be found, the process will be terminated, please check.");
        System.exit(1);
    }

    //Generate DIA file list
    ArrayList<DIAPack> FileList = new ArrayList<>();
    try {
        File folder = new File(WorkFolder);
        if (!folder.exists()) {
            Logger.getRootLogger().info("The path : " + WorkFolder + " cannot be found.");
            System.exit(1);
        }
        for (final File fileEntry : folder.listFiles()) {
            if (fileEntry.isFile()
                    && (fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml")
                            | fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzml"))
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml")
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml")
                    && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) {
                AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry);
            }
            if (fileEntry.isDirectory()) {
                for (final File fileEntry2 : fileEntry.listFiles()) {
                    if (fileEntry2.isFile()
                            && (fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml")
                                    | fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzml"))
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml")
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml")
                            && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) {
                        AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2);
                    }
                }
            }
        }

        Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size());
        for (File fileEntry : AssignFiles.values()) {
            Logger.getRootLogger().info(fileEntry.getAbsolutePath());
        }
        for (File fileEntry : AssignFiles.values()) {
            String mzXMLFile = fileEntry.getAbsolutePath();
            if (mzXMLFile.toLowerCase().endsWith(".mzxml") | mzXMLFile.toLowerCase().endsWith(".mzml")) {
                DIAPack DiaFile = new DIAPack(mzXMLFile, NoCPUs);
                Logger.getRootLogger().info(
                        "=================================================================================================");
                Logger.getRootLogger().info("Processing " + mzXMLFile);
                if (!DiaFile.LoadDIASetting()) {
                    Logger.getRootLogger().info("Loading DIA setting failed, job is incomplete");
                    System.exit(1);
                }
                if (!DiaFile.LoadParams()) {
                    Logger.getRootLogger().info("Loading parameters failed, job is incomplete");
                    System.exit(1);
                }
                Logger.getRootLogger().info("Loading identification results " + mzXMLFile + "....");

                //If the serialization file for ID file existed
                if (DiaFile.ReadSerializedLCMSID()) {
                    DiaFile.IDsummary.ReduceMemoryUsage();
                    DiaFile.IDsummary.FastaPath = tandemPara.FastaPath;
                    FileList.add(DiaFile);
                }
            }
        }

        //<editor-fold defaultstate="collapsed" desc="Targete re-extraction using internal library">            
        Logger.getRootLogger().info(
                "=================================================================================================");
        if (FileList.size() > 1) {
            Logger.getRootLogger().info("Targeted re-extraction using internal library");

            FragmentLibManager libManager = FragmentLibManager.ReadFragmentLibSerialization(WorkFolder,
                    InternalLibID);
            if (libManager == null) {
                Logger.getRootLogger().info("Building internal spectral library");
                libManager = new FragmentLibManager(InternalLibID);
                ArrayList<LCMSID> LCMSIDList = new ArrayList<>();
                for (DIAPack dia : FileList) {
                    LCMSIDList.add(dia.IDsummary);
                }
                libManager.ImportFragLibTopFrag(LCMSIDList, Freq, TopNFrag);
                libManager.WriteFragmentLibSerialization(WorkFolder);
            }
            libManager.ReduceMemoryUsage();

            Logger.getRootLogger()
                    .info("Building retention time prediction model and generate candidate peptide list");
            for (int i = 0; i < FileList.size(); i++) {
                FileList.get(i).IDsummary.ClearMappedPep();
            }
            for (int i = 0; i < FileList.size(); i++) {
                for (int j = i + 1; j < FileList.size(); j++) {
                    RTAlignedPepIonMapping alignment = new RTAlignedPepIonMapping(WorkFolder,
                            FileList.get(i).GetParameter(), FileList.get(i).IDsummary,
                            FileList.get(j).IDsummary);
                    alignment.GenerateModel();
                    alignment.GenerateMappedPepIon();
                }
                FileList.get(i).ExportID();
                FileList.get(i).IDsummary = null;
            }

            Logger.getRootLogger().info("Targeted matching........");
            for (DIAPack diafile : FileList) {
                if (diafile.IDsummary == null) {
                    diafile.ReadSerializedLCMSID();
                }
                if (!diafile.IDsummary.GetMappedPepIonList().isEmpty()) {
                    diafile.UseMappedIon = true;
                    diafile.FilterMappedIonByProb = false;
                    diafile.BuildStructure();
                    diafile.MS1FeatureMap.ReadPeakCluster();
                    diafile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster();
                    diafile.GenerateMassCalibrationRTMap();
                    diafile.TargetedExtractionQuant(false, libManager, ProbThreshold, RTWindow_Int);
                    diafile.MS1FeatureMap.ClearAllPeaks();
                    diafile.IDsummary.ReduceMemoryUsage();
                    diafile.IDsummary.RemoveLowProbMappedIon(ProbThreshold);
                    diafile.ExportID();
                    Logger.getRootLogger().info("Peptide ions: " + diafile.IDsummary.GetPepIonList().size()
                            + " Mapped ions: " + diafile.IDsummary.GetMappedPepIonList().size());
                    diafile.ClearStructure();
                }
                diafile.IDsummary = null;
                System.gc();
            }
            Logger.getRootLogger().info(
                    "=================================================================================================");
        }
        //</editor-fold>

        Logger.getRootLogger().info("Job done");
        Logger.getRootLogger().info(
                "=================================================================================================");

    } catch (Exception e) {
        Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e));
        throw e;
    }
}

From source file:com.yahoo.athenz.example.ntoken.HttpExampleClient.java

public static void main(String[] args) throws MalformedURLException, IOException {

    // parse our command line to retrieve required input

    CommandLine cmd = parseCommandLine(args);

    String domainName = cmd.getOptionValue("domain");
    String serviceName = cmd.getOptionValue("service");
    String privateKeyPath = cmd.getOptionValue("pkey");
    String keyId = cmd.getOptionValue("keyid");
    String url = cmd.getOptionValue("url");

    // we need to generate our principal credentials (ntoken). In
    // addition to the domain and service names, we need the
    // the service's private key and the key identifier - the
    // service with the corresponding public key must already be
    // registered in ZMS

    PrivateKey privateKey = Crypto.loadPrivateKey(new File(privateKeyPath));
    ServiceIdentityProvider identityProvider = new SimpleServiceIdentityProvider(domainName, serviceName,
            privateKey, keyId);//from  w ww.  j a v  a2 s . co  m
    Principal principal = identityProvider.getIdentity(domainName, serviceName);

    URL obj = new URL(url);
    HttpURLConnection con = (HttpURLConnection) obj.openConnection();

    // set our Athenz credentials. The authority in the principal provides
    // the header name that we must use for credentials while the principal
    // itself provides the credentials (ntoken).

    con.setRequestProperty(principal.getAuthority().getHeader(), principal.getCredentials());

    // now process our request

    int responseCode = con.getResponseCode();
    switch (responseCode) {
    case HttpURLConnection.HTTP_FORBIDDEN:
        System.out.println("Request was forbidden - not authorized: " + con.getResponseMessage());
        break;
    case HttpURLConnection.HTTP_OK:
        System.out.println("Successful response: ");
        try (BufferedReader in = new BufferedReader(new InputStreamReader(con.getInputStream()))) {
            String inputLine;
            while ((inputLine = in.readLine()) != null) {
                System.out.println(inputLine);
            }
        }
        break;
    default:
        System.out.println("Request failed - response status code: " + responseCode);
    }
}