Example usage for java.util Map get

List of usage examples for java.util Map get

Introduction

In this page you can find the example usage for java.util Map get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.hesine.manager.generate.Generate.java

public static void main(String[] args) throws Exception {
    File projectPath = new DefaultResourceLoader().getResource("").getFile();
    //        Generate.execute(projectPath);

    // /*from   w ww.  j a  v a 2  s .c o  m*/
    String packageName = "com.hesine.manager";
    String moduleName = "";

    // ?
    List<Map<String, String>> list = DBOperator.getTables("tb_");
    for (Map<String, String> a : list) {
        System.out.println(a.toString());
        if (a.get("tableName").equals("tb_userinfo")) {
            List<Map<String, String>> listTC = DBOperator.getTableColumns(a.get("tableName"));
            // ?
            for (Map<String, String> b : listTC) {
                System.out.println(b.toString());
                for (String key : b.keySet()) {
                    System.out.println(key + " : " + b.get(key));
                }

            }
            Generate.execute(projectPath, packageName, moduleName, a, listTC);
            break;
        }
    }

}

From source file:es.upm.oeg.tools.rdfshapes.utils.CadinalityResultGenerator.java

public static void main(String[] args) throws Exception {

    String endpoint = "http://3cixty.eurecom.fr/sparql";

    List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset());

    String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset());
    String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset());
    String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset());

    DecimalFormat df = new DecimalFormat("0.0000");

    //Create the Excel workbook and sheet
    XSSFWorkbook wb = new XSSFWorkbook();
    XSSFSheet sheet = wb.createSheet("Cardinality");

    int currentExcelRow = 0;
    int classStartRow = 0;

    for (String clazz : classList) {

        Map<String, String> litMap = new HashMap<>();
        Map<String, String> iriMap = ImmutableMap.of("class", clazz);

        String queryString = bindQueryString(individualCountQueryString,
                ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap));

        int individualCount;
        List<RDFNode> c = executeQueryForList(queryString, endpoint, "c");
        if (c.size() == 1) {
            individualCount = c.get(0).asLiteral().getInt();
        } else {/*from w w  w .j ava  2  s  . c o  m*/
            continue;
        }

        // If there are zero individuals, continue
        if (individualCount == 0) {
            throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match.");
        }

        //            System.out.println("***");
        //            System.out.println("### **" + clazz + "** (" + individualCount + ")");
        //            System.out.println("***");
        //            System.out.println();

        classStartRow = currentExcelRow;
        XSSFRow row = sheet.createRow(currentExcelRow);
        XSSFCell cell = row.createCell(0);
        cell.setCellValue(clazz);
        cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER);

        queryString = bindQueryString(classPropertyQueryString,
                ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap));

        List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p");

        for (RDFNode property : nodeList) {
            if (property.isURIResource()) {

                DescriptiveStatistics stats = new DescriptiveStatistics();

                String propertyURI = property.asResource().getURI();
                //                    System.out.println("* " + propertyURI);
                //                    System.out.println();

                XSSFRow propertyRow = sheet.getRow(currentExcelRow);
                if (propertyRow == null) {
                    propertyRow = sheet.createRow(currentExcelRow);
                }
                currentExcelRow++;

                XSSFCell propertyCell = propertyRow.createCell(1);
                propertyCell.setCellValue(propertyURI);

                Map<String, String> litMap2 = new HashMap<>();
                Map<String, String> iriMap2 = ImmutableMap.of("class", clazz, "p", propertyURI);

                queryString = bindQueryString(propertyCardinalityQueryString,
                        ImmutableMap.of(IRI_BINDINGS, iriMap2, LITERAL_BINDINGS, litMap2));

                List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint,
                        ImmutableSet.of("card", "count"));

                int sum = 0;
                List<CardinalityCount> cardinalityList = new ArrayList<>();
                if (solnMaps.size() > 0) {

                    for (Map<String, RDFNode> soln : solnMaps) {
                        int count = soln.get("count").asLiteral().getInt();
                        int card = soln.get("card").asLiteral().getInt();

                        for (int i = 0; i < count; i++) {
                            stats.addValue(card);
                        }

                        CardinalityCount cardinalityCount = new CardinalityCount(card, count,
                                (((double) count) / individualCount) * 100);
                        cardinalityList.add(cardinalityCount);
                        sum += count;
                    }

                    // Check for zero cardinality instances
                    int count = individualCount - sum;
                    if (count > 0) {
                        for (int i = 0; i < count; i++) {
                            stats.addValue(0);
                        }
                        CardinalityCount cardinalityCount = new CardinalityCount(0, count,
                                (((double) count) / individualCount) * 100);
                        cardinalityList.add(cardinalityCount);
                    }
                }

                Map<Integer, Double> cardMap = new HashMap<>();
                for (CardinalityCount count : cardinalityList) {
                    cardMap.put(count.getCardinality(), count.getPrecentage());
                }

                XSSFCell instanceCountCell = propertyRow.createCell(2);
                instanceCountCell.setCellValue(individualCount);

                XSSFCell minCell = propertyRow.createCell(3);
                minCell.setCellValue(stats.getMin());

                XSSFCell maxCell = propertyRow.createCell(4);
                maxCell.setCellValue(stats.getMax());

                XSSFCell p1 = propertyRow.createCell(5);
                p1.setCellValue(stats.getPercentile(1));

                XSSFCell p99 = propertyRow.createCell(6);
                p99.setCellValue(stats.getPercentile(99));

                XSSFCell mean = propertyRow.createCell(7);
                mean.setCellValue(df.format(stats.getMean()));

                for (int i = 0; i < 21; i++) {
                    XSSFCell dataCell = propertyRow.createCell(8 + i);
                    Double percentage = cardMap.get(i);
                    if (percentage != null) {
                        dataCell.setCellValue(df.format(percentage));
                    } else {
                        dataCell.setCellValue(0);
                    }
                }

                //                    System.out.println("| Min Card. |Max Card. |");
                //                    System.out.println("|---|---|");
                //                    System.out.println("| ? | ? |");
                //                    System.out.println();

            }
        }

        //System.out.println("class start: " + classStartRow + ", class end: " + (currentExcelRow -1));
        //We have finished writting properties of one class, now it's time to merge the cells
        int classEndRow = currentExcelRow - 1;
        if (classStartRow < classEndRow) {
            sheet.addMergedRegion(new CellRangeAddress(classStartRow, classEndRow, 0, 0));
        }

    }

    String filename = "3cixty.xls";
    FileOutputStream fileOut = new FileOutputStream(filename);
    wb.write(fileOut);
    fileOut.close();
}

From source file:net.wedjaa.elasticparser.tester.ESSearchTester.java

public static void main(String[] args) {

    System.out.println("ES Search Testing started.");

    System.out.println("Starting local node...");

    Settings nodeSettings = ImmutableSettings.settingsBuilder().put("transport.tcp.port", "9600-9700")
            .put("http.port", "9500").put("http.max_content_length", "104857600").build();

    Node node = NodeBuilder.nodeBuilder().settings(nodeSettings).clusterName("elasticparser.unittest").node();

    node.start();/*  ww  w  .ja  v a  2  s.c  o  m*/

    // Populate our test index
    System.out.println("Preparing Unit Test Index - this may take a while...");
    populateTest();

    try {
        System.out.println("...OK - Executing query!");
        // Try our searches
        ESSearch search = new ESSearch(null, null, ESSearch.ES_MODE_AGGS, "localhost", 9600,
                "elasticparser.unittest");
        search.search(getQuery("test-aggs.json"));
        Map<String, Object> hit = null;
        while ((hit = search.next()) != null) {
            System.out.println("Hit: {");
            for (String key : hit.keySet()) {
                System.out.println("  " + key + ": " + hit.get(key));
            }
            System.out.println("};");
        }
        search.close();
        Map<String, Class<?>> fields = search.getFields(getQuery("test-aggs.json"));
        List<String> sortedKeys = new ArrayList<String>(fields.keySet());
        Collections.sort(sortedKeys);
        Iterator<String> sortedKeyIter = sortedKeys.iterator();
        while (sortedKeyIter.hasNext()) {
            String fieldname = sortedKeyIter.next();
            System.out.println(" --> " + fieldname + "[" + fields.get(fieldname).getCanonicalName() + "]");
        }

    } catch (Exception ex) {
        System.out.println("Exception: " + ex);
    } finally {
        System.out.println("Stopping Test Node");
        node.stop();
    }
}

From source file:com.joliciel.talismane.terminology.TalismaneTermExtractorMain.java

public static void main(String[] args) throws Exception {
    String termFilePath = null;/*from  www. j  a  v  a  2  s  .  com*/
    String outFilePath = null;
    Command command = Command.extract;
    int depth = -1;
    String databasePropertiesPath = null;
    String projectCode = null;
    String terminologyPropertiesPath = null;

    Map<String, String> argMap = StringUtils.convertArgs(args);

    String logConfigPath = argMap.get("logConfigFile");
    if (logConfigPath != null) {
        argMap.remove("logConfigFile");
        Properties props = new Properties();
        props.load(new FileInputStream(logConfigPath));
        PropertyConfigurator.configure(props);
    }

    Map<String, String> innerArgs = new HashMap<String, String>();
    for (Entry<String, String> argEntry : argMap.entrySet()) {
        String argName = argEntry.getKey();
        String argValue = argEntry.getValue();

        if (argName.equals("command"))
            command = Command.valueOf(argValue);
        else if (argName.equals("termFile"))
            termFilePath = argValue;
        else if (argName.equals("outFile"))
            outFilePath = argValue;
        else if (argName.equals("depth"))
            depth = Integer.parseInt(argValue);
        else if (argName.equals("databaseProperties"))
            databasePropertiesPath = argValue;
        else if (argName.equals("terminologyProperties"))
            terminologyPropertiesPath = argValue;
        else if (argName.equals("projectCode"))
            projectCode = argValue;
        else
            innerArgs.put(argName, argValue);
    }
    if (termFilePath == null && databasePropertiesPath == null)
        throw new TalismaneException("Required argument: termFile or databasePropertiesPath");

    if (termFilePath != null) {
        String currentDirPath = System.getProperty("user.dir");
        File termFileDir = new File(currentDirPath);
        if (termFilePath.lastIndexOf("/") >= 0) {
            String termFileDirPath = termFilePath.substring(0, termFilePath.lastIndexOf("/"));
            termFileDir = new File(termFileDirPath);
            termFileDir.mkdirs();
        }
    }

    long startTime = new Date().getTime();
    try {
        if (command.equals(Command.analyse)) {
            innerArgs.put("command", "analyse");
        } else {
            innerArgs.put("command", "process");
        }

        String sessionId = "";
        TalismaneServiceLocator locator = TalismaneServiceLocator.getInstance(sessionId);
        TalismaneService talismaneService = locator.getTalismaneService();

        TalismaneConfig config = talismaneService.getTalismaneConfig(innerArgs, sessionId);

        TerminologyServiceLocator terminologyServiceLocator = TerminologyServiceLocator.getInstance(locator);
        TerminologyService terminologyService = terminologyServiceLocator.getTerminologyService();
        TerminologyBase terminologyBase = null;

        if (projectCode == null)
            throw new TalismaneException("Required argument: projectCode");

        File file = new File(databasePropertiesPath);
        FileInputStream fis = new FileInputStream(file);
        Properties dataSourceProperties = new Properties();
        dataSourceProperties.load(fis);
        terminologyBase = terminologyService.getPostGresTerminologyBase(projectCode, dataSourceProperties);

        TalismaneSession talismaneSession = talismaneService.getTalismaneSession();

        if (command.equals(Command.analyse) || command.equals(Command.extract)) {
            Locale locale = talismaneSession.getLocale();
            Map<TerminologyProperty, String> terminologyProperties = new HashMap<TerminologyProperty, String>();
            if (terminologyPropertiesPath != null) {
                Map<String, String> terminologyPropertiesStr = StringUtils.getArgMap(terminologyPropertiesPath);
                for (String key : terminologyPropertiesStr.keySet()) {
                    try {
                        TerminologyProperty property = TerminologyProperty.valueOf(key);
                        terminologyProperties.put(property, terminologyPropertiesStr.get(key));
                    } catch (IllegalArgumentException e) {
                        throw new TalismaneException("Unknown terminology property: " + key);
                    }
                }
            } else {
                terminologyProperties = getDefaultTerminologyProperties(locale);
            }
            if (depth <= 0 && !terminologyProperties.containsKey(TerminologyProperty.maxDepth))
                throw new TalismaneException("Required argument: depth");

            InputStream regexInputStream = getInputStreamFromResource(
                    "parser_conll_with_location_input_regex.txt");
            Scanner regexScanner = new Scanner(regexInputStream, "UTF-8");
            String inputRegex = regexScanner.nextLine();
            regexScanner.close();
            config.setInputRegex(inputRegex);

            Charset outputCharset = config.getOutputCharset();

            TermExtractor termExtractor = terminologyService.getTermExtractor(terminologyBase,
                    terminologyProperties);
            if (depth > 0)
                termExtractor.setMaxDepth(depth);
            termExtractor.setOutFilePath(termFilePath);

            if (outFilePath != null) {
                if (outFilePath.lastIndexOf("/") >= 0) {
                    String outFileDirPath = outFilePath.substring(0, outFilePath.lastIndexOf("/"));
                    File outFileDir = new File(outFileDirPath);
                    outFileDir.mkdirs();
                }
                File outFile = new File(outFilePath);
                outFile.delete();
                outFile.createNewFile();

                Writer writer = new BufferedWriter(
                        new OutputStreamWriter(new FileOutputStream(outFilePath), outputCharset));
                TermAnalysisWriter termAnalysisWriter = new TermAnalysisWriter(writer);
                termExtractor.addTermObserver(termAnalysisWriter);
            }

            Talismane talismane = config.getTalismane();
            talismane.setParseConfigurationProcessor(termExtractor);
            talismane.process();
        } else if (command.equals(Command.list)) {

            List<Term> terms = terminologyBase.findTerms(2, null, 0, null, null);
            for (Term term : terms) {
                LOG.debug("Term: " + term.getText());
                LOG.debug("Frequency: " + term.getFrequency());
                LOG.debug("Heads: " + term.getHeads());
                LOG.debug("Expansions: " + term.getExpansions());
                LOG.debug("Contexts: " + term.getContexts());
            }
        }
    } finally {
        long endTime = new Date().getTime();
        long totalTime = endTime - startTime;
        LOG.info("Total time: " + totalTime);
    }
}

From source file:com.gNova.circularFP.Fingerprinter.java

public static void main(String... args) {
    parseCommandLine(args);//from  ww  w .j  a v a  2s . c om

    // option to fp.generate for verbose output

    CFP ecfp = new CFP(verbose);
    oemolistream ifs = new oemolistream(Fingerprinter.infile);
    oemolostream ofs = new oemolostream(Fingerprinter.outfile);
    OEGraphMol mol = new OEGraphMol();

    /*
       // read first molecule; Tanimoto of others uses this initial fp
       oechem.OEReadMolecule(ifs, mol);
       ecfp.generate(mol, 3, verbose);
       Fingerprint fp0 = new CFingerprint(ecfp.get(), 512);
       //System.out.print(String.format("%5.3f",fp0.Tanimoto(fp0)));
       System.out.print(fp0.getNBits()+" ");
       oechem.OEWriteMolecule(ofs, mol);
       mol.Clear();
    */

    String tag;
    if (type.equals("functional")) {
        tag = "FFP";
        List<String> smarts = null;

        if (smaFile != null)
            smarts = readSmartsFile();
        ecfp.initializeSmarts(smarts);

    } else {
        tag = "AFP";
    }

    while (oechem.OEReadMolecule(ifs, mol)) {
        Fingerprint fp;

        for (int lvl : levels) {
            ecfp.clear();
            ecfp.generate(mol, lvl, type);
            if (format.equals("hex")) {
                fp = CFingerprint.createCFingerprint(ecfp.getCounts(0), nbits, countType);
                oechem.OESetSDData(mol, tag + lvl, fp.getHexString());

            } else if (format.equals("bitlist")) {
                fp = CFingerprint.createCFingerprint(ecfp.getCounts(0), nbits, countType);
                oechem.OESetSDData(mol, tag + lvl, fp.getBitString());

            } else if (format.equals("counts")) {
                Map<Integer, Integer> counts = ecfp.getCounts(nbits);
                String myTag = tag + lvl + '_';
                for (int i = 0; i < nbits; i++) {
                    Integer count = counts.get(i);
                    if (count != null)
                        oechem.OESetSDData(mol, (myTag + i), counts.get(i).toString());
                    else
                        oechem.OESetSDData(mol, (myTag + i), "0");
                }
            } else if (format.equals("atomID")) {
                fp = CFingerprint.createCFingerprint(ecfp.getCounts(0), nbits, countType);
                oechem.OESetSDData(mol, tag + lvl, fp.getAtomIDString());

            }
            //tag = "CFPBits" + i;
            //oechem.OESetSDData(mol, tag, Integer.toString(fp.getNBits()));
        }
        oechem.OEWriteMolecule(ofs, mol);
        mol.Clear();
    }
}

From source file:Main.java

public static void main(String[] args) throws Exception {
    Map map = new HashMap<String, String>();
    map.put("cluster", "10.200.111.111");
    map.put("cluster1", "10.200.121.111");

    Product xml = new Product();
    List<Top> top1 = new ArrayList<Top>();
    Set<String> keys = map.keySet();
    for (String key : keys) {
        Top top = new Top();
        top.setMode(key);/*w ww . ja  va  2 s .  com*/
        top.setAddress((String) map.get(key));
        top1.add(top);
    }
    xml.setTop(top1);
    File file = new File("C:\\kar\\file.xml");
    JAXBContext jaxbContext = JAXBContext.newInstance(Product.class);
    Marshaller jaxbMarshaller = jaxbContext.createMarshaller();

    // output pretty printed
    jaxbMarshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, true);

    jaxbMarshaller.marshal(xml, file);
    jaxbMarshaller.marshal(xml, System.out);
}

From source file:com.act.analysis.surfactant.AnalysisDriver.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());//from www  .jav a2  s  .c  o  m
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        return;
    }

    Set<String> seenOutputIds = new HashSet<>();

    TSVWriter<String, String> tsvWriter = null;
    if (cl.hasOption(OPTION_OUTPUT_FILE)) {
        File outputFile = new File(cl.getOptionValue(OPTION_OUTPUT_FILE));
        List<Map<String, String>> oldResults = null;
        if (outputFile.exists()) {
            System.err.format(
                    "Output file already exists, reading old results and skipping processed molecules.\n");
            TSVParser outputParser = new TSVParser();
            outputParser.parse(outputFile);
            oldResults = outputParser.getResults();
            for (Map<String, String> row : oldResults) {
                // TODO: verify that the last row was written cleanly/completely.
                seenOutputIds.add(row.get("id"));
            }
        }

        List<String> header = new ArrayList<>();
        header.add("name");
        header.add("id");
        header.add("inchi");
        header.add("label");
        for (SurfactantAnalysis.FEATURES f : SurfactantAnalysis.FEATURES.values()) {
            header.add(f.toString());
        }
        // TODO: make this API more auto-closable friendly.
        tsvWriter = new TSVWriter<>(header);
        tsvWriter.open(outputFile);
        if (oldResults != null) {
            System.out.format("Re-writing %d existing result rows\n", oldResults.size());
            tsvWriter.append(oldResults);
        }
    }

    try {
        Map<SurfactantAnalysis.FEATURES, Double> analysisFeatures;

        LicenseManager.setLicenseFile(cl.getOptionValue(OPTION_LICENSE_FILE));
        if (cl.hasOption(OPTION_INCHI)) {
            analysisFeatures = SurfactantAnalysis.performAnalysis(cl.getOptionValue(OPTION_INCHI),
                    cl.hasOption(OPTION_DISPLAY));
            Map<String, String> tsvFeatures = new HashMap<>();
            // Convert features to strings to avoid some weird formatting issues.  It's ugly, but it works.
            for (Map.Entry<SurfactantAnalysis.FEATURES, Double> entry : analysisFeatures.entrySet()) {
                tsvFeatures.put(entry.getKey().toString(), String.format("%.6f", entry.getValue()));
            }
            tsvFeatures.put("name", "direct-inchi-input");
            if (tsvWriter != null) {
                tsvWriter.append(tsvFeatures);
            }
        } else if (cl.hasOption(OPTION_INPUT_FILE)) {
            TSVParser parser = new TSVParser();
            parser.parse(new File(cl.getOptionValue(OPTION_INPUT_FILE)));
            int i = 0;
            List<Map<String, String>> inputRows = parser.getResults();

            for (Map<String, String> row : inputRows) {
                i++; // Just for warning messages.
                if (!row.containsKey("name") || !row.containsKey("id") || !row.containsKey("inchi")) {
                    System.err.format(
                            "WARNING: TSV rows must contain at least name, id, and inchi, skipping row %d\n",
                            i);
                    continue;
                }
                if (seenOutputIds.contains(row.get("id"))) {
                    System.out.format("Skipping input row with id already in output: %s\n", row.get("id"));
                    continue;
                }

                System.out.format("Analysis for chemical %s\n", row.get("name"));
                try {
                    analysisFeatures = SurfactantAnalysis.performAnalysis(row.get("inchi"), false);
                } catch (Exception e) {
                    // Ignore exceptions for now.  Sometimes the regression analysis or Chemaxon processing chokes unexpectedly.
                    System.err.format("ERROR caught exception while processing '%s':\n", row.get("name"));
                    System.err.format("%s\n", e.getMessage());
                    e.printStackTrace(System.err);
                    System.err.println("Skipping...");
                    continue;
                }
                System.out.format("--- Done analysis for chemical %s\n", row.get("name"));

                // This is a duplicate of the OPTION_INCHI block code, but it's inside of a tight loop, so...
                Map<String, String> tsvFeatures = new HashMap<>();
                for (Map.Entry<SurfactantAnalysis.FEATURES, Double> entry : analysisFeatures.entrySet()) {
                    tsvFeatures.put(entry.getKey().toString(), String.format("%.6f", entry.getValue()));
                }
                tsvFeatures.put("name", row.get("name"));
                tsvFeatures.put("id", row.get("id"));
                tsvFeatures.put("inchi", row.get("inchi"));
                tsvFeatures.put("label", row.containsKey("label") ? row.get("label") : "?");
                if (tsvWriter != null) {
                    tsvWriter.append(tsvFeatures);
                    // Flush every time in case we crash or get interrupted.  The features must flow!
                    tsvWriter.flush();
                }
            }
        } else {
            throw new RuntimeException("Must specify inchi or input file");
        }
    } finally {
        if (tsvWriter != null) {
            tsvWriter.close();
        }
    }
}

From source file:edu.usu.sdl.wso2client.SampleWSRegistryClient.java

public static void main(String[] args) throws Exception {
    Registry registry = initialize();
    try {//w  ww .j  av  a  2  s .c o  m
        //load component

        List<ComponentAll> components;
        try (InputStream in = new FileInputStream("C:\\temp\\components.json")) {
            components = StringProcessor.defaultObjectMapper().readValue(in,
                    new TypeReference<List<ComponentAll>>() {
                    });
        } catch (IOException ex) {
            throw ex;
        }

        ComponentAll componentAll = components.get(0);

        Resource resource = registry.newResource();
        resource.setContent(componentAll.getComponent().getDescription());

        resource.setDescription("Storefront Component");
        resource.setMediaType("application/openstorefront");
        resource.setUUID(componentAll.getComponent().getComponentId());

        try {
            Map fieldMap = BeanUtils.describe(componentAll.getComponent());
            fieldMap.keySet().stream().forEach((key) -> {
                if ("description".equals(key) == false) {
                    resource.setProperty(Component.class.getSimpleName() + "_" + key, "" + fieldMap.get(key));
                    //System.out.println("key  = " + Component.class.getSimpleName() + "_" + key);
                    //System.out.println("Value  = " + fieldMap.get(key));
                }
            });
        } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException ex) {
            Logger.getLogger(StringProcessor.class.getName()).log(Level.SEVERE, null, ex);
        }

        String resourcePath = "/storefront/components/" + componentAll.getComponent().getComponentId();
        registry.put(resourcePath, resource);

        //
        System.out.println("A resource added to: " + resourcePath);
        //         registry.rateResource(resourcePath, 4);
        //
        //         System.out.println("Resource rated with 4 stars!");
        //         Comment comment = new Comment();
        //         comment.setText("Testing Connection");
        //         registry.addComment(resourcePath, comment);
        //         System.out.println("Comment added to resource");
        //
        //         Resource getResource = registry.get("/abc2");
        //         System.out.println("Resource retrived");
        //         System.out.println("Printing retrieved resource content: "
        //               + new String((byte[]) getResource.getContent()));

        //         Resource resource = registry.newResource();
        //         resource.setContent("Hello Out there!");
        //
        //         String resourcePath = "/abc3";
        //         registry.put(resourcePath, resource);
        //
        //         System.out.println("A resource added to: " + resourcePath);
        //
        //         registry.rateResource(resourcePath, 4);
        //
        //         System.out.println("Resource rated with 4 stars!");
        //         Comment comment = new Comment();
        //         comment.setText("Testing Connection");
        //         registry.addComment(resourcePath, comment);
        //         System.out.println("Comment added to resource");
        //
        //         Resource getResource = registry.get("/abc3");
        //         System.out.println("Resource retrived");
        //         System.out.println("Printing retrieved resource content: "
        //               + new String((byte[]) getResource.getContent()));
    } finally {
        //Close the session
        ((WSRegistryServiceClient) registry).logut();
    }
    System.exit(0);
}

From source file:org.mitre.mpf.wfm.rest_client.UnregisterComponent.java

public static void main(String[] args) {

    String filePath = "/home/mpf/mpf/trunk/java-hello-world/src/main/resources/HelloWorldComponent.json";
    //"/home/mpf/mpf/trunk/extraction/hello/cpp/src/helloComponent.json";
    String url = "http://localhost:8080/workflow-manager/rest/component/unregisterViaFile";
    final String credentials = "Basic bXBmOm1wZjEyMw==";

    Map<String, String> params = new HashMap<String, String>();

    System.out.println("Starting rest-client!");

    //not necessary for localhost
    //System.setProperty("http.proxyHost","gatekeeper.mitre.org");
    //System.setProperty("http.proxyPort","80");

    RequestInterceptor authorize = new RequestInterceptor() {
        @Override//ww  w  . j a  va  2  s .  co m
        public void intercept(HttpRequestBase request) {
            request.addHeader("Authorization", credentials);
        }
    };
    RestClient client = RestClient.builder().requestInterceptor(authorize).build();

    if (args.length > 0) {
        filePath = args[0];
    }
    log.info(filePath);
    params.put("filePath", filePath);
    Map<String, String> stringVal = null;
    try {
        stringVal = client.get(url, params, Map.class);
    } catch (RestClientException e) {
        log.error("RestClientException occurred");
        e.printStackTrace();
    } catch (IOException e) {
        log.error("IOException occurred");
        e.printStackTrace();
    }
    System.out.println(stringVal.get("message"));
}

From source file:com.act.analysis.similarity.SimilarityAnalysis.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    for (Option.Builder b : OPTION_BUILDERS) {
        opts.addOption(b.build());//  ww  w.  j a v  a  2  s.c o m
    }

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        return;
    }

    LicenseManager.setLicenseFile(cl.getOptionValue(OPTION_LICENSE_FILE));

    if (cl.hasOption(OPTION_TARGET_INCHI) && cl.hasOption(OPTION_TARGET_FILE)) {
        System.err.format("Specify only one of -%s or -%s\n", OPTION_TARGET_INCHI, OPTION_TARGET_FILE);
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        System.exit(1);
    }

    List<SimilarityOperator> querySimilarityOperators = new ArrayList<>();
    List<String> header = new ArrayList<>();
    header.add("name");
    header.add("id");
    header.add("inchi");

    if (cl.hasOption(OPTION_QUERY_INCHI) && !cl.hasOption(OPTION_QUERY_FILE)) {
        SimilarityOperator so = makeSimilarityOperators("from inchi", cl.getOptionValue(OPTION_QUERY_INCHI));
        so.init();
        querySimilarityOperators.add(so);
        header.addAll(so.getResultFields());
    } else if (cl.hasOption(OPTION_QUERY_FILE) && !cl.hasOption(OPTION_QUERY_INCHI)) {
        TSVParser parser = new TSVParser();
        parser.parse(new File(cl.getOptionValue(OPTION_QUERY_FILE)));
        for (Map<String, String> row : parser.getResults()) {
            System.out.format("Compiling query for %s, %s\n", row.get("name"), row.get("inchi"));
            SimilarityOperator so = makeSimilarityOperators(row.get("name"), row.get("inchi"));
            so.init();
            querySimilarityOperators.add(so);
            header.addAll(so.getResultFields());
        }
    } else {
        System.err.format("Specify exactly one of -%s or -%s\n", OPTION_QUERY_INCHI, OPTION_QUERY_FILE);
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        System.exit(1);
    }

    List<Map<String, String>> targetChemicals = null;
    if (cl.hasOption(OPTION_TARGET_INCHI) && !cl.hasOption(OPTION_TARGET_FILE)) {
        String inchi = cl.getOptionValue(OPTION_TARGET_INCHI);
        targetChemicals = Collections.singletonList(new HashMap<String, String>() {
            {
                put("name", "direct-input");
                put("id", null);
                put("inchi", inchi);
            }
        });
    } else if (cl.hasOption(OPTION_TARGET_FILE) && !cl.hasOption(OPTION_TARGET_INCHI)) {
        TSVParser parser = new TSVParser();
        parser.parse(new File(cl.getOptionValue(OPTION_TARGET_FILE)));
        targetChemicals = parser.getResults();
    } else {
        System.err.format("Specify exactly one of -%s or -%s\n", OPTION_TARGET_INCHI, OPTION_TARGET_FILE);
        HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null,
                true);
        System.exit(1);
    }

    AlignmentMoleculeFactory alignmentMoleculeFactory = new AlignmentMoleculeFactory();

    // TODO: add symmetric computations for target as query and each query as target.
    TSVWriter<String, String> writer = new TSVWriter<>(header);
    writer.open(new File(cl.getOptionValue(OPTION_OUTPUT_FILE)));
    try {
        for (Map<String, String> row : targetChemicals) {
            Molecule targetMol = MolImporter.importMol(row.get("inchi"));
            Cleaner.clean(targetMol, 3); // This will assign 3D atom coordinates to the MolAtoms in targetMol.
            Molecule targetFragment = findLargestFragment(targetMol.convertToFrags());
            AlignmentMolecule am = alignmentMoleculeFactory.create(targetFragment,
                    AlignmentProperties.DegreeOfFreedomType.TRANSLATE_ROTATE_FLEXIBLE);
            Map<String, String> outputRow = new HashMap<>(row);
            System.out.format("Processing target %s\n", row.get("name"));
            for (SimilarityOperator so : querySimilarityOperators) {
                System.out.format("  running query %s\n", so.getName());
                Map<String, Double> results = so.calculateSimilarity(am);
                outputRow.putAll(doubleMapToStringMap(results));
            }
            writer.append(outputRow);
            writer.flush();
        }
    } finally {
        if (writer != null) {
            writer.close();
        }
    }
    System.out.format("Done\n");
}