Example usage for java.util Map get

List of usage examples for java.util Map get

Introduction

In this page you can find the example usage for java.util Map get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:hk.idv.kenson.jrconsole.Console.java

/**
 * @param args//  w  ww. j a  v a 2 s.  c  om
 */
public static void main(String[] args) {
    try {
        Map<String, Object> params = Console.parseArgs(args);
        if (params.containsKey("help")) {
            printUsage();
            return;
        }
        if (params.containsKey("version")) {
            System.err.println("Version: " + VERSION);
            return;
        }
        if (params.containsKey("debug")) {
            for (String key : params.keySet())
                log.info("\"" + key + "\" => \"" + params.get(key) + "\"");
            return;
        }

        checkParam(params);
        stepCompile(params);
        JasperReport jasper = stepLoadReport(params);
        JasperPrint print = stepFill(jasper, params);
        InputStream stream = stepExport(print, params);

        File output = new File(params.get("output").toString());
        FileOutputStream fos = new FileOutputStream(output);
        copy(stream, fos);

        fos.close();
        stream.close();
        System.out.println(output.getAbsolutePath()); //Output the report path for pipe
    } catch (IllegalArgumentException ex) {
        printUsage();
        System.err.println("Error: " + ex.getMessage());
        ex.printStackTrace();
    } catch (RuntimeException ex) {
        throw ex;
    } catch (Exception ex) {
        throw new RuntimeException("Unexpected exception", ex);
    }
}

From source file:com.pansky.integration.generate.Generate.java

public static void main(String[] args) throws Exception {

    // ==========  ?? ====================

    // ??????//from  w  w w .ja  va  2  s .  co  m
    // ?{packageName}/{moduleName}/{dao,entity,service,web}/{subModuleName}/{className}

    // packageName ????applicationContext.xmlsrping-mvc.xml?base-package?packagesToScan?4?
    String packageName = "com.pansky.integration.modules";

    String moduleName = "test"; // ???sys
    String subModuleName = "test1"; // ????? 
    String className = "test1"; // ??user
    String classAuthor = "renmh"; // ThinkGem
    String functionName = ""; // ??

    // ???
    //      Boolean isEnable = false;
    Boolean isEnable = true;

    // ==========  ?? ====================

    if (!isEnable) {
        logger.error("????isEnable = true");
        return;
    }

    if (StringUtils.isBlank(moduleName) || StringUtils.isBlank(moduleName) || StringUtils.isBlank(className)
            || StringUtils.isBlank(functionName)) {
        logger.error("??????????????");
        return;
    }

    // ?
    String separator = File.separator;

    // ?
    File projectPath = new DefaultResourceLoader().getResource("").getFile();
    while (!new File(projectPath.getPath() + separator + "src" + separator + "main").exists()) {
        projectPath = projectPath.getParentFile();
    }
    logger.info("Project Path: {}", projectPath);

    // ?
    String tplPath = StringUtils
            .replace(projectPath + "/src/main/java/com/pansky/integration/generate/template", "/", separator);
    logger.info("Template Path: {}", tplPath);

    // Java
    String javaPath = StringUtils.replaceEach(
            projectPath + "/src/main/java/" + StringUtils.lowerCase(packageName), new String[] { "/", "." },
            new String[] { separator, separator });
    String javaTestPath = StringUtils.replaceEach(
            projectPath + "/src/test/java/" + StringUtils.lowerCase(packageName), new String[] { "/", "." },
            new String[] { separator, separator });
    logger.info("Java Path: {}", javaPath);

    // 
    String viewPath = StringUtils.replace(projectPath + "/src/main/webapp/WEB-INF/views", "/", separator);
    logger.info("View Path: {}", viewPath);

    // ???
    Configuration cfg = new Configuration();
    cfg.setDefaultEncoding("UTF-8");
    cfg.setDirectoryForTemplateLoading(new File(tplPath));

    // ???
    Map<String, String> model = Maps.newHashMap();
    model.put("packageName", StringUtils.lowerCase(packageName));
    model.put("moduleName", StringUtils.lowerCase(moduleName));
    model.put("subModuleName",
            StringUtils.isNotBlank(subModuleName) ? "." + StringUtils.lowerCase(subModuleName) : "");
    model.put("className", StringUtils.uncapitalize(className));
    model.put("ClassName", StringUtils.capitalize(className));
    model.put("classAuthor", StringUtils.isNotBlank(classAuthor) ? classAuthor : "Generate Tools");
    model.put("classVersion", DateUtils.getDate());
    model.put("functionName", functionName);
    model.put("tableName",
            model.get("moduleName")
                    + (StringUtils.isNotBlank(subModuleName) ? "_" + StringUtils.lowerCase(subModuleName) : "")
                    + "_" + model.get("className"));
    model.put("urlPrefix",
            model.get("moduleName")
                    + (StringUtils.isNotBlank(subModuleName) ? "/" + StringUtils.lowerCase(subModuleName) : "")
                    + "/" + model.get("className"));
    model.put("viewPrefix", //StringUtils.substringAfterLast(model.get("packageName"),".")+"/"+
            model.get("urlPrefix"));
    model.put("permissionPrefix",
            model.get("moduleName")
                    + (StringUtils.isNotBlank(subModuleName) ? ":" + StringUtils.lowerCase(subModuleName) : "")
                    + ":" + model.get("className"));

    // ? Entity
    Template template = cfg.getTemplate("entity.ftl");
    String content = FreeMarkers.renderTemplate(template, model);
    String filePath = javaPath + separator + model.get("moduleName") + separator + "entity" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + ".java";
    writeFile(content, filePath);
    logger.info("Entity: {}", filePath);

    // ? Dao
    template = cfg.getTemplate("dao.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("moduleName") + separator + "dao" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + "Dao.java";
    writeFile(content, filePath);
    logger.info("Dao: {}", filePath);

    // ? Service
    template = cfg.getTemplate("service.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("moduleName") + separator + "service" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + "Service.java";
    writeFile(content, filePath);
    logger.info("Service: {}", filePath);

    // ? Controller
    template = cfg.getTemplate("controller.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("moduleName") + separator + "web" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + "Controller.java";
    writeFile(content, filePath);
    logger.info("Controller: {}", filePath);

    // ? ViewForm
    template = cfg.getTemplate("viewForm.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + StringUtils.substringAfterLast(model.get("packageName"), ".") + separator
            + model.get("moduleName") + separator + StringUtils.lowerCase(subModuleName) + separator
            + model.get("className") + "Form.jsp";
    writeFile(content, filePath);
    logger.info("ViewForm: {}", filePath);

    // ? ViewList
    template = cfg.getTemplate("viewList.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + StringUtils.substringAfterLast(model.get("packageName"), ".") + separator
            + model.get("moduleName") + separator + StringUtils.lowerCase(subModuleName) + separator
            + model.get("className") + "List.jsp";
    writeFile(content, filePath);
    logger.info("ViewList: {}", filePath);
    //?TestCase
    template = cfg.getTemplate("serviceTest.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaTestPath + separator + model.get("moduleName") + separator + "service" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + "ServiceTest.java";
    writeFile(content, filePath);
    logger.info("Service: {}", filePath);
    logger.info("Generate Success.");
}

From source file:MyMap.java

public static void main(String[] argv) {

    // Construct and load the hash. This simulates loading a
    // database or reading from a file, or wherever the data is.

    Map map = new MyMap();

    // The hash maps from company name to address.
    // In real life this might map to an Address object...
    map.put("Adobe", "Mountain View, CA");
    map.put("Learning Tree", "Los Angeles, CA");
    map.put("IBM", "White Plains, NY");
    map.put("Netscape", "Mountain View, CA");
    map.put("Microsoft", "Redmond, WA");
    map.put("Sun", "Mountain View, CA");
    map.put("O'Reilly", "Sebastopol, CA");

    // Two versions of the "retrieval" phase.
    // Version 1: get one pair's value given its key
    // (presumably the key would really come from user input):
    String queryString = "O'Reilly";
    System.out.println("You asked about " + queryString + ".");
    String resultString = (String) map.get(queryString);
    System.out.println("They are located in: " + resultString);
    System.out.println();/*from w w w.  ja v  a 2s  .c o  m*/

    // Version 2: get ALL the keys and pairs 
    // (maybe to print a report, or to save to disk)
    Iterator k = map.keySet().iterator();
    while (k.hasNext()) {
        String key = (String) k.next();
        System.out.println("Key " + key + "; Value " + (String) map.get(key));
    }

    // Step 3 - try out the entrySet() method.
    Set es = map.entrySet();
    System.out.println("entrySet() returns " + es.size() + " Map.Entry's");
}

From source file:net.itransformers.postDiscoverer.core.ReportManager.java

public static void main(String[] args) throws IOException {

    File projectDir = new File(".");
    File scriptPath = new File("postDiscoverer/src/main/resources/postDiscoverer/conf/groovy/");

    ResourceManagerFactory resourceManagerFactory = new XmlResourceManagerFactory(
            "iDiscover/resourceManager/xmlResourceManager/src/main/resources/xmlResourceManager/conf/xml/resource.xml");
    Map<String, String> resourceManagerParams = new HashMap<>();
    resourceManagerParams.put("projectPath", projectDir.getAbsolutePath());
    ResourceManager resourceManager = resourceManagerFactory.createResourceManager("xml",
            resourceManagerParams);//from  ww  w . j  av  a2s.  c  o m

    Map<String, String> params = new HashMap<String, String>();
    params.put("protocol", "telnet");
    params.put("deviceName", "R1");
    params.put("deviceType", "CISCO");
    params.put("address", "10.17.1.5");
    params.put("port", "23");

    ResourceType resource = resourceManager.findFirstResourceBy(params);
    List connectParameters = resource.getConnectionParams();

    for (int i = 0; i < connectParameters.size(); i++) {
        ConnectionParamsType connParamsType = (ConnectionParamsType) connectParameters.get(i);

        String connectionType = connParamsType.getConnectionType();
        if (connectionType.equalsIgnoreCase(params.get("protocol"))) {

            for (ParamType param : connParamsType.getParam()) {
                params.put(param.getName(), param.getValue());
            }

        }
    }

    File postDiscoveryConfing = new File(
            projectDir + "/postDiscoverer/src/main/resources/postDiscoverer/conf/xml/reportGenerator.xml");
    if (!postDiscoveryConfing.exists()) {
        System.out.println("File missing: " + postDiscoveryConfing.getAbsolutePath());
        return;
    }

    ReportGeneratorType reportGenerator = null;

    FileInputStream is = new FileInputStream(postDiscoveryConfing);
    try {
        reportGenerator = JaxbMarshalar.unmarshal(ReportGeneratorType.class, is);
    } catch (JAXBException e) {
        logger.info(e); //To change body of catch statement use File | Settings | File Templates.
    } finally {
        is.close();
    }

    ReportManager reportManager = new ReportManager(reportGenerator, scriptPath.getPath(), projectDir,
            "postDiscoverer/conf/xslt/table_creator.xslt");
    StringBuffer report = null;

    HashMap<String, Object> groovyExecutorParams = new HashMap<String, Object>();

    for (String s : params.keySet()) {
        groovyExecutorParams.put(s, params.get(s));
    }

    try {
        report = reportManager.reportExecutor(
                new File("/Users/niau/Projects/Projects/netTransformer10/version1/post-discovery"),
                groovyExecutorParams);
    } catch (ParserConfigurationException e) {
        e.printStackTrace();
    } catch (SAXException e) {
        e.printStackTrace();
    }
    if (report != null) {
        System.out.println(report.toString());

    } else {
        System.out.println("Report generation failed!");

    }
}

From source file:importer.handler.post.stages.Splitter.java

/** test and commandline utility */
public static void main(String[] args) {
    if (args.length >= 1) {
        try {/*from ww  w .  j  a va  2  s  . com*/
            int i = 0;
            int fileIndex = 0;
            // see if the user supplied a conf file
            String textConf = Discriminator.defaultConf;
            while (i < args.length) {
                if (args[i].equals("-c") && i < args.length - 1) {
                    textConf = readConfig(args[i + 1]);
                    i += 2;
                } else {
                    fileIndex = i;
                    i++;
                }
            }
            File f = new File(args[fileIndex]);
            char[] data = new char[(int) f.length()];
            FileReader fr = new FileReader(f);
            fr.read(data);
            JSONObject config = (JSONObject) JSONValue.parse(textConf);
            Splitter split = new Splitter(config);
            Map<String, String> map = split.split(new String(data));
            Set<String> keys = map.keySet();
            String rawFileName = args[fileIndex];
            int pos = rawFileName.lastIndexOf(".");
            if (pos != -1)
                rawFileName = rawFileName.substring(0, pos);
            Iterator<String> iter = keys.iterator();
            while (iter.hasNext()) {
                String key = iter.next();
                String fName = rawFileName + "-" + key + ".xml";
                File g = new File(fName);
                if (g.exists())
                    g.delete();
                FileOutputStream fos = new FileOutputStream(g);
                fos.write(map.get(key).getBytes("UTF-8"));
                fos.close();
            }
        } catch (Exception e) {
            e.printStackTrace(System.out);
        }
    } else
        System.out.println("usage: java -jar split.jar [-c json-config] <tei-xml>\n");
}

From source file:com.sxjun.core.generate.Generate.java

public static void main(String[] args) throws Exception {

    // ==========  ?? ====================
    // ??????//from  w  ww  .  ja  v a  2 s.co m
    // ?{packageName}/{moduleName}/{dao,entity,service,web}/{subModuleName}/{className}
    // packageName ????applicationContext.xmlsrping-mvc.xml?base-package?packagesToScan?4?
    String packageName = "com.sxjun";
    String moduleName = "retrieval"; // ???sys
    String subModuleName = ""; // ????? 
    String className = "IndexManager"; // ??user
    String classAuthor = "sxjun"; // ThinkGem
    String functionName = "?"; // ??

    // ???
    Boolean isEnable = true;
    // ==========  ?? ====================
    if (!isEnable) {
        logger.error("????isEnable = true");
        return;
    }

    if (StringUtils.isBlank(moduleName) || StringUtils.isBlank(moduleName) || StringUtils.isBlank(className)
            || StringUtils.isBlank(functionName)) {
        logger.error("??????????????");
        return;
    }

    // ?
    String separator = File.separator;
    String classPath = new DefaultResourceLoader().getResource("").getFile().getPath();
    String templatePath = classPath.replace(
            separator + "WebRoot" + separator + "WEB-INF" + separator + "classes",
            separator + "src" + separator + "com" + separator + "sxjun" + separator + "retrieval");
    String javaPath = classPath.replace(separator + "WebRoot" + separator + "WEB-INF" + separator + "classes",
            separator + "src" + separator + (StringUtils.lowerCase(packageName)).replace(".", separator));
    String viewPath = classPath.replace(separator + "classes", separator + "retrieval");

    // ???
    Configuration cfg = new Configuration();
    cfg.setDirectoryForTemplateLoading(new File(templatePath.substring(0, templatePath.lastIndexOf(separator))
            + separator + "generate" + separator + "template"));

    // ???
    Map<String, String> model = Maps.newHashMap();
    model.put("packageName", StringUtils.lowerCase(packageName));
    model.put("moduleName", StringUtils.lowerCase(moduleName));
    model.put("subModuleName",
            StringUtils.isNotBlank(subModuleName) ? "." + StringUtils.lowerCase(subModuleName) : "");
    model.put("className", StringUtils.uncapitalize(className));
    model.put("ClassName", StringUtils.capitalize(className));
    model.put("classAuthor", StringUtils.isNotBlank(classAuthor) ? classAuthor : "Generate Tools");
    model.put("classVersion", DateUtils.getDate());
    model.put("functionName", functionName);
    model.put("urlPrefix",
            (StringUtils.isNotBlank(subModuleName) ? "/" + StringUtils.lowerCase(subModuleName) + "/" : "")
                    + model.get("className"));
    model.put("viewPrefix",
            StringUtils.substringAfterLast(model.get("packageName"), ".") + "/" + model.get("urlPrefix"));

    // ? Entity
    Template template = cfg.getTemplate("pojo.ftl");
    String content = FreeMarkers.renderTemplate(template, model);
    String filePath = javaPath + separator + model.get("moduleName") + separator + "pojo" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + ".java";
    writeFile(content, filePath);
    logger.info(filePath);

    // ? Dao
    /*template = cfg.getTemplate("dao.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath+separator+model.get("moduleName")+separator+"dao"+separator
    +StringUtils.lowerCase(subModuleName)+separator+model.get("ClassName")+"Dao.java";
    writeFile(content, filePath);
    logger.info(filePath);
            
    // ? Service
    template = cfg.getTemplate("service.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath+separator+model.get("moduleName")+separator+"service"+separator
    +StringUtils.lowerCase(subModuleName)+separator+model.get("ClassName")+"Service.java";
    writeFile(content, filePath);
    logger.info(filePath);*/

    // ? Controller
    template = cfg.getTemplate("controller.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = javaPath + separator + model.get("moduleName") + separator + "controller" + separator
            + StringUtils.lowerCase(subModuleName) + separator + model.get("ClassName") + "Controller.java";
    writeFile(content, filePath);
    logger.info(filePath);

    // ? ViewForm
    template = cfg.getTemplate("viewForm.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + model.get("className") + separator + model.get("className") + "Form.jsp";
    writeFile(content, filePath);
    logger.info(filePath);

    // ? ViewList
    template = cfg.getTemplate("viewList.ftl");
    content = FreeMarkers.renderTemplate(template, model);
    filePath = viewPath + separator + model.get("className") + separator + model.get("className") + "List.jsp";
    writeFile(content, filePath);
    logger.info(filePath);

    logger.info("????");
}

From source file:com.github.horrorho.inflatabledonkey.Main.java

/**
 * @param args the command line arguments
 * @throws IOException//from  w w  w . jav  a 2s .  co m
 */
public static void main(String[] args) throws IOException {
    try {
        if (!PropertyLoader.instance().test(args)) {
            return;
        }
    } catch (IllegalArgumentException ex) {
        System.out.println("Argument error: " + ex.getMessage());
        System.out.println("Try '" + Property.APP_NAME.value() + " --help' for more information.");
        System.exit(-1);
    }

    // SystemDefault HttpClient.
    // TODO concurrent
    CloseableHttpClient httpClient = HttpClients.custom().setUserAgent("CloudKit/479 (13A404)")
            .useSystemProperties().build();

    // Auth
    // TODO rework when we have UncheckedIOException for Authenticator
    Auth auth = Property.AUTHENTICATION_TOKEN.value().map(Auth::new).orElse(null);

    if (auth == null) {
        auth = Authenticator.authenticate(httpClient, Property.AUTHENTICATION_APPLEID.value().get(),
                Property.AUTHENTICATION_PASSWORD.value().get());
    }
    logger.debug("-- main() - auth: {}", auth);
    logger.info("-- main() - dsPrsID:mmeAuthToken: {}:{}", auth.dsPrsID(), auth.mmeAuthToken());

    if (Property.ARGS_TOKEN.booleanValue().orElse(false)) {
        System.out.println("DsPrsID:mmeAuthToken " + auth.dsPrsID() + ":" + auth.mmeAuthToken());
        return;
    }

    logger.info("-- main() - Apple ID: {}", Property.AUTHENTICATION_APPLEID.value());
    logger.info("-- main() - password: {}", Property.AUTHENTICATION_PASSWORD.value());
    logger.info("-- main() - token: {}", Property.AUTHENTICATION_TOKEN.value());

    // Account
    Account account = Accounts.account(httpClient, auth);

    // Backup
    Backup backup = Backup.create(httpClient, account);

    // BackupAccount
    BackupAccount backupAccount = backup.backupAccount(httpClient);
    logger.debug("-- main() - backup account: {}", backupAccount);

    // Devices
    List<Device> devices = backup.devices(httpClient, backupAccount.devices());
    logger.debug("-- main() - device count: {}", devices.size());

    // Snapshots
    List<SnapshotID> snapshotIDs = devices.stream().map(Device::snapshots).flatMap(Collection::stream)
            .collect(Collectors.toList());
    logger.info("-- main() - total snapshot count: {}", snapshotIDs.size());

    Map<String, Snapshot> snapshots = backup.snapshot(httpClient, snapshotIDs).stream().collect(
            Collectors.toMap(s -> s.record().getRecordIdentifier().getValue().getName(), Function.identity()));

    boolean repeat = false;
    do {

        for (int i = 0; i < devices.size(); i++) {
            Device device = devices.get(i);
            List<SnapshotID> deviceSnapshotIDs = device.snapshots();

            System.out.println(i + " " + device.info());

            for (int j = 0; j < deviceSnapshotIDs.size(); j++) {
                SnapshotID sid = deviceSnapshotIDs.get(j);
                System.out.println("\t" + j + snapshots.get(sid.id()).info() + "   " + sid.timestamp());
            }
        }
        if (Property.PRINT_SNAPSHOTS.booleanValue().orElse(false)) {
            return;
        }
        // Selection
        Scanner input = new Scanner(System.in);

        int deviceIndex;
        int snapshotIndex = Property.SELECT_SNAPSHOT_INDEX.intValue().get();

        if (devices.size() > 1) {
            System.out.printf("Select a device [0 - %d]: ", devices.size() - 1);
            deviceIndex = input.nextInt();
        } else
            deviceIndex = Property.SELECT_DEVICE_INDEX.intValue().get();

        if (deviceIndex >= devices.size() || deviceIndex < 0) {
            System.out.println("No such device: " + deviceIndex);
            System.exit(-1);
        }

        Device device = devices.get(deviceIndex);
        System.out.println("Selected device: " + deviceIndex + ", " + device.info());

        if (device.snapshots().size() > 1) {
            System.out.printf("Select a snapshot [0 - %d]: ", device.snapshots().size() - 1);
            snapshotIndex = input.nextInt();
        } else
            snapshotIndex = Property.SELECT_SNAPSHOT_INDEX.intValue().get();

        if (snapshotIndex >= devices.get(deviceIndex).snapshots().size() || snapshotIndex < 0) {
            System.out.println("No such snapshot for selected device: " + snapshotIndex);
            System.exit(-1);
        }

        logger.info("-- main() - arg device index: {}", deviceIndex);
        logger.info("-- main() - arg snapshot index: {}", snapshotIndex);

        String selected = devices.get(deviceIndex).snapshots().get(snapshotIndex).id();
        Snapshot snapshot = snapshots.get(selected);
        System.out.println("Selected snapshot: " + snapshotIndex + ", " + snapshot.info());

        // Asset list.
        List<Assets> assetsList = backup.assetsList(httpClient, snapshot);
        logger.info("-- main() - assets count: {}", assetsList.size());

        // Domains filter --domain option
        String chosenDomain = Property.FILTER_DOMAIN.value().orElse("").toLowerCase(Locale.US);
        logger.info("-- main() - arg domain substring filter: {}", Property.FILTER_DOMAIN.value());
        // Output domains --domains option
        if (Property.PRINT_DOMAIN_LIST.booleanValue().orElse(false)) {
            System.out.println("Domains / file count:");
            assetsList.stream().filter(a -> a.domain().isPresent())
                    .map(a -> a.domain().get() + " / " + a.files().size()).sorted()
                    .forEach(System.out::println);

            System.out.print("Type a domain ('null' to exit): ");
            chosenDomain = input.next().toLowerCase(Locale.US);
            if (chosenDomain.equals("null"))
                return;
            // TODO check Assets without domain information.
        }

        String domainSubstring = chosenDomain;

        Predicate<Optional<String>> domainFilter = domain -> domain.map(d -> d.toLowerCase(Locale.US))
                .map(d -> d.contains(domainSubstring)).orElse(false);

        List<String> files = Assets.files(assetsList, domainFilter);
        logger.info("-- main() - domain filtered file count: {}", files.size());

        // Output folders.
        Path outputFolder = Paths.get(Property.OUTPUT_FOLDER.value().orElse("output"));
        Path assetOutputFolder = outputFolder.resolve("assets"); // TODO assets value injection
        Path chunkOutputFolder = outputFolder.resolve("chunks"); // TODO chunks value injection
        logger.info("-- main() - output folder chunks: {}", chunkOutputFolder);
        logger.info("-- main() - output folder assets: {}", assetOutputFolder);

        // Download tools.
        AuthorizeAssets authorizeAssets = AuthorizeAssets.backupd();
        DiskChunkStore chunkStore = new DiskChunkStore(chunkOutputFolder);
        StandardChunkEngine chunkEngine = new StandardChunkEngine(chunkStore);
        AssetDownloader assetDownloader = new AssetDownloader(chunkEngine);
        KeyBagManager keyBagManager = backup.newKeyBagManager();

        // Mystery Moo. 
        Moo moo = new Moo(authorizeAssets, assetDownloader, keyBagManager);

        // Filename extension filter.
        String filenameExtension = Property.FILTER_EXTENSION.value().orElse("").toLowerCase(Locale.US);
        logger.info("-- main() - arg filename extension filter: {}", Property.FILTER_EXTENSION.value());

        Predicate<Asset> assetFilter = asset -> asset.relativePath().map(d -> d.toLowerCase(Locale.US))
                .map(d -> d.endsWith(filenameExtension)).orElse(false);

        // Batch process files in groups of 100.
        // TODO group files into batches based on file size.
        List<List<String>> batches = ListUtils.partition(files, 100);

        for (List<String> batch : batches) {
            List<Asset> assets = backup.assets(httpClient, batch).stream().filter(assetFilter::test)
                    .collect(Collectors.toList());
            logger.info("-- main() - filtered asset count: {}", assets.size());
            moo.download(httpClient, assets, assetOutputFolder);
        }
        System.out.print("Download other snapshot (Y/N)? ");
        repeat = input.next().toLowerCase(Locale.US).charAt(0) == 'y';
    } while (repeat == true);
}

From source file:com.github.benyzhous.springboot.web.core.gateway.sign.backend.Sign.java

/**
 * Demo/*  w  ww .j a v a  2s  .c  om*/
 *
 * @param args
 * @throws Exception
 */
public static void main(String[] args) throws Exception {
    String uri = "/list";
    String httpMethod = "GET";
    Map<String, String> headers = new HashMap<String, String>();
    headers.put("x-ca-proxy-signature", "YTaCd7ZCm6wRYY/mc2Dj4lQOviuR4oSJA9YGxjYHYIo=");
    headers.put("x-ca-proxy-signature-headers", "RequestId,X-Ca-Proxy-Signature-Secret-Key");
    headers.put("x-ca-proxy-signature-secret-key", "kaishu2099");
    headers.put("requestid", "1609A1D2-2845-488E-B052-9BA2143D51B5");
    //headers.put("HeaderKey2", "HeaderValue2");

    Map<String, Object> paramsMap = new HashMap<String, Object>();
    paramsMap.put("appid", "wxbb22392b3fefd76f");
    paramsMap.put("AppID", "4206209");
    /**paramsMap.put("FormKey1", "FormValue1");
    paramsMap.put("FormKey2", "FormValue2");*/

    byte[] inputStreamBytes = new byte[] {};

    String gatewaySign = headers.get("x-ca-proxy-signature");
    System.out.println("API??:" + gatewaySign);

    String serviceSign = serviceSign(uri, httpMethod, headers, paramsMap, inputStreamBytes);
    System.out.println("???:" + serviceSign);

    System.out.println("????:" + gatewaySign.equals(serviceSign));
}

From source file:com.termmed.sampling.ConceptsWithMoreThanThreeRoleGroups.java

/**
 * The main method.//from  w  w  w.  j av  a 2 s .  c om
 *
 * @param args the arguments
 * @throws Exception the exception
 */
public static void main(String[] args) throws Exception {
    System.out.println("Starting...");
    Map<String, Set<String>> groupsMap = new HashMap<String, Set<String>>();
    File relsFile = new File(
            "/Users/alo/Downloads/SnomedCT_RF2Release_INT_20160131-1/Snapshot/Terminology/sct2_Relationship_Snapshot_INT_20160131.txt");
    BufferedReader br2 = new BufferedReader(new FileReader(relsFile));
    String line2;
    int count2 = 0;
    while ((line2 = br2.readLine()) != null) {
        // process the line.
        count2++;
        if (count2 % 10000 == 0) {
            //System.out.println(count2);
        }
        List<String> columns = Arrays.asList(line2.split("\t", -1));
        if (columns.size() >= 6) {
            if (columns.get(2).equals("1") && !columns.get(6).equals("0")) {
                if (!groupsMap.containsKey(columns.get(4))) {
                    groupsMap.put(columns.get(4), new HashSet<String>());
                }
                groupsMap.get(columns.get(4)).add(columns.get(6));
            }
        }
    }
    System.out.println("Relationship groups loaded");
    Gson gson = new Gson();
    System.out.println("Reading JSON 1");
    File crossoverFile1 = new File("/Users/alo/Downloads/crossover_role_to_group.json");
    String contents = FileUtils.readFileToString(crossoverFile1, "utf-8");
    Type collectionType = new TypeToken<Collection<ControlResultLine>>() {
    }.getType();
    List<ControlResultLine> lineObject = gson.fromJson(contents, collectionType);
    Set<String> crossovers1 = new HashSet<String>();
    for (ControlResultLine loopResult : lineObject) {
        crossovers1.add(loopResult.conceptId);
    }
    System.out.println("Crossovers 1 loaded, " + lineObject.size() + " Objects");

    System.out.println("Reading JSON 2");
    File crossoverFile2 = new File("/Users/alo/Downloads/crossover_group_to_group.json");
    String contents2 = FileUtils.readFileToString(crossoverFile2, "utf-8");
    List<ControlResultLine> lineObject2 = gson.fromJson(contents2, collectionType);
    Set<String> crossovers2 = new HashSet<String>();
    for (ControlResultLine loopResult : lineObject2) {
        crossovers2.add(loopResult.conceptId);
    }
    System.out.println("Crossovers 2 loaded, " + lineObject2.size() + " Objects");

    Set<String> foundConcepts = new HashSet<String>();
    int count3 = 0;
    BufferedWriter writer = new BufferedWriter(
            new FileWriter(new File("ConceptsWithMoreThanThreeRoleGroups.csv")));
    ;
    for (String loopConcept : groupsMap.keySet()) {
        if (groupsMap.get(loopConcept).size() > 3) {
            writer.write(loopConcept);
            writer.newLine();
            foundConcepts.add(loopConcept);
            count3++;
        }
    }
    writer.close();
    System.out.println("Found " + foundConcepts.size() + " concepts");

    int countCrossover1 = 0;
    for (String loopConcept : foundConcepts) {
        if (crossovers1.contains(loopConcept)) {
            countCrossover1++;
        }
    }
    System.out.println(countCrossover1 + " are present in crossover_role_to_group");

    int countCrossover2 = 0;
    for (String loopConcept : foundConcepts) {
        if (crossovers2.contains(loopConcept)) {
            countCrossover2++;
        }
    }
    System.out.println(countCrossover2 + " are present in crossover_group_to_group");

    System.out.println("Done");
}

From source file:de.unileipzig.ub.indexer.App.java

public static void main(String[] args) throws IOException {

    // create Options object
    Options options = new Options();

    options.addOption("h", "help", false, "display this help");

    options.addOption("f", "filename", true, "name of the JSON file whose content should be indexed");
    options.addOption("i", "index", true, "the name of the target index");
    options.addOption("d", "doctype", true, "the name of the doctype (title, local, ...)");

    options.addOption("t", "host", true, "elasticsearch hostname (default: 0.0.0.0)");
    options.addOption("p", "port", true, "transport port (that's NOT the http port, default: 9300)");
    options.addOption("c", "cluster", true, "cluster name (default: elasticsearch_mdma)");

    options.addOption("b", "bulksize", true, "number of docs sent in one request (default: 3000)");
    options.addOption("v", "verbose", false, "show processing speed while indexing");
    options.addOption("s", "status", false, "only show status of index for file");

    options.addOption("r", "repair", false, "attempt to repair recoverable inconsistencies on the go");
    options.addOption("e", "debug", false, "set logging level to debug");
    options.addOption("l", "logfile", true, "logfile - in not specified only log to stdout");

    options.addOption("m", "memcached", true, "host and port of memcached (default: localhost:11211)");
    options.addOption("z", "latest-flag-on", true,
            "enable latest flag according to field (within content, e.g. 001)");
    options.addOption("a", "flat", false, "flat-mode: do not check for inconsistencies");

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;/*from  ww w .j ava  2 s  .c o m*/

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException ex) {
        logger.error(ex);
        System.exit(1);
    }

    // setup logging
    Properties systemProperties = System.getProperties();
    systemProperties.put("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.Log4JLogger");
    System.setProperties(systemProperties);
    Logger.getLogger("net.spy.memcached").setLevel(Level.ERROR);

    Properties props = new Properties();
    props.load(props.getClass().getResourceAsStream("/log4j.properties"));

    if (cmd.hasOption("debug")) {
        props.setProperty("log4j.logger.de.unileipzig", "DEBUG");
    }

    if (cmd.hasOption("logfile")) {
        props.setProperty("log4j.rootLogger", "INFO, stdout, F");
        props.setProperty("log4j.appender.F", "org.apache.log4j.FileAppender");
        props.setProperty("log4j.appender.F.File", cmd.getOptionValue("logfile"));
        props.setProperty("log4j.appender.F.layout", "org.apache.log4j.PatternLayout");
        props.setProperty("log4j.appender.F.layout.ConversionPattern", "%5p | %d | %F | %L | %m%n");
    }

    PropertyConfigurator.configure(props);

    InetAddress addr = InetAddress.getLocalHost();
    String memcachedHostAndPort = addr.getHostAddress() + ":11211";
    if (cmd.hasOption("m")) {
        memcachedHostAndPort = cmd.getOptionValue("m");
    }

    // setup caching
    try {
        if (memcachedClient == null) {
            memcachedClient = new MemcachedClient(
                    new ConnectionFactoryBuilder().setFailureMode(FailureMode.Cancel).build(),
                    AddrUtil.getAddresses("0.0.0.0:11211"));
            try {
                // give client and server 500ms
                Thread.sleep(300);
            } catch (InterruptedException ex) {
            }

            Collection availableServers = memcachedClient.getAvailableServers();
            logger.info(availableServers);
            if (availableServers.size() == 0) {
                logger.info("no memcached servers found");
                memcachedClient.shutdown();
                memcachedClient = null;
            } else {
                logger.info(availableServers.size() + " memcached server(s) detected, fine.");
            }
        }
    } catch (IOException ex) {
        logger.warn("couldn't create a connection, bailing out: " + ex.getMessage());
    }

    // process options

    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("indexer", options, true);
        quit(0);
    }

    boolean verbose = false;
    if (cmd.hasOption("verbose")) {
        verbose = true;
    }

    // ES options
    String[] hosts = new String[] { "0.0.0.0" };
    int port = 9300;
    String clusterName = "elasticsearch_mdma";
    int bulkSize = 3000;

    if (cmd.hasOption("host")) {
        hosts = cmd.getOptionValues("host");
    }
    if (cmd.hasOption("port")) {
        port = Integer.parseInt(cmd.getOptionValue("port"));
    }
    if (cmd.hasOption("cluster")) {
        clusterName = cmd.getOptionValue("cluster");
    }
    if (cmd.hasOption("bulksize")) {
        bulkSize = Integer.parseInt(cmd.getOptionValue("bulksize"));
        if (bulkSize < 1 || bulkSize > 100000) {
            logger.error("bulksize must be between 1 and 100,000");
            quit(1);
        }
    }

    // ES Client
    final Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "elasticsearch_mdma")
            .build();
    final TransportClient client = new TransportClient(settings);
    for (String host : hosts) {
        client.addTransportAddress(new InetSocketTransportAddress(host, port));
    }

    if (cmd.hasOption("filename") && cmd.hasOption("index") && cmd.hasOption("doctype")) {

        final String filename = cmd.getOptionValue("filename");

        final File _file = new File(filename);
        if (_file.length() == 0) {
            logger.info(_file.getAbsolutePath() + " is empty, skipping");
            quit(0); // file is empty
        }

        // for flat mode: leave a stampfile beside the json to 
        // indicate previous successful processing
        File directory = new File(filename).getParentFile();
        File stampfile = new File(directory, DigestUtils.shaHex(filename) + ".indexed");

        long start = System.currentTimeMillis();
        long lineCount = 0;

        final String indexName = cmd.getOptionValue("index");
        final String docType = cmd.getOptionValue("doctype");
        BulkRequestBuilder bulkRequest = client.prepareBulk();

        try {
            if (cmd.hasOption("flat")) {
                // flat mode
                // .........
                if (stampfile.exists()) {
                    logger.info("SKIPPING, since it seems this file has already " + "been imported (found: "
                            + stampfile.getAbsolutePath() + ")");
                    quit(0);
                }
            } else {

                final String srcSHA1 = extractSrcSHA1(filename);

                logger.debug(filename + " srcsha1: " + srcSHA1);

                long docsInIndex = getIndexedRecordCount(client, indexName, srcSHA1);
                logger.debug(filename + " indexed: " + docsInIndex);

                long docsInFile = getLineCount(filename);
                logger.debug(filename + " lines: " + docsInFile);

                // in non-flat-mode, indexing would take care
                // of inconsistencies
                if (docsInIndex == docsInFile) {
                    logger.info("UP-TO DATE: " + filename + " (" + docsInIndex + ", " + srcSHA1 + ")");
                    client.close();
                    quit(0);
                }

                if (docsInIndex > 0) {
                    logger.warn("INCONSISTENCY DETECTED: " + filename + ": indexed:" + docsInIndex + " lines:"
                            + docsInFile);

                    if (!cmd.hasOption("r")) {
                        logger.warn(
                                "Please re-run indexer with --repair flag or delete residues first with: $ curl -XDELETE "
                                        + hosts[0] + ":9200/" + indexName
                                        + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1
                                        + "\" }}'");
                        client.close();
                        quit(1);
                    } else {
                        logger.info("Attempting to clear residues...");
                        // attempt to repair once
                        DeleteByQueryResponse dbqr = client.prepareDeleteByQuery(indexName)
                                .setQuery(termQuery("meta.srcsha1", srcSHA1)).execute().actionGet();

                        Iterator<IndexDeleteByQueryResponse> it = dbqr.iterator();
                        long deletions = 0;
                        while (it.hasNext()) {
                            IndexDeleteByQueryResponse response = it.next();
                            deletions += 1;
                        }
                        logger.info("Deleted residues of " + filename);
                        logger.info("Refreshing [" + indexName + "]");
                        RefreshResponse refreshResponse = client.admin().indices()
                                .refresh(new RefreshRequest(indexName)).actionGet();

                        long indexedAfterDelete = getIndexedRecordCount(client, indexName, srcSHA1);
                        logger.info(indexedAfterDelete + " docs remained");
                        if (indexedAfterDelete > 0) {
                            logger.warn("Not all residues cleaned. Try to fix this manually: $ curl -XDELETE "
                                    + hosts[0] + ":9200/" + indexName
                                    + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'");
                            quit(1);
                        } else {
                            logger.info("Residues are gone. Now trying to reindex: " + filename);
                        }
                    }
                }
            }

            logger.info("INDEXING-REQUIRED: " + filename);
            if (cmd.hasOption("status")) {
                quit(0);
            }

            HashSet idsInBatch = new HashSet();

            String idField = null;
            if (cmd.hasOption("z")) {
                idField = cmd.getOptionValue("z");
            }

            final FileReader fr = new FileReader(filename);
            final BufferedReader br = new BufferedReader(fr);

            String line;
            // one line is one document
            while ((line = br.readLine()) != null) {

                // "Latest-Flag" machine
                // This gets obsolete with a "flat" index
                if (cmd.hasOption("z")) {
                    // flag that indicates, whether the document
                    // about to be indexed will be the latest
                    boolean willBeLatest = true;

                    // check if there is a previous (lower meta.timestamp) document with 
                    // the same identifier (whatever that may be - queried under "content")
                    final String contentIdentifier = getContentIdentifier(line, idField);
                    idsInBatch.add(contentIdentifier);

                    // assumed in meta.timestamp
                    final Long timestamp = Long.parseLong(getTimestamp(line));

                    logger.debug("Checking whether record is latest (line: " + lineCount + ")");
                    logger.debug(contentIdentifier + ", " + timestamp);

                    // get all docs, which match the contentIdentifier
                    // by filter, which doesn't score
                    final TermFilterBuilder idFilter = new TermFilterBuilder("content." + idField,
                            contentIdentifier);
                    final TermFilterBuilder kindFilter = new TermFilterBuilder("meta.kind", docType);
                    final AndFilterBuilder afb = new AndFilterBuilder();
                    afb.add(idFilter).add(kindFilter);
                    final FilteredQueryBuilder fb = filteredQuery(matchAllQuery(), afb);

                    final SearchResponse searchResponse = client.prepareSearch(indexName)
                            .setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(fb).setFrom(0)
                            .setSize(1200) // 3 years and 105 days assuming daily updates at the most
                            .setExplain(false).execute().actionGet();

                    final SearchHits searchHits = searchResponse.getHits();

                    logger.debug("docs with this id in the index: " + searchHits.getTotalHits());

                    for (final SearchHit hit : searchHits.getHits()) {
                        final String docId = hit.id();
                        final Map<String, Object> source = hit.sourceAsMap();
                        final Map meta = (Map) source.get("meta");
                        final Long docTimestamp = Long.parseLong(meta.get("timestamp").toString());
                        // if the indexed doc timestamp is lower the the current one, 
                        // remove any latest flag
                        if (timestamp >= docTimestamp) {
                            source.remove("latest");
                            final ObjectMapper mapper = new ObjectMapper();
                            // put the updated doc back
                            // IndexResponse response = 
                            client.prepareIndex(indexName, docType).setCreate(false).setId(docId)
                                    .setSource(mapper.writeValueAsBytes(source))
                                    .execute(new ActionListener<IndexResponse>() {
                                        public void onResponse(IndexResponse rspns) {
                                            logger.debug("Removed latest flag from " + contentIdentifier + ", "
                                                    + docTimestamp + ", " + hit.id() + " since (" + timestamp
                                                    + " > " + docTimestamp + ")");
                                        }

                                        public void onFailure(Throwable thrwbl) {
                                            logger.error("Could not remove flag from " + hit.id() + ", "
                                                    + contentIdentifier);
                                        }
                                    });
                            // .execute()
                            //.actionGet();
                        } else {
                            logger.debug("Doc " + hit.id() + " is newer (" + docTimestamp + ")");
                            willBeLatest = false;
                        }
                    }

                    if (willBeLatest) {
                        line = setLatestFlag(line);
                        logger.info("Setting latest flag on " + contentIdentifier + ", " + timestamp);
                    }

                    // end of latest-flag machine
                    // beware - this will be correct as long as there
                    // are no dups within one bulk!
                }

                bulkRequest.add(client.prepareIndex(indexName, docType).setSource(line));
                lineCount++;
                logger.debug("Added line " + lineCount + " to BULK");
                logger.debug(line);

                if (lineCount % bulkSize == 0) {

                    if (idsInBatch.size() != bulkSize && cmd.hasOption("z")) {
                        logger.error(
                                "This batch has duplications in the ID. That's not bad for the index, just makes the latest flag fuzzy");
                        logger.error(
                                "Bulk size was: " + bulkSize + ", but " + idsInBatch.size() + " IDs (only)");
                    }
                    idsInBatch.clear();

                    logger.debug("Issuing BULK request");

                    final long actionCount = bulkRequest.numberOfActions();
                    final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                    final long tookInMillis = bulkResponse.getTookInMillis();

                    if (bulkResponse.hasFailures()) {
                        logger.fatal("FAILED, bulk not indexed. exiting now.");
                        Iterator<BulkItemResponse> it = bulkResponse.iterator();
                        while (it.hasNext()) {
                            BulkItemResponse bir = it.next();
                            if (bir.isFailed()) {
                                Failure failure = bir.getFailure();
                                logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                        + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                            }
                        }
                        quit(1);
                    } else {
                        if (verbose) {
                            final double elapsed = System.currentTimeMillis() - start;
                            final double speed = (lineCount / elapsed * 1000);
                            logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount
                                    + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                        }
                    }
                    bulkRequest = client.prepareBulk();
                }
            }

            // handle the remaining items
            final long actionCount = bulkRequest.numberOfActions();
            if (actionCount > 0) {
                final BulkResponse bulkResponse = bulkRequest.execute().actionGet();
                final long tookInMillis = bulkResponse.getTookInMillis();

                if (bulkResponse.hasFailures()) {
                    logger.fatal("FAILED, bulk not indexed. exiting now.");
                    Iterator<BulkItemResponse> it = bulkResponse.iterator();
                    while (it.hasNext()) {
                        BulkItemResponse bir = it.next();
                        if (bir.isFailed()) {
                            Failure failure = bir.getFailure();
                            logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage()
                                    + ", type: " + failure.getType() + ", index: " + failure.getIndex());
                        }
                    }
                    quit(1);
                } else {

                    // trigger update now
                    RefreshResponse refreshResponse = client.admin().indices()
                            .refresh(new RefreshRequest(indexName)).actionGet();

                    if (verbose) {
                        final double elapsed = System.currentTimeMillis() - start;
                        final double speed = (lineCount / elapsed * 1000);
                        logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/"
                                + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)");
                    }

                }

            }

            br.close();
            client.close();
            final double elapsed = (System.currentTimeMillis() - start) / 1000;
            final double speed = (lineCount / elapsed);
            logger.info("indexing (" + filename + ") " + lineCount + " docs took " + elapsed + "s (speed: "
                    + String.format("%.2f", speed) + "r/s)");
            if (cmd.hasOption("flat")) {
                try {
                    FileUtils.touch(stampfile);
                } catch (IOException ioe) {
                    logger.warn(".indexed files not created. Will reindex everything everytime.");
                }
            }
        } catch (IOException e) {
            client.close();
            logger.error(e);
            quit(1);
        } finally {
            client.close();
        }
    }
    quit(0);
}