List of usage examples for java.util Iterator hasNext
boolean hasNext();
From source file:Batch.java
static public void main(String[] args) { Connection conn = null;//w w w.j a va 2 s . co m try { ArrayList breakable = new ArrayList(); PreparedStatement stmt; Iterator users; ResultSet rs; Class.forName(args[0]).newInstance(); conn = DriverManager.getConnection(args[1], args[2], args[3]); stmt = conn.prepareStatement("SELECT user_id, password " + "FROM user"); rs = stmt.executeQuery(); while (rs.next()) { String uid = rs.getString(1); String pw = rs.getString(2); // Assume PasswordCracker is some class that provides // a single static method called crack() that attempts // to run password cracking routines on the password // if( PasswordCracker.crack(uid, pw) ) { // breakable.add(uid); // } } stmt.close(); if (breakable.size() < 1) { return; } stmt = conn.prepareStatement("UPDATE user " + "SET bad_password = 'Y' " + "WHERE uid = ?"); users = breakable.iterator(); while (users.hasNext()) { String uid = (String) users.next(); stmt.setString(1, uid); stmt.addBatch(); } stmt.executeBatch(); } catch (Exception e) { e.printStackTrace(); } finally { if (conn != null) { try { conn.close(); } catch (Exception e) { } } } }
From source file:de.unileipzig.ub.indexer.App.java
public static void main(String[] args) throws IOException { // create Options object Options options = new Options(); options.addOption("h", "help", false, "display this help"); options.addOption("f", "filename", true, "name of the JSON file whose content should be indexed"); options.addOption("i", "index", true, "the name of the target index"); options.addOption("d", "doctype", true, "the name of the doctype (title, local, ...)"); options.addOption("t", "host", true, "elasticsearch hostname (default: 0.0.0.0)"); options.addOption("p", "port", true, "transport port (that's NOT the http port, default: 9300)"); options.addOption("c", "cluster", true, "cluster name (default: elasticsearch_mdma)"); options.addOption("b", "bulksize", true, "number of docs sent in one request (default: 3000)"); options.addOption("v", "verbose", false, "show processing speed while indexing"); options.addOption("s", "status", false, "only show status of index for file"); options.addOption("r", "repair", false, "attempt to repair recoverable inconsistencies on the go"); options.addOption("e", "debug", false, "set logging level to debug"); options.addOption("l", "logfile", true, "logfile - in not specified only log to stdout"); options.addOption("m", "memcached", true, "host and port of memcached (default: localhost:11211)"); options.addOption("z", "latest-flag-on", true, "enable latest flag according to field (within content, e.g. 001)"); options.addOption("a", "flat", false, "flat-mode: do not check for inconsistencies"); CommandLineParser parser = new PosixParser(); CommandLine cmd = null;/* w ww . ja v a 2 s.com*/ try { cmd = parser.parse(options, args); } catch (ParseException ex) { logger.error(ex); System.exit(1); } // setup logging Properties systemProperties = System.getProperties(); systemProperties.put("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.Log4JLogger"); System.setProperties(systemProperties); Logger.getLogger("net.spy.memcached").setLevel(Level.ERROR); Properties props = new Properties(); props.load(props.getClass().getResourceAsStream("/log4j.properties")); if (cmd.hasOption("debug")) { props.setProperty("log4j.logger.de.unileipzig", "DEBUG"); } if (cmd.hasOption("logfile")) { props.setProperty("log4j.rootLogger", "INFO, stdout, F"); props.setProperty("log4j.appender.F", "org.apache.log4j.FileAppender"); props.setProperty("log4j.appender.F.File", cmd.getOptionValue("logfile")); props.setProperty("log4j.appender.F.layout", "org.apache.log4j.PatternLayout"); props.setProperty("log4j.appender.F.layout.ConversionPattern", "%5p | %d | %F | %L | %m%n"); } PropertyConfigurator.configure(props); InetAddress addr = InetAddress.getLocalHost(); String memcachedHostAndPort = addr.getHostAddress() + ":11211"; if (cmd.hasOption("m")) { memcachedHostAndPort = cmd.getOptionValue("m"); } // setup caching try { if (memcachedClient == null) { memcachedClient = new MemcachedClient( new ConnectionFactoryBuilder().setFailureMode(FailureMode.Cancel).build(), AddrUtil.getAddresses("0.0.0.0:11211")); try { // give client and server 500ms Thread.sleep(300); } catch (InterruptedException ex) { } Collection availableServers = memcachedClient.getAvailableServers(); logger.info(availableServers); if (availableServers.size() == 0) { logger.info("no memcached servers found"); memcachedClient.shutdown(); memcachedClient = null; } else { logger.info(availableServers.size() + " memcached server(s) detected, fine."); } } } catch (IOException ex) { logger.warn("couldn't create a connection, bailing out: " + ex.getMessage()); } // process options if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("indexer", options, true); quit(0); } boolean verbose = false; if (cmd.hasOption("verbose")) { verbose = true; } // ES options String[] hosts = new String[] { "0.0.0.0" }; int port = 9300; String clusterName = "elasticsearch_mdma"; int bulkSize = 3000; if (cmd.hasOption("host")) { hosts = cmd.getOptionValues("host"); } if (cmd.hasOption("port")) { port = Integer.parseInt(cmd.getOptionValue("port")); } if (cmd.hasOption("cluster")) { clusterName = cmd.getOptionValue("cluster"); } if (cmd.hasOption("bulksize")) { bulkSize = Integer.parseInt(cmd.getOptionValue("bulksize")); if (bulkSize < 1 || bulkSize > 100000) { logger.error("bulksize must be between 1 and 100,000"); quit(1); } } // ES Client final Settings settings = ImmutableSettings.settingsBuilder().put("cluster.name", "elasticsearch_mdma") .build(); final TransportClient client = new TransportClient(settings); for (String host : hosts) { client.addTransportAddress(new InetSocketTransportAddress(host, port)); } if (cmd.hasOption("filename") && cmd.hasOption("index") && cmd.hasOption("doctype")) { final String filename = cmd.getOptionValue("filename"); final File _file = new File(filename); if (_file.length() == 0) { logger.info(_file.getAbsolutePath() + " is empty, skipping"); quit(0); // file is empty } // for flat mode: leave a stampfile beside the json to // indicate previous successful processing File directory = new File(filename).getParentFile(); File stampfile = new File(directory, DigestUtils.shaHex(filename) + ".indexed"); long start = System.currentTimeMillis(); long lineCount = 0; final String indexName = cmd.getOptionValue("index"); final String docType = cmd.getOptionValue("doctype"); BulkRequestBuilder bulkRequest = client.prepareBulk(); try { if (cmd.hasOption("flat")) { // flat mode // ......... if (stampfile.exists()) { logger.info("SKIPPING, since it seems this file has already " + "been imported (found: " + stampfile.getAbsolutePath() + ")"); quit(0); } } else { final String srcSHA1 = extractSrcSHA1(filename); logger.debug(filename + " srcsha1: " + srcSHA1); long docsInIndex = getIndexedRecordCount(client, indexName, srcSHA1); logger.debug(filename + " indexed: " + docsInIndex); long docsInFile = getLineCount(filename); logger.debug(filename + " lines: " + docsInFile); // in non-flat-mode, indexing would take care // of inconsistencies if (docsInIndex == docsInFile) { logger.info("UP-TO DATE: " + filename + " (" + docsInIndex + ", " + srcSHA1 + ")"); client.close(); quit(0); } if (docsInIndex > 0) { logger.warn("INCONSISTENCY DETECTED: " + filename + ": indexed:" + docsInIndex + " lines:" + docsInFile); if (!cmd.hasOption("r")) { logger.warn( "Please re-run indexer with --repair flag or delete residues first with: $ curl -XDELETE " + hosts[0] + ":9200/" + indexName + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'"); client.close(); quit(1); } else { logger.info("Attempting to clear residues..."); // attempt to repair once DeleteByQueryResponse dbqr = client.prepareDeleteByQuery(indexName) .setQuery(termQuery("meta.srcsha1", srcSHA1)).execute().actionGet(); Iterator<IndexDeleteByQueryResponse> it = dbqr.iterator(); long deletions = 0; while (it.hasNext()) { IndexDeleteByQueryResponse response = it.next(); deletions += 1; } logger.info("Deleted residues of " + filename); logger.info("Refreshing [" + indexName + "]"); RefreshResponse refreshResponse = client.admin().indices() .refresh(new RefreshRequest(indexName)).actionGet(); long indexedAfterDelete = getIndexedRecordCount(client, indexName, srcSHA1); logger.info(indexedAfterDelete + " docs remained"); if (indexedAfterDelete > 0) { logger.warn("Not all residues cleaned. Try to fix this manually: $ curl -XDELETE " + hosts[0] + ":9200/" + indexName + "/_query -d ' {\"term\" : { \"meta.srcsha1\" : \"" + srcSHA1 + "\" }}'"); quit(1); } else { logger.info("Residues are gone. Now trying to reindex: " + filename); } } } } logger.info("INDEXING-REQUIRED: " + filename); if (cmd.hasOption("status")) { quit(0); } HashSet idsInBatch = new HashSet(); String idField = null; if (cmd.hasOption("z")) { idField = cmd.getOptionValue("z"); } final FileReader fr = new FileReader(filename); final BufferedReader br = new BufferedReader(fr); String line; // one line is one document while ((line = br.readLine()) != null) { // "Latest-Flag" machine // This gets obsolete with a "flat" index if (cmd.hasOption("z")) { // flag that indicates, whether the document // about to be indexed will be the latest boolean willBeLatest = true; // check if there is a previous (lower meta.timestamp) document with // the same identifier (whatever that may be - queried under "content") final String contentIdentifier = getContentIdentifier(line, idField); idsInBatch.add(contentIdentifier); // assumed in meta.timestamp final Long timestamp = Long.parseLong(getTimestamp(line)); logger.debug("Checking whether record is latest (line: " + lineCount + ")"); logger.debug(contentIdentifier + ", " + timestamp); // get all docs, which match the contentIdentifier // by filter, which doesn't score final TermFilterBuilder idFilter = new TermFilterBuilder("content." + idField, contentIdentifier); final TermFilterBuilder kindFilter = new TermFilterBuilder("meta.kind", docType); final AndFilterBuilder afb = new AndFilterBuilder(); afb.add(idFilter).add(kindFilter); final FilteredQueryBuilder fb = filteredQuery(matchAllQuery(), afb); final SearchResponse searchResponse = client.prepareSearch(indexName) .setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(fb).setFrom(0) .setSize(1200) // 3 years and 105 days assuming daily updates at the most .setExplain(false).execute().actionGet(); final SearchHits searchHits = searchResponse.getHits(); logger.debug("docs with this id in the index: " + searchHits.getTotalHits()); for (final SearchHit hit : searchHits.getHits()) { final String docId = hit.id(); final Map<String, Object> source = hit.sourceAsMap(); final Map meta = (Map) source.get("meta"); final Long docTimestamp = Long.parseLong(meta.get("timestamp").toString()); // if the indexed doc timestamp is lower the the current one, // remove any latest flag if (timestamp >= docTimestamp) { source.remove("latest"); final ObjectMapper mapper = new ObjectMapper(); // put the updated doc back // IndexResponse response = client.prepareIndex(indexName, docType).setCreate(false).setId(docId) .setSource(mapper.writeValueAsBytes(source)) .execute(new ActionListener<IndexResponse>() { public void onResponse(IndexResponse rspns) { logger.debug("Removed latest flag from " + contentIdentifier + ", " + docTimestamp + ", " + hit.id() + " since (" + timestamp + " > " + docTimestamp + ")"); } public void onFailure(Throwable thrwbl) { logger.error("Could not remove flag from " + hit.id() + ", " + contentIdentifier); } }); // .execute() //.actionGet(); } else { logger.debug("Doc " + hit.id() + " is newer (" + docTimestamp + ")"); willBeLatest = false; } } if (willBeLatest) { line = setLatestFlag(line); logger.info("Setting latest flag on " + contentIdentifier + ", " + timestamp); } // end of latest-flag machine // beware - this will be correct as long as there // are no dups within one bulk! } bulkRequest.add(client.prepareIndex(indexName, docType).setSource(line)); lineCount++; logger.debug("Added line " + lineCount + " to BULK"); logger.debug(line); if (lineCount % bulkSize == 0) { if (idsInBatch.size() != bulkSize && cmd.hasOption("z")) { logger.error( "This batch has duplications in the ID. That's not bad for the index, just makes the latest flag fuzzy"); logger.error( "Bulk size was: " + bulkSize + ", but " + idsInBatch.size() + " IDs (only)"); } idsInBatch.clear(); logger.debug("Issuing BULK request"); final long actionCount = bulkRequest.numberOfActions(); final BulkResponse bulkResponse = bulkRequest.execute().actionGet(); final long tookInMillis = bulkResponse.getTookInMillis(); if (bulkResponse.hasFailures()) { logger.fatal("FAILED, bulk not indexed. exiting now."); Iterator<BulkItemResponse> it = bulkResponse.iterator(); while (it.hasNext()) { BulkItemResponse bir = it.next(); if (bir.isFailed()) { Failure failure = bir.getFailure(); logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage() + ", type: " + failure.getType() + ", index: " + failure.getIndex()); } } quit(1); } else { if (verbose) { final double elapsed = System.currentTimeMillis() - start; final double speed = (lineCount / elapsed * 1000); logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)"); } } bulkRequest = client.prepareBulk(); } } // handle the remaining items final long actionCount = bulkRequest.numberOfActions(); if (actionCount > 0) { final BulkResponse bulkResponse = bulkRequest.execute().actionGet(); final long tookInMillis = bulkResponse.getTookInMillis(); if (bulkResponse.hasFailures()) { logger.fatal("FAILED, bulk not indexed. exiting now."); Iterator<BulkItemResponse> it = bulkResponse.iterator(); while (it.hasNext()) { BulkItemResponse bir = it.next(); if (bir.isFailed()) { Failure failure = bir.getFailure(); logger.fatal("id: " + failure.getId() + ", message: " + failure.getMessage() + ", type: " + failure.getType() + ", index: " + failure.getIndex()); } } quit(1); } else { // trigger update now RefreshResponse refreshResponse = client.admin().indices() .refresh(new RefreshRequest(indexName)).actionGet(); if (verbose) { final double elapsed = System.currentTimeMillis() - start; final double speed = (lineCount / elapsed * 1000); logger.info("OK (" + filename + ") " + lineCount + " docs indexed (" + actionCount + "/" + tookInMillis + "ms" + "/" + String.format("%.2f", speed) + "r/s)"); } } } br.close(); client.close(); final double elapsed = (System.currentTimeMillis() - start) / 1000; final double speed = (lineCount / elapsed); logger.info("indexing (" + filename + ") " + lineCount + " docs took " + elapsed + "s (speed: " + String.format("%.2f", speed) + "r/s)"); if (cmd.hasOption("flat")) { try { FileUtils.touch(stampfile); } catch (IOException ioe) { logger.warn(".indexed files not created. Will reindex everything everytime."); } } } catch (IOException e) { client.close(); logger.error(e); quit(1); } finally { client.close(); } } quit(0); }
From source file:com.netscape.cms.servlet.test.ConfigurationTest.java
public static void main(String args[]) throws Exception { String host = null;//w w w .j a v a 2 s .c o m String port = null; String cstype = null; String token_pwd = null; String db_dir = "./"; String protocol = "https"; String pin = null; String extCertFile = null; int testnum = 1; // parse command line arguments Options options = new Options(); options.addOption("t", true, "Subsystem type"); options.addOption("h", true, "Hostname of the CS subsystem"); options.addOption("p", true, "Port of the CS subsystem"); options.addOption("w", true, "Token password"); options.addOption("d", true, "Directory for tokendb"); options.addOption("s", true, "preop pin"); options.addOption("e", true, "File for externally signed signing cert"); options.addOption("x", true, "Test number"); try { CommandLineParser parser = new PosixParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("t")) { cstype = cmd.getOptionValue("t"); } else { System.err.println("Error: no subsystem type provided."); usage(options); } if (cmd.hasOption("h")) { host = cmd.getOptionValue("h"); } else { System.err.println("Error: no hostname provided."); usage(options); } if (cmd.hasOption("p")) { port = cmd.getOptionValue("p"); } else { System.err.println("Error: no port provided"); usage(options); } if (cmd.hasOption("w")) { token_pwd = cmd.getOptionValue("w"); } else { System.err.println("Error: no token password provided"); usage(options); } if (cmd.hasOption("d")) { db_dir = cmd.getOptionValue("d"); } if (cmd.hasOption("s")) { pin = cmd.getOptionValue("s"); } if (cmd.hasOption("e")) { extCertFile = cmd.getOptionValue("e"); } if (cmd.hasOption("x")) { testnum = Integer.parseInt(cmd.getOptionValue("x")); } } catch (ParseException e) { System.err.println("Error in parsing command line options: " + e.getMessage()); usage(options); } // Initialize token try { CryptoManager.initialize(db_dir); } catch (AlreadyInitializedException e) { // it is ok if it is already initialized } catch (Exception e) { System.out.println("INITIALIZATION ERROR: " + e.toString()); System.exit(1); } // log into token CryptoManager manager = null; CryptoToken token = null; try { manager = CryptoManager.getInstance(); token = manager.getInternalKeyStorageToken(); Password password = new Password(token_pwd.toCharArray()); try { token.login(password); } catch (Exception e) { System.out.println("login Exception: " + e.toString()); if (!token.isLoggedIn()) { token.initPassword(password, password); } } } catch (Exception e) { System.out.println("Exception in logging into token:" + e.toString()); } SystemConfigClient client = null; try { ClientConfig config = new ClientConfig(); config.setServerURL(protocol + "://" + host + ":" + port); client = new SystemConfigClient(new PKIClient(config, null), cstype); } catch (URISyntaxException e1) { e1.printStackTrace(); System.exit(1); } ConfigurationRequest data = null; switch (testnum) { case 1: data = constructCAData(host, port, pin, db_dir, token_pwd, token); break; case 2: data = constructCloneCAData(host, port, pin, db_dir, token_pwd, token); break; case 3: data = constructKRAData(host, port, pin, db_dir, token_pwd, token); break; case 4: data = constructOCSPData(host, port, pin, db_dir, token_pwd, token); break; case 5: data = constructTKSData(host, port, pin, db_dir, token_pwd, token); break; case 6: data = constructSubCAData(host, port, pin, db_dir, token_pwd, token); break; case 7: data = constructExternalCADataPart1(host, port, pin, db_dir, token_pwd, token); break; case 8: data = constructExternalCADataPart2(host, port, pin, db_dir, token_pwd, token, extCertFile); break; default: System.out.println("Invalid test"); System.exit(1); } ConfigurationResponse response = client.configure(data); System.out.println("adminCert: " + response.getAdminCert().getCert()); List<SystemCertData> certs = response.getSystemCerts(); Iterator<SystemCertData> iterator = certs.iterator(); while (iterator.hasNext()) { SystemCertData cdata = iterator.next(); System.out.println("tag: " + cdata.getTag()); System.out.println("cert: " + cdata.getCert()); System.out.println("request: " + cdata.getRequest()); } }
From source file:de.clusteval.serverclient.BackendClient.java
/** * @param args//w w w . j a v a 2 s . c om * Command line parameters for the backend client (see * {@link #params}). * @throws IOException */ @SuppressWarnings("unused") public static void main(String[] args) throws IOException { try { CommandLine params = parseParams(args, false); if (params.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("clustevalClient", clientCLIOptions); System.exit(0); } if (params.hasOption("version")) { System.out.println(VERSION); System.exit(0); } initLogging(params); Logger log = LoggerFactory.getLogger(BackendClient.class); System.out.println("Starting clusteval client"); System.out.println(VERSION); System.out.println("========================="); // if command line arguments (except connection parameters) are // passed, we do not start a console Set<String> paramKeys = new HashSet<String>(); @SuppressWarnings("unchecked") Iterator<Option> it = params.iterator(); while (it.hasNext()) { paramKeys.add(it.next().getOpt()); } paramKeys.remove("ip"); paramKeys.remove("port"); paramKeys.remove("clientId"); if (paramKeys.size() > 0) { try { new BackendClient(args); } catch (Exception e) { } } else { String clientId; if (params.hasOption("clientId")) clientId = params.getOptionValue("clientId"); else // parse args because of possible connection parameters clientId = new BackendClient(args).clientId; reader = new ConsoleReader(); List<Completer> completers = new LinkedList<Completer>(); completers.add(new BackendClientCompleter(clientId, args)); String ip = params.hasOption("ip") ? params.getOptionValue("ip") : "localhost"; String port = params.hasOption("port") ? params.getOptionValue("port") : "1099"; setDefaultPromptAndCompleter(ip, port, completers); String line; while ((line = reader.readLine()) != null) { if (line.equalsIgnoreCase("quit") || line.equalsIgnoreCase("exit")) { break; } boolean connectException = false; do { try { new BackendClient( ArraysExt.merge(args, ("-clientId " + clientId + " -" + line).split(" "))); connectException = false; } catch (ConnectException e) { e.printStackTrace(); connectException = true; } catch (Exception e) { log.warn(e.getMessage()); } Thread.sleep(1000); } while (connectException); // } } } } catch (ParseException e) { System.err.println("Parsing failed. Reason: " + e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("clustevalClient", "Invoking this client without any parameters will open a shell with tab-completion.", clientCLIOptions, "", true); } catch (Throwable t) { // t.printStackTrace(); } }
From source file:com.comcast.cqs.test.stress.CqsStressTester.java
/** * @param args/* w w w .ja v a 2 s . co m*/ */ public static void main(String[] args) { try { CMBControllerServlet.valueAccumulator.initializeAllCounters(); setup(); Util.initLog4j(); CqsStressTester tester = new CqsStressTester(); ScheduledExecutorService scheduledExecutorService = tester .createQueuesAndInitializePublishersAndReceivers(); //will clear queues long totalMessagesReceived = 0; long totalMessagesDeleted = 0; long totalMessagesRevisibled = 0; long totalEmptyResponses = 0; long totalDuplicates = 0; long totalOutOfOrderMessages = 0; int testDurationSeconds = CQSStressTestProperties.getInstance().getTestDurationSeconds(); Thread.sleep(testDurationSeconds * 1000); if (scheduledExecutorService != null) { scheduledExecutorService.shutdown(); } logger.info("===Sender Shutdown Triggered=="); for (String queueUrl : tester.receiverMap.keySet()) { for (Receiver receiver : tester.receiverMap.get(queueUrl)) { receiver.setContinueThread(false); } } for (String queueUrl : tester.receiverMap.keySet()) { Set<String> messageIdMaster = new HashSet<String>(); List<Integer> deleteTimesMaster = new ArrayList<Integer>(); List<Long> flightTimesMaster = new ArrayList<Long>(); List<Long> receiveTimesMaster = new ArrayList<Long>(); for (Receiver receiver : tester.receiverMap.get(queueUrl)) { receiver.join(); logger.warn( "==================================================================================================================="); logger.warn("TheadId=" + receiver.getThreadId() + " receiveMessageCount=" + receiver.getTotalMessagesReceived() + " deletedMessageCount=" + receiver.getTotalMessagesDeleted() + " revisibledMessageCount=" + receiver.getTotalMessagesRevisibled()); logger.warn( "==================================================================================================================="); totalMessagesReceived += receiver.getTotalMessagesReceived(); totalMessagesDeleted += receiver.getTotalMessagesDeleted(); totalMessagesRevisibled += receiver.getTotalMessagesRevisibled(); totalOutOfOrderMessages += receiver.getTotalOutOfOrderMessages(); totalDuplicates += checkAndCombine(messageIdMaster, receiver.messageIds); //deleteTimesMaster.addAll(receiver.deleteLatencyMSList); //flightTimesMaster.addAll(receiver.flightTimeList); totalEmptyResponses += receiver.emptyResponseCount; } logger.warn( "==================================================================================================================="); Iterator<String> iter = sendMessageIdSet.iterator(); while (iter.hasNext()) { logger.error("Missed message:" + iter.next()); } Collections.sort(deleteTimesMaster); Collections.sort(flightTimesMaster); //receiveTimesMaster.addAll(receiveLacencyMSList); Collections.sort(receiveTimesMaster); /*logger.warn("Receive message latencies"); if (receiveTimesMaster.size() > 0) { for (int i=5; i<=100; i+=5) { int percentileIndex = receiveTimesMaster.size()*i/100 - 1; if (percentileIndex < 0) { percentileIndex = 0; } logger.warn("" + i + "th percentile=" + receiveTimesMaster.get(percentileIndex)); } } logger.warn("==================================================================================================================="); logger.warn("Message flight time latencies"); if (flightTimesMaster.size() > 0) { for (int i=5; i<=100; i+=5) { int percentileIndex = flightTimesMaster.size()*i/100 - 1; if (percentileIndex < 0) { percentileIndex = 0; } logger.warn("" + i + "th percentile=" + flightTimesMaster.get(percentileIndex)); } } logger.warn("==================================================================================================================="); logger.warn("Delete message latencies"); if (deleteTimesMaster.size() > 0) { for (int i=5; i<=100; i+=5) { int percentileIndex = deleteTimesMaster.size()*i/100 - 1; if (percentileIndex < 0) { percentileIndex = 0; } logger.warn("" + i + "th percentile=" + deleteTimesMaster.get(percentileIndex)); } }*/ } logger.warn( "==================================================================================================================="); logger.warn( "==================================================================================================================="); logger.warn("totalMessagesSent=" + tester.messageCount.get() + " totalMessagesReceived=" + totalMessagesReceived + " totalMessagesDeleted=" + totalMessagesDeleted + " totalMessagesRevisibled=" + totalMessagesRevisibled); logger.warn( "==================================================================================================================="); logger.warn("totalEmptyResponses=" + totalEmptyResponses); logger.warn("totalDuplicates=" + totalDuplicates); logger.warn("totalOutOfOrderMessages=" + totalOutOfOrderMessages); logger.warn("totalMessagesLost=" + sendMessageIdSet.size()); logger.warn( "==================================================================================================================="); logger.warn("totalRunTimeMillis=" + (System.currentTimeMillis() - tester.startTime) + " status=Exit"); logger.warn( "==================================================================================================================="); logger.warn( "==================================================================================================================="); } catch (Exception e) { logger.error("Thread=main status=exception message=setup_failure ", e); } finally { CMBControllerServlet.valueAccumulator.deleteAllCounters(); } }
From source file:edu.oregonstate.eecs.mcplan.abstraction.EvaluateSimilarityFunction.java
/** * @param args/*from w w w . j ava 2 s .com*/ * @throws IOException * @throws FileNotFoundException */ public static void main(final String[] args) throws FileNotFoundException, IOException { final String experiment_file = args[0]; final File root_directory; if (args.length > 1) { root_directory = new File(args[1]); } else { root_directory = new File("."); } final CsvConfigurationParser csv_config = new CsvConfigurationParser(new FileReader(experiment_file)); final String experiment_name = FilenameUtils.getBaseName(experiment_file); final File expr_directory = new File(root_directory, experiment_name); expr_directory.mkdirs(); final Csv.Writer csv = new Csv.Writer( new PrintStream(new FileOutputStream(new File(expr_directory, "results.csv")))); final String[] parameter_headers = new String[] { "kpca.kernel", "kpca.rbf.sigma", "kpca.random_forest.Ntrees", "kpca.random_forest.max_depth", "kpca.Nbases", "multiclass.classifier", "multiclass.random_forest.Ntrees", "multiclass.random_forest.max_depth", "pairwise_classifier.max_branching", "training.label_noise" }; csv.cell("domain").cell("abstraction"); for (final String p : parameter_headers) { csv.cell(p); } csv.cell("Ntrain").cell("Ntest").cell("ami.mean").cell("ami.variance").cell("ami.confidence").newline(); for (int expr = 0; expr < csv_config.size(); ++expr) { try { final KeyValueStore expr_config = csv_config.get(expr); final Configuration config = new Configuration(root_directory.getPath(), expr_directory.getName(), expr_config); System.out.println("[Loading '" + config.training_data_single + "']"); final Instances single = WekaUtil .readLabeledDataset(new File(root_directory, config.training_data_single + ".arff")); final Instances train = new Instances(single, 0); final int[] idx = Fn.range(0, single.size()); int instance_counter = 0; Fn.shuffle(config.rng, idx); final int Ntrain = config.getInt("Ntrain_games"); // TODO: Rename? final double label_noise = config.getDouble("training.label_noise"); final int Nlabels = train.classAttribute().numValues(); assert (Nlabels > 0); for (int i = 0; i < Ntrain; ++i) { final Instance inst = single.get(idx[instance_counter++]); if (label_noise > 0 && config.rng.nextDouble() < label_noise) { int noisy_label = 0; do { noisy_label = config.rng.nextInt(Nlabels); } while (noisy_label == (int) inst.classValue()); System.out.println("Noisy label (" + inst.classValue() + " -> " + noisy_label + ")"); inst.setClassValue(noisy_label); } train.add(inst); inst.setDataset(train); } final Fn.Function2<Boolean, Instance, Instance> plausible_p = createPlausiblePredicate(config); final int Ntest = config.Ntest_games; int Ntest_added = 0; final ArrayList<Instances> tests = new ArrayList<Instances>(); while (instance_counter < single.size() && Ntest_added < Ntest) { final Instance inst = single.get(idx[instance_counter++]); boolean found = false; for (final Instances test : tests) { // Note that 'plausible_p' should be transitive if (plausible_p.apply(inst, test.get(0))) { WekaUtil.addInstance(test, inst); if (test.size() == 30) { Ntest_added += test.size(); } else if (test.size() > 30) { Ntest_added += 1; } found = true; break; } } if (!found) { final Instances test = new Instances(single, 0); WekaUtil.addInstance(test, inst); tests.add(test); } } final Iterator<Instances> test_itr = tests.iterator(); while (test_itr.hasNext()) { if (test_itr.next().size() < 30) { test_itr.remove(); } } System.out.println("=== tests.size() = " + tests.size()); System.out.println("=== Ntest_added = " + Ntest_added); System.out.println("[Training]"); final Evaluator evaluator = createEvaluator(config, train); // final Instances transformed_test = evaluator.prepareInstances( test ); System.out.println("[Evaluating]"); final int Nxval = evaluator.isSensitiveToOrdering() ? 10 : 1; final MeanVarianceAccumulator ami = new MeanVarianceAccumulator(); final MeanVarianceAccumulator errors = new MeanVarianceAccumulator(); final MeanVarianceAccumulator relative_error = new MeanVarianceAccumulator(); int c = 0; for (int xval = 0; xval < Nxval; ++xval) { for (final Instances test : tests) { // TODO: Debugging WekaUtil.writeDataset(new File(config.root_directory), "test_" + (c++), test); // transformed_test.randomize( new RandomAdaptor( config.rng ) ); // final ClusterContingencyTable ct = evaluator.evaluate( transformed_test ); test.randomize(new RandomAdaptor(config.rng)); final ClusterContingencyTable ct = evaluator.evaluate(test); System.out.println(ct); int Nerrors = 0; final MeanVarianceAccumulator mv = new MeanVarianceAccumulator(); for (int i = 0; i < ct.R; ++i) { final int max = Fn.max(ct.n[i]); Nerrors += (ct.a[i] - max); mv.add(((double) ct.a[i]) / ct.N * Nerrors / ct.a[i]); } errors.add(Nerrors); relative_error.add(mv.mean()); System.out.println("exemplar: " + test.get(0)); System.out.println("Nerrors = " + Nerrors); final PrintStream ct_out = new PrintStream( new FileOutputStream(new File(expr_directory, "ct_" + expr + "_" + xval + ".csv"))); ct.writeCsv(ct_out); ct_out.close(); final double ct_ami = ct.adjustedMutualInformation_max(); if (Double.isNaN(ct_ami)) { System.out.println("! ct_ami = NaN"); } else { ami.add(ct_ami); } System.out.println(); } } System.out.println("errors = " + errors.mean() + " (" + errors.confidence() + ")"); System.out.println( "relative_error = " + relative_error.mean() + " (" + relative_error.confidence() + ")"); System.out.println("AMI_max = " + ami.mean() + " (" + ami.confidence() + ")"); csv.cell(config.domain).cell(config.get("abstraction.discovery")); for (final String p : parameter_headers) { csv.cell(config.get(p)); } csv.cell(Ntrain).cell(Ntest).cell(ami.mean()).cell(ami.variance()).cell(ami.confidence()).newline(); } catch (final Exception ex) { ex.printStackTrace(); } } }
From source file:com.netscape.cms.servlet.test.DRMTest.java
public static void main(String args[]) throws InvalidKeyException, NoSuchAlgorithmException, InvalidKeySpecException, SignatureException, IOException { String host = null;/* w w w . j av a 2 s .c o m*/ String port = null; String token_pwd = null; String db_dir = "./"; String protocol = "http"; String clientCertNickname = "KRA Administrator of Instance pki-kra's SjcRedhat Domain ID"; // parse command line arguments Options options = new Options(); options.addOption("h", true, "Hostname of the DRM"); options.addOption("p", true, "Port of the DRM"); options.addOption("w", true, "Token password"); options.addOption("d", true, "Directory for tokendb"); options.addOption("s", true, "Attempt Optional Secure SSL connection"); options.addOption("c", true, "Optional SSL Client cert Nickname"); try { CommandLineParser parser = new PosixParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("h")) { host = cmd.getOptionValue("h"); } else { System.err.println("Error: no hostname provided."); usage(options); } if (cmd.hasOption("p")) { port = cmd.getOptionValue("p"); } else { System.err.println("Error: no port provided"); usage(options); } if (cmd.hasOption("w")) { token_pwd = cmd.getOptionValue("w"); } else { System.err.println("Error: no token password provided"); usage(options); } if (cmd.hasOption("d")) { db_dir = cmd.getOptionValue("d"); } if (cmd.hasOption("s")) { if (cmd.getOptionValue("s") != null && cmd.getOptionValue("s").equals("true")) { protocol = "https"; } } if (cmd.hasOption("c")) { String nick = cmd.getOptionValue("c"); if (nick != null && protocol.equals("https")) { clientCertNickname = nick; } } } catch (ParseException e) { System.err.println("Error in parsing command line options: " + e.getMessage()); usage(options); } // used for crypto operations byte iv[] = { 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1, 0x1 }; try { iv = genIV(8); } catch (Exception e) { log("Can't generate initialization vector use default: " + e.toString()); } // used for wrapping to send data to DRM String transportCert = null; // Data to be archived SymmetricKey vek = null; String passphrase = null; // Session keys and passphrases for recovery SymmetricKey sessionKey = null; byte[] wrappedRecoveryKey = null; String recoveryPassphrase = null; byte[] wrappedRecoveryPassphrase = null; // retrieved data (should match archived data) byte[] encryptedData = null; String recoveredKey = null; // various ids used in recovery/archival operations KeyId keyId = null; String clientKeyId = null; RequestId recoveryRequestId = null; // Variables for data structures from calls KeyRequestResponse requestResponse = null; Key keyData = null; KeyInfo keyInfo = null; // Initialize token try { CryptoManager.initialize(db_dir); } catch (AlreadyInitializedException e) { // it is ok if it is already initialized } catch (Exception e) { log("INITIALIZATION ERROR: " + e.toString()); System.exit(1); } // Set base URI and get client KRAClient client; SystemCertClient systemCertClient; KeyClient keyClient; NSSCryptoProvider nssCrypto; try { ClientConfig config = new ClientConfig(); config.setServerURI(protocol + "://" + host + ":" + port + "/kra"); config.setCertNickname(clientCertNickname); config.setCertDatabase(db_dir); config.setCertPassword(token_pwd); nssCrypto = new NSSCryptoProvider(config); client = new KRAClient(new PKIClient(config, nssCrypto)); systemCertClient = (SystemCertClient) client.getClient("systemcert"); keyClient = (KeyClient) client.getClient("key"); } catch (Exception e) { e.printStackTrace(); return; } // Test 1: Get transport certificate from DRM transportCert = systemCertClient.getTransportCert().getEncoded(); transportCert = transportCert.substring(CertData.HEADER.length(), transportCert.indexOf(CertData.FOOTER)); keyClient.setTransportCert(transportCert); log("Transport Cert retrieved from DRM: " + transportCert); // Test 2: Get list of completed key archival requests log("\n\nList of completed archival requests"); KeyRequestInfoCollection list = keyClient.listRequests("complete", "securityDataEnrollment"); if (list.getTotal() == 0) { log("No requests found"); } else { Iterator<KeyRequestInfo> iter = list.getEntries().iterator(); while (iter.hasNext()) { KeyRequestInfo info = iter.next(); printRequestInfo(info); } } // Test 3: Get list of key recovery requests log("\n\nList of completed recovery requests"); KeyRequestInfoCollection list2 = keyClient.listRequests("complete", "securityDataRecovery"); if (list2.getTotal() == 0) { log("No requests found"); } else { Iterator<KeyRequestInfo> iter2 = list2.getEntries().iterator(); while (iter2.hasNext()) { KeyRequestInfo info = iter2.next(); printRequestInfo(info); } } // Test 4: Generate and archive a symmetric key log("Archiving symmetric key"); clientKeyId = "UUID: 123-45-6789 VEK " + Calendar.getInstance().getTime().toString(); try { vek = nssCrypto.generateSessionKey(); byte[] encoded = nssCrypto.createPKIArchiveOptions(transportCert, vek, null, KeyRequestResource.DES3_ALGORITHM, 0, iv); KeyRequestResponse info = keyClient.archivePKIOptions(clientKeyId, KeyRequestResource.SYMMETRIC_KEY_TYPE, KeyRequestResource.DES3_ALGORITHM, 0, encoded); log("Archival Results:"); printRequestInfo(info.getRequestInfo()); keyId = info.getKeyId(); } catch (Exception e) { log("Exception in archiving symmetric key:" + e.getMessage()); e.printStackTrace(); } //Test 5: Get keyId for active key with client ID log("Getting key ID for symmetric key"); keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); KeyId keyId2 = keyInfo.getKeyId(); if (keyId2 == null) { log("No archived key found"); } else { log("Archived Key found: " + keyId); } if (!keyId.equals(keyId2)) { log("Error: key ids from search and archival do not match"); } else { log("Success: keyids from search and archival match."); } // Test 6: Submit a recovery request for the symmetric key using a session key log("Submitting a recovery request for the symmetric key using session key"); try { sessionKey = nssCrypto.generateSessionKey(); wrappedRecoveryKey = CryptoUtil.wrapSymmetricKey(nssCrypto.getManager(), nssCrypto.getToken(), transportCert, sessionKey); keyData = keyClient.retrieveKey(keyId, wrappedRecoveryKey); } catch (Exception e) { log("Exception in recovering symmetric key using session key: " + e.getMessage()); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = Utils.base64encode(nssCrypto.unwrapWithSessionKey(encryptedData, sessionKey, KeyRequestResource.DES3_ALGORITHM, keyData.getNonceData())); } catch (Exception e) { log("Exception in unwrapping key: " + e.toString()); e.printStackTrace(); } if (!recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) { log("Error: recovered and archived keys do not match!"); } else { log("Success: recoverd and archived keys match!"); } // Test 7: Submit a recovery request for the symmetric key using a passphrase log("Submitting a recovery request for the symmetric key using a passphrase"); recoveryPassphrase = "Gimme me keys please"; try { sessionKey = nssCrypto.generateSessionKey(); wrappedRecoveryPassphrase = nssCrypto.wrapWithSessionKey(recoveryPassphrase, iv, sessionKey, KeyRequestResource.DES3_ALGORITHM); wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert); keyData = keyClient.retrieveKeyUsingWrappedPassphrase(keyId, wrappedRecoveryKey, wrappedRecoveryPassphrase, iv); } catch (Exception e) { log("Exception in recovering symmetric key using passphrase" + e.toString()); e.printStackTrace(); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = Utils.base64encode(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase)); } catch (Exception e) { log("Error: unable to unwrap key using passphrase"); e.printStackTrace(); } if (recoveredKey == null || !recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) { log("Error: recovered and archived keys do not match!"); } else { log("Success: recovered and archived keys do match!"); } passphrase = "secret12345"; // Test 8: Generate and archive a passphrase clientKeyId = "UUID: 123-45-6789 RKEK " + Calendar.getInstance().getTime().toString(); try { requestResponse = keyClient.archivePassphrase(clientKeyId, passphrase); log("Archival Results:"); printRequestInfo(requestResponse.getRequestInfo()); keyId = requestResponse.getKeyId(); } catch (Exception e) { log("Exception in archiving symmetric key:" + e.toString()); e.printStackTrace(); } //Test 9: Get keyId for active passphrase with client ID log("Getting key ID for passphrase"); keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); keyId2 = keyInfo.getKeyId(); if (keyId2 == null) { log("No archived key found"); } else { log("Archived Key found: " + keyId); } if (!keyId.equals(keyId2)) { log("Error: key ids from search and archival do not match"); } else { log("Success: key ids from search and archival do match!"); } // Test 10: Submit a recovery request for the passphrase using a session key log("Submitting a recovery request for the passphrase using session key"); sessionKey = null; wrappedRecoveryKey = null; try { keyData = keyClient.retrieveKeyByPassphrase(keyId, recoveryPassphrase); } catch (Exception e) { log("Exception in recovering passphrase using session key: " + e.getMessage()); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8"); } catch (Exception e) { log("Exception in unwrapping key: " + e.toString()); e.printStackTrace(); } if (recoveredKey == null || !recoveredKey.equals(passphrase)) { log("Error: recovered and archived passphrases do not match!"); } else { log("Success: recovered and archived passphrases do match!"); } // Test 11: Submit a recovery request for the passphrase using a passphrase try { sessionKey = nssCrypto.generateSessionKey(); wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert); wrappedRecoveryPassphrase = nssCrypto.wrapWithSessionKey(recoveryPassphrase, iv, sessionKey, KeyRequestResource.DES3_ALGORITHM); keyData = keyClient.retrieveKeyUsingWrappedPassphrase(keyId, wrappedRecoveryKey, wrappedRecoveryPassphrase, iv); } catch (Exception e1) { e1.printStackTrace(); System.out.println("Test 17: " + e1.getMessage()); System.exit(-1); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8"); } catch (Exception e) { log("Error: cannot unwrap key using passphrase"); e.printStackTrace(); } if (recoveredKey == null || !recoveredKey.equals(passphrase)) { log("Error: recovered and archived passphrases do not match!"); } else { log("Success: recovered and archived passphrases do match!"); } // Test 12: Get key log("Getting passphrase: " + keyId); try { keyData = keyClient.retrieveKeyByPassphrase(keyId, recoveryPassphrase); } catch (Exception e1) { e1.printStackTrace(); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = new String(nssCrypto.unwrapWithPassphrase(encryptedData, recoveryPassphrase), "UTF-8"); } catch (Exception e) { log("Error: Can't unwrap recovered key using passphrase"); e.printStackTrace(); } if (recoveredKey == null || !recoveredKey.equals(passphrase)) { log("Error: recovered and archived passphrases do not match!"); } else { log("Success: recovered and archived passphrases do match!"); } // Test 13: Get non-existent request RequestId requestId = new RequestId("0xabcdef"); log("Getting non-existent request: " + requestId.toHexString()); try { keyClient.getRequestInfo(requestId); log("Error: getting non-existent request does not throw an exception"); } catch (RequestNotFoundException e) { log("Success: getting non-existent request throws an exception: " + e.getMessage() + " (" + e.getRequestId().toHexString() + ")"); } // Test 14: Request x509 key recovery // This test requires to retrieve keyId and matching certificate // from installed instances of CA and DRM String keyID = "1"; String b64Certificate = "MIIC+TCCAeGgAwIBAgIBDDANBgkqhkiG9w0BAQsFADBOMSswKQYDVQQKDCJ1c2Vy" + "c3lzLnJlZGhhdC5jb20gU2VjdXJpdHkgRG9tYWluMR8wHQYDVQQDDBZDQSBTaWdu" + "aW5nIENlcnRpZmljYXRlMB4XDTEzMTAyNTE5MzQwM1oXDTE0MDQyMzE5MzQwM1ow" + "EzERMA8GCgmSJomT8ixkAQEMAXgwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB" + "ALhLfGmKvxFsKXPh49q1QsluXU3WlyS1XnpDLgOAhgTNgO4sG6CpPdv6hZYIvQBb" + "ZQ5bhuML+NXK+Q+EIiNk1cUTxgL3a30sPzy6QaFWxwM8i4uXm4nCBYv7T+n4V6/O" + "xHIM2Ch/dviAb3vz+M9trErv9t+d2H8jNXT3sHuDb/kvAgMBAAGjgaAwgZ0wHwYD" + "VR0jBBgwFoAUh1cxWFRY+nMsx4odQQI1GqyFxP8wSwYIKwYBBQUHAQEEPzA9MDsG" + "CCsGAQUFBzABhi9odHRwOi8vZG9ndGFnMjAudXNlcnN5cy5yZWRoYXQuY29tOjgw" + "ODAvY2Evb2NzcDAOBgNVHQ8BAf8EBAMCBSAwHQYDVR0lBBYwFAYIKwYBBQUHAwIG" + "CCsGAQUFBwMEMA0GCSqGSIb3DQEBCwUAA4IBAQCvmbUzQOouE2LgQQcKfmgwwJMJ" + "9tMrPwDUtyFdaIFoPL4uZaujSscaN4IWK2r5vIMJ65jwYCI7sI9En2ZfO28J9dQj" + "lpqu6TaJ+xtaMk7OvXpVB7lJk73HAttMGjETlkoq/6EjxcugmJsDqVD0b2tO7Vd0" + "hroBe2uPDHM2ASewZF415lUcRh0URtmxSazTInbyxpmy1wgSJQ0C6fMCeT+hUFlA" + "0P4k1TIprapGVq7FpKcqlhK2gTBfTSnoO7gmXG/9MxJiYpb/Aph8ptXq6quHz1Mj" + "greWr3xTsy6gF2yphUEkGHh4v22XvK+FLx9Jb6zloMWA2GG9gpUpvMnl1fH4"; log("Requesting X509 key recovery."); recoveryRequestId = keyClient.recoverKey(new KeyId(keyID), null, null, null, b64Certificate) .getRequestInfo().getRequestId(); log("Requesting X509 key recovery request: " + recoveryRequestId); // Test 55: Approve x509 key recovery log("Approving X509 key recovery request: " + recoveryRequestId); keyClient.approveRequest(recoveryRequestId); // Test 16: Recover x509 key log("Recovering X509 key based on request: " + recoveryRequestId); try { // KeyData recoveredX509Key = client.recoverKey(recoveryRequestId, "netscape"); // log("Success: X509Key recovered: "+ recoveredX509Key.getP12Data()); } catch (RequestNotFoundException e) { log("Error: recovering X509Key"); } // Test 1: Get transport certificate from DRM transportCert = systemCertClient.getTransportCert().getEncoded(); transportCert = transportCert.substring(CertData.HEADER.length(), transportCert.indexOf(CertData.FOOTER)); log("Transport Cert retrieved from DRM: " + transportCert); // Test 17: Get list of completed key archival requests log("\n\nList of completed archival requests"); list = keyClient.listRequests("complete", IRequest.SYMKEY_GENERATION_REQUEST); if (list.getTotal() == 0) { log("No requests found"); } else { Iterator<KeyRequestInfo> iter = list.getEntries().iterator(); while (iter.hasNext()) { KeyRequestInfo info = iter.next(); printRequestInfo(info); } } // test 18: Generate symmetric key clientKeyId = "Symmetric Key #1234f " + Calendar.getInstance().getTime().toString(); List<String> usages = new ArrayList<String>(); usages.add(SymKeyGenerationRequest.DECRYPT_USAGE); usages.add(SymKeyGenerationRequest.ENCRYPT_USAGE); KeyRequestResponse genKeyResponse = keyClient.generateSymmetricKey(clientKeyId, KeyRequestResource.AES_ALGORITHM, 128, usages, null); printRequestInfo(genKeyResponse.getRequestInfo()); keyId = genKeyResponse.getKeyId(); // test 19: Get keyId for active key with client ID log("Getting key ID for symmetric key"); keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); keyId2 = keyInfo.getKeyId(); if (keyId2 == null) { log("No archived key found"); } else { log("Archived Key found: " + keyId); } if (!keyId.equals(keyId2)) { log("Error: key ids from search and archival do not match"); } else { log("Success: keyids from search and archival match."); } // Test 20: Submit a recovery request for the symmetric key using a session key log("Submitting a recovery request for the symmetric key using session key"); try { sessionKey = nssCrypto.generateSessionKey(); wrappedRecoveryKey = nssCrypto.wrapSessionKeyWithTransportCert(sessionKey, transportCert); keyData = keyClient.retrieveKey(keyId, wrappedRecoveryKey); } catch (Exception e) { log("Exception in recovering symmetric key using session key: " + e.getMessage()); } encryptedData = keyData.getEncryptedData(); try { recoveredKey = new String(nssCrypto.unwrapWithSessionKey(encryptedData, sessionKey, KeyRequestResource.DES3_ALGORITHM, keyData.getNonceData())); } catch (Exception e) { log("Exception in unwrapping key: " + e.toString()); e.printStackTrace(); } // test 21: Generate symmetric key - invalid algorithm try { genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1235", "AFS", 128, usages, null); } catch (Exception e) { log("Exception: " + e); } // test 22: Generate symmetric key - invalid key size try { genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1236", "AES", 0, usages, null); } catch (Exception e) { log("Exception: " + e); } // test 23: Generate symmetric key - usages not defined try { genKeyResponse = keyClient.generateSymmetricKey("Symmetric Key #1236", "DES", 56, null, null); } catch (Exception e) { log("Exception: " + e); } // Test 24: Generate and archive a symmetric key of type AES log("Archiving symmetric key"); clientKeyId = "UUID: 123-45-6789 VEK " + Calendar.getInstance().getTime().toString(); try { vek = nssCrypto.generateSymmetricKey(KeyRequestResource.AES_ALGORITHM, 128); byte[] encoded = CryptoUtil.createPKIArchiveOptions(nssCrypto.getManager(), nssCrypto.getToken(), transportCert, vek, null, KeyGenAlgorithm.DES3, 0, new IVParameterSpec(iv)); KeyRequestResponse response = keyClient.archivePKIOptions(clientKeyId, KeyRequestResource.SYMMETRIC_KEY_TYPE, KeyRequestResource.AES_ALGORITHM, 128, encoded); log("Archival Results:"); printRequestInfo(response.getRequestInfo()); keyId = response.getKeyId(); } catch (Exception e) { log("Exception in archiving symmetric key:" + e.getMessage()); e.printStackTrace(); } //Test 25: Get keyId for active key with client ID log("Getting key ID for symmetric key"); keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); keyId2 = keyInfo.getKeyId(); if (keyId2 == null) { log("No archived key found"); } else { log("Archived Key found: " + keyId); } if (!keyId.equals(keyId2)) { log("Error: key ids from search and archival do not match"); } else { log("Success: keyids from search and archival match."); } // Test 26: Submit a recovery request for the symmetric key log("Submitting a recovery request for the symmetric key without using session key"); try { keyData = keyClient.retrieveKey(keyId); } catch (Exception e) { log("Exception in recovering symmetric key using session key: " + e.getMessage()); } // Since no session key is provided externally, the retrieveKey method // generates a session key, wraps it with transport cert and completes the request. // The encrypted data is then unwrapped using the temporary session key and set to // the attribute privateData. recoveredKey = Utils.base64encode(keyData.getData()); if (!recoveredKey.equals(Utils.base64encode(vek.getEncoded()))) { log("Error: recovered and archived keys do not match!"); } else { log("Success: recoverd and archived keys match!"); } // Test 27: Get key info log("getting key info for existing key"); printKeyInfo(keyClient.getKeyInfo(keyId)); //Test 28: Modify status log("modify the key status"); keyClient.modifyKeyStatus(keyId, KeyResource.KEY_STATUS_INACTIVE); keyInfo = keyClient.getKeyInfo(keyId); printKeyInfo(keyInfo); //Test 29: Confirm no more active keys with this ID log("look for active keys with this id"); clientKeyId = keyInfo.getClientKeyID(); try { keyInfo = keyClient.getActiveKeyInfo(clientKeyId); printKeyInfo(keyInfo); } catch (ResourceNotFoundException e) { log("Success: ResourceNotFound exception thrown: " + e); } // Test asymmetric key generation. String[] algs = { "RSA", "DSA" }; for (int i = 0; i < algs.length; i++) { // Test 30: Generate Asymmetric keys - RSA key System.out.println("\nTesting asymmetric key generation for algorithm " + algs[i]); clientKeyId = "AsymKey #" + Calendar.getInstance().getTimeInMillis(); usages.clear(); usages.add(AsymKeyGenerationRequest.SIGN); usages.add(AsymKeyGenerationRequest.VERIFY); KeyRequestResponse response = keyClient.generateAsymmetricKey(clientKeyId, algs[i], 1024, usages, null); printRequestInfo(response.getRequestInfo()); System.out.println(); // Test 31: Get information of the newly generated asymmetric keys System.out.println("Fetch information of the newly generated asymmetric keys."); System.out.println(); KeyInfo info = keyClient.getKeyInfo(response.getKeyId()); printKeyInfo(info); System.out.println(); // Test 32: Retrieve private key data System.out.println("Retrieving and verifying the generated private key."); try { keyData = keyClient.retrieveKey(response.getKeyId()); } catch (Exception e) { log("Exception retrieving the private key data."); e.printStackTrace(); } // Test 33: Verify the generated key pair. if (isKeyPairValid(algs[i], keyData.getData(), info.getPublicKey())) { log("The key pair generated using " + algs[i] + " algorithm is valid."); } else { log("The key pair generated using " + algs[i] + " algorithm is invalid."); } System.out.println(); } // Test 34: }
From source file:com.g2inc.scap.library.domain.SCAPContentManager.java
/** * A main method only used for testing// w ww . java 2 s . c om * * @param args Command-line arguments * */ public static void main(String[] args) throws Exception { BasicConfigurator.configure(); LOG.trace("SchemaParser starting, schemaVersion =" + args[0] + ", xsd File=" + args[1]); File xsdFile = new File(args[1]); if (!xsdFile.exists()) { LOG.error("Usage: java SchemaParser <name of xsd file>"); } SCAPDocumentTypeEnum version = SCAPDocumentTypeEnum.valueOf(args[0]); SCAPContentManager mgr = SCAPContentManager.getInstance(new File(args[1])); // schema-related data should now be populated List<String> platformTypes = mgr.getValidPlatforms(version); LOG.trace("STARTING PLATFORMS"); for (int i = 0; i < platformTypes.size(); i++) { LOG.trace(platformTypes.get(i)); } String[] platforms = { "windows", "independent" }; List<NameDoc> list = null; Iterator<NameDoc> iter = null; LOG.trace("STARTING TESTS"); list = mgr.getValidTestTypes(version, platforms); iter = list.iterator(); while (iter.hasNext()) { NameDoc testName = iter.next(); LOG.trace("\t" + testName); } LOG.trace("STARTING OBJECTS"); list = mgr.getValidObjectTypes(version, platforms); iter = list.iterator(); while (iter.hasNext()) { NameDoc objName = iter.next(); LOG.trace("\t" + objName); LOG.trace("STARTING OBJECT ENTITIES FOR " + objName.getName()); List<OvalEntity> entityList = mgr.getValidObjectEntityTypes(version, "windows", objName.getName()); if (entityList != null && entityList.size() > 0) { for (int i = 0; i < entityList.size(); i++) { LOG.trace("\t\t" + entityList.get(i).toString()); } } } LOG.trace("STARTING STATES"); list = mgr.getValidStateTypes(version, platforms); iter = list.iterator(); while (iter.hasNext()) { NameDoc stateName = iter.next(); LOG.trace("\t" + stateName); LOG.trace("STARTING STATE ENTITIES FOR " + stateName.getName()); List<OvalEntity> entityList = mgr.getValidStateEntityTypes(version, "windows", stateName.getName()); if (entityList != null && entityList.size() > 0) { for (int i = 0; i < entityList.size(); i++) { LOG.trace("\t\t" + entityList.get(i).toString()); } } } }
From source file:com.act.biointerpretation.metadata.ProteinMetadataFactory.java
public static void main(String[] args) throws Exception { // TODO: This is referencing a temporary collection. Change it! // TODO: FIX THIS BEFORE MERGE! NoSQLAPI api = new NoSQLAPI("actv01_vijay_proteins", "actv01_vijay_proteins"); Iterator<Reaction> iterator = api.readRxnsFromInKnowledgeGraph(); //Create a single instance of the factory method to use for all json ProteinMetadataFactory factory = ProteinMetadataFactory.initiate(); //Run some tests try {/* w w w . ja v a 2s .com*/ if (factory.testHandlesubunits() == true) { System.out.println("Subunit test OK"); } } catch (Exception err) { System.err.println("Failed to test subunits"); } //Create a list to aggregate the results of the database scan List<ProteinMetadata> agg = new ArrayList<>(); //Scan the database and store ProteinMetadata objects while (iterator.hasNext()) { Reaction rxn = iterator.next(); Reaction.RxnDataSource source = rxn.getDataSource(); if (!source.equals(Reaction.RxnDataSource.BRENDA)) { continue; } Set<JSONObject> jsons = rxn.getProteinData(); for (JSONObject json : jsons) { ProteinMetadata meta = factory.create(json); agg.add(meta); } } //Write out any messages to file StringBuilder sb = new StringBuilder(); for (String aline : factory.dataList) { sb.append(aline).append("\n"); } File outfile = new File("output/ProteinMetadata/Factory_output.txt"); if (outfile.exists()) { outfile.delete(); } FileUtils.writeStringToFile(outfile, sb.toString()); sb = new StringBuilder(); for (String key : factory.dataMap.keySet()) { int value = factory.dataMap.get(key); sb.append(key + "\t" + value + "\n"); } outfile = new File("output/ProteinMetadata/Factory_output_map.txt"); if (outfile.exists()) { outfile.delete(); } FileUtils.writeStringToFile(outfile, sb.toString()); //Count up the results of modifications to get statistics int falsecount = 0; int truecount = 0; int nullcount = 0; for (ProteinMetadata datum : agg) { if (datum == null) { System.err.println("null datum"); continue; } if (datum.modifications == null) { nullcount++; } else if (datum.modifications == false) { falsecount++; } else if (datum.modifications == true) { truecount++; } } System.out.println("Total # protein metadata: " + agg.size()); System.out.println(); System.out.println("modification true count: " + truecount); System.out.println("modification false count: " + falsecount); System.out.println("modification null count: " + nullcount); System.out.println(); //Get some statistics for cloned nullcount = 0; int emptycount = 0; int colicount = 0; int humancount = 0; int bothcount = 0; for (ProteinMetadata datum : agg) { if (datum == null) { System.err.println("null datum"); continue; } if (datum.cloned == null) { nullcount++; continue; } if (datum.cloned.isEmpty()) { emptycount++; continue; } Integer human = datum.cloned.get(Host.Hsapiens); if (human != null && human > 0) { humancount++; } Integer coli = datum.cloned.get(Host.Ecoli); if (coli != null && coli > 0) { colicount++; if (human != null && human > 0) { bothcount++; } } } System.out.println("cloned null count: " + nullcount); System.out.println("cloned empty count: " + emptycount); System.out.println("cloned coli count: " + colicount); System.out.println("cloned human count: " + humancount); System.out.println("cloned both count: " + bothcount); System.out.println(); }
From source file:com.cloud.test.stress.StressTestDirectAttach.java
public static void main(String[] args) { String host = "http://localhost"; String port = "8092"; String devPort = "8080"; String apiUrl = "/client/api"; try {// w ww . j a v a2s.com // Parameters List<String> argsList = Arrays.asList(args); Iterator<String> iter = argsList.iterator(); while (iter.hasNext()) { String arg = iter.next(); // host if (arg.equals("-h")) { host = "http://" + iter.next(); } if (arg.equals("-p")) { port = iter.next(); } if (arg.equals("-dp")) { devPort = iter.next(); } if (arg.equals("-t")) { numThreads = Integer.parseInt(iter.next()); } if (arg.equals("-s")) { sleepTime = Long.parseLong(iter.next()); } if (arg.equals("-a")) { accountName = iter.next(); } if (arg.equals("-c")) { cleanUp = Boolean.parseBoolean(iter.next()); if (!cleanUp) sleepTime = 0L; // no need to wait if we don't ever // cleanup } if (arg.equals("-r")) { repeat = Boolean.parseBoolean(iter.next()); } if (arg.equals("-i")) { internet = Boolean.parseBoolean(iter.next()); } if (arg.equals("-w")) { wait = Integer.parseInt(iter.next()); } if (arg.equals("-z")) { zoneId = iter.next(); } if (arg.equals("-so")) { serviceOfferingId = iter.next(); } } final String server = host + ":" + port + "/"; final String developerServer = host + ":" + devPort + apiUrl; s_logger.info("Starting test against server: " + server + " with " + numThreads + " thread(s)"); if (cleanUp) s_logger.info("Clean up is enabled, each test will wait " + sleepTime + " ms before cleaning up"); for (int i = 0; i < numThreads; i++) { new Thread(new Runnable() { @Override public void run() { do { String username = null; try { long now = System.currentTimeMillis(); Random ran = new Random(); username = Math.abs(ran.nextInt()) + "-user"; NDC.push(username); s_logger.info("Starting test for the user " + username); int response = executeDeployment(server, developerServer, username); boolean success = false; String reason = null; if (response == 200) { success = true; if (internet) { s_logger.info("Deploy successful...waiting 5 minute before SSH tests"); Thread.sleep(300000L); // Wait 60 // seconds so // the windows VM // can boot up and do a sys prep. s_logger.info("Begin Linux SSH test for account " + _account.get()); reason = sshTest(_linuxIP.get(), _linuxPassword.get()); if (reason == null) { s_logger.info( "Linux SSH test successful for account " + _account.get()); } } if (reason == null) { if (internet) { s_logger.info( "Windows SSH test successful for account " + _account.get()); } else { s_logger.info("deploy test successful....now cleaning up"); if (cleanUp) { s_logger.info( "Waiting " + sleepTime + " ms before cleaning up vms"); Thread.sleep(sleepTime); } else { success = true; } } if (usageIterator >= numThreads) { int eventsAndBillingResponseCode = executeEventsAndBilling(server, developerServer); s_logger.info( "events and usage records command finished with response code: " + eventsAndBillingResponseCode); usageIterator = 1; } else { s_logger.info( "Skipping events and usage records for this user: usageIterator " + usageIterator + " and number of Threads " + numThreads); usageIterator++; } if ((users == null) && (accountName == null)) { s_logger.info("Sending cleanup command"); int cleanupResponseCode = executeCleanup(server, developerServer, username); s_logger.info("cleanup command finished with response code: " + cleanupResponseCode); success = (cleanupResponseCode == 200); } else { s_logger.info("Sending stop DomR / destroy VM command"); int stopResponseCode = executeStop(server, developerServer, username); s_logger.info("stop(destroy) command finished with response code: " + stopResponseCode); success = (stopResponseCode == 200); } } else { // Just stop but don't destroy the // VMs/Routers s_logger.info("SSH test failed for account " + _account.get() + "with reason '" + reason + "', stopping VMs"); int stopResponseCode = executeStop(server, developerServer, username); s_logger.info( "stop command finished with response code: " + stopResponseCode); success = false; // since the SSH test // failed, mark the // whole test as // failure } } else { // Just stop but don't destroy the // VMs/Routers s_logger.info("Deploy test failed with reason '" + reason + "', stopping VMs"); int stopResponseCode = executeStop(server, developerServer, username); s_logger.info("stop command finished with response code: " + stopResponseCode); success = false; // since the deploy test // failed, mark the // whole test as failure } if (success) { s_logger.info("***** Completed test for user : " + username + " in " + ((System.currentTimeMillis() - now) / 1000L) + " seconds"); } else { s_logger.info("##### FAILED test for user : " + username + " in " + ((System.currentTimeMillis() - now) / 1000L) + " seconds with reason : " + reason); } s_logger.info("Sleeping for " + wait + " seconds before starting next iteration"); Thread.sleep(wait); } catch (Exception e) { s_logger.warn("Error in thread", e); try { int stopResponseCode = executeStop(server, developerServer, username); s_logger.info("stop response code: " + stopResponseCode); } catch (Exception e1) { } } finally { NDC.clear(); } } while (repeat); } }).start(); } } catch (Exception e) { s_logger.error(e); } }