List of usage examples for java.util Map put
V put(K key, V value);
From source file:com.pureinfo.srm.reports.table.data.Index3SCIStatistic.java
public static void main(String[] args) { try {//from ww w .j av a2 s . com Index3SCIStatistic s = new Index3SCIStatistic(); Map m = new HashMap(); String[] start = { "2006" }; m.put("start", start); String[] end = { "2007" }; m.put("end", end); String[] audit = { "a" }; m.put("audit", audit); String[] isFirst = { "y" }; m.put("isFirst", isFirst); s.setParameters(m); // s.doOutlayQuery(); Object[][] o = s.buildDatas(false, false); for (int i = 0; i < o.length; i++) { for (int j = 0; j < o[i].length; j++) { System.out.print("\t" + "\t" + "\t" + o[i][j]); } System.out.println(); } } catch (Exception ex) { // TODO Auto-generated catch block ex.printStackTrace(System.err); } finally { } }
From source file:fr.sewatech.sewatoool.impress.Main.java
/** * @param args cf. usage.txt/*from w w w. j a v a 2 s . co m*/ * * @author "Alexis Hassler (alexis.hassler@sewatech.org)" */ public static void main(String[] args) { // Analyse des arguments if (args.length == 0) { message(MESSAGE_WRONG_ARGS); logger.warn("Probleme d'arguments : pas d'argument"); } Map<String, String> arguments = new HashMap<String, String>(); String argName = null; String documentLocation = null; for (String arg : args) { if ("--".equals(arg.substring(0, 2))) { argName = arg.substring(2); arguments.put(argName, ""); } else if (argName != null) { arguments.put(argName, arg); argName = null; } else if (documentLocation == null) { documentLocation = arg; } else { message(MESSAGE_WRONG_ARGS); logger.warn("Probleme d'arguments : 2 fois le nom du fichier"); } } if (logger.isDebugEnabled()) { logger.debug("Liste des arguments pris en compte : "); for (Entry<String, String> option : arguments.entrySet()) { logger.debug(" Argument " + option.getKey() + "=" + option.getValue()); } } if (arguments.containsKey("help")) { if (logger.isDebugEnabled()) { logger.debug("Affichage de l'aide"); } doHelp(); } try { ImpressService service = new ImpressService(); ImpressDocument document = service.loadDocument(documentLocation, arguments.containsKey("hidden")); doToc(arguments, service, document); doPdf(arguments, service, document); if (!arguments.containsKey("no-save")) { service.save(document); } if (!arguments.containsKey("no-close")) { service.close(document); } } catch (Throwable e) { logger.error("Il y a un probleme...", e); } finally { System.exit(0); } }
From source file:com.example.geomesa.kafka.KafkaLoadTester.java
public static void main(String[] args) throws Exception { // read command line args for a connection to Kafka CommandLineParser parser = new BasicParser(); Options options = getCommonRequiredOptions(); CommandLine cmd = parser.parse(options, args); String visibility = getVisibility(cmd); Integer delay = getDelay(cmd); if (visibility == null) { System.out.println("visibility: null"); } else {//from w ww . j a v a 2 s . c om System.out.println("visibility: '" + visibility + "'"); } // create the producer and consumer KafkaDataStore objects Map<String, String> dsConf = getKafkaDataStoreConf(cmd); System.out.println("KDS config: " + dsConf); dsConf.put("kafka.consumer.count", "0"); DataStore producerDS = DataStoreFinder.getDataStore(dsConf); dsConf.put("kafka.consumer.count", "1"); DataStore consumerDS = DataStoreFinder.getDataStore(dsConf); // verify that we got back our KafkaDataStore objects properly if (producerDS == null) { throw new Exception("Null producer KafkaDataStore"); } if (consumerDS == null) { throw new Exception("Null consumer KafkaDataStore"); } try { // create the schema which creates a topic in Kafka // (only needs to be done once) final String sftName = "KafkaStressTest"; final String sftSchema = "name:String,age:Int,step:Double,lat:Double,dtg:Date,*geom:Point:srid=4326"; SimpleFeatureType sft = SimpleFeatureTypes.createType(sftName, sftSchema); producerDS.createSchema(sft); System.out.println("Register KafkaDataStore in GeoServer (Press enter to continue)"); System.in.read(); // the live consumer must be created before the producer writes features // in order to read streaming data. // i.e. the live consumer will only read data written after its instantiation SimpleFeatureStore producerFS = (SimpleFeatureStore) producerDS.getFeatureSource(sftName); SimpleFeatureSource consumerFS = consumerDS.getFeatureSource(sftName); // creates and adds SimpleFeatures to the producer every 1/5th of a second System.out.println("Writing features to Kafka... refresh GeoServer layer preview to see changes"); SimpleFeatureBuilder builder = new SimpleFeatureBuilder(sft); Integer numFeats = getLoad(cmd); System.out.println("Building a list of " + numFeats + " SimpleFeatures."); List<SimpleFeature> features = IntStream.range(1, numFeats) .mapToObj(i -> createFeature(builder, i, visibility)).collect(Collectors.toList()); // set variables to estimate feature production rate Long startTime = null; Long featuresSinceStartTime = 0L; int cycle = 0; int cyclesToSkip = 50000 / numFeats; // collect enough features // to get an accurate rate estimate while (true) { // write features features.forEach(feat -> { try { DefaultFeatureCollection featureCollection = new DefaultFeatureCollection(); featureCollection.add(feat); producerFS.addFeatures(featureCollection); } catch (Exception e) { System.out.println("Caught an exception while writing features."); e.printStackTrace(); } updateFeature(feat); }); // count features written Integer consumerSize = consumerFS.getFeatures().size(); cycle++; featuresSinceStartTime += consumerSize; System.out.println("At " + new Date() + " wrote " + consumerSize + " features"); // if we've collected enough features, calculate the rate if (cycle >= cyclesToSkip || startTime == null) { Long endTime = System.currentTimeMillis(); if (startTime != null) { Long diffTime = endTime - startTime; Double rate = (featuresSinceStartTime.doubleValue() * 1000.0) / diffTime.doubleValue(); System.out.printf("%.1f feats/sec (%d/%d)\n", rate, featuresSinceStartTime, diffTime); } cycle = 0; startTime = endTime; featuresSinceStartTime = 0L; } // sleep before next write if (delay != null) { System.out.printf("Sleeping for %d ms\n", delay); Thread.sleep(delay); } } } finally { producerDS.dispose(); consumerDS.dispose(); } }
From source file:com.heliosapm.tsdblite.metric.Trace.java
@SuppressWarnings("javadoc") public static void main(String[] args) { log("Trace Test"); Map<String, String> tags = new HashMap<String, String>(4); tags.put("host", "localhost"); tags.put("app", "test"); tags.put("cpu", "" + 1); tags.put("type", "combined"); final Trace trace = new Trace("sys.cpu", tags, false, 34, -1, System.currentTimeMillis()); log("toString: " + trace); String json = JSON.serializeToString(trace); log("JSON: " + json); final Trace t = JSON.parseToObject(json, Trace.class); log("fromJson: " + t); log("====================================="); final Trace[] traces = new Trace[Constants.CORES]; final Random r = new Random(System.currentTimeMillis()); for (int i = 0; i < Constants.CORES; i++) { tags = new HashMap<String, String>(4); tags.put("host", "localhost"); tags.put("app", "test"); tags.put("cpu", "" + i); tags.put("type", "combined"); traces[i] = new Trace("sys.cpu", tags, false, Math.abs(r.nextInt(100)), -1, System.currentTimeMillis()); log("toString:" + traces[i]); }/* w ww.jav a2s .co m*/ json = JSON.serializeToString(traces); log("JSON: " + json); Trace[] ts = JSON.parseToObject(json, Trace[].class); for (Trace x : ts) { log("fromJson: " + x); } }
From source file:com.kappaware.logtrawler.Main.java
@SuppressWarnings("static-access") static public void main(String[] argv) throws Throwable { Config config;//from w w w. ja va 2 s .co m Options options = new Options(); options.addOption(OptionBuilder.hasArg().withArgName("configFile").withLongOpt("config-file") .withDescription("JSON configuration file").create("c")); options.addOption(OptionBuilder.hasArg().withArgName("folder").withLongOpt("folder") .withDescription("Folder to monitor").create("f")); options.addOption(OptionBuilder.hasArg().withArgName("exclusion").withLongOpt("exclusion") .withDescription("Exclusion regex").create("x")); options.addOption(OptionBuilder.hasArg().withArgName("adminEndpoint").withLongOpt("admin-endpoint") .withDescription("Endpoint for admin REST").create("e")); options.addOption(OptionBuilder.hasArg().withArgName("outputFlow").withLongOpt("output-flow") .withDescription("Target to post result on").create("o")); options.addOption(OptionBuilder.hasArg().withArgName("hostname").withLongOpt("hostname") .withDescription("This hostname").create("h")); options.addOption(OptionBuilder.withLongOpt("displayDot").withDescription("Display Dot").create("d")); options.addOption(OptionBuilder.hasArg().withArgName("mimeType").withLongOpt("mime-type") .withDescription("Valid MIME type").create("m")); options.addOption(OptionBuilder.hasArg().withArgName("allowedAdmin").withLongOpt("allowedAdmin") .withDescription("Allowed admin network").create("a")); options.addOption(OptionBuilder.hasArg().withArgName("configFile").withLongOpt("gen-config-file") .withDescription("Generate JSON configuration file").create("g")); options.addOption(OptionBuilder.hasArg().withArgName("maxBatchSize").withLongOpt("max-batch-size") .withDescription("Max JSON batch (array) size").create("b")); CommandLineParser clParser = new BasicParser(); CommandLine line; String configFile = null; try { // parse the command line argument line = clParser.parse(options, argv); if (line.hasOption("c")) { configFile = line.getOptionValue("c"); config = Json.fromJson(Config.class, new BufferedReader(new InputStreamReader(new FileInputStream(configFile)))); } else { config = new Config(); } if (line.hasOption("f")) { String[] fs = line.getOptionValues("f"); // Get the first agent (Create it if needed) if (config.getAgents() == null || config.getAgents().size() == 0) { Config.Agent agent = new Config.Agent("default"); config.addAgent(agent); } Config.Agent agent = config.getAgents().iterator().next(); for (String f : fs) { agent.addFolder(new Config.Agent.Folder(f, false)); } } if (line.hasOption("e")) { String e = line.getOptionValue("e"); config.setAdminEndpoint(e); } if (line.hasOption("o")) { String[] es = line.getOptionValues("o"); if (config.getAgents() != null) { for (Agent agent : config.getAgents()) { for (String s : es) { agent.addOuputFlow(s); } } } } if (line.hasOption("h")) { String e = line.getOptionValue("h"); config.setHostname(e); } if (line.hasOption("x")) { if (config.getAgents() != null) { for (Agent agent : config.getAgents()) { if (agent.getFolders() != null) { for (Folder folder : agent.getFolders()) { String[] exs = line.getOptionValues("x"); for (String ex : exs) { folder.addExcludedPath(ex); } } } } } } if (line.hasOption("m")) { if (config.getAgents() != null) { for (Agent agent : config.getAgents()) { String[] exs = line.getOptionValues("m"); for (String ex : exs) { agent.addLogMimeType(ex); } } } } if (line.hasOption("a")) { String[] exs = line.getOptionValues("a"); for (String ex : exs) { config.addAdminAllowedNetwork(ex); } } if (line.hasOption("d")) { config.setDisplayDot(true); } if (line.hasOption("b")) { Integer i = getIntegerParameter(line, "b"); if (config.getAgents() != null) { for (Agent agent : config.getAgents()) { agent.setOutputMaxBatchSize(i); } } } config.setDefault(); if (line.hasOption("g")) { String fileName = line.getOptionValue("g"); PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(fileName, false))); out.println(Json.toJson(config, true)); out.flush(); out.close(); System.exit(0); } } catch (ParseException exp) { // oops, something went wrong usage(options, exp.getMessage()); return; } try { // Check config if (config.getAgents() == null || config.getAgents().size() < 1) { throw new ConfigurationException("At least one folder to monitor must be provided!"); } Map<String, AgentHandler> agentHandlerByName = new HashMap<String, AgentHandler>(); for (Config.Agent agent : config.getAgents()) { agentHandlerByName.put(agent.getName(), new AgentHandler(agent)); } if (!Utils.isNullOrEmpty(config.getAdminEndpoint())) { new AdminServer(config, agentHandlerByName); } } catch (ConfigurationException e) { log.error(e.toString()); System.exit(1); } catch (Throwable t) { log.error("Error in main", t); System.exit(2); } }
From source file:com.netflix.aegisthus.tools.SSTableExport.java
@SuppressWarnings("rawtypes") public static void main(String[] args) throws IOException { String usage = String.format("Usage: %s <sstable>", SSTableExport.class.getName()); CommandLineParser parser = new PosixParser(); try {/*from w w w . jav a 2s .co m*/ cmd = parser.parse(options, args); } catch (ParseException e1) { System.err.println(e1.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } if (cmd.getArgs().length != 1) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } Map<String, AbstractType> convertors = null; if (cmd.hasOption(COLUMN_NAME_TYPE)) { try { convertors = new HashMap<String, AbstractType>(); convertors.put(SSTableScanner.COLUMN_NAME_KEY, TypeParser.parse(cmd.getOptionValue(COLUMN_NAME_TYPE))); } catch (ConfigurationException e) { System.err.println(e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } catch (SyntaxException e) { System.err.println(e.getMessage()); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } } Descriptor.Version version = null; if (cmd.hasOption(OPT_VERSION)) { version = new Descriptor.Version(cmd.getOptionValue(OPT_VERSION)); } if (cmd.hasOption(INDEX_SPLIT)) { String ssTableFileName; DataInput input = null; if ("-".equals(cmd.getArgs()[0])) { ssTableFileName = System.getProperty("aegisthus.file.name"); input = new DataInputStream(new BufferedInputStream(System.in, 65536 * 10)); } else { ssTableFileName = new File(cmd.getArgs()[0]).getAbsolutePath(); input = new DataInputStream( new BufferedInputStream(new FileInputStream(ssTableFileName), 65536 * 10)); } exportIndexSplit(ssTableFileName, input); } else if (cmd.hasOption(INDEX)) { String ssTableFileName = new File(cmd.getArgs()[0]).getAbsolutePath(); exportIndex(ssTableFileName); } else if (cmd.hasOption(ROWSIZE)) { String ssTableFileName = new File(cmd.getArgs()[0]).getAbsolutePath(); exportRowSize(ssTableFileName); } else if ("-".equals(cmd.getArgs()[0])) { if (version == null) { System.err.println("when streaming must supply file version"); HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(usage, options); System.exit(1); } exportStream(version); } else { String ssTableFileName = new File(cmd.getArgs()[0]).getAbsolutePath(); FileInputStream fis = new FileInputStream(ssTableFileName); InputStream inputStream = new DataInputStream(new BufferedInputStream(fis, 65536 * 10)); long end = -1; if (cmd.hasOption(END)) { end = Long.valueOf(cmd.getOptionValue(END)); } if (cmd.hasOption(OPT_COMP)) { CompressionMetadata cm = new CompressionMetadata( new BufferedInputStream(new FileInputStream(cmd.getOptionValue(OPT_COMP)), 65536), fis.getChannel().size()); inputStream = new CompressionInputStream(inputStream, cm); end = cm.getDataLength(); } DataInputStream input = new DataInputStream(inputStream); if (version == null) { version = Descriptor.fromFilename(ssTableFileName).version; } SSTableScanner scanner = new SSTableScanner(input, convertors, end, version); if (cmd.hasOption(OPT_MAX_COLUMN_SIZE)) { scanner.setMaxColSize(Long.parseLong(cmd.getOptionValue(OPT_MAX_COLUMN_SIZE))); } export(scanner); if (cmd.hasOption(OPT_MAX_COLUMN_SIZE)) { if (scanner.getErrorRowCount() > 0) { System.err.println(String.format("%d rows were too large", scanner.getErrorRowCount())); } } } }
From source file:es.upm.oeg.tools.rdfshapes.utils.CadinalityResultGenerator.java
public static void main(String[] args) throws Exception { String endpoint = "http://3cixty.eurecom.fr/sparql"; List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset()); String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset()); String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset()); String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset()); DecimalFormat df = new DecimalFormat("0.0000"); //Create the Excel workbook and sheet XSSFWorkbook wb = new XSSFWorkbook(); XSSFSheet sheet = wb.createSheet("Cardinality"); int currentExcelRow = 0; int classStartRow = 0; for (String clazz : classList) { Map<String, String> litMap = new HashMap<>(); Map<String, String> iriMap = ImmutableMap.of("class", clazz); String queryString = bindQueryString(individualCountQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); int individualCount; List<RDFNode> c = executeQueryForList(queryString, endpoint, "c"); if (c.size() == 1) { individualCount = c.get(0).asLiteral().getInt(); } else {/*from w w w. j a v a2 s. co m*/ continue; } // If there are zero individuals, continue if (individualCount == 0) { throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match."); } // System.out.println("***"); // System.out.println("### **" + clazz + "** (" + individualCount + ")"); // System.out.println("***"); // System.out.println(); classStartRow = currentExcelRow; XSSFRow row = sheet.createRow(currentExcelRow); XSSFCell cell = row.createCell(0); cell.setCellValue(clazz); cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER); queryString = bindQueryString(classPropertyQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap)); List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p"); for (RDFNode property : nodeList) { if (property.isURIResource()) { DescriptiveStatistics stats = new DescriptiveStatistics(); String propertyURI = property.asResource().getURI(); // System.out.println("* " + propertyURI); // System.out.println(); XSSFRow propertyRow = sheet.getRow(currentExcelRow); if (propertyRow == null) { propertyRow = sheet.createRow(currentExcelRow); } currentExcelRow++; XSSFCell propertyCell = propertyRow.createCell(1); propertyCell.setCellValue(propertyURI); Map<String, String> litMap2 = new HashMap<>(); Map<String, String> iriMap2 = ImmutableMap.of("class", clazz, "p", propertyURI); queryString = bindQueryString(propertyCardinalityQueryString, ImmutableMap.of(IRI_BINDINGS, iriMap2, LITERAL_BINDINGS, litMap2)); List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint, ImmutableSet.of("card", "count")); int sum = 0; List<CardinalityCount> cardinalityList = new ArrayList<>(); if (solnMaps.size() > 0) { for (Map<String, RDFNode> soln : solnMaps) { int count = soln.get("count").asLiteral().getInt(); int card = soln.get("card").asLiteral().getInt(); for (int i = 0; i < count; i++) { stats.addValue(card); } CardinalityCount cardinalityCount = new CardinalityCount(card, count, (((double) count) / individualCount) * 100); cardinalityList.add(cardinalityCount); sum += count; } // Check for zero cardinality instances int count = individualCount - sum; if (count > 0) { for (int i = 0; i < count; i++) { stats.addValue(0); } CardinalityCount cardinalityCount = new CardinalityCount(0, count, (((double) count) / individualCount) * 100); cardinalityList.add(cardinalityCount); } } Map<Integer, Double> cardMap = new HashMap<>(); for (CardinalityCount count : cardinalityList) { cardMap.put(count.getCardinality(), count.getPrecentage()); } XSSFCell instanceCountCell = propertyRow.createCell(2); instanceCountCell.setCellValue(individualCount); XSSFCell minCell = propertyRow.createCell(3); minCell.setCellValue(stats.getMin()); XSSFCell maxCell = propertyRow.createCell(4); maxCell.setCellValue(stats.getMax()); XSSFCell p1 = propertyRow.createCell(5); p1.setCellValue(stats.getPercentile(1)); XSSFCell p99 = propertyRow.createCell(6); p99.setCellValue(stats.getPercentile(99)); XSSFCell mean = propertyRow.createCell(7); mean.setCellValue(df.format(stats.getMean())); for (int i = 0; i < 21; i++) { XSSFCell dataCell = propertyRow.createCell(8 + i); Double percentage = cardMap.get(i); if (percentage != null) { dataCell.setCellValue(df.format(percentage)); } else { dataCell.setCellValue(0); } } // System.out.println("| Min Card. |Max Card. |"); // System.out.println("|---|---|"); // System.out.println("| ? | ? |"); // System.out.println(); } } //System.out.println("class start: " + classStartRow + ", class end: " + (currentExcelRow -1)); //We have finished writting properties of one class, now it's time to merge the cells int classEndRow = currentExcelRow - 1; if (classStartRow < classEndRow) { sheet.addMergedRegion(new CellRangeAddress(classStartRow, classEndRow, 0, 0)); } } String filename = "3cixty.xls"; FileOutputStream fileOut = new FileOutputStream(filename); wb.write(fileOut); fileOut.close(); }
From source file:org.ala.harvester.PpmlHarvester.java
/** * Main method for testing this particular Harvester * * @param args//www .ja v a2s .c o m */ public static void main(String[] args) throws Exception { String[] locations = { "classpath*:spring.xml" }; ApplicationContext context = new ClassPathXmlApplicationContext(locations); PpmlHarvester h = new PpmlHarvester(); Repository r = (Repository) context.getBean("repository"); h.setRepository(r); //set the connection params Map<String, String> connectParams = new HashMap<String, String>(); connectParams.put("endpoint", "http://portphillipmarinelife.net.au/SpeciesMap/index"); h.setConnectionParams(connectParams); h.start(PPML_INFOSOURCE_ID); }
From source file:com.fusesource.customer.wssec.client.Main.java
public static void main(String args[]) throws Exception { try {//from w ww. j a v a2 s . com CommandLine cli = new PosixParser().parse(opts, args); timestamp = cli.hasOption("timestamp"); encrypt = cli.hasOption("encrypt"); sign = cli.hasOption("sign"); usernameToken = cli.hasOption("username-token"); passwordDigest = cli.hasOption("password-digest"); user = cli.getOptionValue("user"); pw = cli.getOptionValue("pw"); disableCNCheck = !cli.hasOption("ecnc"); if (cli.hasOption("help") || !(sign | encrypt | usernameToken | timestamp)) { printUsageAndExit(); } if (sign) { sigCertAlias = cli.getOptionValue("sa"); sigCertPw = cli.getOptionValue("spw"); sigKsLoc = cli.getOptionValue("sk"); sigKsPw = cli.getOptionValue("skpw"); if (sigCertAlias == null || sigKsLoc == null || sigKsPw == null || sigCertPw == null) { printUsageAndExit( "You must provide keystore, keystore password, cert alias and cert password for signing certificate"); } } if (encrypt) { encCertAlias = cli.getOptionValue("ea"); encKsLoc = cli.getOptionValue("ek"); encKsPw = cli.getOptionValue("ekpw"); if (encCertAlias == null || encKsLoc == null || encKsPw == null) { printUsageAndExit( "You must provide keystore, keystore password, and cert alias for encryption certificate"); } } } catch (ParseException ex) { printUsageAndExit(); } // Here we set the truststore for the client - by trusting the CA (in the // truststore.jks file) we implicitly trust all services presenting certificates // signed by this CA. // System.setProperty("javax.net.ssl.trustStore", "../certs/truststore.jks"); System.setProperty("javax.net.ssl.trustStorePassword", "truststore"); URL wsdl = new URL("https://localhost:8443/cxf/Customers?wsdl"); // The demo certs provided with this example configure the server with a certificate // called 'fuse-esb'. As this probably won't match the fully-qualified domain // name of the machine you're running on, we need to disable Common Name matching // to allow the JVM runtime to happily resolve the WSDL for the server. Note that // we also have to do something similar on the CXf proxy itself (see below). // if (disableCNCheck) { HttpsURLConnection.setDefaultHostnameVerifier(new HostnameVerifier() { public boolean verify(String string, SSLSession ssls) { return true; } }); } // Initialise the bus // Bus bus = SpringBusFactory.newInstance().createBus(); SpringBusFactory.setDefaultBus(bus); // Define the properties to configure the WS Security Handler // Map<String, Object> props = new HashMap<String, Object>(); props.put(WSHandlerConstants.ACTION, getWSSecActions()); // Specify the callback handler for passwords. // PasswordCallback passwords = new PasswordCallback(); props.put(WSHandlerConstants.PW_CALLBACK_REF, passwords); if (usernameToken) { passwords.addUser(user, pw); props.put(WSHandlerConstants.USER, user); props.put(WSHandlerConstants.PASSWORD_TYPE, passwordDigest ? "PasswordDigest" : "PasswordText"); } if (encrypt) { props.put(WSHandlerConstants.ENCRYPTION_USER, encCertAlias); props.put(WSHandlerConstants.ENC_PROP_REF_ID, "encProps"); props.put("encProps", merlinCrypto(encKsLoc, encKsPw, encCertAlias)); props.put(WSHandlerConstants.ENC_KEY_ID, "IssuerSerial"); props.put(WSHandlerConstants.ENCRYPTION_PARTS, TIMESTAMP_AND_BODY); } if (sign) { props.put(WSHandlerConstants.SIGNATURE_USER, sigCertAlias); props.put(WSHandlerConstants.SIG_PROP_REF_ID, "sigProps"); props.put("sigProps", merlinCrypto(sigKsLoc, sigKsPw, sigCertAlias)); props.put(WSHandlerConstants.SIG_KEY_ID, "DirectReference"); props.put(WSHandlerConstants.SIGNATURE_PARTS, TIMESTAMP_AND_BODY); passwords.addUser(sigCertAlias, sigCertPw); } // Here we add the WS Security interceptor to perform security processing // on the outgoing SOAP messages. Also, we configure a logging interceptor // to log the message payload for inspection. // bus.getOutInterceptors().add(new WSS4JOutInterceptor(props)); bus.getOutInterceptors().add(new LoggingOutInterceptor()); CustomerService svc = new CustomerService_Service(wsdl).getPort( new QName("http://demo.fusesource.com/wsdl/CustomerService/", "SOAPOverHTTP"), CustomerService.class); // The demo certs provided with this example configure the server with a certificate // called 'fuse-esb'. As this probably won't match the fully-qualified domain // name of the machine you're running on, we need to disable Common Name matching // to allow the CXF runtime to happily invoke on the server. // if (disableCNCheck) { HTTPConduit httpConduit = (HTTPConduit) ClientProxy.getClient(svc).getConduit(); TLSClientParameters tls = new TLSClientParameters(); tls.setDisableCNCheck(true); httpConduit.setTlsClientParameters(tls); } System.out.println("Looking up the customer..."); // Here's the part where we invoke on the web service. // Customer c = svc.lookupCustomer("007"); System.out.println("Got customer " + c.getFirstName()); }
From source file:de.uni_rostock.goodod.checker.CheckerApp.java
public static void main(String[] args) throws OWLOntologyCreationException { config = Configuration.getConfiguration(args); String bioTopVariantA = "biotoplite_group_A_TEST.owl"; String bioTopVariantB = "biotoplite_group_B_TEST.owl"; String repoRoot = config.getString("repositoryRoot"); File commonBioTopF = new File(repoRoot + File.separator + config.getString("bioTopLiteSource")); String groupAFile = repoRoot + File.separator + "Results" + File.separator + "GruppeA" + File.separator + bioTopVariantA;/* ww w. ja v a2 s. c o m*/ String groupBFile = repoRoot + File.separator + "Results" + File.separator + "GruppeB" + File.separator + bioTopVariantB; String testFile = config.getString("testDescription"); IRI bioTopIRI = IRI.create("http://purl.org/biotop/biotoplite.owl"); SimpleIRIMapper bioTopLiteMapper = new SimpleIRIMapper(bioTopIRI, IRI.create(commonBioTopF)); SimpleIRIMapper variantMapperA = new SimpleIRIMapper( IRI.create("http://purl.org/biotop/biotoplite_group_A_TEST.owl"), IRI.create(new File(groupAFile))); SimpleIRIMapper variantMapperB = new SimpleIRIMapper( IRI.create("http://purl.org/biotop/biotoplite_group_B_TEST.owl"), IRI.create(new File(groupBFile))); //logger.info("Loading ontology " + testFile + "."); OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); manager.addIRIMapper(variantMapperA); manager.addIRIMapper(variantMapperB); manager.addIRIMapper(bioTopLiteMapper); FileDocumentSource source = new FileDocumentSource(new File(testFile)); OWLOntology ontology = null; try { ontology = manager.loadOntologyFromOntologyDocument(source); } catch (Throwable e) { logger.fatal("Loading failed", e); System.exit(1); } org.semanticweb.HermiT.Configuration reasonerConfig = new org.semanticweb.HermiT.Configuration(); reasonerConfig.throwInconsistentOntologyException = false; //ReasonerProgressMonitor monitor = new ConsoleProgressMonitor(); reasonerConfig.existentialStrategyType = ExistentialStrategyType.INDIVIDUAL_REUSE; //reasonerConfig.reasonerProgressMonitor = monitor; reasonerConfig.tableauMonitorType = TableauMonitorType.NONE; //reasonerConfig.individualTaskTimeout = 10000; Reasoner reasoner = new Reasoner(reasonerConfig, ontology); reasoner.classifyClasses(); Set<OWLClass> before = reasoner.getUnsatisfiableClasses() .getEntitiesMinus(manager.getOWLDataFactory().getOWLNothing()); //logger.info("Found " + before.size() + " inconsistent classes before import change."); logger.debug(before); reasoner.dispose(); reasoner = null; manager.removeOntology(ontology); ontology = null; Map<IRI, IRI> importMap = new HashMap<IRI, IRI>(); OWLOntologyLoaderConfiguration interimConfig = new OWLOntologyLoaderConfiguration(); for (String str : config.getStringArray("ignoredImports")) { IRI ignoredIRI = IRI.create(str); importMap.put(ignoredIRI, bioTopIRI); interimConfig = interimConfig.addIgnoredImport(ignoredIRI); } interimConfig = interimConfig.setMissingImportHandlingStrategy(MissingImportHandlingStrategy.SILENT); try { ontology = manager.loadOntologyFromOntologyDocument(source, interimConfig); } catch (Throwable e) { logger.fatal("Loading failed", e); System.exit(1); } BasicImportingNormalizerFactory n = new BasicImportingNormalizerFactory(importMap, interimConfig); n.normalize(ontology); reasoner = new Reasoner(reasonerConfig, ontology); reasoner.classifyClasses(); Set<OWLClass> after = reasoner.getUnsatisfiableClasses() .getEntitiesMinus(manager.getOWLDataFactory().getOWLNothing()); //logger.info("Found " + after.size() + " inconsistent classes after import change."); logger.debug(after); /* * We need some tidying afterwards. The after set can contain * inconsistent classes that are inconsistent only because in the new * import, they are subclasses of a class that was already inconsistent before. * Hence we remove them from the after set. */ for (OWLClass c : before) { Set<OWLClass> subclasses = SubClassCollector.collect(c, manager.getImportsClosure(ontology)); for (OWLClass subC : subclasses) { if ((true == after.contains(subC)) && (false == before.contains(subC))) { after.remove(subC); } } } int difference = before.size() - after.size(); if (0 == difference) { logger.info(testFile + ": OK"); } else { logger.warn(testFile + ": Import change is not neutral to inconsistencies (" + before.size() + '/' + after.size() + ")"); } }