List of usage examples for java.io PrintStream print
public void print(Object obj)
From source file:org.hyperic.hq.product.util.PluginDumper.java
private void dumpMetrics(String outputDir, String filename, boolean asXML) throws IOException { final PrintStream os = openFile(outputDir, filename); final String pluginIndent = " "; final String metricsIndent = pluginIndent + " "; final String metricIndent = metricsIndent + " "; final String pluginEnd = pluginIndent + "</plugin>"; final String metricsEnd = metricsIndent + "</metrics>"; HashMap typeMap = new HashMap(); if (asXML) {/*from w w w.j av a 2 s . co m*/ os.println(XML_VERSION); os.println("<hq>"); } for (int n = 0; n < this.pPlugins.length; n++) { ProductPlugin pp = this.pPlugins[n]; String productName = pp.getName(); TypeInfo[] types = pp.getTypes(); MeasurementPlugin mp; if (asXML) { os.println(pluginIndent + "<plugin name =\"" + productName + "\">"); } else { os.println("\n" + productName + " plugin:"); } if ((types == null) || (types.length == 0)) { if (asXML) { os.println(pluginEnd); } else { os.println(" [No types defined]"); } continue; } for (int i = 0; i < types.length; i++) { TypeInfo type = types[i]; String name = type.getName(); ServerTypeInfo server = null; String typeName = TYPES[type.getType()]; String indent = TYPE_INDENT[type.getType()]; String parent = ""; String parentType = ""; String[] platforms = null; switch (type.getType()) { case TypeInfo.TYPE_SERVER: server = (ServerTypeInfo) type; platforms = server.getValidPlatformTypes(); if (platforms != null) { parent = ArrayUtil.toString(platforms); parentType = "platform"; } break; case TypeInfo.TYPE_SERVICE: server = ((ServiceTypeInfo) type).getServerTypeInfo(); if (server != null) { if (server.isVirtual()) { platforms = server.getValidPlatformTypes(); if (platforms != null) { parent = ArrayUtil.toString(platforms); parentType = "platform"; } } else { parent = server.getName(); parentType = "server"; } } break; } MeasurementInfo[] metrics = null; mp = this.ppm.getMeasurementPlugin(name); if (mp != null) { metrics = mp.getMeasurements(types[i]); } if (asXML) { //fold multiple types with the same name //XXX could/should include platforms in <metrics> if (typeMap.put(name, type) != null) { continue; } os.println("\n" + metricsIndent + "<metrics type=\"" + typeName + "\" " + "name=\"" + name + "\" " + "parent=\"" + parent + "\" " + "parentType=\"" + parentType + "\">"); } else { os.print("\n" + indent + "'" + name + "' " + typeName); if (server != null) { String pTypes = ArrayUtil.toString(server.getValidPlatformTypes()); os.print(" "); os.print(pTypes); if (metrics != null) { int numOn = getNumDefaultOn(metrics); os.print(" [" + metrics.length + " metrics" + ", " + numOn + " default on]"); } } os.println(""); } if ((metrics == null) || (metrics.length == 0)) { if (asXML) { os.println(metricsEnd); } else { os.println(indent + "[No metrics defined]"); } continue; } for (int j = 0; j < metrics.length; j++) { if (asXML) { os.println(metrics[j].toXML(metricIndent)); } else { os.println(indent + metricString(metrics[j])); } } if (asXML) { os.println(metricsEnd); } } if (asXML) { os.println(pluginEnd); } } if (asXML) { os.println("</hq>"); } }
From source file:com.opengamma.analytics.financial.model.volatility.local.LocalVolatilityPDEGreekCalculator.java
/** * Runs both forward and backwards PDE solvers, and produces delta and gamma (plus the dual- i.e. with respect to strike) * values again strike and spot, for the given expiry and strike using the provided local volatility (i.e. override * that calculated from the fitted implied volatility surface). * @param ps Print Stream//from w w w.j a v a 2s . c o m * @param expiry the expiry of test option * @param strike the strike of test option * @param localVol The local volatility */ public void deltaAndGamma(final PrintStream ps, final double expiry, final double strike, final LocalVolatilitySurfaceStrike localVol) { final ForwardCurve forwardCurve = _marketData.getForwardCurve(); final double forward = forwardCurve.getForward(expiry); final double shift = 1e-2; final double maxForward = 3.5 * forward; final double maxProxyDelta = 1.5; final PDEFullResults1D pdeRes = runForwardPDESolver(forwardCurve, localVol, _isCall, _theta, expiry, maxProxyDelta, _timeSteps, _spaceSteps, _timeGridBunching, _spaceGridBunching, 1.0); final PDEFullResults1D pdeResUp = runForwardPDESolver(forwardCurve.withFractionalShift(shift), localVol, _isCall, _theta, expiry, maxProxyDelta, _timeSteps, _spaceSteps, _timeGridBunching, _spaceGridBunching, 1.0); final PDEFullResults1D pdeResDown = runForwardPDESolver(forwardCurve.withFractionalShift(-shift), localVol, _isCall, _theta, expiry, maxProxyDelta, _timeSteps, _spaceSteps, _timeGridBunching, _spaceGridBunching, 1.0); final int n = pdeRes.getNumberSpaceNodes(); ps.println( "Result of running Forward PDE solver - this gives you a grid of prices at expiries and strikes for a spot " + "and forward curve. Dual delta and gamma are calculated by finite difference on the PDE grid. Spot delta and " + "gamma are calculated by "); ps.println( "Strike\tVol\tBS Delta\tDelta\tBS Dual Delta\tDual Delta\tBS Gamma\tGamma\tBS Dual Gamma\tDual Gamma"); //\tsurface delta\tsurface gamma\t surface cross gamma\tmodel dg"); final double minM = Math.exp(-1.0 * Math.sqrt(expiry)); final double maxM = 1.0 / minM; for (int i = 0; i < n; i++) { final double m = pdeRes.getSpaceValue(i); if (m > minM && maxM < 3.0) { final double k = m * forward; final double mPrice = pdeRes.getFunctionValue(i); double impVol = 0; try { impVol = BlackFormulaRepository.impliedVolatility(mPrice, 1.0, m, expiry, _isCall); } catch (final Exception e) { } final double bsDelta = BlackFormulaRepository.delta(forward, k, expiry, impVol, _isCall); final double bsDualDelta = BlackFormulaRepository.dualDelta(forward, k, expiry, impVol, _isCall); final double bsGamma = BlackFormulaRepository.gamma(forward, k, expiry, impVol); final double bsDualGamma = BlackFormulaRepository.dualGamma(forward, k, expiry, impVol); final double modelDD = pdeRes.getFirstSpatialDerivative(i); final double fixedSurfaceDelta = mPrice - m * modelDD; //i.e. the delta if the moneyness parameterised local vol surface was invariant to forward final double surfaceDelta = (pdeResUp.getFunctionValue(i) - pdeResDown.getFunctionValue(i)) / 2 / forward / shift; final double modelDelta = fixedSurfaceDelta + forward * surfaceDelta; final double modelDG = pdeRes.getSecondSpatialDerivative(i) / forward; final double crossGamma = (pdeResUp.getFirstSpatialDerivative(i) - pdeResDown.getFirstSpatialDerivative(i)) / 2 / forward / shift; final double surfaceGamma = (pdeResUp.getFunctionValue(i) + pdeResDown.getFunctionValue(i) - 2 * pdeRes.getFunctionValue(i)) / forward / shift / shift; final double modelGamma = 2 * surfaceDelta + surfaceGamma - 2 * m * crossGamma + m * m * modelDG; ps.println(k + "\t" + impVol + "\t" + bsDelta + "\t" + modelDelta + "\t" + bsDualDelta + "\t" + modelDD + "\t" + bsGamma + "\t" + modelGamma + "\t" + bsDualGamma + "\t" + modelDG); // + "\t" + 2 * surfaceDelta + "\t" + surfaceGamma + "\t" + -2 * m * crossGamma + "\t" + m * m * modelDG); } } ps.print("\n"); //Now run the backwards solver and get delta and gamma off the grid ps.println( "Result of running backwards PDE solver - this gives you a set of prices at different spot levels for a" + " single expiry and strike. Delta and gamma are calculated by finite difference on the grid"); ps.println("Spot\tVol\tBS Delta\tDelta\tBS Gamma\tGamma"); PDEResults1D res = runBackwardsPDESolver(strike, localVol, _isCall, _theta, expiry, maxForward, _timeSteps, _spaceSteps, _timeGridBunching, _spaceGridBunching, forward); for (int i = 0; i < n; i++) { final double price = res.getFunctionValue(i); final double fwd = res.getGrid().getSpaceNode(i); double impVol = 0; try { impVol = BlackFormulaRepository.impliedVolatility(price, fwd, strike, expiry, _isCall); } catch (final Exception e) { } final double bsDelta = BlackFormulaRepository.delta(fwd, strike, expiry, impVol, _isCall); final double bsGamma = BlackFormulaRepository.gamma(fwd, strike, expiry, impVol); final double modelDelta = res.getFirstSpatialDerivative(i); final double modelGamma = res.getSecondSpatialDerivative(i); ps.println( fwd + "\t" + impVol + "\t" + bsDelta + "\t" + modelDelta + "\t" + bsGamma + "\t" + modelGamma); } ps.print("\n"); //finally run the backwards PDE solver 100 times with different strikes, interpolating to get vol, delta and gamma at the forward final int xIndex = res.getGrid().getLowerBoundIndexForSpace(forward); final double actForward = res.getSpaceValue(xIndex); final double f1 = res.getSpaceValue(xIndex); final double f2 = res.getSpaceValue(xIndex + 1); final double w = (f2 - forward) / (f2 - f1); ps.println("True forward: " + forward + ", grid forward: " + actForward); ps.println( "Result of running 100 backwards PDE solvers all with different strikes. Delta and gamma for each strike" + " is calculated from finite difference on the grid"); ps.println("Strike\tVol\tDelta\tGamma"); for (int i = 0; i < 100; i++) { final double k = forward * (0.3 + 2.7 * i / 99.0); res = runBackwardsPDESolver(k, localVol, _isCall, _theta, expiry, maxForward, _timeSteps, _spaceSteps, _timeGridBunching, _spaceGridBunching, forward); double vol = 0; try { final double vol1 = BlackFormulaRepository.impliedVolatility(res.getFunctionValue(xIndex), f1, k, expiry, _isCall); final double vol2 = BlackFormulaRepository.impliedVolatility(res.getFunctionValue(xIndex + 1), f2, k, expiry, _isCall); vol = w * vol1 + (1 - w) * vol2; } catch (final Exception e) { } final double modelDelta = w * res.getFirstSpatialDerivative(xIndex) + (1 - w) * res.getFirstSpatialDerivative(xIndex + 1); final double modelGamma = w * res.getSecondSpatialDerivative(xIndex) + (1 - w) * res.getSecondSpatialDerivative(xIndex + 1); ps.println(k + "\t" + vol + "\t" + modelDelta + "\t" + modelGamma); } }
From source file:eu.udig.omsbox.core.OmsScriptExecutor.java
/** * Execute an OMS script.// w w w . ja v a 2s . c om * * @param script the script file or the script string. * @param internalStream * @param errorStream * @param loggerLevelGui the log level as presented in the GUI, can be OFF|ON. This is not the OMS logger level, which * in stead has to be picked from the {@link OmsBoxConstants#LOGLEVELS_MAP}. * @param ramLevel the heap size to use in megabytes. * @return the process. * @throws Exception */ public Process exec(String script, final PrintStream internalStream, final PrintStream errorStream, String loggerLevelGui, String ramLevel) throws Exception { if (loggerLevelGui == null) loggerLevelGui = OmsBoxConstants.LOGLEVEL_GUI_OFF; File scriptFile = new File(script); if (!scriptFile.exists()) { // if the file doesn't exist, it is a script, let's put it into a file scriptFile = File.createTempFile("omsbox_script_", ".oms"); BufferedWriter bw = null; try { bw = new BufferedWriter(new FileWriter(scriptFile)); bw.write(script); } finally { bw.close(); } } else { // it is a script in a file, read it to log it BufferedReader br = null; StringBuilder sb = new StringBuilder(); try { br = new BufferedReader(new FileReader(scriptFile)); String line = null; while ((line = br.readLine()) != null) { sb.append(line).append("\n"); } } finally { br.close(); } script = sb.toString(); } // tmp folder String tempdir = System.getProperty("java.io.tmpdir"); File omsTmp = new File(tempdir + File.separator + "oms"); if (!omsTmp.exists()) omsTmp.mkdirs(); List<String> arguments = new ArrayList<String>(); arguments.add(javaFile); // ram usage String ramExpr = "-Xmx" + ramLevel + "m"; arguments.add(ramExpr); // modules jars List<String> modulesJars = OmsModulesManager.getInstance().getModulesJars(); StringBuilder sb = new StringBuilder(); for (String moduleJar : modulesJars) { sb.append(File.pathSeparator).append(moduleJar); } String modulesJarsString = sb.toString().replaceFirst(File.pathSeparator, ""); String resourcesFlag = "-Doms.sim.resources=\"" + modulesJarsString + "\""; arguments.add(resourcesFlag); // grass gisbase String grassGisbase = OmsBoxPlugin.getDefault().getGisbasePreference(); if (grassGisbase != null && grassGisbase.length() > 0) { arguments.add("-D" + OmsBoxConstants.GRASS_ENVIRONMENT_GISBASE_KEY + "=" + grassGisbase); } String grassShell = OmsBoxPlugin.getDefault().getShellPreference(); if (grassShell != null && grassShell.length() > 0) { arguments.add("-D" + OmsBoxConstants.GRASS_ENVIRONMENT_SHELL_KEY + "=" + grassShell); } // all the arguments arguments.add("-cp"); arguments.add(classPath); arguments.add(CLI.class.getCanonicalName()); arguments.add("-r "); arguments.add("\"" + scriptFile.getAbsolutePath() + "\""); String homeDir = System.getProperty("java.io.tmpdir"); File homeFile = new File(homeDir); StringBuilder runSb = new StringBuilder(); for (String arg : arguments) { runSb.append(arg).append(" "); } String[] args; if (Platform.getOS().equals(Platform.OS_WIN32)) { File tmpRunFile = new File(homeFile, "udig_spatialtoolbox.bat"); FileUtils.writeStringToFile(tmpRunFile, "@echo off\n" + runSb.toString()); args = new String[] { "cmd", "/c", tmpRunFile.getAbsolutePath() }; } else { File tmpRunFile = new File(homeFile, "udig_spatialtoolbox.sh"); FileUtils.writeStringToFile(tmpRunFile, runSb.toString()); args = new String[] { "sh", tmpRunFile.getAbsolutePath() }; } // {javaFile, ramExpr, resourcesFlag, "-cp", classPath, // CLI.class.getCanonicalName(), "-r", // scriptFile.getAbsolutePath()}; ProcessBuilder processBuilder = new ProcessBuilder(args); // work in home // processBuilder.directory(homeFile); // environment Map<String, String> environment = processBuilder.environment(); // environment.put("CLASSPATH", classPath); final Process process = processBuilder.start(); internalStream.println( "Process started: " + new DateTime().toString(OmsBoxConstants.dateTimeFormatterYYYYMMDDHHMMSS)); internalStream.println(""); // command launched if (loggerLevelGui.equals(OmsBoxConstants.LOGLEVEL_GUI_ON)) { internalStream.println("------------------------------>8----------------------------"); internalStream.println("Launching command: "); internalStream.println("------------------"); List<String> command = processBuilder.command(); for (String arg : command) { internalStream.print(arg); internalStream.print(" "); } internalStream.println("\n"); internalStream.println("(you can run the above from command line, customizing the content)"); internalStream.println("----------------------------------->8---------------------------------"); internalStream.println(""); // script run internalStream.println("Script run: "); internalStream.println("-----------"); internalStream.println(script); internalStream.println(""); internalStream.println("------------------------------>8----------------------------"); internalStream.println(""); // environment used internalStream.println("Environment used: "); internalStream.println("-----------------"); Set<Entry<String, String>> entrySet = environment.entrySet(); for (Entry<String, String> entry : entrySet) { internalStream.print(entry.getKey()); internalStream.print(" =\t"); internalStream.println(entry.getValue()); } internalStream.println("------------------------------>8----------------------------"); internalStream.println(""); } internalStream.println(""); isRunning = true; new Thread() { public void run() { BufferedReader br = null; try { InputStream is = process.getInputStream(); InputStreamReader isr = new InputStreamReader(is); br = new BufferedReader(isr); String line; while ((line = br.readLine()) != null) { internalStream.println(line); } } catch (Exception e) { e.printStackTrace(); errorStream.println(e.getLocalizedMessage()); } finally { if (br != null) try { br.close(); } catch (IOException e) { e.printStackTrace(); } isRunning = false; updateListeners(); } internalStream.println(""); internalStream.println(""); internalStream.println("Process finished: " + new DateTime().toString(OmsBoxConstants.dateTimeFormatterYYYYMMDDHHMMSS)); }; }.start(); new Thread() { public void run() { BufferedReader br = null; try { InputStream is = process.getErrorStream(); InputStreamReader isr = new InputStreamReader(is); br = new BufferedReader(isr); String line; while ((line = br.readLine()) != null) { /* * remove of ugly recurring geotools warnings. Not nice, but * at least users do not get confused. */ if (ConsoleMessageFilter.doRemove(line)) { continue; } errorStream.println(line); } } catch (Exception e) { e.printStackTrace(); errorStream.println(e.getLocalizedMessage()); } finally { if (br != null) try { br.close(); } catch (IOException e) { e.printStackTrace(); } } }; }.start(); return process; }
From source file:org.apache.accumulo.core.file.rfile.bcfile.TFileDumper.java
/** * Dump information about TFile.// w w w . ja v a 2 s.c o m * * @param file * Path string of the TFile * @param out * PrintStream to output the information. * @param conf * The configuration object. * @throws IOException */ static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException { final int maxKeySampleLen = 16; Path path = new Path(file); FileSystem fs = path.getFileSystem(conf); long length = fs.getFileStatus(path).getLen(); FSDataInputStream fsdis = fs.open(path); TFile.Reader reader = new TFile.Reader(fsdis, length, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); int blockCnt = reader.readerBCF.getBlockCount(); int metaBlkCnt = reader.readerBCF.metaIndex.index.size(); properties.put("BCFile Version", reader.readerBCF.version.toString()); properties.put("TFile Version", reader.tfileMeta.version.toString()); properties.put("File Length", Long.toString(length)); properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName()); properties.put("Record Count", Long.toString(reader.getEntryCount())); properties.put("Sorted", Boolean.toString(reader.isSorted())); if (reader.isSorted()) { properties.put("Comparator", reader.getComparatorName()); } properties.put("Data Block Count", Integer.toString(blockCnt)); long dataSize = 0, dataSizeUncompressed = 0; if (blockCnt > 0) { for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); dataSize += region.getCompressedSize(); dataSizeUncompressed += region.getRawSize(); } properties.put("Data Block Bytes", Long.toString(dataSize)); if (reader.readerBCF.getDefaultCompressionName() != "none") { properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed)); properties.put("Data Block Compression Ratio", String.format("1:%.1f", (double) dataSizeUncompressed / dataSize)); } } properties.put("Meta Block Count", Integer.toString(metaBlkCnt)); long metaSize = 0, metaSizeUncompressed = 0; if (metaBlkCnt > 0) { Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values(); boolean calculateCompression = false; for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) { MetaIndexEntry e = it.next(); metaSize += e.getRegion().getCompressedSize(); metaSizeUncompressed += e.getRegion().getRawSize(); if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) { calculateCompression = true; } } properties.put("Meta Block Bytes", Long.toString(metaSize)); if (calculateCompression) { properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed)); properties.put("Meta Block Compression Ratio", String.format("1:%.1f", (double) metaSizeUncompressed / metaSize)); } } properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize)); long leftOverBytes = length - dataSize - metaSize; long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size(); long metaIndexSize = leftOverBytes - miscSize; properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize)); properties.put("Headers Etc Bytes", Long.toString(miscSize)); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); out.printf("%s : %s%n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue()); } out.println(); reader.checkTFileDataIndex(); if (blockCnt > 0) { String blkID = "Data-Block"; int blkIDWidth = Align.calculateWidth(blkID, blockCnt); int blkIDWidth2 = Align.calculateWidth("", blockCnt); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10); String records = "Records"; int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10); String endKey = "End-Key"; int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5); out.printf("%s %s %s %s %s %s%n", Align.format(blkID, blkIDWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(records, recordsWidth, Align.CENTER), Align.format(endKey, endKeyWidth, Align.LEFT)); for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i); out.printf("%s %s %s %s %s ", Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT)); byte[] key = indexEntry.key; boolean asAscii = true; int sampleLen = Math.min(maxKeySampleLen, key.length); for (int j = 0; j < sampleLen; ++j) { byte b = key[j]; if ((b < 32 && b != 9) || (b == 127)) { asAscii = false; } } if (!asAscii) { out.print("0X"); for (int j = 0; j < sampleLen; ++j) { byte b = key[i]; out.printf("%X", b); } } else { out.print(new String(key, 0, sampleLen)); } if (sampleLen < key.length) { out.print("..."); } out.println(); } } out.println(); if (metaBlkCnt > 0) { String name = "Meta-Block"; int maxNameLen = 0; Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index .entrySet(); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); if (e.getKey().length() > maxNameLen) { maxNameLen = e.getKey().length(); } } int nameWidth = Math.max(name.length(), maxNameLen); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10); String compression = "Compression"; int compressionWidth = compression.length(); out.printf("%s %s %s %s %s%n", Align.format(name, nameWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(compression, compressionWidth, Align.LEFT)); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); String blkName = e.getValue().getMetaName(); BlockRegion region = e.getValue().getRegion(); String blkCompression = e.getValue().getCompressionAlgorithm().getName(); out.printf("%s %s %s %s %s%n", Align.format(blkName, nameWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(blkCompression, compressionWidth, Align.LEFT)); } } } finally { IOUtils.cleanup(LOG, reader, fsdis); } }
From source file:org.apache.hadoop.io.file.tfile.TFileDumper.java
/** * Dump information about TFile./*from ww w.j a v a 2 s . com*/ * * @param file * Path string of the TFile * @param out * PrintStream to output the information. * @param conf * The configuration object. * @throws IOException */ static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException { final int maxKeySampleLen = 16; Path path = new Path(file); FileSystem fs = path.getFileSystem(conf); long length = fs.getFileStatus(path).getLen(); FSDataInputStream fsdis = fs.open(path); TFile.Reader reader = new TFile.Reader(fsdis, length, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); int blockCnt = reader.readerBCF.getBlockCount(); int metaBlkCnt = reader.readerBCF.metaIndex.index.size(); properties.put("BCFile Version", reader.readerBCF.version.toString()); properties.put("TFile Version", reader.tfileMeta.version.toString()); properties.put("File Length", Long.toString(length)); properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName()); properties.put("Record Count", Long.toString(reader.getEntryCount())); properties.put("Sorted", Boolean.toString(reader.isSorted())); if (reader.isSorted()) { properties.put("Comparator", reader.getComparatorName()); } properties.put("Data Block Count", Integer.toString(blockCnt)); long dataSize = 0, dataSizeUncompressed = 0; if (blockCnt > 0) { for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); dataSize += region.getCompressedSize(); dataSizeUncompressed += region.getRawSize(); } properties.put("Data Block Bytes", Long.toString(dataSize)); if (reader.readerBCF.getDefaultCompressionName() != "none") { properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed)); properties.put("Data Block Compression Ratio", String.format("1:%.1f", (double) dataSizeUncompressed / dataSize)); } } properties.put("Meta Block Count", Integer.toString(metaBlkCnt)); long metaSize = 0, metaSizeUncompressed = 0; if (metaBlkCnt > 0) { Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values(); boolean calculateCompression = false; for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) { MetaIndexEntry e = it.next(); metaSize += e.getRegion().getCompressedSize(); metaSizeUncompressed += e.getRegion().getRawSize(); if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) { calculateCompression = true; } } properties.put("Meta Block Bytes", Long.toString(metaSize)); if (calculateCompression) { properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed)); properties.put("Meta Block Compression Ratio", String.format("1:%.1f", (double) metaSizeUncompressed / metaSize)); } } properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize)); long leftOverBytes = length - dataSize - metaSize; long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size(); long metaIndexSize = leftOverBytes - miscSize; properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize)); properties.put("Headers Etc Bytes", Long.toString(miscSize)); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); out.printf("%s : %s\n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue()); } out.println(); reader.checkTFileDataIndex(); if (blockCnt > 0) { String blkID = "Data-Block"; int blkIDWidth = Align.calculateWidth(blkID, blockCnt); int blkIDWidth2 = Align.calculateWidth("", blockCnt); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10); String records = "Records"; int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10); String endKey = "End-Key"; int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5); out.printf("%s %s %s %s %s %s\n", Align.format(blkID, blkIDWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(records, recordsWidth, Align.CENTER), Align.format(endKey, endKeyWidth, Align.LEFT)); for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i); out.printf("%s %s %s %s %s ", Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT)); byte[] key = indexEntry.key; boolean asAscii = true; int sampleLen = Math.min(maxKeySampleLen, key.length); for (int j = 0; j < sampleLen; ++j) { byte b = key[j]; if ((b < 32 && b != 9) || (b == 127)) { asAscii = false; } } if (!asAscii) { out.print("0X"); for (int j = 0; j < sampleLen; ++j) { byte b = key[i]; out.printf("%X", b); } } else { out.print(new String(key, 0, sampleLen)); } if (sampleLen < key.length) { out.print("..."); } out.println(); } } out.println(); if (metaBlkCnt > 0) { String name = "Meta-Block"; int maxNameLen = 0; Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index .entrySet(); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); if (e.getKey().length() > maxNameLen) { maxNameLen = e.getKey().length(); } } int nameWidth = Math.max(name.length(), maxNameLen); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10); String compression = "Compression"; int compressionWidth = compression.length(); out.printf("%s %s %s %s %s\n", Align.format(name, nameWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(compression, compressionWidth, Align.LEFT)); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); String blkName = e.getValue().getMetaName(); BlockRegion region = e.getValue().getRegion(); String blkCompression = e.getValue().getCompressionAlgorithm().getName(); out.printf("%s %s %s %s %s\n", Align.format(blkName, nameWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(blkCompression, compressionWidth, Align.LEFT)); } } } finally { IOUtils.cleanup(LOG, reader, fsdis); } }
From source file:org.openmrs.module.sync.api.db.hibernate.HibernateSyncDAO.java
public void exportChildDB(String uuidForChild, OutputStream os) throws DAOException { PrintStream out = new PrintStream(os); Set<String> tablesToSkip = new HashSet<String>(); {/* w w w . j a v a2s . co m*/ tablesToSkip.add("hl7_in_archive"); tablesToSkip.add("hl7_in_queue"); tablesToSkip.add("hl7_in_error"); tablesToSkip.add("formentry_archive"); tablesToSkip.add("formentry_queue"); tablesToSkip.add("formentry_error"); tablesToSkip.add("sync_class"); tablesToSkip.add("sync_import"); tablesToSkip.add("sync_record"); tablesToSkip.add("sync_server"); tablesToSkip.add("sync_server_class"); tablesToSkip.add("sync_server_record"); // TODO: figure out which other tables to skip // tablesToSkip.add("obs"); // tablesToSkip.add("concept"); // tablesToSkip.add("patient"); } List<String> tablesToDump = new ArrayList<String>(); Session session = sessionFactory.getCurrentSession(); String schema = (String) session.createSQLQuery("SELECT schema()").uniqueResult(); log.warn("schema: " + schema); // Get all tables that we'll need to dump { Query query = session.createSQLQuery( "SELECT tabs.table_name FROM INFORMATION_SCHEMA.TABLES tabs WHERE tabs.table_schema = '" + schema + "'"); for (Object tn : query.list()) { String tableName = (String) tn; if (!tablesToSkip.contains(tableName.toLowerCase())) tablesToDump.add(tableName); } } log.warn("tables to dump: " + tablesToDump); String thisServerGuid = getGlobalProperty(SyncConstants.PROPERTY_SERVER_UUID); // Write the DDL Header as mysqldump does { out.println("-- ------------------------------------------------------"); out.println("-- Database dump to create an openmrs child server"); out.println("-- Schema: " + schema); out.println("-- Parent GUID: " + thisServerGuid); out.println("-- Parent version: " + OpenmrsConstants.OPENMRS_VERSION); out.println("-- ------------------------------------------------------"); out.println(""); out.println("/*!40101 SET CHARACTER_SET_CLIENT=utf8 */;"); out.println("/*!40101 SET NAMES utf8 */;"); out.println("/*!40103 SET TIME_ZONE='+00:00' */;"); out.println("/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;"); out.println("/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;"); out.println("/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;"); out.println("/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;"); out.println("/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;"); out.println("/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;"); out.println("/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;"); out.println("/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;"); out.println(""); } try { // JDBC way of doing this // Connection conn = // DriverManager.getConnection("jdbc:mysql://localhost/" + schema, // "test", "test"); Connection conn = sessionFactory.getCurrentSession().connection(); try { Statement st = conn.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); // Get the create database statement ResultSet rs = st.executeQuery("SHOW CREATE DATABASE " + schema); for (String tableName : tablesToDump) { out.println(); out.println("--"); out.println("-- Table structure for table `" + tableName + "`"); out.println("--"); out.println("DROP TABLE IF EXISTS `" + tableName + "`;"); out.println("SET @saved_cs_client = @@character_set_client;"); out.println("SET character_set_client = utf8;"); rs = st.executeQuery("SHOW CREATE TABLE " + tableName); while (rs.next()) { out.println(rs.getString("Create Table") + ";"); } out.println("SET character_set_client = @saved_cs_client;"); out.println(); { out.println("-- Dumping data for table `" + tableName + "`"); out.println("LOCK TABLES `" + tableName + "` WRITE;"); out.println("/*!40000 ALTER TABLE `" + tableName + "` DISABLE KEYS */;"); boolean first = true; rs = st.executeQuery("select * from " + tableName); ResultSetMetaData md = rs.getMetaData(); int numColumns = md.getColumnCount(); int rowNum = 0; boolean insert = false; while (rs.next()) { if (rowNum == 0) { insert = true; out.print("INSERT INTO `" + tableName + "` VALUES "); } ++rowNum; if (first) { first = false; } else { out.print(", "); } if (rowNum % 20 == 0) { out.println(); } out.print("("); for (int i = 1; i <= numColumns; ++i) { if (i != 1) { out.print(","); } if (rs.getObject(i) == null) { out.print("NULL"); } else { switch (md.getColumnType(i)) { case Types.VARCHAR: case Types.CHAR: case Types.LONGVARCHAR: out.print("'"); out.print( rs.getString(i).replaceAll("\n", "\\\\n").replaceAll("'", "\\\\'")); out.print("'"); break; case Types.BIGINT: case Types.DECIMAL: case Types.NUMERIC: out.print(rs.getBigDecimal(i)); break; case Types.BIT: out.print(rs.getBoolean(i)); break; case Types.INTEGER: case Types.SMALLINT: case Types.TINYINT: out.print(rs.getInt(i)); break; case Types.REAL: case Types.FLOAT: case Types.DOUBLE: out.print(rs.getDouble(i)); break; case Types.BLOB: case Types.VARBINARY: case Types.LONGVARBINARY: Blob blob = rs.getBlob(i); out.print("'"); InputStream in = blob.getBinaryStream(); while (true) { int b = in.read(); if (b < 0) { break; } char c = (char) b; if (c == '\'') { out.print("\'"); } else { out.print(c); } } out.print("'"); break; case Types.CLOB: out.print("'"); out.print( rs.getString(i).replaceAll("\n", "\\\\n").replaceAll("'", "\\\\'")); out.print("'"); break; case Types.DATE: out.print("'" + rs.getDate(i) + "'"); break; case Types.TIMESTAMP: out.print("'" + rs.getTimestamp(i) + "'"); break; default: throw new RuntimeException("TODO: handle type code " + md.getColumnType(i) + " (name " + md.getColumnTypeName(i) + ")"); } } } out.print(")"); } if (insert) { out.println(";"); insert = false; } out.println("/*!40000 ALTER TABLE `" + tableName + "` ENABLE KEYS */;"); out.println("UNLOCK TABLES;"); out.println(); } } } finally { conn.close(); } // Now we mark this as a child out.println("-- Now mark this as a child database"); if (uuidForChild == null) uuidForChild = SyncUtil.generateUuid(); out.println("update global_property set property_value = '" + uuidForChild + "' where property = '" + SyncConstants.PROPERTY_SERVER_UUID + "';"); // Write the footer of the DDL script { out.println("/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;"); out.println("/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;"); out.println("/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;"); out.println("/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;"); out.println("/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;"); out.println("/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;"); out.println("/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;"); out.println("/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;"); } out.flush(); out.close(); } catch (IOException ex) { log.error("IOException", ex); } catch (SQLException ex) { log.error("SQLException", ex); } }
From source file:gov.nasa.ensemble.dictionary.nddl.ParseInterpreter.java
public void writeActiveTimelineCompats(OutputStream oStrm) { PrintStream out = new PrintStream(oStrm); String actName;//from w w w . ja va2 s.c om @SuppressWarnings("unused") String objrefName; String claimName; String mxVar; String[] mutexActs; // *** hack for test *** // exclusiveActsStatesMap2.get("MASTCAM_MOSAIC_GENERIC__mx__Arm_Unstow").add("Arm_Stationary"); // debug print out of mutex mapping // PHM 12/07/2011 Will be empty for resource solving System.out.print("\n* MUTEX Mapping: *\n"); for (String mutex : exclusiveActsStatesMap.keySet()) { mutexActs = mutex.split("__mx__"); System.out.printf("(%s, %s)\t", mutexActs[0], mutexActs[1]); // out.print(mutex + ":\t"); for (String state : exclusiveActsStatesMap.get(mutex)) { System.out.printf("%s ", state); } System.out.println(); } // System.out.print("\n* MUTEX Mapping: Condition #2 *\n"); // for (String mutex : exclusiveActsStatesMap2.keySet()) { // mutexActs = mutex.split("__mx__"); // System.out.printf("(%s, %s)\t", mutexActs[0], mutexActs[1]); // //out.print(mutex + ":\t"); // for (String state : exclusiveActsStatesMap2.get(mutex)) { // System.out.printf("%s ", state); // } // System.out.println(); // } System.out.print("\n* Mapping of states to AtEnd Effects *\n"); for (String state : stateEndEffectsMap.keySet()) { System.out.printf("%s:\t", state); for (String val : stateEndEffectsMap.get(state)) { System.out.printf("%s ", val); } System.out.println(); } System.out.println(); // end of debug printout // handle active compats on dynamic object claims for (String objdef : objectDefNames) { mxVar = "x" + varN++; out.printf("\n%s::Assign_%s {\n" + " if (isSingleton(object)) {\n" + " if (scheduled == true) {\n" + " if (Enable_Active_Enforcement == true) {\n" + " if (subSolved == true) {\n" + " if (enforced == true) {\n" + " if (Enforce_%s_claim == true) {\n" + " if (myEnforce.Enforce_%s_claim == true) {\n\n" + " equals(object.active.%s_claim_MX %s);\n" + " neq(%s.state, MERGED);\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}\n\n", objdef, objdef, objdef, objdef, objdef, mxVar, mxVar); } for (EActivityDef activityDef : activityDefs) { actName = NDDLUtil.escape(activityDef.getName()); // if ((activityDef.getExpansion() == null || activityDef // .getExpansion().getSubActivities() == null) // && (activityDef.getClaims() != null // || exclusiveActsStatesMap.keySet() != null || // // exclusiveActsStatesMap2.keySet() != null || // actThresholdRequireMap.get(actName) != null || actThresholdEffectMap // .get(actName) != null)) { if (!activityDef.getClaimableEffects().isEmpty() || actThresholdRequireMap.get(actName) != null || actThresholdEffectMap.get(actName) != null) { out.printf( "%s::%s {\n" + " if (scheduled == true) {\n" + " if (Enable_Active_Enforcement == true) {\n" + " if (subSolved == true) {\n" + " if (enforced == true) {\n\n", activitySubsystemMap.get(actName), actName); // handle claims for (EClaimableEffect claim : activityDef.getClaimableEffects()) { claimName = NDDLUtil.escape(claim.getName()); if (claimNames.contains(claimName)) { mxVar = "x" + varN++; out.printf( "\t\t if (Enforce_%s == true) {\n" + "\t\t if (myEnforce.Enforce_%s == true) {\n" + "\t\t\t equals(Active_%s.%s_MX %s);\n" + "\t\t\t neq(%s.state, MERGED);\n\t\t }\n\t\t }\n\n", claimName, claimName, claimName, claimName, mxVar, mxVar); } else { System.err.print("\n* Undefined claim " + claimName + " in activity " + actName + " *\n\n"); } } out.print(" }\n }\n }\n }\n}\n\n"); } } }
From source file:gov.nasa.ensemble.dictionary.nddl.ParseInterpreter.java
public void writeActiveCompats(OutputStream oStrm) { PrintStream out = new PrintStream(oStrm); String actName;// w ww. ja va2s.co m @SuppressWarnings("unused") String objrefName; String claimName; String mxVar; String[] mutexActs; Integer dgN; String dgName; dgN = 1; // *** hack for test *** // exclusiveActsStatesMap2.get("MASTCAM_MOSAIC_GENERIC__mx__Arm_Unstow").add("Arm_Stationary"); // debug print out of mutex mapping System.out.print("\n* MUTEX Mapping: *\n"); for (String mutex : exclusiveActsStatesMap.keySet()) { mutexActs = mutex.split("__mx__"); System.out.printf("(%s, %s)\t", mutexActs[0], mutexActs[1]); // out.print(mutex + ":\t"); for (String state : exclusiveActsStatesMap.get(mutex)) { System.out.printf("%s ", state); } System.out.println(); } // System.out.print("\n* MUTEX Mapping: Condition #2 *\n"); // for (String mutex : exclusiveActsStatesMap2.keySet()) { // mutexActs = mutex.split("__mx__"); // System.out.printf("(%s, %s)\t", mutexActs[0], mutexActs[1]); // //out.print(mutex + ":\t"); // for (String state : exclusiveActsStatesMap2.get(mutex)) { // System.out.printf("%s ", state); // } // System.out.println(); // } System.out.print("\n* Mapping of states to AtEnd Effects *\n"); for (String state : stateEndEffectsMap.keySet()) { System.out.printf("%s:\t", state); for (String val : stateEndEffectsMap.get(state)) { System.out.printf("%s ", val); } System.out.println(); } System.out.println(); // end of debug printout // handle active compats on dynamic object claims for (String objdef : objectDefNames) { mxVar = "x" + varN++; out.printf("\n%s::Assign_%s {\n" + " if (isSingleton(object)) {\n" + " if (scheduled == true) {\n" + " if (Enable_Active_Enforcement == true) {\n" + " if (subSolved == true) {\n" + " if (enforced == true) {\n" + " if (Enforce_%s_claim == true) {\n" + " if (myEnforce.Enforce_%s_claim == true) {\n\n" + " equals(object.active.%s_claim_MX %s);\n" + " neq(%s.state, MERGED);\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}\n\n", objdef, objdef, objdef, objdef, objdef, mxVar, mxVar); } for (EActivityDef activityDef : activityDefs) { actName = NDDLUtil.escape(activityDef.getName()); // if ((activityDef.getExpansion() == null || activityDef // .getExpansion().getSubActivities() == null) // && (activityDef.getClaims() != null // || exclusiveActsStatesMap.keySet() != null || // // exclusiveActsStatesMap2.keySet() != null || // actThresholdRequireMap.get(actName) != null || actThresholdEffectMap // .get(actName) != null)) { if (!activityDef.getClaimableEffects().isEmpty() || exclusiveActsStatesMap.keySet() != null || actThresholdRequireMap.get(actName) != null || actThresholdEffectMap.get(actName) != null) { out.printf( "%s::%s {\n" + " if (scheduled == true) {\n" + " if (Enable_Active_Enforcement == true) {\n" + " if (subSolved == true) {\n" + " if (enforced == true) {\n\n", activitySubsystemMap.get(actName), actName); // handle claims for (EClaimableEffect claim : activityDef.getClaimableEffects()) { claimName = NDDLUtil.escape(claim.getName()); if (claimNames.contains(claimName)) { mxVar = "x" + varN++; out.printf( "\t\t if (Enforce_%s == true) {\n" + "\t\t if (myEnforce.Enforce_%s == true) {\n" + "\t\t\t equals(Active_%s.%s_MX %s);\n" + "\t\t\t neq(%s.state, MERGED);\n\t\t }\n\t\t }\n\n", claimName, claimName, claimName, claimName, mxVar, mxVar); } else { System.err.print("\n* Undefined claim " + claimName + " in activity " + actName + " *\n\n"); } } // handle enum state mutexes for (String mutex : exclusiveActsStatesMap.keySet()) { mutexActs = mutex.split("__mx__"); if (actName.equals(mutexActs[0]) || actName.equals(mutexActs[1])) { mxVar = "x" + varN++; if (exclusiveActsStatesMap.get(mutex).size() > 1) { dgName = "disjunctGuard" + dgN; dgN++; out.printf("\t\t bool %s;\n" + "\t\t EqualMaximum(%s", dgName, dgName); for (String state : exclusiveActsStatesMap.get(mutex)) { out.printf(", Enforce_%s__%s", mutex, state); } out.printf(");\n\t\t if (%s == true) {\n", dgName); dgName = "disjunctGuard" + dgN; dgN++; out.printf("\t\t bool %s;\n" + "\t\t EqualMaximum(%s", dgName, dgName); for (String state : exclusiveActsStatesMap.get(mutex)) { out.printf(", myEnforce.Enforce_%s__%s", mutex, state); } out.printf(");\n\t\t if (%s == true) {\n", dgName); } else { String state0 = exclusiveActsStatesMap.get(mutex).get(0); out.printf( "\t\t if (Enforce_%s__%s == true) {\n" + "\t\t if (myEnforce.Enforce_%s__%s == true) {\n", mutex, state0, mutex, state0); } out.printf( "\t\t\t equals(Active_%s.%s_MX %s);\n" + "\t\t\t neq(%s.state, MERGED);\n\t\t }\n\t\t }\n\n", mutex, mutex, mxVar, mxVar); } } // for (String mutex : exclusiveActsStatesMap2.keySet()) { // mutexActs = mutex.split("__mx__"); // if (actName.equals(mutexActs[0]) || // actName.equals(mutexActs[1])) { // mxVar = "x" + varN++; // if (exclusiveActsStatesMap2.get(mutex).size() > 1) { // out.print("\t\t or(disjunctGuard"); // for (String state : exclusiveActsStatesMap2.get(mutex)) { // out.printf(", Enforce_%s__%s", mutex, state); // } // out.print(");\n\t\t if (disjunctGuard == true) {\n"); // } else {out.printf("\t\t if (Enforce_%s__%s == true) {\n", // mutex, exclusiveActsStatesMap2.get(mutex).get(0));} // out.printf("\t\t equals(Active_%s.%s_MX %s);\n" + // "\t\t neq(%s.state, MERGED);\n\t\t }\n\n", // mutex, mutex, mxVar, mxVar); // } // } // handle threshold state requirements if (actThresholdRequireMap.get(actName) != null) { for (String[] stateVal : actThresholdRequireMap.get(actName)) { String stateValName = stateVal[0] + "_" + stateVal[1]; out.printf( "\t\t if (Enforce_%s == true) {\n" + "\t\t if (myEnforce.Enforce_%s == true) {\n" + "\t\t\t contained_by(Active_%s.LE_%s);\n\t\t }\n\t\t }\n\n", stateVal[0], stateVal[0], stateValName, stateValName); } } // handle threshold state effects if (actThresholdEffectMap.get(actName) != null) { for (String[] stateVal : actThresholdEffectMap.get(actName)) { out.printf( "\t\t if (Enforce_%s == true) {\n" + "\t\t if (myEnforce.Enforce_%s == true) {\n", stateVal[0], stateVal[0]); for (String val : stateValuesMap.get(stateVal[0])) { if (val.equals(stateVal[1])) { break; } String stateValName = stateVal[0] + "_" + val; out.printf("\t\t\t contained_by(Active_%s.GT_%s);\n", stateValName, stateValName); } out.print("\t\t }\n\t\t }\n\n"); } } out.print(" }\n }\n }\n }\n}\n\n"); } } }
From source file:com.github.lindenb.jvarkit.tools.vcfcmp.VcfCompareCallers.java
@Override public Collection<Throwable> call() throws Exception { htsjdk.samtools.util.IntervalTreeMap<Boolean> capture = null; PrintWriter exampleWriter = null; XMLStreamWriter exampleOut = null; PrintStream pw = null; VcfIterator vcfInputs[] = new VcfIterator[] { null, null }; VCFHeader headers[] = new VCFHeader[] { null, null }; final List<String> args = getInputFiles(); try {/*from www.j a v a 2 s .com*/ if (args.size() == 1) { LOG.info("Reading from stdin and " + args.get(0)); vcfInputs[0] = VCFUtils.createVcfIteratorStdin(); vcfInputs[1] = VCFUtils.createVcfIterator(args.get(0)); } else if (args.size() == 2) { LOG.info("Reading from stdin and " + args.get(0) + " and " + args.get(1)); vcfInputs[0] = VCFUtils.createVcfIterator(args.get(0)); vcfInputs[1] = VCFUtils.createVcfIterator(args.get(1)); } else { return wrapException(getMessageBundle("illegal.number.of.arguments")); } if (super.captureFile != null) { LOG.info("Reading " + super.captureFile); capture = super.readBedFileAsBooleanIntervalTreeMap(super.captureFile); } for (int i = 0; i < vcfInputs.length; ++i) { headers[i] = vcfInputs[i].getHeader(); } /* dicts */ final SAMSequenceDictionary dict0 = headers[0].getSequenceDictionary(); final SAMSequenceDictionary dict1 = headers[1].getSequenceDictionary(); final Comparator<VariantContext> ctxComparator; if (dict0 == null && dict1 == null) { ctxComparator = VCFUtils.createChromPosRefComparator(); } else if (dict0 != null && dict1 != null) { if (!SequenceUtil.areSequenceDictionariesEqual(dict0, dict1)) { return wrapException(getMessageBundle("not.the.same.sequence.dictionaries")); } ctxComparator = VCFUtils.createTidPosRefComparator(dict0); } else { return wrapException(getMessageBundle("not.the.same.sequence.dictionaries")); } /* samples */ Set<String> samples0 = new HashSet<>(headers[0].getSampleNamesInOrder()); Set<String> samples1 = new HashSet<>(headers[1].getSampleNamesInOrder()); Set<String> samples = new TreeSet<>(samples0); samples.retainAll(samples1); if (samples.size() != samples0.size() || samples.size() != samples1.size()) { LOG.warn("Warning: Not the same samples set. Using intersection of both lists."); } if (samples.isEmpty()) { return wrapException("No common samples"); } Map<String, Counter<Category>> sample2info = new HashMap<String, Counter<Category>>(samples.size()); for (String sampleName : samples) { sample2info.put(sampleName, new Counter<Category>()); } if (super.exampleFile != null) { exampleWriter = new PrintWriter(exampleFile, "UTF-8"); XMLOutputFactory xof = XMLOutputFactory.newFactory(); exampleOut = xof.createXMLStreamWriter(exampleWriter); exampleOut.writeStartDocument("UTF-8", "1.0"); exampleOut.writeStartElement("compare-callers"); } SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(dict0); VariantContext buffer[] = new VariantContext[vcfInputs.length]; VariantContext prev[] = new VariantContext[vcfInputs.length]; for (;;) { VariantContext smallest = null; //refill buffer for (int i = 0; i < vcfInputs.length; ++i) { if (buffer[i] == null && vcfInputs[i] != null) { if (vcfInputs[i].hasNext()) { buffer[i] = vcfInputs[i].peek(); /* check data are sorted */ if (prev[i] != null && ctxComparator.compare(prev[i], buffer[i]) > 0) { return wrapException("Input " + (i + 1) + "/2 is not sorted" + (((i == 0 && dict0 == null) || (i == 1 && dict1 == null)) ? "on chrom/pos/ref" : "on sequence dictionary") + ". got\n" + buffer[i] + "\nafter\n" + prev[i]); } } else { vcfInputs[i].close(); vcfInputs[i] = null; } } if (buffer[i] != null) { if (smallest == null || ctxComparator.compare(buffer[i], smallest) < 0) { smallest = buffer[i]; } } } if (smallest == null) break; VariantContext ctx0 = null; VariantContext ctx1 = null; Interval interval = null; if (buffer[0] != null && ctxComparator.compare(buffer[0], smallest) == 0) { prev[0] = progress.watch(vcfInputs[0].next()); ctx0 = prev[0]; buffer[0] = null; interval = new Interval(ctx0.getContig(), ctx0.getStart(), ctx0.getEnd()); } if (buffer[1] != null && ctxComparator.compare(buffer[1], smallest) == 0) { prev[1] = progress.watch(vcfInputs[1].next()); ctx1 = prev[1]; buffer[1] = null; interval = new Interval(ctx1.getContig(), ctx1.getStart(), ctx1.getEnd()); } boolean in_capture = true; if (capture != null && interval != null) { in_capture = capture.containsOverlapping(interval); } for (final String sampleName : sample2info.keySet()) { final Counter<Category> sampleInfo = sample2info.get(sampleName); Genotype g0 = (ctx0 == null ? null : ctx0.getGenotype(sampleName)); Genotype g1 = (ctx1 == null ? null : ctx1.getGenotype(sampleName)); if (g0 != null && (g0.isNoCall() || !g0.isAvailable())) g0 = null; if (g1 != null && (g1.isNoCall() || !g1.isAvailable())) g1 = null; if (g0 == null && g1 == null) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.both_missing); continue; } else if (g0 != null && g1 == null) { if (!in_capture) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.off_target_only_1); continue; } watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.unique_to_file_1); if (ctx0.isIndel()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.unique_to_file_1_indel); } else if (ctx0.isSNP()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.unique_to_file_1_snp); } continue; } else if (g0 == null && g1 != null) { if (!in_capture) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.off_target_only_2); continue; } watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.unique_to_file_2); if (ctx1.isIndel()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.unique_to_file_2_indel); } else if (ctx1.isSNP()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.unique_to_file_2_snp); } continue; } else { if (!in_capture) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.off_target_both); continue; } watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.common_context); if (ctx0.isIndel() && ctx1.isIndel()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.common_context_indel); } else if (ctx0.isSNP() && ctx1.isSNP()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.common_context_snp); } if ((ctx0.hasID() && !ctx1.hasID()) || (!ctx0.hasID() && ctx1.hasID()) || (ctx0.hasID() && ctx1.hasID() && !ctx0.getID().equals(ctx1.getID()))) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.common_context_discordant_id); } if (g0.sameGenotype(g1)) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_and_same); if (g0.isHomRef()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_and_same_hom_ref); } if (g0.isHomVar()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_and_same_hom_var); } else if (g0.isHet()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_and_same_het); } } else { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_but_discordant); if (g0.isHom() && g1.isHet()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_but_discordant_hom1_het2); } else if (g0.isHet() && g1.isHom()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_but_discordant_het1_hom2); } else if (g0.isHom() && g1.isHom()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_but_discordant_hom1_hom2); } else if (g0.isHet() && g1.isHet()) { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_but_discordant_het1_het2); } else { watch(exampleOut, ctx0, ctx1, g0, g1, sampleName, sampleInfo, Category.called_but_discordant_others); } } } } } progress.finish(); pw = openFileOrStdoutAsPrintStream(); pw.print("#Sample"); for (Category c : Category.values()) { pw.print('\t'); pw.print(c.name()); } pw.println(); for (String sample : sample2info.keySet()) { Counter<Category> count = sample2info.get(sample); pw.print(sample); for (Category c : Category.values()) { pw.print('\t'); pw.print(count.count(c)); } pw.println(); if (pw.checkError()) break; } pw.flush(); if (exampleOut != null) { exampleOut.writeEndElement(); exampleOut.writeEndDocument(); exampleOut.flush(); exampleOut.close(); } return RETURN_OK; } catch (Exception err) { return wrapException(err); } finally { if (getOutputFile() != null) CloserUtil.close(pw); CloserUtil.close(exampleWriter); } }