List of usage examples for java.io PrintStream printf
public PrintStream printf(Locale l, String format, Object... args)
From source file:org.apache.hadoop.io.file.tfile.TFileDumper.java
/** * Dump information about TFile./*ww w. j a v a 2s . c o m*/ * * @param file * Path string of the TFile * @param out * PrintStream to output the information. * @param conf * The configuration object. * @throws IOException */ static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException { final int maxKeySampleLen = 16; Path path = new Path(file); FileSystem fs = path.getFileSystem(conf); long length = fs.getFileStatus(path).getLen(); FSDataInputStream fsdis = fs.open(path); TFile.Reader reader = new TFile.Reader(fsdis, length, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); int blockCnt = reader.readerBCF.getBlockCount(); int metaBlkCnt = reader.readerBCF.metaIndex.index.size(); properties.put("BCFile Version", reader.readerBCF.version.toString()); properties.put("TFile Version", reader.tfileMeta.version.toString()); properties.put("File Length", Long.toString(length)); properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName()); properties.put("Record Count", Long.toString(reader.getEntryCount())); properties.put("Sorted", Boolean.toString(reader.isSorted())); if (reader.isSorted()) { properties.put("Comparator", reader.getComparatorName()); } properties.put("Data Block Count", Integer.toString(blockCnt)); long dataSize = 0, dataSizeUncompressed = 0; if (blockCnt > 0) { for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); dataSize += region.getCompressedSize(); dataSizeUncompressed += region.getRawSize(); } properties.put("Data Block Bytes", Long.toString(dataSize)); if (reader.readerBCF.getDefaultCompressionName() != "none") { properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed)); properties.put("Data Block Compression Ratio", String.format("1:%.1f", (double) dataSizeUncompressed / dataSize)); } } properties.put("Meta Block Count", Integer.toString(metaBlkCnt)); long metaSize = 0, metaSizeUncompressed = 0; if (metaBlkCnt > 0) { Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values(); boolean calculateCompression = false; for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) { MetaIndexEntry e = it.next(); metaSize += e.getRegion().getCompressedSize(); metaSizeUncompressed += e.getRegion().getRawSize(); if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) { calculateCompression = true; } } properties.put("Meta Block Bytes", Long.toString(metaSize)); if (calculateCompression) { properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed)); properties.put("Meta Block Compression Ratio", String.format("1:%.1f", (double) metaSizeUncompressed / metaSize)); } } properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize)); long leftOverBytes = length - dataSize - metaSize; long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size(); long metaIndexSize = leftOverBytes - miscSize; properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize)); properties.put("Headers Etc Bytes", Long.toString(miscSize)); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); out.printf("%s : %s\n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue()); } out.println(); reader.checkTFileDataIndex(); if (blockCnt > 0) { String blkID = "Data-Block"; int blkIDWidth = Align.calculateWidth(blkID, blockCnt); int blkIDWidth2 = Align.calculateWidth("", blockCnt); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10); String records = "Records"; int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10); String endKey = "End-Key"; int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5); out.printf("%s %s %s %s %s %s\n", Align.format(blkID, blkIDWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(records, recordsWidth, Align.CENTER), Align.format(endKey, endKeyWidth, Align.LEFT)); for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i); out.printf("%s %s %s %s %s ", Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT)); byte[] key = indexEntry.key; boolean asAscii = true; int sampleLen = Math.min(maxKeySampleLen, key.length); for (int j = 0; j < sampleLen; ++j) { byte b = key[j]; if ((b < 32 && b != 9) || (b == 127)) { asAscii = false; } } if (!asAscii) { out.print("0X"); for (int j = 0; j < sampleLen; ++j) { byte b = key[i]; out.printf("%X", b); } } else { out.print(new String(key, 0, sampleLen)); } if (sampleLen < key.length) { out.print("..."); } out.println(); } } out.println(); if (metaBlkCnt > 0) { String name = "Meta-Block"; int maxNameLen = 0; Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index .entrySet(); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); if (e.getKey().length() > maxNameLen) { maxNameLen = e.getKey().length(); } } int nameWidth = Math.max(name.length(), maxNameLen); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10); String compression = "Compression"; int compressionWidth = compression.length(); out.printf("%s %s %s %s %s\n", Align.format(name, nameWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(compression, compressionWidth, Align.LEFT)); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); String blkName = e.getValue().getMetaName(); BlockRegion region = e.getValue().getRegion(); String blkCompression = e.getValue().getCompressionAlgorithm().getName(); out.printf("%s %s %s %s %s\n", Align.format(blkName, nameWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(blkCompression, compressionWidth, Align.LEFT)); } } } finally { IOUtils.cleanup(LOG, reader, fsdis); } }
From source file:org.apache.slider.client.SliderClient.java
/** * list exports available for an instance * * @param registryArgs registry Arguments * @throws YarnException YARN problems//w w w .j av a2 s.c o m * @throws IOException Network or other problems */ public void actionRegistryListExports(ActionRegistryArgs registryArgs) throws YarnException, IOException { ServiceRecord instance = lookupServiceRecord(registryArgs); RegistryRetriever retriever = new RegistryRetriever(instance); PublishedExportsSet exports = retriever.getExports(!registryArgs.internal); PrintStream out = null; boolean streaming = false; try { if (registryArgs.out != null) { out = new PrintStream(new FileOutputStream(registryArgs.out)); streaming = true; log.debug("Saving output to {}", registryArgs.out); } else { out = System.out; } log.debug("Number of exports: {}", exports.keys().size()); for (String exportName : exports.keys()) { if (streaming) { log.debug(exportName); } if (!registryArgs.verbose) { out.println(exportName); } else { PublishedExports published = exports.get(exportName); out.printf("%s: %s\n", exportName, published.description); } } } finally { if (streaming) { out.flush(); out.close(); } } }
From source file:gov.nasa.ensemble.dictionary.nddl.ParseInterpreter.java
/** * Function that writes out the active compatibilities into the model file * specified by the given output stream and based on a given Activity * Dictionary that has already been parsed * //from w w w .j av a 2 s . c om * @param shortName * of oStrm */ public void writeActiveResourceCompats(OutputStream oStrm) { PrintStream out = new PrintStream(oStrm); String actName; String startVar; String endVar; String qVar; String stateName; String state; String atStartValue; String atEndValue; @SuppressWarnings("unused") String objrefName; String shareName; List<String> allowedValues; // first handle the incon activity, then all others out.print("InitialConds::incon {\n" + " if (scheduled == true) {\n" + " if (Enable_Active_Enforcement == true) {\n" + " if (subSolved == true) {\n" + " if (enforced == true) {\n"); // PHM 04/17/2013 Prevent fixViolations from moving activity to before the INCON out.print(" \neq(inconStart, reftime);\n"); // PHM 05/10/2011 Declare the negated states as locals if they exist for (String stat : stateNames) { Set<String> stateNotValues = stateNotValuesMap.get(stat); if (stateNotValues != null && stateNotValues.size() > 0) { for (String val : stateNotValues) { out.printf(" float \t _not_%s_%s;\n", NDDLUtil.escape(stat), NDDLUtil.escape(val)); } } } // PHM 05/10/2011 Set the negated state values for (String stat : stateNames) { Set<String> stateNotValues = stateNotValuesMap.get(stat); if (stateNotValues != null && stateNotValues.size() > 0) { for (String val : stateNotValues) { out.printf(" sum(_not_%s_%s, _%s_%s, STATE_COND_TRUE);\n", NDDLUtil.escape(stat), NDDLUtil.escape(val), NDDLUtil.escape(stat), NDDLUtil.escape(val)); } } } for (String share : shareNames) { startVar = NDDLUtil.escape("i" + varN++); shareName = NDDLUtil.escape(share); out.printf("\n" + "\t starts(%s.produce %s);\n" + "\t eq(%s.quantity, _%s);\n", shareName, startVar, startVar, shareName); } // note that we use state-value pairs // for each state, one and only one value should be TRUE for (String resource : stateNames) { List<String> stateValues = stateValuesMap.get(resource); if (stateValues != null && stateValues.size() > 0) { for (String val : stateValues) { String resourceName = NDDLUtil.escape(resource + "_" + val); startVar = NDDLUtil.escape("i" + varN++); out.printf("\n" + "\t starts(%s.produce %s);\n" + "\t eq(%s.quantity, _%s);\n", resourceName, startVar, startVar, resourceName); } } // add in the negated values if they exist Set<String> stateNotValues = stateNotValuesMap.get(resource); if (stateNotValues != null && stateNotValues.size() > 0) { for (String val : stateNotValues) { String resourceName = NDDLUtil.escape("not_" + resource + "_" + val); startVar = NDDLUtil.escape("i" + varN++); out.printf("\n" + "\t starts(%s.produce %s);\n" + "\t eq(%s.quantity, _%s);\n", resourceName, startVar, startVar, resourceName); } } } out.print(" }\n }\n}\n}\n}\n\n"); // Due to the afterIncon check, have to handle the start transitions and end // transitions separately for (EActivityDef activityDef : activityDefs) { actName = NDDLUtil.escape(activityDef.getName()); if (!activityDef.getSharedEffects().isEmpty() || !activityDef.getStateRequirements().isEmpty() || !activityDef.getStateEffects().isEmpty()) { out.printf( "%s::%s {\n" + " if (scheduled == true) {\n" + " if (Enable_Active_Enforcement == true) {\n" + " if (subSolved == true) {\n" + " if (enforced == true) {\n\n", activitySubsystemMap.get(NDDLUtil.escape(activityDef.getName())), NDDLUtil.escape(activityDef.getName())); // handle shared reservations for (ESharableResourceEffect share : activityDef.getSharedEffects()) { shareName = NDDLUtil.escape(share.getName()); if (shareNames.contains(shareName)) { startVar = "r" + varN++; qVar = "q" + varN++; endVar = "r" + varN++; int reservations = share.getReservations(); if (reservations > 0) { out.printf( " if (Enforce_sx_%s == true) {\n" + " if (myEnforce.Enforce_sx_%s == true) {\n", shareName, shareName); out.printf("\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " product(%s, %d, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, reservations, shareName, startVar, startVar, qVar); out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, %d);\n", NDDLUtil.escape(shareName), NDDLUtil.escape(endVar), NDDLUtil.escape(endVar), reservations); out.printf("\n }\n }\n\n"); } } else { System.err.print("\n* Undefined share " + shareName + " in activity " + NDDLUtil.escape(activityDef.getName()) + " *\n\n"); } } // handle state requirements for (EStateRequirement stateReq : activityDef.getStateRequirements()) { // period = 0 means RequiresThroughout; period = 1 means // RequiresBeforeStart // we only handle RequiresThroughout state = NDDLUtil.escape(stateReq.getName()); if (stateNames.contains(state)) { if (stateReq.getPeriod() == Period.REQUIRES_THROUGHOUT) { // For requirements, Enum and Threshold states are no longer // handled identically due to negation and disjunction if (stateTypesMap.get(state).equals("Enum")) { // PHM 12/02/2011 Replace mutex guards per John Bresina suggestion // Keep _mx_ substring used to identify state flight rules. out.printf( " if (Enforce_mx_%s__%s == true) {\n" + " if (myEnforce.Enforce_mx_%s__%s == true) {\n", actName, state, actName, state); if (stateReq.getRequiredState() != null) { stateName = NDDLUtil.escape(state + "_" + stateReq.getRequiredState()); startVar = "r" + varN++; qVar = "q" + varN++; out.printf( "\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); endVar = NDDLUtil.escape("r" + varN++); out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(stateName), endVar, endVar); } else if (stateReq.getDisallowedState() != null) { stateName = NDDLUtil .escape("not_" + state + "_" + stateReq.getDisallowedState()); startVar = "r" + varN++; qVar = "q" + varN++; out.printf( "\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); endVar = NDDLUtil.escape("r" + varN++); out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(stateName), endVar, endVar); } else if (stateReq.getAllowedStates() != null && stateReq.getAllowedStates().size() > 0) { allowedValues = stateReq.getAllowedStates(); for (String val : stateValuesMap.get(state)) { if (!allowedValues.contains(val)) { stateName = NDDLUtil.escape("not_" + state + "_" + val); startVar = "r" + varN++; qVar = "q" + varN++; out.printf("\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); endVar = NDDLUtil.escape("r" + varN++); out.printf( "\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(stateName), endVar, endVar); } } } else { System.err.print( "*Required resource " + state + " did not have a value specified*\n\n"); } out.printf("\n }\n }\n\n"); } else if (stateTypesMap.get(state).equals("Threshold")) { // PHM 12/02/2011 Keep existing threshold guards out.printf(" if (Enforce_%s == true) {\n" + " if (myEnforce.Enforce_%s == true) {\n", state, state); stateName = NDDLUtil.escape(state + "_" + stateReq.getRequiredState()); startVar = "r" + varN++; qVar = "q" + varN++; out.printf( "\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); endVar = NDDLUtil.escape("r" + varN++); out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(stateName), endVar, endVar); out.printf("\n }\n }\n\n"); } else { System.err.print("*Required resource " + state + " is not of type Enum nor ThresholdEnum*\n\n"); } } } else { System.err.print("\n* Undefined state " + state + " in activity " + NDDLUtil.escape(activityDef.getName()) + " *\n\n"); } } // handle start state effects for (EStateResourceEffect<?> effect : activityDef.getStateEffects()) { state = NDDLUtil.escape(effect.getName()); atStartValue = effect.getStartEffect(); // for effects, Enum and Threshold states are handled // differently String stateTypeName = stateTypesMap.get(state); if (stateTypeName != null && stateTypeName.equals("Enum")) { if (atStartValue != null) { // PHM 12/02/2011 Replace mutex guards per John Bresina suggestion // Keep _mx_ substring used to identify state flight rules. out.printf( " if (Enforce_mx_%s__%s == true) {\n" + " if (myEnforce.Enforce_mx_%s__%s == true) {\n", actName, state, actName, state); writeEnumStateEffectSection(out, state, atStartValue, "starts"); out.printf("\n }\n }\n\n"); } } else if (stateTypeName != null && stateTypeName.equals("Threshold")) { // PHM 12/02/2011 Keep existing threshold guards out.printf( " if (Enforce_%s == true) {\n" + " if (myEnforce.Enforce_%s == true) {\n", state, state); // we assume that there is an atStart value // and that atEnd we retract the effect // make all LOWER values False at start and True at // end for (String val : stateValuesMap.get(state)) { if (val.equals(atStartValue)) { break; } stateName = state + "_" + val; startVar = "s" + varN++; qVar = "q" + varN++; out.printf("\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " product(%s, STATE_COND_FALSE, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); out.println(); } out.printf("\n }\n }\n\n"); } } // handle end state effects for (EStateResourceEffect<?> effect : activityDef.getStateEffects()) { state = NDDLUtil.escape(effect.getName()); atEndValue = effect.getEndEffect(); // for effects, Enum and Threshold states are handled // differently String stateTypeName = stateTypesMap.get(state); if (stateTypeName != null && stateTypeName.equals("Enum")) { if (atEndValue != null) { // PHM 12/02/2011 Replace mutex guards per John Bresina suggestion // Keep _mx_ substring used to identify state flight rules. out.printf( " if (Enforce_mx_%s__%s == true) {\n" + " if (myEnforce.Enforce_mx_%s__%s == true) {\n", actName, state, actName, state); writeEnumStateEffectSection(out, state, atEndValue, "ends"); out.printf("\n }\n }\n\n"); } } else if (stateTypeName != null && stateTypeName.equals("Threshold")) { // PHM 12/02/2011 Keep existing threshold guards out.printf( " if (Enforce_%s == true) {\n" + " if (myEnforce.Enforce_%s == true) {\n", state, state); // we assume that there is an atStart value // and that atEnd we retract the effect // make all LOWER values False at start and True at // end for (String val : stateValuesMap.get(state)) { if (val.equals(atEndValue)) { break; } stateName = NDDLUtil.escape(state + "_" + val); endVar = NDDLUtil.escape("e" + varN++); out.printf("\t\tends(%s.produce %s);\n\t\teq(%s.quantity, STATE_COND_TRUE);\n", stateName, endVar, endVar); out.println(); } out.printf("\n }\n }\n\n"); } } out.print("\n }\n }\n}\n}\n}\n\n"); } } }