List of usage examples for java.util LinkedHashMap entrySet
public Set<Map.Entry<K, V>> entrySet()
From source file:org.apache.openmeetings.cli.OmHelpFormatter.java
private LinkedHashMap<String, List<OmOption>> getOptions(Options opts, int leftPad) { final String longOptSeparator = " "; final String lpad = createPadding(leftPad); final String lpadParam = createPadding(leftPad + 2); List<OmOption> reqOptions = getReqOptions(opts); LinkedHashMap<String, List<OmOption>> map = new LinkedHashMap<String, List<OmOption>>(reqOptions.size()); map.put(GENERAL_OPTION_GROUP, new ArrayList<OmOption>()); for (OmOption o : reqOptions) { map.put(o.getOpt(), new ArrayList<OmOption>()); }/*from w w w. ja va2s .c o m*/ for (Option _o : opts.getOptions()) { OmOption o = (OmOption) _o; //TODO need better check (required option should go first and should not be duplicated boolean skipOption = map.containsKey(o.getOpt()); boolean mainOption = skipOption || o.getGroup() == null; // first create list containing only <lpad>-a,--aaa where // -a is opt and --aaa is long opt; in parallel look for // the longest opt string this list will be then used to // sort options ascending StringBuilder optBuf = new StringBuilder(); if (o.getOpt() == null) { optBuf.append(mainOption ? lpad : lpadParam).append(" ").append(getLongOptPrefix()) .append(o.getLongOpt()); } else { optBuf.append(mainOption ? lpad : lpadParam).append(getOptPrefix()).append(o.getOpt()); if (o.hasLongOpt()) { optBuf.append(',').append(getLongOptPrefix()).append(o.getLongOpt()); } } if (o.hasArg()) { String argName = o.getArgName(); if (argName != null && argName.length() == 0) { // if the option has a blank argname optBuf.append(' '); } else { optBuf.append(o.hasLongOpt() ? longOptSeparator : " "); optBuf.append("<").append(argName != null ? o.getArgName() : getArgName()).append(">"); } } o.setHelpPrefix(optBuf); maxPrefixLength = Math.max(optBuf.length(), maxPrefixLength); if (skipOption) { //TODO need better check (required option should go first and should not be duplicated continue; } String grp = o.getGroup(); grp = grp == null ? GENERAL_OPTION_GROUP : grp; String[] grps = grp.split(","); for (String g : grps) { map.get(g).add(o); } } for (Map.Entry<String, List<OmOption>> me : map.entrySet()) { final String key = me.getKey(); List<OmOption> options = me.getValue(); Collections.sort(options, new Comparator<OmOption>() { @Override public int compare(OmOption o1, OmOption o2) { boolean o1opt = !o1.isOptional(key); boolean o2opt = !o2.isOptional(key); return (o1opt && o2opt || !o1opt && !o2opt) ? (o1.getOpt() == null ? 1 : -1) : (o1opt ? -1 : 1); } }); if (opts.hasOption(key)) { options.add(0, (OmOption) opts.getOption(key)); } } return map; }
From source file:io.personium.core.bar.BarFileReadRunner.java
private boolean execBulkRequest(String cellId, LinkedHashMap<String, BulkRequest> bulkRequests, Map<String, String> fileNameMap, PersoniumODataProducer producer) { // ??/* www . j a v a 2 s .com*/ producer.bulkCreateEntity(producer.getMetadata(), bulkRequests, cellId); // ??? for (Entry<String, BulkRequest> request : bulkRequests.entrySet()) { // ??????????? if (request.getValue().getError() != null) { if (request.getValue().getError() instanceof PersoniumCoreException) { PersoniumCoreException e = ((PersoniumCoreException) request.getValue().getError()); writeOutputStream(true, "PL-BI-1004", fileNameMap.get(request.getKey()), e.getMessage()); log.info("PersoniumCoreException: " + e.getMessage()); } else { Exception e = request.getValue().getError(); String message = PersoniumCoreMessageUtils.getMessage("PL-BI-2003"); writeOutputStream(true, "PL-BI-1004", fileNameMap.get(request.getKey()), message); log.info("Regist Entity Error: " + e.toString()); log.info("Regist Entity Error: " + e.getClass().getName()); log.info("Regist Entity Error: " + e); } return false; } writeOutputStream(false, "PL-BI-1003", fileNameMap.get(request.getKey())); } bulkRequests.clear(); fileNameMap.clear(); return true; }
From source file:com.redhat.rcm.version.Cli.java
private void testConfigAndPrintDiags() { VersionManagerSession session = null; final List<VManException> errors = new ArrayList<VManException>(); try {/*from w w w . jav a 2 s . c om*/ session = initSession(); } catch (final VManException e) { errors.add(e); } if (session != null) { try { vman.configureSession(boms, toolchain, session); } catch (final VManException e) { errors.add(e); } } final FullProjectKey toolchainKey = session == null ? null : session.getToolchainKey(); final List<FullProjectKey> bomCoords = session == null ? null : session.getBomCoords(); final LinkedHashMap<String, Object> map = new LinkedHashMap<String, Object>(); map.put("Bootstrap location:", bootstrapLocation); map.put("Bootstrap read?", bootstrapRead); map.put("Config location:", configLocation); map.put("", ""); map.put(" ", ""); map.put("Toolchain location:", toolchain); map.put("Toolchain:", toolchainKey); map.put(" ", ""); map.put("BOM locations:", boms); map.put("BOMs:", bomCoords); map.put(" ", ""); map.put(" ", ""); map.put("Settings.xml:", settings); map.put("Remote repo:", remoteRepositories); System.out.println("Version information:\n-------------------------------------------------\n\n"); printVersionInfo(); System.out.printf("Diagnostics:\n-------------------------------------------------\n\n"); int max = 0; for (final String key : map.keySet()) { max = Math.max(max, key.length()); } final StringBuilder indent = new StringBuilder(); for (int i = 0; i < max + 4; i++) { indent.append(' '); } final int descMax = 75 - max; final String fmt = "%-" + max + "s %-" + descMax + "s\n"; for (final Map.Entry<String, Object> entry : map.entrySet()) { final Object value = entry.getValue(); String val = value == null ? "-NONE-" : String.valueOf(value); if (value instanceof Collection<?>) { final Collection<?> coll = ((Collection<?>) value); if (coll.isEmpty()) { val = "-NONE-"; } else { val = join(coll, "\n" + indent) + "\n"; } } System.out.printf(fmt, entry.getKey(), val); } System.out.println(); System.out.printf("Errors:\n-------------------------------------------------\n%s\n\n", errors.isEmpty() ? "-NONE" : join(errors, "\n\n")); System.out.println(); }
From source file:org.bimserver.charting.Containers.TreeNode.java
/** * @param tree/* www . j a va2s.co m*/ */ public void parseIntoPrefuseTree(prefuse.data.Tree tree) { TreeNode thisNode; Iterator<TreeNode> nodes = iterateFromLeafNodesToRoot(); // Track IDs. LinkedHashMap<TreeNode, prefuse.data.Node> translationTable = new LinkedHashMap<>(); // Make root first node. Prefuse throws exceptions otherwise. prefuse.data.Node prefuseRootNode = tree.addRoot(); translationTable.put(this, prefuseRootNode); // Walk up from the leaves. while (nodes.hasNext()) { // Get node. thisNode = nodes.next(); // Handle node. if (!thisNode.isRoot()) { prefuse.data.Node prefuseNode = tree.addNode(); prefuseNode.set("class", thisNode.Class); prefuseNode.set("name", thisNode.Name); prefuseNode.set("size", thisNode.Size); prefuseNode.set("tooltip", thisNode.getTooltip()); if (thisNode.Data != null) { // Do color. NOTE: Color is a value relative to other data based on magnitude. ArrayList<Object> colorValues = thisNode.Data.getValueListByDimensionId("color"); if (colorValues != null && colorValues.size() > 0) { Object colorValue = colorValues.get(0); if (colorValue instanceof String) prefuseNode.set("color", ((String) colorValue).hashCode()); else prefuseNode.set("color", ((Number) colorValue).doubleValue()); } // Do labels. ArrayList<Object> labelValues = thisNode.Data.getValueListByDimensionId("label"); if (labelValues != null && labelValues.size() > 0) prefuseNode.set("label", StringUtils.join(labelValues, ", ")); } translationTable.put(thisNode, prefuseNode); } } // Walk translation table, adding relationships. for (Entry<TreeNode, Node> entry : translationTable.entrySet()) { TreeNode sourceNode = entry.getKey(); prefuse.data.Node destinationNode = entry.getValue(); // if (sourceNode.CollapsesInto != null) { Node prefuseNode = translationTable.get(sourceNode.CollapsesInto); destinationNode.set("collapsesInto", prefuseNode.getRow()); } // if (!sourceNode.isRoot()) { TreeNode parentSourceNode = sourceNode.getParent(); prefuse.data.Node parentDestinationNode = translationTable.get(parentSourceNode); // Add relationship. tree.addChildEdge(parentDestinationNode, destinationNode); } } }
From source file:com.fujitsu.dc.core.bar.BarFileReadRunner.java
private boolean execBulkRequest(String cellId, LinkedHashMap<String, BulkRequest> bulkRequests, Map<String, String> fileNameMap, DcODataProducer producer) { // ??//from w ww .java 2 s . c om producer.bulkCreateEntity(producer.getMetadata(), bulkRequests, cellId); // ??? for (Entry<String, BulkRequest> request : bulkRequests.entrySet()) { // ??????????? if (request.getValue().getError() != null) { if (request.getValue().getError() instanceof DcCoreException) { DcCoreException e = ((DcCoreException) request.getValue().getError()); writeOutputStream(true, "PL-BI-1004", fileNameMap.get(request.getKey()), e.getMessage()); log.info("DcCoreException: " + e.getMessage()); } else { Exception e = request.getValue().getError(); String message = DcCoreMessageUtils.getMessage("PL-BI-2003"); writeOutputStream(true, "PL-BI-1004", fileNameMap.get(request.getKey()), message); log.info("Regist Entity Error: " + e.toString()); log.info("Regist Entity Error: " + e.getClass().getName()); log.info("Regist Entity Error: " + e); } return false; } writeOutputStream(false, "PL-BI-1003", fileNameMap.get(request.getKey())); } bulkRequests.clear(); fileNameMap.clear(); return true; }
From source file:org.dcm4che.tool.dcmqrscp.DcmQRSCP.java
public Attributes calculateStorageCommitmentResult(String calledAET, Attributes actionInfo) throws DicomServiceException { Sequence requestSeq = actionInfo.getSequence(Tag.ReferencedSOPSequence); int size = requestSeq.size(); String[] sopIUIDs = new String[size]; Attributes eventInfo = new Attributes(6); eventInfo.setString(Tag.RetrieveAETitle, VR.AE, calledAET); eventInfo.setString(Tag.StorageMediaFileSetID, VR.SH, ddReader.getFileSetID()); eventInfo.setString(Tag.StorageMediaFileSetUID, VR.SH, ddReader.getFileSetUID()); eventInfo.setString(Tag.TransactionUID, VR.UI, actionInfo.getString(Tag.TransactionUID)); Sequence successSeq = eventInfo.newSequence(Tag.ReferencedSOPSequence, size); Sequence failedSeq = eventInfo.newSequence(Tag.FailedSOPSequence, size); LinkedHashMap<String, String> map = new LinkedHashMap<String, String>(size * 4 / 3); for (int i = 0; i < sopIUIDs.length; i++) { Attributes item = requestSeq.get(i); map.put(sopIUIDs[i] = item.getString(Tag.ReferencedSOPInstanceUID), item.getString(Tag.ReferencedSOPClassUID)); }//from w ww .j ava 2 s .c om DicomDirReader ddr = ddReader; try { Attributes patRec = ddr.findPatientRecord(); while (patRec != null) { Attributes studyRec = ddr.findStudyRecord(patRec); while (studyRec != null) { Attributes seriesRec = ddr.findSeriesRecord(studyRec); while (seriesRec != null) { Attributes instRec = ddr.findLowerInstanceRecord(seriesRec, true, sopIUIDs); while (instRec != null) { String iuid = instRec.getString(Tag.ReferencedSOPInstanceUIDInFile); String cuid = map.remove(iuid); if (cuid.equals(instRec.getString(Tag.ReferencedSOPClassUIDInFile))) successSeq.add(refSOP(iuid, cuid, Status.Success)); else failedSeq.add(refSOP(iuid, cuid, Status.ClassInstanceConflict)); instRec = ddr.findNextInstanceRecord(instRec, true, sopIUIDs); } seriesRec = ddr.findNextSeriesRecord(seriesRec); } studyRec = ddr.findNextStudyRecord(studyRec); } patRec = ddr.findNextPatientRecord(patRec); } } catch (IOException e) { LOG.info("Failed to M-READ " + dicomDir, e); throw new DicomServiceException(Status.ProcessingFailure, e); } for (Map.Entry<String, String> entry : map.entrySet()) { failedSeq.add(refSOP(entry.getKey(), entry.getValue(), Status.NoSuchObjectInstance)); } if (failedSeq.isEmpty()) eventInfo.remove(Tag.FailedSOPSequence); return eventInfo; }
From source file:net.sf.jabref.sql.importer.DBImporter.java
private void importGroupsTree(MetaData metaData, Map<String, BibEntry> entries, Connection conn, final String database_id) throws SQLException { Map<String, GroupTreeNode> groups = new HashMap<>(); LinkedHashMap<GroupTreeNode, String> parentIds = new LinkedHashMap<>(); GroupTreeNode rootNode = new GroupTreeNode(new AllEntriesGroup()); try (Statement statement = SQLUtil.queryAllFromTable(conn, "groups WHERE database_id='" + database_id + "' ORDER BY groups_id"); ResultSet rsGroups = statement.getResultSet()) { while (rsGroups.next()) { AbstractGroup group = null;/*from w w w . j a va 2s . c o m*/ String typeId = findGroupTypeName(rsGroups.getString("group_types_id"), conn); if (typeId.equals(AllEntriesGroup.ID)) { // register the id of the root node: groups.put(rsGroups.getString("groups_id"), rootNode); } else if (typeId.equals(ExplicitGroup.ID)) { group = new ExplicitGroup(rsGroups.getString("label"), GroupHierarchyType.getByNumber(rsGroups.getInt("hierarchical_context"))); } else if (typeId.equals(KeywordGroup.ID)) { LOGGER.debug("Keyw: " + rsGroups.getBoolean("case_sensitive")); group = new KeywordGroup(rsGroups.getString("label"), StringUtil.unquote(rsGroups.getString("search_field"), '\\'), StringUtil.unquote(rsGroups.getString("search_expression"), '\\'), rsGroups.getBoolean("case_sensitive"), rsGroups.getBoolean("reg_exp"), GroupHierarchyType.getByNumber(rsGroups.getInt("hierarchical_context"))); } else if (typeId.equals(SearchGroup.ID)) { LOGGER.debug("Search: " + rsGroups.getBoolean("case_sensitive")); group = new SearchGroup(rsGroups.getString("label"), StringUtil.unquote(rsGroups.getString("search_expression"), '\\'), rsGroups.getBoolean("case_sensitive"), rsGroups.getBoolean("reg_exp"), GroupHierarchyType.getByNumber(rsGroups.getInt("hierarchical_context"))); } if (group != null) { GroupTreeNode node = new GroupTreeNode(group); parentIds.put(node, rsGroups.getString("parent_id")); groups.put(rsGroups.getString("groups_id"), node); } // Ok, we have collected a map of all groups and their parent IDs, // and another map of all group IDs and their group nodes. // Now we need to build the groups tree: for (Map.Entry<GroupTreeNode, String> groupTreeNodeStringEntry : parentIds.entrySet()) { String parentId = groupTreeNodeStringEntry.getValue(); GroupTreeNode parent = groups.get(parentId); if (parent == null) { // TODO: missing parent } else { parent.add(groupTreeNodeStringEntry.getKey()); } } try (Statement entryGroup = SQLUtil.queryAllFromTable(conn, "entry_group"); ResultSet rsEntryGroup = entryGroup.getResultSet()) { while (rsEntryGroup.next()) { String entryId = rsEntryGroup.getString("entries_id"); String groupId = rsEntryGroup.getString("groups_id"); GroupTreeNode node = groups.get(groupId); if ((node != null) && (node.getGroup() instanceof ExplicitGroup)) { ExplicitGroup expGroup = (ExplicitGroup) node.getGroup(); expGroup.addEntry(entries.get(entryId)); } } rsEntryGroup.getStatement().close(); } metaData.setGroups(rootNode); } rsGroups.getStatement().close(); } }
From source file:org.apache.accumulo.core.file.rfile.bcfile.TFileDumper.java
/** * Dump information about TFile./*from w ww . ja v a 2 s . co m*/ * * @param file * Path string of the TFile * @param out * PrintStream to output the information. * @param conf * The configuration object. * @throws IOException */ static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException { final int maxKeySampleLen = 16; Path path = new Path(file); FileSystem fs = path.getFileSystem(conf); long length = fs.getFileStatus(path).getLen(); FSDataInputStream fsdis = fs.open(path); TFile.Reader reader = new TFile.Reader(fsdis, length, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); int blockCnt = reader.readerBCF.getBlockCount(); int metaBlkCnt = reader.readerBCF.metaIndex.index.size(); properties.put("BCFile Version", reader.readerBCF.version.toString()); properties.put("TFile Version", reader.tfileMeta.version.toString()); properties.put("File Length", Long.toString(length)); properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName()); properties.put("Record Count", Long.toString(reader.getEntryCount())); properties.put("Sorted", Boolean.toString(reader.isSorted())); if (reader.isSorted()) { properties.put("Comparator", reader.getComparatorName()); } properties.put("Data Block Count", Integer.toString(blockCnt)); long dataSize = 0, dataSizeUncompressed = 0; if (blockCnt > 0) { for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); dataSize += region.getCompressedSize(); dataSizeUncompressed += region.getRawSize(); } properties.put("Data Block Bytes", Long.toString(dataSize)); if (reader.readerBCF.getDefaultCompressionName() != "none") { properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed)); properties.put("Data Block Compression Ratio", String.format("1:%.1f", (double) dataSizeUncompressed / dataSize)); } } properties.put("Meta Block Count", Integer.toString(metaBlkCnt)); long metaSize = 0, metaSizeUncompressed = 0; if (metaBlkCnt > 0) { Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values(); boolean calculateCompression = false; for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) { MetaIndexEntry e = it.next(); metaSize += e.getRegion().getCompressedSize(); metaSizeUncompressed += e.getRegion().getRawSize(); if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) { calculateCompression = true; } } properties.put("Meta Block Bytes", Long.toString(metaSize)); if (calculateCompression) { properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed)); properties.put("Meta Block Compression Ratio", String.format("1:%.1f", (double) metaSizeUncompressed / metaSize)); } } properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize)); long leftOverBytes = length - dataSize - metaSize; long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size(); long metaIndexSize = leftOverBytes - miscSize; properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize)); properties.put("Headers Etc Bytes", Long.toString(miscSize)); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); out.printf("%s : %s%n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue()); } out.println(); reader.checkTFileDataIndex(); if (blockCnt > 0) { String blkID = "Data-Block"; int blkIDWidth = Align.calculateWidth(blkID, blockCnt); int blkIDWidth2 = Align.calculateWidth("", blockCnt); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10); String records = "Records"; int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10); String endKey = "End-Key"; int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5); out.printf("%s %s %s %s %s %s%n", Align.format(blkID, blkIDWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(records, recordsWidth, Align.CENTER), Align.format(endKey, endKeyWidth, Align.LEFT)); for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i); out.printf("%s %s %s %s %s ", Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT)); byte[] key = indexEntry.key; boolean asAscii = true; int sampleLen = Math.min(maxKeySampleLen, key.length); for (int j = 0; j < sampleLen; ++j) { byte b = key[j]; if ((b < 32 && b != 9) || (b == 127)) { asAscii = false; } } if (!asAscii) { out.print("0X"); for (int j = 0; j < sampleLen; ++j) { byte b = key[i]; out.printf("%X", b); } } else { out.print(new String(key, 0, sampleLen)); } if (sampleLen < key.length) { out.print("..."); } out.println(); } } out.println(); if (metaBlkCnt > 0) { String name = "Meta-Block"; int maxNameLen = 0; Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index .entrySet(); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); if (e.getKey().length() > maxNameLen) { maxNameLen = e.getKey().length(); } } int nameWidth = Math.max(name.length(), maxNameLen); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10); String compression = "Compression"; int compressionWidth = compression.length(); out.printf("%s %s %s %s %s%n", Align.format(name, nameWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(compression, compressionWidth, Align.LEFT)); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); String blkName = e.getValue().getMetaName(); BlockRegion region = e.getValue().getRegion(); String blkCompression = e.getValue().getCompressionAlgorithm().getName(); out.printf("%s %s %s %s %s%n", Align.format(blkName, nameWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(blkCompression, compressionWidth, Align.LEFT)); } } } finally { IOUtils.cleanup(LOG, reader, fsdis); } }
From source file:org.apache.hadoop.io.file.tfile.TFileDumper.java
/** * Dump information about TFile.//from www . jav a2s . com * * @param file * Path string of the TFile * @param out * PrintStream to output the information. * @param conf * The configuration object. * @throws IOException */ static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException { final int maxKeySampleLen = 16; Path path = new Path(file); FileSystem fs = path.getFileSystem(conf); long length = fs.getFileStatus(path).getLen(); FSDataInputStream fsdis = fs.open(path); TFile.Reader reader = new TFile.Reader(fsdis, length, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); int blockCnt = reader.readerBCF.getBlockCount(); int metaBlkCnt = reader.readerBCF.metaIndex.index.size(); properties.put("BCFile Version", reader.readerBCF.version.toString()); properties.put("TFile Version", reader.tfileMeta.version.toString()); properties.put("File Length", Long.toString(length)); properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName()); properties.put("Record Count", Long.toString(reader.getEntryCount())); properties.put("Sorted", Boolean.toString(reader.isSorted())); if (reader.isSorted()) { properties.put("Comparator", reader.getComparatorName()); } properties.put("Data Block Count", Integer.toString(blockCnt)); long dataSize = 0, dataSizeUncompressed = 0; if (blockCnt > 0) { for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); dataSize += region.getCompressedSize(); dataSizeUncompressed += region.getRawSize(); } properties.put("Data Block Bytes", Long.toString(dataSize)); if (reader.readerBCF.getDefaultCompressionName() != "none") { properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed)); properties.put("Data Block Compression Ratio", String.format("1:%.1f", (double) dataSizeUncompressed / dataSize)); } } properties.put("Meta Block Count", Integer.toString(metaBlkCnt)); long metaSize = 0, metaSizeUncompressed = 0; if (metaBlkCnt > 0) { Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values(); boolean calculateCompression = false; for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) { MetaIndexEntry e = it.next(); metaSize += e.getRegion().getCompressedSize(); metaSizeUncompressed += e.getRegion().getRawSize(); if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) { calculateCompression = true; } } properties.put("Meta Block Bytes", Long.toString(metaSize)); if (calculateCompression) { properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed)); properties.put("Meta Block Compression Ratio", String.format("1:%.1f", (double) metaSizeUncompressed / metaSize)); } } properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize)); long leftOverBytes = length - dataSize - metaSize; long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size(); long metaIndexSize = leftOverBytes - miscSize; properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize)); properties.put("Headers Etc Bytes", Long.toString(miscSize)); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); out.printf("%s : %s\n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue()); } out.println(); reader.checkTFileDataIndex(); if (blockCnt > 0) { String blkID = "Data-Block"; int blkIDWidth = Align.calculateWidth(blkID, blockCnt); int blkIDWidth2 = Align.calculateWidth("", blockCnt); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10); String records = "Records"; int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10); String endKey = "End-Key"; int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5); out.printf("%s %s %s %s %s %s\n", Align.format(blkID, blkIDWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(records, recordsWidth, Align.CENTER), Align.format(endKey, endKeyWidth, Align.LEFT)); for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i); out.printf("%s %s %s %s %s ", Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT)); byte[] key = indexEntry.key; boolean asAscii = true; int sampleLen = Math.min(maxKeySampleLen, key.length); for (int j = 0; j < sampleLen; ++j) { byte b = key[j]; if ((b < 32 && b != 9) || (b == 127)) { asAscii = false; } } if (!asAscii) { out.print("0X"); for (int j = 0; j < sampleLen; ++j) { byte b = key[i]; out.printf("%X", b); } } else { out.print(new String(key, 0, sampleLen)); } if (sampleLen < key.length) { out.print("..."); } out.println(); } } out.println(); if (metaBlkCnt > 0) { String name = "Meta-Block"; int maxNameLen = 0; Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index .entrySet(); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); if (e.getKey().length() > maxNameLen) { maxNameLen = e.getKey().length(); } } int nameWidth = Math.max(name.length(), maxNameLen); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10); String compression = "Compression"; int compressionWidth = compression.length(); out.printf("%s %s %s %s %s\n", Align.format(name, nameWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(compression, compressionWidth, Align.LEFT)); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); String blkName = e.getValue().getMetaName(); BlockRegion region = e.getValue().getRegion(); String blkCompression = e.getValue().getCompressionAlgorithm().getName(); out.printf("%s %s %s %s %s\n", Align.format(blkName, nameWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(blkCompression, compressionWidth, Align.LEFT)); } } } finally { IOUtils.cleanup(LOG, reader, fsdis); } }
From source file:dev.memento.MainActivity.java
@Override protected Dialog onCreateDialog(int id) { Dialog dialog = null;/* w ww .j a v a 2 s .c o m*/ AlertDialog.Builder builder = new AlertDialog.Builder(this); switch (id) { case DIALOG_ERROR: builder.setMessage("error message").setCancelable(false).setPositiveButton("OK", null); dialog = builder.create(); break; case DIALOG_MEMENTO_YEARS: builder.setTitle(R.string.select_year); final TreeMap<Integer, Integer> yearCount = mMementos.getAllYears(); if (Log.LOG) Log.d(LOG_TAG, "Dialog: num of years = " + yearCount.size()); // This shouldn't happen, but just in case if (yearCount.size() == 0) { showToast("There are no years to choose from... something is wrong."); if (Log.LOG) Log.d(LOG_TAG, "Num of mementos: " + mMementos.size()); return null; } // Build a list that shows how many dates are available for each year final CharSequence[] yearText = new CharSequence[yearCount.size()]; // Parallel arrays used to determine which entry was selected. // Could also have used a regular expression. final int years[] = new int[yearCount.size()]; final int count[] = new int[yearCount.size()]; int selectedYear = -1; int displayYear = mDateDisplayed.getYear(); int i = 0; for (Map.Entry<Integer, Integer> entry : yearCount.entrySet()) { Integer year = entry.getKey(); // Select the year of the Memento currently displayed if (displayYear == year) selectedYear = i; years[i] = year; count[i] = entry.getValue(); yearText[i] = Integer.toString(year) + " (" + entry.getValue() + ")"; i++; } builder.setSingleChoiceItems(yearText, selectedYear, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { dialog.dismiss(); mSelectedYear = years[item]; int numItems = count[item]; if (numItems > MAX_NUM_MEMENTOS_PER_MONTH) showDialog(DIALOG_MEMENTO_MONTHS); else showDialog(DIALOG_MEMENTO_DATES); } }); dialog = builder.create(); // Cause the dialog to be freed whenever it is dismissed. // This is necessary because the items are dynamic. dialog.setOnDismissListener(new OnDismissListener() { @Override public void onDismiss(DialogInterface arg0) { removeDialog(DIALOG_MEMENTO_YEARS); } }); break; case DIALOG_MEMENTO_MONTHS: builder.setTitle(R.string.select_month); final LinkedHashMap<CharSequence, Integer> monthCount = mMementos.getMonthsForYear(mSelectedYear); // This shouldn't happen, but just in case if (monthCount.size() == 0) { showToast("There are no months to choose from... something is wrong."); if (Log.LOG) Log.d(LOG_TAG, "Num of mementos: " + mMementos.size()); return null; } // Build a list that shows how many dates are available for each month final CharSequence[] monthText = new CharSequence[monthCount.size()]; int selectedMonth = mDateDisplayed.getMonth() - 1; i = 0; for (Map.Entry<CharSequence, Integer> entry : monthCount.entrySet()) { CharSequence month = entry.getKey(); monthText[i] = month + " (" + entry.getValue() + ")"; i++; } builder.setSingleChoiceItems(monthText, selectedMonth, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { dialog.dismiss(); // Pull out month name so we can map it back to a number. // This is ugly, but it's necessary because the LinkedHashMap doesn't // give back the order of its keys. Pattern r = Pattern.compile("^(.+) "); Matcher m = r.matcher(monthText[item]); if (m.find()) { String month = m.group(1); mSelectedMonth = Utilities.monthStringToInt(month); showDialog(DIALOG_MEMENTO_DATES); } else { if (Log.LOG) Log.e(LOG_TAG, "Could not find month in [" + monthText[item] + "]"); } } }); dialog = builder.create(); // Cause the dialog to be freed whenever it is dismissed. // This is necessary because the items are dynamic. dialog.setOnDismissListener(new OnDismissListener() { @Override public void onDismiss(DialogInterface arg0) { removeDialog(DIALOG_MEMENTO_MONTHS); } }); break; case DIALOG_MEMENTO_DATES: builder.setTitle(R.string.select_day); // Which radio button is selected? int selected = -1; final CharSequence[] dates; if (Log.LOG) Log.d(LOG_TAG, "mSelectedMonth = " + mSelectedMonth); if (Log.LOG) Log.d(LOG_TAG, "mSelectedYear = " + mSelectedYear); final Memento[] mementoList; // See if there is a month/year filter if (mSelectedMonth != -1 || mSelectedYear != -1) { if (mSelectedMonth != -1) mementoList = mMementos.getByMonthAndYear(mSelectedMonth, mSelectedYear); else mementoList = mMementos.getByYear(mSelectedYear); if (Log.LOG) Log.d(LOG_TAG, "Number of dates = " + mementoList.length); // Get dates for selected mementos dates = new CharSequence[mementoList.length]; i = 0; for (Memento m : mementoList) { dates[i] = m.getDateAndTimeFormatted(); i++; } // See if any of these items match. This could take a little while if // there are a large number of items unfortunately. Memento m = mMementos.getCurrent(); if (m != null) { CharSequence searchDate = m.getDateAndTimeFormatted(); for (i = 0; i < dates.length; i++) { if (searchDate.equals(dates[i])) { selected = i; break; } } } } else { // No filter, so get all available mementos dates = mMementos.getAllDates(); if (Log.LOG) Log.d(LOG_TAG, "Number of dates = " + dates.length); selected = mMementos.getCurrentIndex(); mementoList = mMementos.toArray(new Memento[0]); } if (Log.LOG) Log.d(LOG_TAG, "Selected index = " + selected); // Reset for future selections mSelectedYear = -1; mSelectedMonth = -1; builder.setSingleChoiceItems(dates, selected, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { dialog.dismiss(); // Display this Memento Memento m = mementoList[item]; mCurrentMemento = m; final SimpleDateTime dateSelected = m.getDateTime(); mDateDisplayed = dateSelected; setChosenDate(mDateDisplayed); if (Log.LOG) Log.d(LOG_TAG, "User selected Memento with date " + dateSelected.dateFormatted()); showToast("Time traveling to " + mDateDisplayed.dateFormatted()); refreshDisplayedDate(); // Load memento into the browser String redirectUrl = m.getUrl(); surfToUrl(redirectUrl); setEnableForNextPrevButtons(); mNowButton.setEnabled(true); // Potentially lengthly operation new Thread() { public void run() { int index = mMementos.getIndex(dateSelected); if (index == -1) { // This should never happen if (Log.LOG) Log.e(LOG_TAG, "!! Couldn't find " + dateSelected + " in the memento list!"); } else mMementos.setCurrentIndex(index); } }.start(); } }); dialog = builder.create(); // Cause the dialog to be freed whenever it is dismissed. // This is necessary because the items are dynamic. I couldn't find // a better way to solve this problem. dialog.setOnDismissListener(new OnDismissListener() { @Override public void onDismiss(DialogInterface arg0) { removeDialog(DIALOG_MEMENTO_DATES); } }); break; } return dialog; }