List of usage examples for java.io PrintStream printf
public PrintStream printf(Locale l, String format, Object... args)
From source file:org.ncic.bioinfo.sparkseq.algorithms.utils.reports.GATKReportTable.java
/** * Write the table to the PrintStream, formatted nicely to be human-readable, AWK-able, and R-friendly. * * @param out the PrintStream to which the table should be written *///w ww . j av a2 s. co m void write(final PrintStream out) { /* * Table header: * #:GATKTable:nColumns:nRows:(DataType for each column):; * #:GATKTable:TableName:Description :; * key colA colB * row1 xxxx xxxxx */ // write the table definition out.printf(GATKTABLE_HEADER_PREFIX + ":%d:%d", getNumColumns(), getNumRows()); // write the formats for all the columns for (final GATKReportColumn column : columnInfo) out.print(SEPARATOR + column.getFormat()); out.println(ENDLINE); // write the table name & description out.printf(GATKTABLE_HEADER_PREFIX + ":%s:%s\n", tableName, tableDescription); // write the column names boolean needsPadding = false; for (final GATKReportColumn column : columnInfo) { if (needsPadding) out.printf(" "); needsPadding = true; out.printf(column.getColumnFormat().getNameFormat(), column.getColumnName()); } out.println(); // write the table body switch (sortingWay) { case SORT_BY_COLUMN: Collections.sort(underlyingData, new Comparator<Object[]>() { //INVARIANT the two arrays are of the same length and corresponding elements are of the same type @Override public int compare(Object[] objectArr1, Object[] objectArr2) { final int EQUAL = 0; int result = EQUAL; int l = objectArr1.length; for (int x = 0; x < l; x++) { if (objectArr1[x] instanceof Integer) { result = ((Integer) objectArr1[x]).compareTo((Integer) objectArr2[x]); } else if (objectArr1[x] instanceof Double) { result = ((Double) objectArr1[x]).compareTo((Double) objectArr2[x]); } else { // default uses String comparison result = objectArr1[x].toString().compareTo(objectArr2[x].toString()); } if (result != EQUAL) { return result; } } return result; } }); for (final Object[] row : underlyingData) writeRow(out, row); break; case SORT_BY_ROW: // make sure that there are exactly the correct number of ID mappings if (rowIdToIndex.size() != underlyingData.size()) throw new ReviewedGATKException( "There isn't a 1-to-1 mapping from row ID to index; this can happen when rows are not created consistently"); final TreeMap<Object, Integer> sortedMap; try { sortedMap = new TreeMap<Object, Integer>(rowIdToIndex); } catch (ClassCastException e) { throw new ReviewedGATKException( "Unable to sort the rows based on the row IDs because the ID Objects are of different types"); } for (final Map.Entry<Object, Integer> rowKey : sortedMap.entrySet()) writeRow(out, underlyingData.get(rowKey.getValue())); break; case DO_NOT_SORT: for (final Object[] row : underlyingData) writeRow(out, row); } out.println(); }
From source file:name.livitski.databag.cli.Launcher.java
protected void listFilters(ReplicaInfo replica) throws DBException { PrintStream out = getOutputStream(); FilterFactory factory = getFilterFactory(); Number rid = null == replica ? null : replica.getId(); String defaultName = factory.defaultFilter(rid).getName(); for (String name : factory.listFilterNames()) out.printf("%1$c %2$-77s%n", defaultName.equals(name) ? '*' : ' ', name); }
From source file:com.genentech.struchk.oeStruchk.OEStruchk.java
private void printRules(PrintStream out) { out.println("<html><head><base href='http://research/'/><body>"); out.println("<h1>Genentech Structure Normalization Rules</h1>"); out.println("To regenerate this documentation run: 'ant javaDoc'."); out.println("<table border='1'><tr><th>Name</th><th>Description</th></tr>\n"); // print rules that are not implemented as checker because they work on the // molfile string if (checkForAtomLabelDesc != null) out.printf("<tr><td>%s</td><td>%s</td></tr>\n", "atomLabelCheck", checkForAtomLabelDesc); if (checkForThickBondDesc != null) out.printf("<tr><td>%s</td><td>%s</td></tr>\n", "thickBondCheck", checkForThickBondDesc); // if( checkForWigglyBondDesc != null ) // out.printf("<tr><td>%s</td><td>%s</td></tr>\n", // "wigglyBondCheck", checkForWigglyBondDesc ); ////from ww w . ja v a 2 s . c o m // print all other rules for (StructureCheckInterface r : rules) { out.printf("<tr><td>%s</td><td>%s</td></tr>\n", r.getCheckName(), r.getDescriptionHTML()); } out.println("</table></body></html>"); }
From source file:edu.umn.cs.spatialHadoop.indexing.RTree.java
public void toWKT(PrintStream out) throws IOException { out.println("NodeID\tBoundaries"); for (int nodeID = 0; nodeID < this.nodeCount; nodeID++) { out.printf("%d\t%s\n", nodeID, nodes[nodeID].toWKT()); }// w ww.ja v a 2s . c o m }
From source file:org.csanchez.jenkins.plugins.kubernetes.KubernetesLauncher.java
@Override public void launch(SlaveComputer computer, TaskListener listener) { PrintStream logger = listener.getLogger(); if (!(computer instanceof KubernetesComputer)) { throw new IllegalArgumentException("This Launcher can be used only with KubernetesComputer"); }/* w ww. j av a 2 s .c o m*/ KubernetesComputer kubernetesComputer = (KubernetesComputer) computer; computer.setAcceptingTasks(false); KubernetesSlave slave = kubernetesComputer.getNode(); if (slave == null) { throw new IllegalStateException("Node has been removed, cannot launch " + computer.getName()); } if (launched) { LOGGER.log(INFO, "Agent has already been launched, activating: {}", slave.getNodeName()); computer.setAcceptingTasks(true); return; } KubernetesCloud cloud = slave.getKubernetesCloud(); final PodTemplate unwrappedTemplate = slave.getTemplate(); try { KubernetesClient client = cloud.connect(); Pod pod = getPodTemplate(client, slave, unwrappedTemplate); String podId = pod.getMetadata().getName(); String namespace = StringUtils.defaultIfBlank(slave.getNamespace(), client.getNamespace()); LOGGER.log(Level.FINE, "Creating Pod: {0} in namespace {1}", new Object[] { podId, namespace }); pod = client.pods().inNamespace(namespace).create(pod); LOGGER.log(INFO, "Created Pod: {0} in namespace {1}", new Object[] { podId, namespace }); logger.printf("Created Pod: %s in namespace %s%n", podId, namespace); // We need the pod to be running and connected before returning // otherwise this method keeps being called multiple times List<String> validStates = ImmutableList.of("Running"); int i = 0; int j = 100; // wait 600 seconds List<ContainerStatus> containerStatuses = null; // wait for Pod to be running for (; i < j; i++) { LOGGER.log(INFO, "Waiting for Pod to be scheduled ({1}/{2}): {0}", new Object[] { podId, i, j }); logger.printf("Waiting for Pod to be scheduled (%2$s/%3$s): %1$s%n", podId, i, j); Thread.sleep(6000); pod = client.pods().inNamespace(namespace).withName(podId).get(); if (pod == null) { throw new IllegalStateException("Pod no longer exists: " + podId); } containerStatuses = pod.getStatus().getContainerStatuses(); List<ContainerStatus> terminatedContainers = new ArrayList<>(); Boolean allContainersAreReady = true; for (ContainerStatus info : containerStatuses) { if (info != null) { if (info.getState().getWaiting() != null) { // Pod is waiting for some reason LOGGER.log(INFO, "Container is waiting {0} [{2}]: {1}", new Object[] { podId, info.getState().getWaiting(), info.getName() }); logger.printf("Container is waiting %1$s [%3$s]: %2$s%n", podId, info.getState().getWaiting(), info.getName()); // break; } if (info.getState().getTerminated() != null) { terminatedContainers.add(info); } else if (!info.getReady()) { allContainersAreReady = false; } } } if (!terminatedContainers.isEmpty()) { Map<String, Integer> errors = terminatedContainers.stream().collect(Collectors.toMap( ContainerStatus::getName, (info) -> info.getState().getTerminated().getExitCode())); // Print the last lines of failed containers logLastLines(terminatedContainers, podId, namespace, slave, errors, client); throw new IllegalStateException("Containers are terminated with exit codes: " + errors); } if (!allContainersAreReady) { continue; } if (validStates.contains(pod.getStatus().getPhase())) { break; } } String status = pod.getStatus().getPhase(); if (!validStates.contains(status)) { throw new IllegalStateException( "Container is not running after " + j + " attempts, status: " + status); } j = unwrappedTemplate.getSlaveConnectTimeout(); // now wait for agent to be online for (; i < j; i++) { if (slave.getComputer() == null) { throw new IllegalStateException("Node was deleted, computer is null"); } if (slave.getComputer().isOnline()) { break; } LOGGER.log(INFO, "Waiting for agent to connect ({1}/{2}): {0}", new Object[] { podId, i, j }); logger.printf("Waiting for agent to connect (%2$s/%3$s): %1$s%n", podId, i, j); Thread.sleep(1000); } if (!slave.getComputer().isOnline()) { if (containerStatuses != null) { logLastLines(containerStatuses, podId, namespace, slave, null, client); } throw new IllegalStateException( "Agent is not connected after " + j + " attempts, status: " + status); } computer.setAcceptingTasks(true); } catch (Throwable ex) { LOGGER.log(Level.WARNING, String.format("Error in provisioning; agent=%s, template=%s", slave, unwrappedTemplate), ex); LOGGER.log(Level.FINER, "Removing Jenkins node: {0}", slave.getNodeName()); try { slave.terminate(); } catch (IOException | InterruptedException e) { LOGGER.log(Level.WARNING, "Unable to remove Jenkins node", e); } throw Throwables.propagate(ex); } launched = true; try { // We need to persist the "launched" setting... slave.save(); } catch (IOException e) { LOGGER.log(Level.WARNING, "Could not save() agent: " + e.getMessage(), e); } }
From source file:org.apache.hadoop.zebra.io.ColumnGroup.java
static public void dumpInfo(Path path, PrintStream out, Configuration conf, int indent) throws IOException, Exception { // final int maxKeySampleLen = 16; IOutils.indent(out, indent);/*from w w w . j av a 2s .co m*/ out.println(); IOutils.indent(out, indent); out.println("Column Group : " + path); ColumnGroup.Reader reader = new ColumnGroup.Reader(path, false, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); IOutils.indent(out, indent); out.println("Name: " + reader.getName()); IOutils.indent(out, indent); out.println("Serializer: " + reader.getSerializer()); IOutils.indent(out, indent); out.println("Compressor: " + reader.getCompressor()); IOutils.indent(out, indent); out.println("Group: " + reader.getGroup()); IOutils.indent(out, indent); out.println("Perm: " + reader.getPerm()); properties.put("Schema", reader.getSchema().toString()); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); IOutils.indent(out, indent); out.printf("%s : %s\n", e.getKey(), e.getValue()); } out.println("TFiles within the Column Group :"); if (reader.cgindex == null) reader.cgindex = buildIndex(reader.fs, reader.path, reader.dirty, conf); for (CGIndexEntry entry : reader.cgindex.index) { IOutils.indent(out, indent); out.printf(" *Name : %s\n", entry.name); IOutils.indent(out, indent); out.printf(" Rows : %d\n", entry.rows); if (entry.firstKey != null) { IOutils.indent(out, indent); out.printf(" First Key : %s\n", headToString(entry.firstKey)); } if (entry.lastKey != null) { IOutils.indent(out, indent); out.printf(" Larst Key : %s\n", headToString(entry.lastKey)); } // dump TFile info // Path pathTFile = new Path(path, entry.name); // TFile.dumpInfo(pathTFile.toString(), out, conf); } } finally { try { reader.close(); } catch (Exception e) { // no-op } } }
From source file:com.lmco.ddf.commands.catalog.RemoveAllCommand.java
@Override protected Object doExecute() throws Exception { PrintStream console = System.out; if (batchSize < PAGE_SIZE_LOWER_LIMIT) { printColor(console, Ansi.Color.RED, String.format(BATCH_SIZE_ERROR_MESSAGE_FORMAT, batchSize)); return null; }//w w w . j a va 2s.c om CatalogFacade catalog = this.getCatalog(); if (isAccidentalRemoval(console)) { return null; } FilterBuilder filterBuilder = getFilterBuilder(); QueryRequest firstQuery = getIntendedQuery(filterBuilder, batchSize, expired, true); QueryRequest subsequentQuery = getIntendedQuery(filterBuilder, batchSize, expired, false); long totalAmountDeleted = 0; long start = System.currentTimeMillis(); SourceResponse response = null; try { response = catalog.query(firstQuery); } catch (UnsupportedQueryException e) { firstQuery = getAlternateQuery(filterBuilder, batchSize, expired, true); subsequentQuery = getAlternateQuery(filterBuilder, batchSize, expired, false); response = catalog.query(firstQuery); } if (response == null) { printColor(console, Ansi.Color.RED, "No response from Catalog."); return null; } if (needsAlternateQueryAndResponse(response)) { firstQuery = getAlternateQuery(filterBuilder, batchSize, expired, true); subsequentQuery = getAlternateQuery(filterBuilder, batchSize, expired, false); response = catalog.query(firstQuery); } String totalAmount = getTotalAmount(response.getHits()); while (response.getResults().size() > 0) { List<String> ids = new ArrayList<String>(); // Add metacard ids to string array for (Result result : response.getResults()) { if (result != null && result.getMetacard() != null) { Metacard metacard = result.getMetacard(); ids.add(metacard.getId()); } } // Delete the records DeleteRequestImpl request = new DeleteRequestImpl(ids.toArray(new String[ids.size()])); DeleteResponse deleteResponse = catalog.delete(request); int amountDeleted = deleteResponse.getDeletedMetacards().size(); totalAmountDeleted += amountDeleted; console.print(String.format(PROGRESS_FORMAT, totalAmountDeleted, totalAmount)); console.flush(); // Break out if there are no more records to delete if (amountDeleted < batchSize || batchSize < 1) { break; } // Re-query when necessary response = catalog.query(subsequentQuery); } long end = System.currentTimeMillis(); console.println(); console.printf(" %d file(s) removed in %3.3f seconds%n", totalAmountDeleted, (end - start) / MILLISECONDS_PER_SECOND); return null; }
From source file:org.apache.slider.client.SliderClient.java
/** * list configs available for an instance */*from w ww. j a v a 2 s . c o m*/ * @param registryArgs registry Arguments * @throws YarnException YARN problems * @throws IOException Network or other problems */ public void actionRegistryListConfigsYarn(ActionRegistryArgs registryArgs) throws YarnException, IOException { ServiceRecord instance = lookupServiceRecord(registryArgs); RegistryRetriever retriever = new RegistryRetriever(instance); PublishedConfigSet configurations = retriever.getConfigurations(!registryArgs.internal); PrintStream out = null; try { if (registryArgs.out != null) { out = new PrintStream(new FileOutputStream(registryArgs.out)); } else { out = System.out; } for (String configName : configurations.keys()) { if (!registryArgs.verbose) { out.println(configName); } else { PublishedConfiguration published = configurations.get(configName); out.printf("%s: %s\n", configName, published.description); } } } finally { if (registryArgs.out != null && out != null) { out.flush(); out.close(); } } }
From source file:gov.nasa.ensemble.dictionary.nddl.ParseInterpreter.java
/** * Function that writes out the passive compatibilities into the model file * specified by the given output stream and based on a given Activity * Dictionary that has already been parsed * //from w w w. ja v a 2s . c o m * @param shortName * of oStrm */ public void writePassiveCompats(OutputStream oStrm) { PrintStream out = new PrintStream(oStrm); String startVar; String endVar; String qVar; String stateName; String state; String atStartValue; String atEndValue; String claimName; @SuppressWarnings("unused") String objrefName; String shareName; List<String> allowedValues; // first handle the incon activity, then all others out.print("InitialConds::incon {\n" + " if (scheduled == true) {\n" + " if (Enable_Passive_Checking == true) {\n" + " eq(inconStart, start);\n\n"); // PHM 05/10/2011 Declare the negated states as locals if they exist for (String stat : stateNames) { Set<String> stateNotValues = stateNotValuesMap.get(stat); if (stateNotValues != null && stateNotValues.size() > 0) { for (String val : stateNotValues) { out.printf(" float \t _not_%s_%s;\n", NDDLUtil.escape(stat), NDDLUtil.escape(val)); } } } // PHM 05/10/2011 Set the negated state values for (String stat : stateNames) { Set<String> stateNotValues = stateNotValuesMap.get(stat); if (stateNotValues != null && stateNotValues.size() > 0) { for (String val : stateNotValues) { out.printf(" sum(_not_%s_%s, _%s_%s, STATE_COND_TRUE);\n", NDDLUtil.escape(stat), NDDLUtil.escape(val), NDDLUtil.escape(stat), NDDLUtil.escape(val)); } } } for (String claim : claimNames) { claimName = NDDLUtil.escape(claim); startVar = NDDLUtil.escape("i" + varN++); out.printf("\n" + "\t starts(%s.produce %s);\n" + "\t eq(%s.quantity, _%s);\n", claimName, startVar, startVar, claimName); } for (String share : shareNames) { startVar = NDDLUtil.escape("i" + varN++); shareName = NDDLUtil.escape(share); out.printf("\n" + "\t starts(%s.produce %s);\n" + "\t eq(%s.quantity, _%s);\n", shareName, startVar, startVar, shareName); } // note that we use state-value pairs // for each state, one and only one value should be TRUE for (String resource : stateNames) { List<String> stateValues = stateValuesMap.get(resource); if (stateValues != null && stateValues.size() > 0) { for (String val : stateValues) { String resourceName = NDDLUtil.escape(resource + "_" + val); startVar = NDDLUtil.escape("i" + varN++); out.printf("\n" + "\t starts(%s.produce %s);\n" + "\t eq(%s.quantity, _%s);\n", resourceName, startVar, startVar, resourceName); } } // add in the negated values if they exist Set<String> stateNotValues = stateNotValuesMap.get(resource); if (stateNotValues != null && stateNotValues.size() > 0) { for (String val : stateNotValues) { String resourceName = NDDLUtil.escape("not_" + resource + "_" + val); startVar = NDDLUtil.escape("i" + varN++); out.printf("\n" + "\t starts(%s.produce %s);\n" + "\t eq(%s.quantity, _%s);\n", resourceName, startVar, startVar, resourceName); } } } out.print(" }\n }\n}\n\n"); // handle passive compat for each dynamic object claim for (String objdef : objectDefNames) { startVar = "o" + varN++; endVar = "o" + varN++; qVar = "q" + varN++; out.printf( "\n%s::Assign_%s {\n" + " if (isSingleton(object)) {\n" + " if (scheduled == true) {\n" + " if (Enable_Passive_Checking == true) {\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(object.passive.consume %s);\n" + " eq(%s.quantity, %s);\n\n" + " ends(object.passive.produce %s);\n" + " eq(%s.quantity, 1.0);\n\n }\n }\n }\n}\n\n", objdef, objdef, qVar, qVar, startVar, startVar, qVar, endVar, endVar); } // Due to the incon guard, have to handle the start transitions and end // transitions separately // ** Since we've eliminated the incon guard, this is no longer necessary, // but code for (EActivityDef activityDef : activityDefs) { // if ((activityDef.getExpansion() == null || activityDef // .getExpansion().getSubActivities() == null) // && (activityDef.getClaims() != null // || activityDef.getSharedReservations() != null // || (activityDef.getRequirements() != null && activityDef // .getRequirements().getStateRequirements() != null) || (activityDef // .getEffects() != null && activityDef.getEffects() // .getStateEffects() != null))) { if (!activityDef.getClaimableEffects().isEmpty() || !activityDef.getSharedEffects().isEmpty() || !activityDef.getStateRequirements().isEmpty() || !activityDef.getStateEffects().isEmpty()) { out.printf( "%s::%s {\n" + " if (scheduled == true) {\n" + " if (Enable_Passive_Checking == true) {\n\n", activitySubsystemMap.get(NDDLUtil.escape(activityDef.getName())), NDDLUtil.escape(activityDef.getName())); // first process only the start transitions for the activity // handle claims for (EClaimableEffect claim : activityDef.getClaimableEffects()) { claimName = NDDLUtil.escape(claim.getName()); if (claimNames.contains(claimName)) { startVar = "c" + varN++; qVar = "q" + varN++; out.printf( "\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, claimName, startVar, startVar, qVar); } else { System.err.print("\n* Undefined claim " + claimName + " in activity " + NDDLUtil.escape(activityDef.getName()) + " *\n\n"); } } // handle shared reservations for (ESharableResourceEffect share : activityDef.getSharedEffects()) { shareName = NDDLUtil.escape(share.getName()); if (shareNames.contains(shareName)) { startVar = "r" + varN++; qVar = "q" + varN++; int reservations = share.getReservations(); if (reservations > 0) { out.printf("\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " product(%s, %d, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, reservations, shareName, startVar, startVar, qVar); } } else { System.err.print("\n* Undefined share " + shareName + " in activity " + NDDLUtil.escape(activityDef.getName()) + " *\n\n"); } } // handle state requirements for (EStateRequirement stateReq : activityDef.getStateRequirements()) { // period = 0 means RequiresThroughout; period = 1 means // RequiresBeforeStart // we only handle RequiresThroughout state = NDDLUtil.escape(stateReq.getName()); if (stateNames.contains(state)) { if (stateReq.getPeriod() == Period.REQUIRES_THROUGHOUT) { // For requirements, Enum and Threshold states are no longer // handled identically due to negation and disjunction if (stateTypesMap.get(state).equals("Enum")) { if (stateReq.getRequiredState() != null) { stateName = NDDLUtil.escape(state + "_" + stateReq.getRequiredState()); startVar = "r" + varN++; qVar = "q" + varN++; out.printf( "\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); } else if (stateReq.getDisallowedState() != null) { stateName = NDDLUtil .escape("not_" + state + "_" + stateReq.getDisallowedState()); startVar = "r" + varN++; qVar = "q" + varN++; out.printf( "\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); } else if (stateReq.getAllowedStates() != null && stateReq.getAllowedStates().size() > 0) { allowedValues = stateReq.getAllowedStates(); for (String val : stateValuesMap.get(state)) { if (!allowedValues.contains(val)) { stateName = NDDLUtil.escape("not_" + state + "_" + val); startVar = "r" + varN++; qVar = "q" + varN++; out.printf("\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); } } } else { System.err.print( "*Required resource " + state + " did not have a value specified*\n\n"); } } else if (stateTypesMap.get(state).equals("Threshold")) { stateName = NDDLUtil.escape(state + "_" + stateReq.getRequiredState()); startVar = "r" + varN++; qVar = "q" + varN++; out.printf( "\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " eq(%s, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); } else { System.err.print("*Required resource " + state + " is not of type Enum nor ThresholdEnum*\n\n"); } } } else { System.err.print("\n* Undefined state " + state + " in activity " + NDDLUtil.escape(activityDef.getName()) + " *\n\n"); } } // handle state effects for (EStateResourceEffect<?> effect : activityDef.getStateEffects()) { state = NDDLUtil.escape(effect.getName()); atStartValue = effect.getStartEffect(); // for effects, Enum and Threshold states are handled // differently String stateTypeName = stateTypesMap.get(state); if (stateTypeName != null && stateTypeName.equals("Enum")) { if (atStartValue != null) { writeEnumStateEffectSection(out, state, atStartValue, "starts"); } } else if (stateTypeName != null && stateTypeName.equals("Threshold")) { // we assume that there is an atStart value // and that atEnd we retract the effect // make all LOWER values False at start and True at // end for (String val : stateValuesMap.get(state)) { if (val.equals(atStartValue)) { break; } stateName = state + "_" + val; startVar = "s" + varN++; qVar = "q" + varN++; out.printf("\n" + " condleq(afterIncon, inconStart, start);\n" + " float %s;\n" + " product(%s, STATE_COND_FALSE, afterIncon);\n" + " starts(%s.consume %s);\n" + " eq(%s.quantity, %s);\n", qVar, qVar, stateName, startVar, startVar, qVar); out.println(); } } } // now process the end transitions for the activity // handle claims for (EClaimableEffect claim : activityDef.getClaimableEffects()) { claimName = NDDLUtil.escape(claim.getName()); if (claimNames.contains(claimName)) { endVar = "c" + varN++; out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(claimName), NDDLUtil.escape(endVar), NDDLUtil.escape(endVar)); } else { System.err.print("\n* Undefined claim " + claimName + " in activity " + NDDLUtil.escape(activityDef.getName()) + " *\n\n"); } } // handle shared reservations for (ESharableResourceEffect share : activityDef.getSharedEffects()) { shareName = NDDLUtil.escape(share.getName()); if (shareNames.contains(shareName)) { endVar = "r" + varN++; int reservations = share.getReservations(); if (reservations > 0) { out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, %d);\n", NDDLUtil.escape(shareName), NDDLUtil.escape(endVar), NDDLUtil.escape(endVar), reservations); } } else { System.err.print("\n* Undefined share " + shareName + " in activity " + NDDLUtil.escape(activityDef.getName()) + " *\n\n"); } } // handle state requirements for (EStateRequirement stateReq : activityDef.getStateRequirements()) { // period = 0 means RequiresThroughout; period = 1 means // RequiresBeforeStart // we only handle RequiresThroughout state = NDDLUtil.escape(stateReq.getName()); if (stateNames.contains(state)) { if (stateReq.getPeriod() == Period.REQUIRES_THROUGHOUT) { // For requirements, Enum and Threshold states // are no longer handled identically due to negation and disjunction if (stateTypesMap.get(state).equals("Enum")) { if (stateReq.getRequiredState() != null) { stateName = state + "_" + stateReq.getRequiredState(); endVar = NDDLUtil.escape("r" + varN++); out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(stateName), endVar, endVar); // if stateName is the CPUwindow predicate, then // add the Window capability if (stateName.equals(CPUwindow)) { startVar = NDDLUtil.escape("w" + varN++); out.printf( "\n\t\tany(CPU_Windows.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n" + "\t\ttemporalDistance(%s.time, CPU_BOOT_DUR, start);\n", startVar, startVar, startVar); endVar = NDDLUtil.escape("w" + varN++); out.printf( "\t\tany(CPU_Windows.consume %s);\n" + "\t\teq(%s.quantity, 1.0);\n" + "\t\ttemporalDistance(end, POST_CPU_WINDOW, %s.time);\n", endVar, endVar, endVar); } } else if (stateReq.getDisallowedState() != null) { stateName = "not_" + state + "_" + stateReq.getDisallowedState(); endVar = NDDLUtil.escape("r" + varN++); out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(stateName), endVar, endVar); } else if (stateReq.getAllowedStates() != null && stateReq.getAllowedStates().size() > 0) { allowedValues = stateReq.getAllowedStates(); for (String val : stateValuesMap.get(state)) { if (!allowedValues.contains(val)) { stateName = "not_" + state + "_" + val; endVar = NDDLUtil.escape("r" + varN++); out.printf( "\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(stateName), endVar, endVar); } } } else { System.err.print( "*Required resource " + state + " did not have a value specified*\n\n"); } } else if (stateTypesMap.get(state).equals("Threshold")) { stateName = state + "_" + stateReq.getRequiredState(); endVar = NDDLUtil.escape("r" + varN++); out.printf("\n\t\tends(%s.produce %s);\n" + "\t\teq(%s.quantity, 1.0);\n", NDDLUtil.escape(stateName), endVar, endVar); } } } else { System.err.print("\n* Undefined state " + state + " in activity " + NDDLUtil.escape(activityDef.getName()) + " *\n\n"); } } // handle state effects for (EStateResourceEffect<?> effect : activityDef.getStateEffects()) { state = NDDLUtil.escape(effect.getName()); atEndValue = effect.getEndEffect(); // for effects, Enum and Threshold states are handled // differently String stateTypeName = stateTypesMap.get(state); if (stateTypeName != null && stateTypeName.equals("Enum")) { if (atEndValue != null) { writeEnumStateEffectSection(out, state, atEndValue, "ends"); } } else if (stateTypeName != null && stateTypeName.equals("Threshold")) { // we assume that there is an atStart value // and that atEnd we retract the effect // make all LOWER values False at start and True at // end for (String val : stateValuesMap.get(state)) { if (val.equals(atEndValue)) { break; } stateName = NDDLUtil.escape(state + "_" + val); endVar = NDDLUtil.escape("e" + varN++); out.printf("\t\tends(%s.produce %s);\n\t\teq(%s.quantity, STATE_COND_TRUE);\n", stateName, endVar, endVar); out.println(); } } } out.print("\n }\n }\n}\n\n"); } } }
From source file:org.apache.accumulo.core.file.rfile.bcfile.TFileDumper.java
/** * Dump information about TFile./*www . ja va 2 s .c o m*/ * * @param file * Path string of the TFile * @param out * PrintStream to output the information. * @param conf * The configuration object. * @throws IOException */ static public void dumpInfo(String file, PrintStream out, Configuration conf) throws IOException { final int maxKeySampleLen = 16; Path path = new Path(file); FileSystem fs = path.getFileSystem(conf); long length = fs.getFileStatus(path).getLen(); FSDataInputStream fsdis = fs.open(path); TFile.Reader reader = new TFile.Reader(fsdis, length, conf); try { LinkedHashMap<String, String> properties = new LinkedHashMap<String, String>(); int blockCnt = reader.readerBCF.getBlockCount(); int metaBlkCnt = reader.readerBCF.metaIndex.index.size(); properties.put("BCFile Version", reader.readerBCF.version.toString()); properties.put("TFile Version", reader.tfileMeta.version.toString()); properties.put("File Length", Long.toString(length)); properties.put("Data Compression", reader.readerBCF.getDefaultCompressionName()); properties.put("Record Count", Long.toString(reader.getEntryCount())); properties.put("Sorted", Boolean.toString(reader.isSorted())); if (reader.isSorted()) { properties.put("Comparator", reader.getComparatorName()); } properties.put("Data Block Count", Integer.toString(blockCnt)); long dataSize = 0, dataSizeUncompressed = 0; if (blockCnt > 0) { for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); dataSize += region.getCompressedSize(); dataSizeUncompressed += region.getRawSize(); } properties.put("Data Block Bytes", Long.toString(dataSize)); if (reader.readerBCF.getDefaultCompressionName() != "none") { properties.put("Data Block Uncompressed Bytes", Long.toString(dataSizeUncompressed)); properties.put("Data Block Compression Ratio", String.format("1:%.1f", (double) dataSizeUncompressed / dataSize)); } } properties.put("Meta Block Count", Integer.toString(metaBlkCnt)); long metaSize = 0, metaSizeUncompressed = 0; if (metaBlkCnt > 0) { Collection<MetaIndexEntry> metaBlks = reader.readerBCF.metaIndex.index.values(); boolean calculateCompression = false; for (Iterator<MetaIndexEntry> it = metaBlks.iterator(); it.hasNext();) { MetaIndexEntry e = it.next(); metaSize += e.getRegion().getCompressedSize(); metaSizeUncompressed += e.getRegion().getRawSize(); if (e.getCompressionAlgorithm() != Compression.Algorithm.NONE) { calculateCompression = true; } } properties.put("Meta Block Bytes", Long.toString(metaSize)); if (calculateCompression) { properties.put("Meta Block Uncompressed Bytes", Long.toString(metaSizeUncompressed)); properties.put("Meta Block Compression Ratio", String.format("1:%.1f", (double) metaSizeUncompressed / metaSize)); } } properties.put("Meta-Data Size Ratio", String.format("1:%.1f", (double) dataSize / metaSize)); long leftOverBytes = length - dataSize - metaSize; long miscSize = BCFile.Magic.size() * 2 + Long.SIZE / Byte.SIZE + Version.size(); long metaIndexSize = leftOverBytes - miscSize; properties.put("Meta Block Index Bytes", Long.toString(metaIndexSize)); properties.put("Headers Etc Bytes", Long.toString(miscSize)); // Now output the properties table. int maxKeyLength = 0; Set<Map.Entry<String, String>> entrySet = properties.entrySet(); for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); if (e.getKey().length() > maxKeyLength) { maxKeyLength = e.getKey().length(); } } for (Iterator<Map.Entry<String, String>> it = entrySet.iterator(); it.hasNext();) { Map.Entry<String, String> e = it.next(); out.printf("%s : %s%n", Align.format(e.getKey(), maxKeyLength, Align.LEFT), e.getValue()); } out.println(); reader.checkTFileDataIndex(); if (blockCnt > 0) { String blkID = "Data-Block"; int blkIDWidth = Align.calculateWidth(blkID, blockCnt); int blkIDWidth2 = Align.calculateWidth("", blockCnt); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, dataSize / blockCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, dataSizeUncompressed / blockCnt * 10); String records = "Records"; int recordsWidth = Align.calculateWidth(records, reader.getEntryCount() / blockCnt * 10); String endKey = "End-Key"; int endKeyWidth = Math.max(endKey.length(), maxKeySampleLen * 2 + 5); out.printf("%s %s %s %s %s %s%n", Align.format(blkID, blkIDWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(records, recordsWidth, Align.CENTER), Align.format(endKey, endKeyWidth, Align.LEFT)); for (int i = 0; i < blockCnt; ++i) { BlockRegion region = reader.readerBCF.dataIndex.getBlockRegionList().get(i); TFileIndexEntry indexEntry = reader.tfileIndex.getEntry(i); out.printf("%s %s %s %s %s ", Align.format(Align.format(i, blkIDWidth2, Align.ZERO_PADDED), blkIDWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(indexEntry.kvEntries, recordsWidth, Align.LEFT)); byte[] key = indexEntry.key; boolean asAscii = true; int sampleLen = Math.min(maxKeySampleLen, key.length); for (int j = 0; j < sampleLen; ++j) { byte b = key[j]; if ((b < 32 && b != 9) || (b == 127)) { asAscii = false; } } if (!asAscii) { out.print("0X"); for (int j = 0; j < sampleLen; ++j) { byte b = key[i]; out.printf("%X", b); } } else { out.print(new String(key, 0, sampleLen)); } if (sampleLen < key.length) { out.print("..."); } out.println(); } } out.println(); if (metaBlkCnt > 0) { String name = "Meta-Block"; int maxNameLen = 0; Set<Map.Entry<String, MetaIndexEntry>> metaBlkEntrySet = reader.readerBCF.metaIndex.index .entrySet(); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); if (e.getKey().length() > maxNameLen) { maxNameLen = e.getKey().length(); } } int nameWidth = Math.max(name.length(), maxNameLen); String offset = "Offset"; int offsetWidth = Align.calculateWidth(offset, length); String blkLen = "Length"; int blkLenWidth = Align.calculateWidth(blkLen, metaSize / metaBlkCnt * 10); String rawSize = "Raw-Size"; int rawSizeWidth = Align.calculateWidth(rawSize, metaSizeUncompressed / metaBlkCnt * 10); String compression = "Compression"; int compressionWidth = compression.length(); out.printf("%s %s %s %s %s%n", Align.format(name, nameWidth, Align.CENTER), Align.format(offset, offsetWidth, Align.CENTER), Align.format(blkLen, blkLenWidth, Align.CENTER), Align.format(rawSize, rawSizeWidth, Align.CENTER), Align.format(compression, compressionWidth, Align.LEFT)); for (Iterator<Map.Entry<String, MetaIndexEntry>> it = metaBlkEntrySet.iterator(); it.hasNext();) { Map.Entry<String, MetaIndexEntry> e = it.next(); String blkName = e.getValue().getMetaName(); BlockRegion region = e.getValue().getRegion(); String blkCompression = e.getValue().getCompressionAlgorithm().getName(); out.printf("%s %s %s %s %s%n", Align.format(blkName, nameWidth, Align.LEFT), Align.format(region.getOffset(), offsetWidth, Align.LEFT), Align.format(region.getCompressedSize(), blkLenWidth, Align.LEFT), Align.format(region.getRawSize(), rawSizeWidth, Align.LEFT), Align.format(blkCompression, compressionWidth, Align.LEFT)); } } } finally { IOUtils.cleanup(LOG, reader, fsdis); } }