List of usage examples for java.util Set isEmpty
boolean isEmpty();
From source file:jp.terasoluna.fw.batch.util.BatchUtil.java
/** * ??/*w ww . j a v a 2 s .c o m*/ * @param tranMap * @param statMap * @param log */ public static void commitTransactions(Map<?, ?> tranMap, Map<String, TransactionStatus> statMap, Log log) { Set<Entry<String, TransactionStatus>> statSet = statMap.entrySet(); if (statSet.isEmpty()) { return; } Stack<Entry<String, TransactionStatus>> stack = new Stack<Entry<String, TransactionStatus>>(); for (Entry<String, TransactionStatus> stat : statSet) { stack.push(stat); } while (!stack.isEmpty()) { // ??? Entry<String, TransactionStatus> statEntry = stack.pop(); String key = statEntry.getKey(); TransactionStatus trnStat = statEntry.getValue(); if (trnStat == null) { continue; } // ??? Object ptmObj = tranMap.get(key); if (ptmObj == null || !(ptmObj instanceof PlatformTransactionManager)) { continue; } PlatformTransactionManager ptm = (PlatformTransactionManager) ptmObj; if (log != null && log.isDebugEnabled()) { logDebug(log, LogId.DAL025038, trnStat); } // ptm.commit(trnStat); } }
From source file:de.knowwe.visualization.util.Utils.java
public static String prepareLabel(String string) { // if (true) return string; String lb = LINE_BREAK;//from w w w. j ava 2 s . c om int length = string.length(); if (length < 13) return clean(string, lb); // find possible line break positions Set<Integer> possibleLBs = new TreeSet<>(); // possible line breaks are before the following chars: // _ >= <= = . ( [ and white spaces Matcher m = Pattern.compile("_|>=|<=|=|\\.|\\([^\\)]{1}|\\[[^\\]]{1}").matcher(string); while (m.find()) { possibleLBs.add(m.start(0)); } // line breaks at whitespace only if they are not in range of = or > or // < m = Pattern.compile("(?<=[^=<>]){3}( )(?=[^=<>]{3})").matcher(string); while (m.find()) { possibleLBs.add(m.start(1)); } if (possibleLBs.isEmpty()) return clean(string, lb); // add the line breaks were it makes sense List<Integer> desiredLBs = new LinkedList<>(); Set<Integer> addedLBs = new TreeSet<>(); // optimal length is determined by the length of the given String double optimalLength = (double) length / Math.sqrt(length / 5); for (int i = 1; i < string.length() / optimalLength; i++) { // having the line breaks on these position would be optimal desiredLBs.add((int) Math.round(i * optimalLength)); } //todo: remove creation of trailing linebreaks // try to find those possible line breaks that closest to the optimal // line breaks int d = 0; for (Integer desLB : desiredLBs) { int bestCandiadate = 0; // to avoid breaks for only a few chars at the end, we make // extra efforts for the last line break // we get the line break that produces the smallest variance // we should actually calculate the best break via variance for // all line breaks, but that seems rather complex and not yet // justified right now, since the current simple algorithm // already produces nice results if (d == desiredLBs.size() - 1) { double bestVar = Double.MAX_VALUE; for (Integer posLB : possibleLBs) { Set<Integer> temp = new TreeSet<>(addedLBs); temp.add(posLB); TreeSet<Integer> varianceCheck = new TreeSet<>(temp); varianceCheck.add(length); double variance = getVariance(varianceCheck); if (variance <= bestVar) { bestVar = variance; bestCandiadate = posLB; } } } // for all other breakpoints, just get the one closest to the // desired position else { for (Integer posLB : possibleLBs) { if (Math.abs(desLB - posLB) <= Math.abs(desLB - bestCandiadate)) { bestCandiadate = posLB; } } } if (bestCandiadate != 0 && bestCandiadate != length) { addedLBs.add(bestCandiadate); } d++; } // but in the line breaks StringBuilder labelBuilder = new StringBuilder(); List<String> split = new ArrayList<>(addedLBs.size() + 1); int last = 0; for (Integer addedLB : addedLBs) { split.add(string.substring(last, addedLB)); last = addedLB; } split.add(string.substring(last, string.length())); for (String s : split) { // clean the substrings labelBuilder.append(clean(s.trim(), lb)).append(lb); } String label = labelBuilder.toString(); return label; }
From source file:com.cloudera.whirr.cm.CmServerClusterInstance.java
@SuppressWarnings("unchecked") public static SortedSet<String> getMounts(ClusterSpec specification, Set<Instance> instances) throws IOException { Configuration configuration = getConfiguration(specification); SortedSet<String> mounts = new TreeSet<String>(); Set<String> deviceMappings = CmServerClusterInstance.getDeviceMappings(specification, instances).keySet(); if (!configuration.getList(CONFIG_WHIRR_DATA_DIRS_ROOT).isEmpty()) { mounts.addAll(configuration.getList(CONFIG_WHIRR_DATA_DIRS_ROOT)); } else if (!deviceMappings.isEmpty()) { mounts.addAll(deviceMappings);//w w w. j av a 2s . co m } else { mounts.add(configuration.getString(CONFIG_WHIRR_INTERNAL_DATA_DIRS_DEFAULT)); } return mounts; }
From source file:com.aurel.track.util.GeneralUtils.java
/** * Prepares a list of chunks to avoid the IN statement * limitations in some databases (for ex. Oracle) * @param objectIDs/*from w w w . jav a2s .co m*/ * @return */ public static List<int[]> getListOfChunks(Set<Integer> objectIDList) { if (objectIDList == null || objectIDList.isEmpty()) { return new LinkedList<int[]>(); } int[] arrInts = new int[objectIDList.size()]; int i = 0; for (Integer intValue : objectIDList) { if (intValue != null) { arrInts[i++] = intValue.intValue(); } } return getListOfChunks(arrInts); }
From source file:jp.terasoluna.fw.batch.util.BatchUtil.java
/** * ???/* w w w . jav a 2s .c om*/ * @param tranMap PlatformTransactionManager * @param statMap TransactionStatus * @param log Log * @return ???PlatformTransactionManager?????????true?? */ public static boolean endTransactions(Map<?, ?> tranMap, Map<String, TransactionStatus> statMap, Log log) { boolean isNormal = true; Set<Entry<String, TransactionStatus>> statSet = statMap.entrySet(); if (statSet == null || statSet.isEmpty()) { return isNormal; } Stack<Entry<String, TransactionStatus>> stack = new Stack<Entry<String, TransactionStatus>>(); for (Entry<String, TransactionStatus> stat : statSet) { stack.push(stat); } while (!stack.isEmpty()) { // ??? Entry<String, TransactionStatus> statEntry = stack.pop(); String key = statEntry.getKey(); TransactionStatus trnStat = statEntry.getValue(); if (trnStat == null) { continue; } // ??? Object ptmObj = tranMap.get(key); if (ptmObj == null || !(ptmObj instanceof PlatformTransactionManager)) { continue; } PlatformTransactionManager ptm = (PlatformTransactionManager) ptmObj; // ?????? if (trnStat.isCompleted()) { continue; } if (log != null && log.isDebugEnabled()) { logDebug(log, LogId.DAL025041, trnStat); } // ? try { ptm.rollback(trnStat); } catch (TransactionException e) { if (log != null && log.isErrorEnabled()) { logError(log, LogId.EAL025045, e, key); } isNormal = false; // ???????? } if (log != null && log.isDebugEnabled()) { logDebug(log, LogId.DAL025041, trnStat); } } return isNormal; }
From source file:com.yahoo.ycsb.db.couchbase2.Couchbase2Client.java
/** * Helper method to join the set of fields into a String suitable for N1QL. * * @param fields the fields to join./*w w w .ja v a 2 s . c om*/ * @return the joined fields as a String. */ private static String joinFields(final Set<String> fields) { if (fields == null || fields.isEmpty()) { return "*"; } StringBuilder builder = new StringBuilder(); for (String f : fields) { builder.append("`").append(f).append("`").append(","); } String toReturn = builder.toString(); return toReturn.substring(0, toReturn.length() - 1); }
From source file:com.amazonaws.services.kinesis.clientlibrary.lib.worker.ShardSyncer.java
/** * Private helper method.//from w w w. j a va 2 s . com * Clean up leases for shards that meet the following criteria: * a/ the shard has been fully processed (checkpoint is set to SHARD_END) * b/ we've begun processing all the child shards: we have leases for all child shards and their checkpoint is not * TRIM_HORIZON. * * @param currentLeases List of leases we evaluate for clean up * @param shardIdToShardMap Map of shardId->Shard (assumed to include all Kinesis shards) * @param shardIdToChildShardIdsMap Map of shardId->childShardIds (assumed to include all Kinesis shards) * @param trackedLeases List of all leases we are tracking. * @param leaseManager Lease manager (will be used to delete leases) * @throws DependencyException * @throws InvalidStateException * @throws ProvisionedThroughputException * @throws KinesisClientLibIOException */ private static synchronized void cleanupLeasesOfFinishedShards(Collection<KinesisClientLease> currentLeases, Map<String, Shard> shardIdToShardMap, Map<String, Set<String>> shardIdToChildShardIdsMap, List<KinesisClientLease> trackedLeases, ILeaseManager<KinesisClientLease> leaseManager) throws DependencyException, InvalidStateException, ProvisionedThroughputException, KinesisClientLibIOException { Set<String> shardIdsOfClosedShards = new HashSet<>(); List<KinesisClientLease> leasesOfClosedShards = new ArrayList<>(); for (KinesisClientLease lease : currentLeases) { if (lease.getCheckpoint().equals(ExtendedSequenceNumber.SHARD_END)) { shardIdsOfClosedShards.add(lease.getLeaseKey()); leasesOfClosedShards.add(lease); } } if (!leasesOfClosedShards.isEmpty()) { assertClosedShardsAreCoveredOrAbsent(shardIdToShardMap, shardIdToChildShardIdsMap, shardIdsOfClosedShards); Comparator<? super KinesisClientLease> startingSequenceNumberComparator = new StartingSequenceNumberAndShardIdBasedComparator( shardIdToShardMap); Collections.sort(leasesOfClosedShards, startingSequenceNumberComparator); Map<String, KinesisClientLease> trackedLeaseMap = constructShardIdToKCLLeaseMap(trackedLeases); for (KinesisClientLease leaseOfClosedShard : leasesOfClosedShards) { String closedShardId = leaseOfClosedShard.getLeaseKey(); Set<String> childShardIds = shardIdToChildShardIdsMap.get(closedShardId); if ((closedShardId != null) && (childShardIds != null) && (!childShardIds.isEmpty())) { cleanupLeaseForClosedShard(closedShardId, childShardIds, trackedLeaseMap, leaseManager); } } } }
From source file:com.netflix.genie.core.jpa.specifications.JpaClusterSpecs.java
/** * Generate a specification given the parameters. * * @param name The name of the cluster to find * @param statuses The statuses of the clusters to find * @param tags The tags of the clusters to find * @param minUpdateTime The minimum updated time of the clusters to find * @param maxUpdateTime The maximum updated time of the clusters to find * @return The specification/* w w w.j a va2 s . c o m*/ */ public static Specification<ClusterEntity> find(final String name, final Set<ClusterStatus> statuses, final Set<String> tags, final Date minUpdateTime, final Date maxUpdateTime) { return (final Root<ClusterEntity> root, final CriteriaQuery<?> cq, final CriteriaBuilder cb) -> { final List<Predicate> predicates = new ArrayList<>(); if (StringUtils.isNotBlank(name)) { predicates.add(JpaSpecificationUtils.getStringLikeOrEqualPredicate(cb, root.get(ClusterEntity_.name), name)); } if (minUpdateTime != null) { predicates.add(cb.greaterThanOrEqualTo(root.get(ClusterEntity_.updated), minUpdateTime)); } if (maxUpdateTime != null) { predicates.add(cb.lessThan(root.get(ClusterEntity_.updated), maxUpdateTime)); } if (tags != null && !tags.isEmpty()) { predicates .add(cb.like(root.get(ClusterEntity_.tags), JpaSpecificationUtils.getTagLikeString(tags))); } if (statuses != null && !statuses.isEmpty()) { //Could optimize this as we know size could use native array final List<Predicate> orPredicates = statuses.stream() .map(status -> cb.equal(root.get(ClusterEntity_.status), status)) .collect(Collectors.toList()); predicates.add(cb.or(orPredicates.toArray(new Predicate[orPredicates.size()]))); } return cb.and(predicates.toArray(new Predicate[predicates.size()])); }; }
From source file:org.wrml.runtime.format.application.vnd.wrml.design.schema.SchemaDesignFormatter.java
public static ObjectNode createSchemaDesignObjectNode(final ObjectMapper objectMapper, final Schema schema) { final Context context = schema.getContext(); final SyntaxLoader syntaxLoader = context.getSyntaxLoader(); final SchemaLoader schemaLoader = context.getSchemaLoader(); final ObjectNode rootNode = objectMapper.createObjectNode(); final URI schemaUri = schema.getUri(); final Prototype prototype = schemaLoader.getPrototype(schemaUri); rootNode.put(PropertyName.uri.name(), syntaxLoader.formatSyntaxValue(schemaUri)); rootNode.put(PropertyName.title.name(), schema.getTitle()); rootNode.put(PropertyName.description.name(), schema.getDescription()); rootNode.put(PropertyName.version.name(), schema.getVersion()); final String titleSlotName = getTitleSlotName(schemaUri, schemaLoader); if (titleSlotName != null) { rootNode.put(PropertyName.titleSlotName.name(), titleSlotName); }//w w w. jav a2s . co m final UniqueName uniqueName = schema.getUniqueName(); final ObjectNode uniqueNameNode = objectMapper.createObjectNode(); uniqueNameNode.put(PropertyName.fullName.name(), uniqueName.getFullName()); uniqueNameNode.put(PropertyName.namespace.name(), uniqueName.getNamespace()); uniqueNameNode.put(PropertyName.localName.name(), uniqueName.getLocalName()); rootNode.put(PropertyName.uniqueName.name(), uniqueNameNode); final Set<URI> declaredBaseSchemaUris = prototype.getDeclaredBaseSchemaUris(); if (declaredBaseSchemaUris != null && !declaredBaseSchemaUris.isEmpty()) { final Set<URI> addedBaseSchemaUris = new LinkedHashSet<>(); final ArrayNode baseSchemasNode = objectMapper.createArrayNode(); rootNode.put(PropertyName.baseSchemas.name(), baseSchemasNode); for (final URI baseSchemaUri : declaredBaseSchemaUris) { if (!addedBaseSchemaUris.contains(baseSchemaUri)) { final ObjectNode baseSchemaNode = buildSchemaNode(objectMapper, baseSchemaUri, schemaLoader, addedBaseSchemaUris); baseSchemasNode.add(baseSchemaNode); addedBaseSchemaUris.add(baseSchemaUri); } } } final Set<String> keySlotNames = prototype.getDeclaredKeySlotNames(); if (keySlotNames != null && !keySlotNames.isEmpty()) { final ArrayNode keyPropertyNamesNode = objectMapper.createArrayNode(); for (final String keySlotName : keySlotNames) { keyPropertyNamesNode.add(keySlotName); } if (keyPropertyNamesNode.size() > 0) { rootNode.put(PropertyName.keyPropertyNames.name(), keyPropertyNamesNode); } } final Set<String> allKeySlotNames = prototype.getAllKeySlotNames(); final ArrayNode allKeySlotNamesNode = objectMapper.createArrayNode(); rootNode.put(PropertyName.allKeySlotNames.name(), allKeySlotNamesNode); final ObjectNode keySlotMap = objectMapper.createObjectNode(); rootNode.put(PropertyName.keys.name(), keySlotMap); final String uriSlotName = PropertyName.uri.name(); if (allKeySlotNames.contains(uriSlotName)) { allKeySlotNamesNode.add(uriSlotName); final ObjectNode slot = createSlot(objectMapper, prototype, uriSlotName); keySlotMap.put(uriSlotName, slot); } for (final String keySlotName : allKeySlotNames) { if (!Document.SLOT_NAME_URI.equals(keySlotName)) { allKeySlotNamesNode.add(keySlotName); final ObjectNode slot = createSlot(objectMapper, prototype, keySlotName); keySlotMap.put(keySlotName, slot); } } rootNode.put(PropertyName.keyCount.name(), keySlotMap.size()); final SortedSet<String> allSlotNames = prototype.getAllSlotNames(); if (allSlotNames != null && !allSlotNames.isEmpty()) { final ObjectNode slotMapNode = objectMapper.createObjectNode(); rootNode.put(PropertyName.slots.name(), slotMapNode); final ArrayNode propertyNamesNode = objectMapper.createArrayNode(); for (final String slotName : allSlotNames) { final ProtoSlot protoSlot = prototype.getProtoSlot(slotName); if (protoSlot instanceof LinkProtoSlot) { continue; } if (allKeySlotNames.contains(slotName)) { // Skip key slots (handled separately) continue; } if (protoSlot.getDeclaringSchemaUri().equals(schemaUri)) { propertyNamesNode.add(slotName); } final ObjectNode slotNode = createSlot(objectMapper, prototype, slotName); if (slotNode != null) { slotMapNode.put(slotName, slotNode); } } if (propertyNamesNode.size() > 0) { rootNode.put(PropertyName.propertyNames.name(), propertyNamesNode); } rootNode.put(PropertyName.slotCount.name(), slotMapNode.size()); } final Set<String> comparablePropertyNames = prototype.getComparableSlotNames(); if (comparablePropertyNames != null && !comparablePropertyNames.isEmpty()) { final ArrayNode comparablePropertyNamesNode = objectMapper.createArrayNode(); for (final String comparablePropertyName : comparablePropertyNames) { comparablePropertyNamesNode.add(comparablePropertyName); } if (comparablePropertyNamesNode.size() > 0) { rootNode.put(PropertyName.comparablePropertyNames.name(), comparablePropertyNamesNode); } } final Collection<LinkProtoSlot> linkProtoSlots = prototype.getLinkProtoSlots().values(); if (linkProtoSlots != null && !linkProtoSlots.isEmpty()) { final ArrayNode linkNamesNode = objectMapper.createArrayNode(); final ObjectNode linksMapNode = objectMapper.createObjectNode(); rootNode.put(PropertyName.links.name(), linksMapNode); for (final LinkProtoSlot linkProtoSlot : linkProtoSlots) { if (linkProtoSlot.getDeclaringSchemaUri().equals(schemaUri)) { linkNamesNode.add(linkProtoSlot.getName()); } final ObjectNode linkNode = objectMapper.createObjectNode(); String linkTitle = linkProtoSlot.getTitle(); if (linkTitle == null) { linkTitle = linkProtoSlot.getName(); } linkNode.put(PropertyName.name.name(), linkProtoSlot.getName()); linkNode.put(PropertyName.title.name(), linkTitle); final Method method = linkProtoSlot.getMethod(); final URI linkRelationUri = linkProtoSlot.getLinkRelationUri(); final URI declaringSchemaUri = linkProtoSlot.getDeclaringSchemaUri(); linkNode.put(PropertyName.rel.name(), syntaxLoader.formatSyntaxValue(linkRelationUri)); final Keys linkRelationKeys = context.getApiLoader().buildDocumentKeys(linkRelationUri, schemaLoader.getLinkRelationSchemaUri()); final LinkRelation linkRelation = context.getModel(linkRelationKeys, schemaLoader.getLinkRelationDimensions()); linkNode.put(PropertyName.relationTitle.name(), linkRelation.getTitle()); linkNode.put(PropertyName.description.name(), linkProtoSlot.getDescription()); linkNode.put(PropertyName.method.name(), method.getProtocolGivenName()); linkNode.put(PropertyName.declaringSchemaUri.name(), syntaxLoader.formatSyntaxValue(declaringSchemaUri)); URI requestSchemaUri = linkProtoSlot.getRequestSchemaUri(); if (schemaLoader.getDocumentSchemaUri().equals(requestSchemaUri)) { if (SystemLinkRelation.self.getUri().equals(linkRelationUri) || SystemLinkRelation.save.getUri().equals(linkRelationUri)) { requestSchemaUri = schemaUri; } } if (requestSchemaUri == null && method == Method.Save) { requestSchemaUri = schemaUri; } if (requestSchemaUri != null) { linkNode.put(PropertyName.requestSchemaUri.name(), syntaxLoader.formatSyntaxValue(requestSchemaUri)); final Schema requestSchema = schemaLoader.load(requestSchemaUri); if (requestSchema != null) { linkNode.put(PropertyName.requestSchemaTitle.name(), requestSchema.getTitle()); } } URI responseSchemaUri = linkProtoSlot.getResponseSchemaUri(); if (schemaLoader.getDocumentSchemaUri().equals(responseSchemaUri)) { if (SystemLinkRelation.self.getUri().equals(linkRelationUri) || SystemLinkRelation.save.getUri().equals(linkRelationUri)) { responseSchemaUri = schemaUri; } } if (responseSchemaUri != null) { linkNode.put(PropertyName.responseSchemaUri.name(), syntaxLoader.formatSyntaxValue(responseSchemaUri)); final Schema responseSchema = schemaLoader.load(responseSchemaUri); if (responseSchema != null) { linkNode.put(PropertyName.responseSchemaTitle.name(), responseSchema.getTitle()); } } linksMapNode.put(linkTitle, linkNode); } if (linkNamesNode.size() > 0) { rootNode.put(PropertyName.linkNames.name(), linkNamesNode); } rootNode.put(PropertyName.linkCount.name(), linksMapNode.size()); } return rootNode; }
From source file:com.espertech.esper.epl.core.ResultSetProcessorFactory.java
private static void validateHaving(Set<Pair<Integer, String>> propertiesGroupedBy, ExprNode havingNode) throws ExprValidationException { List<ExprAggregateNode> aggregateNodesHaving = new LinkedList<ExprAggregateNode>(); if (aggregateNodesHaving != null) { ExprAggregateNodeUtil.getAggregatesBottomUp(havingNode, aggregateNodesHaving); }/*from w w w .j a va 2 s . co m*/ // Any non-aggregated properties must occur in the group-by clause (if there is one) if (!propertiesGroupedBy.isEmpty()) { ExprNodeIdentifierVisitor visitor = new ExprNodeIdentifierVisitor(true); havingNode.accept(visitor); List<Pair<Integer, String>> allPropertiesHaving = visitor.getExprProperties(); Set<Pair<Integer, String>> aggPropertiesHaving = ExprNodeUtility .getAggregatedProperties(aggregateNodesHaving); allPropertiesHaving.removeAll(aggPropertiesHaving); allPropertiesHaving.removeAll(propertiesGroupedBy); if (!allPropertiesHaving.isEmpty()) { String name = allPropertiesHaving.iterator().next().getSecond(); throw new ExprValidationException("Non-aggregated property '" + name + "' in the HAVING clause must occur in the group-by clause"); } } }