Example usage for java.util Set isEmpty

List of usage examples for java.util Set isEmpty

Introduction

In this page you can find the example usage for java.util Set isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this set contains no elements.

Usage

From source file:apim.restful.importexport.utils.APIImportUtil.java

/**
 * This method imports an API/*  w ww. ja v  a2 s .co m*/
 *
 * @param pathToArchive            location of the extracted folder of the API
 * @param currentUser              the current logged in user
 * @param isDefaultProviderAllowed decision to keep or replace the provider
 * @throws APIImportException     if there is an error in importing an API
 */
public static void importAPI(String pathToArchive, String currentUser, boolean isDefaultProviderAllowed)
        throws APIImportException {

    API importedApi;

    // If the original provider is preserved,
    if (isDefaultProviderAllowed) {

        FileInputStream inputStream = null;
        BufferedReader bufferedReader = null;

        try {
            inputStream = new FileInputStream(pathToArchive + APIImportExportConstants.JSON_FILE_LOCATION);
            bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
            importedApi = new Gson().fromJson(bufferedReader, API.class);
        } catch (FileNotFoundException e) {
            log.error("Error in locating api.json file. ", e);
            throw new APIImportException("Error in locating api.json file. " + e.getMessage());
        } finally {
            IOUtils.closeQuietly(inputStream);
            IOUtils.closeQuietly(bufferedReader);
        }
    } else {

        String pathToJSONFile = pathToArchive + APIImportExportConstants.JSON_FILE_LOCATION;

        try {
            String jsonContent = FileUtils.readFileToString(new File(pathToJSONFile));
            JsonElement configElement = new JsonParser().parse(jsonContent);
            JsonObject configObject = configElement.getAsJsonObject();

            //locate the "providerName" within the "id" and set it as the current user
            JsonObject apiId = configObject.getAsJsonObject(APIImportExportConstants.ID_ELEMENT);
            apiId.addProperty(APIImportExportConstants.PROVIDER_ELEMENT,
                    APIUtil.replaceEmailDomain(currentUser));
            importedApi = new Gson().fromJson(configElement, API.class);

        } catch (IOException e) {
            log.error("Error in setting API provider to logged in user. ", e);
            throw new APIImportException("Error in setting API provider to logged in user. " + e.getMessage());
        }
    }

    Set<Tier> allowedTiers;
    Set<Tier> unsupportedTiersList;

    try {
        allowedTiers = provider.getTiers();
    } catch (APIManagementException e) {
        log.error("Error in retrieving tiers of the provider. ", e);
        throw new APIImportException("Error in retrieving tiers of the provider. " + e.getMessage());
    }

    if (!(allowedTiers.isEmpty())) {
        unsupportedTiersList = Sets.difference(importedApi.getAvailableTiers(), allowedTiers);

        //If at least one unsupported tier is found, it should be removed before adding API
        if (!(unsupportedTiersList.isEmpty())) {
            for (Tier unsupportedTier : unsupportedTiersList) {

                //Process is continued with a warning and only supported tiers are added to the importer API
                log.warn("Tier name : " + unsupportedTier.getName() + " is not supported.");
            }

            //Remove the unsupported tiers before adding the API
            importedApi.removeAvailableTiers(unsupportedTiersList);
        }
    }

    try {
        int tenantId = APIUtil.getTenantId(currentUser);
        provider.addAPI(importedApi);
        addSwaggerDefinition(importedApi, pathToArchive, tenantId);
    } catch (APIManagementException e) {
        //Error is logged and APIImportException is thrown because adding API and swagger are mandatory steps
        log.error("Error in adding API to the provider. ", e);
        throw new APIImportException("Error in adding API to the provider. " + e.getMessage());
    }
    //Since Image, documents, sequences and WSDL are optional, exceptions are logged and ignored in implementation
    addAPIImage(pathToArchive, importedApi);
    addAPIDocuments(pathToArchive, importedApi);
    addAPISequences(pathToArchive, importedApi, currentUser);
    addAPIWsdl(pathToArchive, importedApi, currentUser);

}

From source file:com.netflix.genie.core.jpa.specifications.JpaClusterSpecs.java

/**
 * Get all the clusters given the specified parameters.
 *
 * @param clusterCriteria The cluster criteria
 * @param commandCriteria The command Criteria
 * @return The specification/*  w  w w .  j a  v  a2 s  . co  m*/
 */
public static Specification<ClusterEntity> findByClusterAndCommandCriteria(
        final ClusterCriteria clusterCriteria, final Set<String> commandCriteria) {
    return (final Root<ClusterEntity> root, final CriteriaQuery<?> cq, final CriteriaBuilder cb) -> {
        final List<Predicate> predicates = new ArrayList<>();
        final Join<ClusterEntity, CommandEntity> commands = root.join(ClusterEntity_.commands);

        cq.distinct(true);

        predicates.add(cb.equal(root.get(ClusterEntity_.status), ClusterStatus.UP));

        if (clusterCriteria != null && clusterCriteria.getTags() != null
                && !clusterCriteria.getTags().isEmpty()) {
            predicates.add(cb.like(root.get(ClusterEntity_.tags),
                    JpaSpecificationUtils.getTagLikeString(clusterCriteria.getTags())));
        }

        predicates.add(cb.equal(commands.get(CommandEntity_.status), CommandStatus.ACTIVE));

        if (commandCriteria != null && !commandCriteria.isEmpty()) {
            predicates.add(cb.like(commands.get(CommandEntity_.tags),
                    JpaSpecificationUtils.getTagLikeString(commandCriteria)));
        }

        return cb.and(predicates.toArray(new Predicate[predicates.size()]));
    };
}

From source file:net.antidot.semantic.rdf.rdb2rdf.r2rml.core.R2RMLMappingFactory.java

/**
 * Extract logicalTable contents./*from  w  w  w . j a v  a2 s  . com*/
 * 
 * @param r2rmlMappingGraph
 * @param triplesMapSubject
 * @return
 * @throws InvalidR2RMLStructureException
 * @throws InvalidR2RMLSyntaxException
 * @throws R2RMLDataError
 */
private static LogicalTable extractLogicalTable(SesameDataSet r2rmlMappingGraph, Resource triplesMapSubject)
        throws InvalidR2RMLStructureException, InvalidR2RMLSyntaxException, R2RMLDataError {

    // Extract logical table blank node
    URI p = r2rmlMappingGraph.URIref(R2RMLVocabulary.R2RML_NAMESPACE + R2RMLTerm.LOGICAL_TABLE);
    List<Statement> statements = r2rmlMappingGraph.tuplePattern(triplesMapSubject, p, null);
    if (statements.isEmpty())
        throw new InvalidR2RMLStructureException("[R2RMLMappingFactory:extractLogicalTable] "
                + triplesMapSubject + " has no logical table defined.");
    if (statements.size() > 1)
        throw new InvalidR2RMLStructureException("[R2RMLMappingFactory:extractLogicalTable] "
                + triplesMapSubject + " has too many logical table defined.");
    Resource blankLogicalTable = (Resource) statements.get(0).getObject();

    // Check SQL base table or view
    URI pName = r2rmlMappingGraph.URIref(R2RMLVocabulary.R2RML_NAMESPACE + R2RMLTerm.TABLE_NAME);
    List<Statement> statementsName = r2rmlMappingGraph.tuplePattern(blankLogicalTable, pName, null);
    URI pView = r2rmlMappingGraph.URIref(R2RMLVocabulary.R2RML_NAMESPACE + R2RMLTerm.SQL_QUERY);
    List<Statement> statementsView = r2rmlMappingGraph.tuplePattern(blankLogicalTable, pView, null);
    LogicalTable logicalTable = null;
    if (!statementsName.isEmpty()) {
        if (statementsName.size() > 1)
            throw new InvalidR2RMLStructureException("[R2RMLMappingFactory:extractLogicalTable] "
                    + triplesMapSubject + " has too many logical table name defined.");
        if (!statementsView.isEmpty())
            throw new InvalidR2RMLStructureException(
                    "[R2RMLMappingFactory:extractLogicalTable] " + triplesMapSubject
                            + " can't have a logical table and sql query defined" + " at the ame time.");
        // Table name defined
        logicalTable = new StdSQLBaseTableOrView(statementsName.get(0).getObject().stringValue());
    } else {
        // Logical table defined by R2RML View
        if (statementsView.size() > 1)
            throw new InvalidR2RMLStructureException("[R2RMLMappingFactory:extractLogicalTable] "
                    + triplesMapSubject + " has too many logical table defined.");
        if (statementsView.isEmpty())
            throw new InvalidR2RMLStructureException("[R2RMLMappingFactory:extractLogicalTable] "
                    + triplesMapSubject + " has no logical table defined.");
        // Check SQL versions
        URI pVersion = r2rmlMappingGraph.URIref(R2RMLVocabulary.R2RML_NAMESPACE + R2RMLTerm.SQL_VERSION);

        List<Statement> statementsVersion = r2rmlMappingGraph.tuplePattern(statementsView.get(0).getSubject(),
                pVersion, null);
        String sqlQuery = statementsView.get(0).getObject().stringValue();
        if (statementsVersion.isEmpty())
            logicalTable = new StdR2RMLView(sqlQuery);
        Set<SQLVersion> versions = new HashSet<SQLVersion>();
        for (Statement statementVersion : statementsVersion) {

            SQLVersion sqlVersion = SQLVersion.getSQLVersion(statementVersion.getObject().stringValue());
            versions.add(sqlVersion);
        }
        if (versions.isEmpty()) {
            // SQL 2008 by default
            if (log.isDebugEnabled())
                log.debug("[R2RMLMappingFactory:extractLogicalTable] " + triplesMapSubject
                        + " has no SQL version defined : SQL 2008 by default");
        }
        logicalTable = new StdR2RMLView(sqlQuery, versions);
    }
    log.debug("[R2RMLMappingFactory:extractLogicalTable] Logical table extracted : " + logicalTable);
    return logicalTable;
}

From source file:blusunrize.immersiveengineering.api.energy.wires.ImmersiveNetHandler.java

private static void handleMapForDamage(Set<Triple<Connection, Vec3d, Vec3d>> in, EntityLivingBase e,
        BlockPos here) {/*from w w  w .ja v a  2s  .  c  o m*/
    final double KNOCKBACK_PER_DAMAGE = 10;
    if (!in.isEmpty()) {
        AxisAlignedBB eAabb = e.getEntityBoundingBox();
        for (Triple<Connection, Vec3d, Vec3d> conn : in)
            if (conn.getLeft().cableType.canCauseDamage()) {
                double extra = conn.getLeft().cableType.getDamageRadius();
                AxisAlignedBB includingExtra = eAabb.grow(extra).offset(-here.getX(), -here.getY(),
                        -here.getZ());
                boolean endpointsInEntity = includingExtra.contains(conn.getMiddle())
                        || includingExtra.contains(conn.getRight());
                RayTraceResult rayRes = endpointsInEntity ? null
                        : includingExtra.calculateIntercept(conn.getMiddle(), conn.getRight());
                if (endpointsInEntity || (rayRes != null && rayRes.typeOfHit == RayTraceResult.Type.BLOCK)) {
                    IImmersiveConnectable iic = toIIC(conn.getLeft().start, e.world);
                    float damage = 0;
                    if (iic != null)
                        damage = iic.getDamageAmount(e, conn.getLeft());
                    if (damage == 0) {
                        iic = toIIC(conn.getLeft().end, e.world);
                        if (iic != null)
                            damage = iic.getDamageAmount(e, conn.getLeft());
                    }
                    if (damage != 0) {
                        IEDamageSources.ElectricDamageSource dmg = IEDamageSources.causeWireDamage(damage,
                                conn.getLeft().cableType.getElectricSource());
                        if (dmg.apply(e)) {
                            damage = dmg.dmg;
                            Vec3d v = e.getLookVec();
                            knockbackNoSource(e, damage / KNOCKBACK_PER_DAMAGE, v.x, v.z);
                            iic.processDamage(e, damage, conn.getLeft());
                        }
                    }
                }
            }
    }
}

From source file:ezbake.security.permissions.PermissionUtils.java

/**
 * Get permissions for user (based their authorizations) against the data's visibility.
 *
 * @param auths Authorizations of the user
 * @param visibility Visibility of the data
 * @param validateFormalAuths true to validate formal authorizations, false to skip
 * @param subsetToCheck The subset of permissions to check (to avoid more bitvector operations than needed)
 * @return The set of permissions the user has for the data (a subset of the subsetToCheck)
 *///from   w  ww  .j  av a 2 s. co m
public static Set<Permission> getPermissions(Authorizations auths, Visibility visibility,
        boolean validateFormalAuths, Set<Permission> subsetToCheck) {
    if (visibility == null) {
        return ALL_PERMS; // No visibility to check
    }

    if (auths == null) {
        return NO_PERMS; // Has visibility but no auths
    }

    if (validateFormalAuths && !validateVisibilityExpression(auths.getFormalAuthorizations(),
            visibility.getFormalVisibility())) {
        return NO_PERMS; // Formals auths check failed
    }

    final AdvancedMarkings markings = visibility.getAdvancedMarkings();
    if (markings == null) {
        return ALL_PERMS; // No further visibility to check
    }

    if (!validateVisibilityExpression(auths.getExternalCommunityAuthorizations(),
            markings.getExternalCommunityVisibility())) {
        return NO_PERMS; // External community auths check failed
    }

    final PlatformObjectVisibilities pov = markings.getPlatformObjectVisibility();
    if (pov == null) {
        return ALL_PERMS; // No further visibility to check
    }

    final Set<Long> authCheck = auths.getPlatformObjectAuthorizations();
    final Set<Permission> perms = EnumSet.noneOf(Permission.class);
    for (final Permission permToCheck : subsetToCheck) {
        Set<Long> permVisibility = null;
        switch (permToCheck) {
        case READ:
            permVisibility = pov.getPlatformObjectReadVisibility();
            break;
        case WRITE:
            permVisibility = pov.getPlatformObjectWriteVisibility();
            break;
        case MANAGE_VISIBILITY:
            permVisibility = pov.getPlatformObjectManageVisibility();
            break;
        case DISCOVER:
            permVisibility = pov.getPlatformObjectDiscoverVisibility();
            break;
        default:
            throw new IllegalArgumentException("Unknown Permission enum value" + permToCheck);
        }

        // Null/Empty visibility means world-accessible, else check intersection
        if (permVisibility == null || permVisibility.isEmpty()
                || authCheck != null && !Sets.intersection(authCheck, permVisibility).isEmpty()) {
            perms.add(permToCheck);
        }
    }

    return perms;
}

From source file:com.netflix.genie.server.repository.jpa.JobSpecs.java

/**
 * Find jobs based on the parameters./*from   ww  w . ja  v a 2s.com*/
 *
 * @param id          The job id
 * @param jobName     The job name
 * @param userName    The user who created the job
 * @param statuses    The job statuses
 * @param tags        The tags for the jobs to find
 * @param clusterName The cluster name
 * @param clusterId   The cluster id
 * @param commandName The command name
 * @param commandId   The command id
 * @return The specification
 */
public static Specification<Job> find(final String id, final String jobName, final String userName,
        final Set<JobStatus> statuses, final Set<String> tags, final String clusterName, final String clusterId,
        final String commandName, final String commandId) {
    return new Specification<Job>() {
        @Override
        public Predicate toPredicate(final Root<Job> root, final CriteriaQuery<?> cq,
                final CriteriaBuilder cb) {
            final List<Predicate> predicates = new ArrayList<>();
            if (StringUtils.isNotBlank(id)) {
                predicates.add(cb.like(root.get(Job_.id), id));
            }
            if (StringUtils.isNotBlank(jobName)) {
                predicates.add(cb.like(root.get(Job_.name), jobName));
            }
            if (StringUtils.isNotBlank(userName)) {
                predicates.add(cb.equal(root.get(Job_.user), userName));
            }
            if (statuses != null && !statuses.isEmpty()) {
                //Could optimize this as we know size could use native array
                final List<Predicate> orPredicates = new ArrayList<>();
                for (final JobStatus status : statuses) {
                    orPredicates.add(cb.equal(root.get(Job_.status), status));
                }
                predicates.add(cb.or(orPredicates.toArray(new Predicate[orPredicates.size()])));
            }
            if (tags != null) {
                for (final String tag : tags) {
                    if (StringUtils.isNotBlank(tag)) {
                        predicates.add(cb.isMember(tag, root.get(Job_.tags)));
                    }
                }
            }
            if (StringUtils.isNotBlank(clusterName)) {
                predicates.add(cb.equal(root.get(Job_.executionClusterName), clusterName));
            }
            if (StringUtils.isNotBlank(clusterId)) {
                predicates.add(cb.equal(root.get(Job_.executionClusterId), clusterId));
            }
            if (StringUtils.isNotBlank(commandName)) {
                predicates.add(cb.equal(root.get(Job_.commandName), commandName));
            }
            if (StringUtils.isNotBlank(commandId)) {
                predicates.add(cb.equal(root.get(Job_.commandId), commandId));
            }
            return cb.and(predicates.toArray(new Predicate[predicates.size()]));
        }
    };
}

From source file:persistence.BranchDao.java

public List<Branch> getList(Set<Long> ids) {
    if (!ids.isEmpty()) {
        Criteria crit = currentSession().createCriteria(Branch.class);
        crit.setResultTransformer(Criteria.DISTINCT_ROOT_ENTITY);
        if (!ids.isEmpty()) {
            crit.add(Restrictions.in("branchId", ids));
        }//from  w  w  w .j a va 2  s . co m
        return crit.list();
    } else {
        return new ArrayList<Branch>();
    }
}

From source file:com.amalto.core.storage.hibernate.EntityFinder.java

/**
 * Starting from <code>wrapper</code>, goes up the containment tree using references introspection in metadata.
 * @param wrapper A {@link Wrapper} instance (so an object managed by {@link HibernateStorage}.
 * @param storage A {@link HibernateStorage} instance. It will be used to compute references from the internal
 *                data model.//  ww w . j  a v a  2  s.  c  o m
 * @param session A Hibernate {@link Session}.
 * @return The top level (aka the Wrapper instance that represent a MDM entity).
 */
public static Wrapper findEntity(Wrapper wrapper, HibernateStorage storage, Session session) {
    ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
    if (!(contextClassLoader instanceof StorageClassLoader)) {
        throw new IllegalStateException("Expects method to be called in the context of a storage operation.");
    }
    StorageClassLoader classLoader = (StorageClassLoader) contextClassLoader;
    ComplexTypeMetadata wrapperType = classLoader.getTypeFromClass(wrapper.getClass());
    if (wrapperType == null) {
        throw new IllegalArgumentException(
                "Wrapper '" + wrapper.getClass().getName() + "' isn't known in current storage.");
    }
    if (wrapperType.isInstantiable()) {
        return wrapper;
    }
    InboundReferences incomingReferences = new InboundReferences(wrapperType);
    InternalRepository internalRepository = storage.getTypeEnhancer();
    Set<ReferenceFieldMetadata> references = internalRepository.getInternalRepository()
            .accept(incomingReferences);
    if (references.isEmpty()) {
        throw new IllegalStateException("Cannot find container type for '" + wrapperType.getName() + "'.");
    }
    String keyFieldName = wrapperType.getKeyFields().iterator().next().getName();
    Object id = wrapper.get(keyFieldName);
    for (ReferenceFieldMetadata reference : references) {
        ComplexTypeMetadata containingType = reference.getContainingType();
        Class<? extends Wrapper> clazz = classLoader.getClassFromType(containingType);
        Criteria criteria = session.createCriteria(clazz, "a0"); //$NON-NLS-1$
        criteria.createAlias("a0." + reference.getName(), "a1", CriteriaSpecification.INNER_JOIN); //$NON-NLS-1$
        criteria.add(Restrictions.eq("a1." + keyFieldName, id)); //$NON-NLS-1$
        List list = criteria.list();
        if (!list.isEmpty()) {
            Wrapper container = (Wrapper) list.get(0);
            if (list.size() > 1) {
                Object previousItem = list.get(0);
                for (int i = 1; i < list.size(); i++) {
                    Object currentItem = list.get(i);
                    if (!previousItem.equals(currentItem)) {
                        throw new IllegalStateException("Expected contained instance to have only one owner.");
                    }
                    previousItem = currentItem;
                }
            }
            return findEntity(container, storage, session);
        }
    }
    return null;
}

From source file:com.ikanow.aleph2.analytics.storm.utils.StormControllerUtil.java

/**
 * Starts up a storm job./*from w  ww  .  j ava  2s  .co  m*/
 * 
 * 1. gets the storm instance from the yarn config
 * 2. Makes a mega jar consisting of:
 *    A. Underlying artefacts (system libs)
 *  B. User supplied libraries
 * 3. Submit megajar to storm with jobname of the bucket id
 * 
 * @param bucket
 * @param underlying_artefacts
 * @param yarn_config_dir
 * @param user_lib_paths
 * @param topology
 * @return
 */
public static CompletableFuture<BasicMessageBean> startJob(final IStormController storm_controller,
        final DataBucketBean bucket, final Optional<String> sub_job,
        final Collection<Object> underlying_artefacts, final Collection<String> user_lib_paths,
        final StormTopology topology, final Map<String, String> config, final String cached_jar_dir) {
    if (null == topology) {
        return CompletableFuture.completedFuture(ErrorUtils.buildErrorMessage(StormControllerUtil.class,
                "startJob", ErrorUtils.TOPOLOGY_NULL_ERROR, bucket.full_name()));
    }

    _logger.info("Retrieved user Storm config topology: spouts=" + topology.get_spouts_size() + " bolts="
            + topology.get_bolts_size() + " configs=" + config.toString());

    final Set<String> jars_to_merge = new TreeSet<String>();

    final CompletableFuture<String> jar_future = Lambdas.get(() -> {
        if (RemoteStormController.class.isAssignableFrom(storm_controller.getClass())) {
            // (This is only necessary in the remote case)

            jars_to_merge.addAll(underlying_artefacts.stream()
                    .map(artefact -> LiveInjector.findPathJar(artefact.getClass(), ""))
                    .filter(f -> !f.equals("")).collect(Collectors.toSet()));

            if (jars_to_merge.isEmpty()) { // special case: no aleph2 libs found, this is almost certainly because this is being run from eclipse...
                final GlobalPropertiesBean globals = ModuleUtils.getGlobalProperties();
                _logger.warn(
                        "WARNING: no library files found, probably because this is running from an IDE - instead taking all JARs from: "
                                + (globals.local_root_dir() + "/lib/"));
                try {
                    //... and LiveInjecter doesn't work on classes ... as a backup just copy everything from "<LOCAL_ALEPH2_HOME>/lib" into there 
                    jars_to_merge
                            .addAll(FileUtils
                                    .listFiles(new File(globals.local_root_dir() + "/lib/"),
                                            new String[] { "jar" }, false)
                                    .stream().map(File::toString).collect(Collectors.toList()));
                } catch (Exception e) {
                    throw new RuntimeException("In eclipse/IDE mode, directory not found: "
                            + (globals.local_root_dir() + "/lib/"));
                }
            }
            //add in the user libs
            jars_to_merge.addAll(user_lib_paths);

            //create jar
            return buildOrReturnCachedStormTopologyJar(jars_to_merge, cached_jar_dir);
        } else {
            return CompletableFuture.completedFuture("/unused/dummy.jar");
        }
    });

    //submit to storm
    @SuppressWarnings("unchecked")
    final CompletableFuture<BasicMessageBean> submit_future = Lambdas.get(() -> {
        long retries = 0;
        while (retries < MAX_RETRIES) {
            try {
                _logger.debug("Trying to submit job, try: " + retries + " of " + MAX_RETRIES);
                final String jar_file_location = jar_future.get();
                return storm_controller.submitJob(bucketPathToTopologyName(bucket, sub_job), jar_file_location,
                        topology, (Map<String, Object>) (Map<String, ?>) config);
            } catch (Exception ex) {
                if (ex instanceof AlreadyAliveException) {
                    retries++;
                    //sleep 1s, was seeing about 2s of sleep required before job successfully submitted on restart
                    try {
                        Thread.sleep(1000);
                    } catch (Exception e) {
                        final CompletableFuture<BasicMessageBean> error_future = new CompletableFuture<BasicMessageBean>();
                        error_future.completeExceptionally(e);
                        return error_future;
                    }
                } else {
                    retries = MAX_RETRIES; //we threw some other exception, bail out
                    final CompletableFuture<BasicMessageBean> error_future = new CompletableFuture<BasicMessageBean>();
                    error_future.completeExceptionally(ex);
                    return error_future;
                }
            }
        }
        //we maxed out our retries, throw failure
        final CompletableFuture<BasicMessageBean> error_future = new CompletableFuture<BasicMessageBean>();
        error_future.completeExceptionally(new Exception(
                "Error submitting job, ran out of retries (previous (same name) job is probably still alive)"));
        return error_future;
    });
    return submit_future;
}

From source file:com.netflix.genie.core.jpa.specifications.JpaJobSpecs.java

/**
 * Generate a criteria query predicate for a where clause based on the given parameters.
 *
 * @param root        The root to use/*from   ww w .j  a  v a2 s.c o  m*/
 * @param cb          The criteria builder to use
 * @param id          The job id
 * @param name        The job name
 * @param user        The user who created the job
 * @param statuses    The job statuses
 * @param tags        The tags for the jobs to find
 * @param clusterName The cluster name
 * @param cluster     The cluster the job should have been run on
 * @param commandName The command name
 * @param command     The command the job should have been run with
 * @param minStarted  The time which the job had to start after in order to be return (inclusive)
 * @param maxStarted  The time which the job had to start before in order to be returned (exclusive)
 * @param minFinished The time which the job had to finish after in order to be return (inclusive)
 * @param maxFinished The time which the job had to finish before in order to be returned (exclusive)
 * @return The specification
 */
public static Predicate getFindPredicate(final Root<JobEntity> root, final CriteriaBuilder cb, final String id,
        final String name, final String user, final Set<JobStatus> statuses, final Set<String> tags,
        final String clusterName, final ClusterEntity cluster, final String commandName,
        final CommandEntity command, final Date minStarted, final Date maxStarted, final Date minFinished,
        final Date maxFinished) {
    final List<Predicate> predicates = new ArrayList<>();
    if (StringUtils.isNotBlank(id)) {
        predicates.add(JpaSpecificationUtils.getStringLikeOrEqualPredicate(cb, root.get(JobEntity_.id), id));
    }
    if (StringUtils.isNotBlank(name)) {
        predicates
                .add(JpaSpecificationUtils.getStringLikeOrEqualPredicate(cb, root.get(JobEntity_.name), name));
    }
    if (StringUtils.isNotBlank(user)) {
        predicates
                .add(JpaSpecificationUtils.getStringLikeOrEqualPredicate(cb, root.get(JobEntity_.user), user));
    }
    if (statuses != null && !statuses.isEmpty()) {
        final List<Predicate> orPredicates = statuses.stream()
                .map(status -> cb.equal(root.get(JobEntity_.status), status)).collect(Collectors.toList());
        predicates.add(cb.or(orPredicates.toArray(new Predicate[orPredicates.size()])));
    }
    if (tags != null && !tags.isEmpty()) {
        predicates.add(cb.like(root.get(JobEntity_.tags), JpaSpecificationUtils.getTagLikeString(tags)));
    }
    if (cluster != null) {
        predicates.add(cb.equal(root.get(JobEntity_.cluster), cluster));
    }
    if (StringUtils.isNotBlank(clusterName)) {
        predicates.add(JpaSpecificationUtils.getStringLikeOrEqualPredicate(cb, root.get(JobEntity_.clusterName),
                clusterName));
    }
    if (command != null) {
        predicates.add(cb.equal(root.get(JobEntity_.command), command));
    }
    if (StringUtils.isNotBlank(commandName)) {
        predicates.add(JpaSpecificationUtils.getStringLikeOrEqualPredicate(cb, root.get(JobEntity_.commandName),
                commandName));
    }
    if (minStarted != null) {
        predicates.add(cb.greaterThanOrEqualTo(root.get(JobEntity_.started), minStarted));
    }
    if (maxStarted != null) {
        predicates.add(cb.lessThan(root.get(JobEntity_.started), maxStarted));
    }
    if (minFinished != null) {
        predicates.add(cb.greaterThanOrEqualTo(root.get(JobEntity_.finished), minFinished));
    }
    if (maxFinished != null) {
        predicates.add(cb.lessThan(root.get(JobEntity_.finished), maxFinished));
    }
    return cb.and(predicates.toArray(new Predicate[predicates.size()]));
}