Example usage for java.util TreeSet contains

List of usage examples for java.util TreeSet contains

Introduction

In this page you can find the example usage for java.util TreeSet contains.

Prototype

public boolean contains(Object o) 

Source Link

Document

Returns true if this set contains the specified element.

Usage

From source file:org.codehaus.enunciate.modules.jboss.JBossDeploymentModule.java

@Override
protected void doBuild() throws EnunciateException, IOException {
    super.doBuild();

    BaseWebAppFragment webappFragment = new BaseWebAppFragment(getName());
    HashMap<String, String> jbossContextParameters = new HashMap<String, String>();
    webappFragment.setContextParameters(jbossContextParameters);

    ArrayList<WebAppComponent> servlets = new ArrayList<WebAppComponent>();
    if (this.enableJaxws) {
        for (WsdlInfo wsdlInfo : getModelInternal().getNamespacesToWSDLs().values()) {
            for (EndpointInterface ei : wsdlInfo.getEndpointInterfaces()) {
                String path = (String) ei.getMetaData().get("soapPath");
                WebAppComponent wsComponent = new WebAppComponent();
                wsComponent.setName(ei.getServiceName());
                wsComponent.setClassname(ei.getEndpointImplementations().iterator().next().getQualifiedName());
                wsComponent.setUrlMappings(new TreeSet<String>(Arrays.asList(path)));
                servlets.add(wsComponent);
            }//  w  w w . ja va  2  s . co  m
        }
    }

    if (this.enableJaxrs) {
        WebAppComponent jaxrsServletComponent = new WebAppComponent();
        jaxrsServletComponent.setName("resteasy-jaxrs");
        jaxrsServletComponent.setClassname(EnunciateJBossHttpServletDispatcher.class.getName());
        TreeSet<String> jaxrsUrlMappings = new TreeSet<String>();
        StringBuilder resources = new StringBuilder();
        for (RootResource rootResource : getModel().getRootResources()) {
            if (resources.length() > 0) {
                resources.append(',');
            }
            resources.append(rootResource.getQualifiedName());

            for (ResourceMethod resourceMethod : rootResource.getResourceMethods(true)) {
                String resourceMethodPattern = resourceMethod.getServletPattern();
                for (Set<String> subcontextList : ((Map<String, Set<String>>) resourceMethod.getMetaData()
                        .get("subcontexts")).values()) {
                    for (String subcontext : subcontextList) {
                        String servletPattern;
                        if ("".equals(subcontext)) {
                            servletPattern = resourceMethodPattern;
                        } else {
                            servletPattern = subcontext + resourceMethodPattern;
                        }

                        if (jaxrsUrlMappings.add(servletPattern)) {
                            debug("Resource method %s of resource %s to be made accessible by servlet pattern %s.",
                                    resourceMethod.getSimpleName(),
                                    resourceMethod.getParent().getQualifiedName(), servletPattern);
                        }
                    }
                }
            }
        }

        if (jaxrsUrlMappings.contains("/*")) {
            jaxrsUrlMappings.clear();
            jaxrsUrlMappings.add("/*");
        } else {
            Iterator<String> iterator = jaxrsUrlMappings.iterator();
            while (iterator.hasNext()) {
                String mapping = iterator.next();
                if (!mapping.endsWith("/*") && jaxrsUrlMappings.contains(mapping + "/*")) {
                    iterator.remove();
                }
            }
        }

        StringBuilder providers = new StringBuilder();
        for (TypeDeclaration provider : getModel().getJAXRSProviders()) {
            if (providers.length() > 0) {
                providers.append(',');
            }

            providers.append(provider.getQualifiedName());
        }

        if (jacksonAvailable) {
            if (providers.length() > 0) {
                providers.append(',');
            }

            providers.append("org.codehaus.enunciate.jboss.ResteasyJacksonJaxbProvider");
        }

        if (getEnunciate().isModuleEnabled("amf")) {
            if (providers.length() > 0) {
                providers.append(',');
            }

            providers.append("org.codehaus.enunciate.modules.amf.JAXRSProvider");
        }

        jaxrsServletComponent.setUrlMappings(jaxrsUrlMappings);
        jbossContextParameters.put(ResteasyContextParameters.RESTEASY_RESOURCES, resources.toString());
        jbossContextParameters.put(ResteasyContextParameters.RESTEASY_PROVIDERS, providers.toString());
        String mappingPrefix = this.useSubcontext ? getRestSubcontext() : "";
        if (!"".equals(mappingPrefix)) {
            jbossContextParameters.put("resteasy.servlet.mapping.prefix", mappingPrefix);
            jaxrsServletComponent.addInitParam("resteasy.servlet.mapping.prefix", mappingPrefix);
        }
        if (isUsePathBasedConneg()) {
            Map<String, String> contentTypesToIds = getModelInternal().getContentTypesToIds();
            if (contentTypesToIds != null && contentTypesToIds.size() > 0) {
                StringBuilder builder = new StringBuilder();
                Iterator<Map.Entry<String, String>> contentTypeIt = contentTypesToIds.entrySet().iterator();
                while (contentTypeIt.hasNext()) {
                    Map.Entry<String, String> contentTypeEntry = contentTypeIt.next();
                    builder.append(contentTypeEntry.getValue()).append(" : ").append(contentTypeEntry.getKey());
                    if (contentTypeIt.hasNext()) {
                        builder.append(", ");
                    }
                }
                jbossContextParameters.put(ResteasyContextParameters.RESTEASY_MEDIA_TYPE_MAPPINGS,
                        builder.toString());
            }
        }
        jbossContextParameters.put(ResteasyContextParameters.RESTEASY_SCAN_RESOURCES, Boolean.FALSE.toString());
        servlets.add(jaxrsServletComponent);
    }

    webappFragment.setServlets(servlets);
    if (!this.options.isEmpty()) {
        webappFragment.setContextParameters(this.options);
    }
    getEnunciate().addWebAppFragment(webappFragment);
}

From source file:net.spfbl.data.Block.java

public static TreeSet<String> getAllTokens(String value) {
    TreeSet<String> blockSet = new TreeSet<String>();
    if (Subnet.isValidIP(value)) {
        String ip = Subnet.normalizeIP(value);
        if (SET.contains(ip)) {
            blockSet.add(ip);//from   w  ww .  j  a v  a  2s . co m
        }
    } else if (Subnet.isValidCIDR(value)) {
        String cidr = Subnet.normalizeCIDR(value);
        if (CIDR.contains((String) null, cidr)) {
            blockSet.add(cidr);
        }
        TreeSet<String> set = SET.getAll();
        for (String ip : set) {
            if (Subnet.containsIP(cidr, ip)) {
                blockSet.add(ip);
            }
        }
        for (String ip : set) {
            if (SubnetIPv6.containsIP(cidr, ip)) {
                blockSet.add(ip);
            }
        }
    } else if (Domain.isHostname(value)) {
        LinkedList<String> regexList = new LinkedList<String>();
        String host = Domain.normalizeHostname(value, true);
        do {
            int index = host.indexOf('.') + 1;
            host = host.substring(index);
            if (Block.dropExact('.' + host)) {
                blockSet.add('.' + host);
                regexList.addFirst('.' + host);
            }
        } while (host.contains("."));
    } else if (SET.contains(value)) {
        blockSet.add(value);
    }
    return blockSet;
}

From source file:com.fortmoon.utils.CSVDBLoader.java

private void getColumnClass(int columnNum) throws IOException {
    log.trace("called");
    // Let's try and find uniques without running out of memory. Dump the list as soon as we find a dup.

    TreeSet<String> uniques = new TreeSet<String>();
    numLines = 1;//from   ww w.j a va 2s  .co m
    String line = lr.readLine();
    ColumnBean column = this.columnModel.get(columnNum);
    while (line != null) {
        String[] result = line.split("\t");
        String val = result[columnNum];
        //         String colName = this.columnNames.get(columnNum);
        if (val.isEmpty()) {
            if (log.isDebugEnabled())
                log.debug("********NULL Value for column: " + column + "*********");
            column.setNullable(true);
            column.setUnique(false);
        } else {
            if (log.isDebugEnabled())
                log.debug("Value for column: " + column.getName() + " = " + val + " line = " + line);
            SQLTYPE colType = SQLUtil.getType(val);
            if (skipBlobs && (colType == SQLTYPE.BLOB || colType == SQLTYPE.LONGBLOB))
                colType = SQLTYPE.VARCHAR;

            log.debug("SQL Type: " + colType);
            if (column.getType().getValue() > colType.getValue()) {
                column.setType(colType);
            }
            int valSize = val.length();
            if (skipBlobs && valSize > 255)
                valSize = 255;
            if (column.getColumnSize() < valSize) {
                column.setColumnSize(valSize);
                log.debug("Size for column: " + column.getName() + " = " + column.getColumnSize());
            }
            digest.update(val.getBytes());
            String hash = new String(digest.digest());
            if (uniques != null && !uniques.contains(hash))
                uniques.add(hash);
            else {
                uniques = null;
                column.setUnique(false);
            }

        }
        line = lr.readLine();
        numLines++;
        if (numLines % 100000 == 0)
            log.debug("Finished processing number of lines in first pass scan: " + numLines);
    }
}

From source file:ru.codeinside.adm.AdminServiceImpl.java

public void setOrganizationInGroup(Group group, TreeSet<String> twinValue) {
    List<Long> orgIds = findAllOrganizationIds();
    for (Long orgId : orgIds) {
        Set<String> groups = getOrgGroupNames(orgId);
        Boolean change;/*  ww w.j  a v  a  2  s.c o  m*/
        if (twinValue.contains(orgId.toString())) {
            change = groups.add(group.getName());
        } else {
            change = groups.remove(group.getName());
        }
        if (change) {
            setOrgGroupNames(orgId, groups);
        }
    }
}

From source file:de.julielab.jcore.ae.jnet.uima.ConsistencyPreservation.java

/**
 * consistency presevation based on (exact) string matching. If string was
 * annotated once as entity, all other occurrences of this string get the
 * same label. For mode: _string_ TODO: more intelligent (voting) mechanism
 * needed to avoid false positives TODO: needs to be checked for performance
 * //ww  w.j av  a 2s  .  c o  m
 * @param aJCas
 * @param entityMentionClassnames
 * @param confidenceThresholdForConsistencyPreservation
 * @throws AnalysisEngineProcessException
 */
public void stringMatch(final JCas aJCas, final TreeSet<String> entityMentionClassnames,
        double confidenceThresholdForConsistencyPreservation) throws AnalysisEngineProcessException {

    // check whether this mode is enabled
    if ((activeModes == null) || (activeModes.size() == 0)
            || !activeModes.contains(ConsistencyPreservation.MODE_STRING))
        return;

    final String text = aJCas.getDocumentText();

    final TypeSystem ts = aJCas.getTypeSystem();
    // This map stores the EntityMentions that share the same specificType.
    // We want to use the TreeSet to check - for a given specificType - if
    // there is already an annotation overlapping a specific text offset.
    // See the comparator below.
    final Map<String, TreeSet<EntityMention>> overlapIndex = new HashMap<>();
    // This Comparator checks whether two Entities overlap in any way. If
    // so, they are deemed "equal". The idea is to use this Comparator with
    // a TreeSet in which we store all existing entities. Then, we can
    // efficiently check for a specific span if there already exists any
    // overlapping entity.
    Comparator<EntityMention> overlapComparator = new Comparator<EntityMention>() {

        @Override
        public int compare(EntityMention o1, EntityMention o2) {
            int b1 = o1.getBegin();
            int e1 = o1.getEnd();
            int b2 = o2.getBegin();
            int e2 = o2.getEnd();

            if ((b1 <= b2) && (e1 >= e2)) {
                return 0;
            } else if ((b1 >= b2) && (e1 <= e2)) {
                return 0;
            }
            //
            else if ((b1 < e2) && (e1 > e2)) {
                return 0;
            } else if ((b1 < b2) && (e1 > b2)) {
                return 0;
            }
            return b1 - b2;
        }
    };

    for (final String entityMentionClassname : entityMentionClassnames) {
        // we use the index entity class wise; we don't want one class to
        // interfer with another
        overlapIndex.clear();
        try {
            // loop over all entity types to be considered
            EntityMention mentionForOffsetComparison = (EntityMention) JCoReAnnotationTools
                    .getAnnotationByClassName(aJCas, entityMentionClassname);

            LOGGER.debug("doStringBased() - checking consistency for type: " + entityMentionClassname);
            final Multimap<String, EntityMention> entityMap = HashMultimap.create();

            // final EntityMention myEntity = (EntityMention)
            // JCoReAnnotationTools
            // .getAnnotationByClassName(aJCas, entityMentionClassname);
            final Type entityType = ts.getType(entityMentionClassname);
            if (null == entityType)
                throw new IllegalArgumentException(
                        "Entity type \"" + entityMentionClassname + "\" was not found in the type system.");

            // loop over all entity annotations in document and put them in
            // hashmap
            LOGGER.debug("doStringBased() - building entity map");
            final Iterator<Annotation> entityIter = aJCas.getAnnotationIndex(entityType).iterator();
            while (entityIter.hasNext()) {
                final EntityMention entity = (EntityMention) entityIter.next();
                entityMap.put(entity.getCoveredText(), entity);
                // additionally, add the entities into the overlap index so
                // we can later quickly lookup whether there is already an
                // entity with the same specific type at a certain location
                String specificType = "<null>";
                if (!StringUtils.isBlank(entity.getSpecificType()))
                    specificType = entity.getSpecificType();
                TreeSet<EntityMention> set = overlapIndex.get(specificType);
                if (null == set) {
                    set = new TreeSet<>(overlapComparator);
                    overlapIndex.put(specificType, set);
                }
                set.add(entity);

            }

            // now search for strings not detected as this kind of entity
            LOGGER.debug("doStringBased() - searching for missed entities...");
            for (final String entityString : entityMap.keySet()) {
                final EntityMention entity = entityMap.get(entityString).iterator().next();
                String specificType = "<null>";
                if (!StringUtils.isBlank(entity.getSpecificType()))
                    specificType = entity.getSpecificType();
                TreeSet<EntityMention> overlapSet = overlapIndex.get(specificType);

                LOGGER.debug("doStringBased() - checking entity string: " + entityString);

                int pos = 0;
                int length = 0;
                List<EntityMention> stringMatchedEntities = new ArrayList<>();
                while ((pos = text.indexOf(entityString, (pos + length))) > -1) {
                    // for each position where we have found this entity
                    // string
                    LOGGER.debug("doStringBased() - found string at pos: " + pos);

                    // check whether there is already an annotation of this
                    // type
                    // this older approach had the issue that only one
                    // overlapping annotation of entityMentionClassname was
                    // returned; but this type could be the wrong one in
                    // that the returned had a different specific type but
                    // another existed with the same specificType as the
                    // sought entity
                    // EntityMention refEntity = (EntityMention)
                    // JCoReAnnotationTools
                    // .getOverlappingAnnotation(aJCas,
                    // entityMentionClassname, pos, pos
                    // + entityString.length());

                    mentionForOffsetComparison.setBegin(pos);
                    mentionForOffsetComparison.setEnd(pos + length);
                    boolean overlappingExists = overlapSet.contains(mentionForOffsetComparison);

                    // if (refEntity == null
                    // || (refEntity.getSpecificType() == null ^
                    // entity.getSpecificType() == null)
                    // || (refEntity.getSpecificType() != null
                    // && entity.getSpecificType() != null && !refEntity
                    // .getSpecificType().equals(entity.getSpecificType())))
                    // {
                    if (!overlappingExists) {
                        // if there is no annotation of same type on this
                        // text span yet...
                        LOGGER.debug("doStringBased() - adding annotation to unlabeled entity mention");
                        EntityMention refEntity = (EntityMention) JCoReAnnotationTools
                                .getAnnotationByClassName(aJCas, entityMentionClassname);
                        // We will not directly just annotate the found
                        // string but extend it to offsets of
                        // overlapped tokens.
                        List<Token> overlappingTokens = JCoReAnnotationTools.getNearestOverlappingAnnotations(
                                aJCas,
                                new Annotation(entity.getCAS().getJCas(), pos, pos + entityString.length()),
                                Token.class);
                        int begin = overlappingTokens.size() > 0 ? overlappingTokens.get(0).getBegin() : pos;
                        int end = overlappingTokens.size() > 0
                                ? overlappingTokens.get(overlappingTokens.size() - 1).getEnd()
                                : pos + entityString.length();
                        // If we would have to adjust the offsets too much,
                        // we have most like just hit some
                        // substring of a larger token by coincidence.
                        refEntity.setBegin(begin);
                        refEntity.setEnd(end);
                        refEntity.setSpecificType(entity.getSpecificType());
                        refEntity.setResourceEntryList(entity.getResourceEntryList());
                        refEntity.setConfidence(entity.getConfidence());
                        refEntity.setTextualRepresentation(entity.getTextualRepresentation());
                        refEntity.setComponentId(COMPONENT_ID + " String (" + entity.getCoveredText() + ", "
                                + begin + "-" + end + ")");
                        stringMatchedEntities.add(refEntity);

                    } else
                        LOGGER.debug("doStringBased() - there is already an entity!");

                    length = entityString.length();
                }

                // A.R. 30.06.15: this option can now be turned on, just by
                // setting the config parameter
                // confidenceThresholdForConsistencyPreservation to a value
                // greater than 0
                // earlier it has been switched by commenting or
                // un-commenting the following code

                // If confidenceThresholdForConsistencyPreservation is given
                // (value != -1)
                // only add the new entities if there is enough evidence by
                // originally found entities with the same string that
                // this is indeed an entity we would like to find.
                if (confidenceThresholdForConsistencyPreservation > 0) {
                    if (!stringMatchedEntities.isEmpty()) {

                        double meanConfidence = 0;
                        for (EntityMention recognizedEntity : entityMap.get(entityString)) {
                            if (null != entity.getConfidence()) {
                                meanConfidence += Double.parseDouble(recognizedEntity.getConfidence());
                            }
                        }
                        meanConfidence /= entityMap.get(entityString).size();

                        int allMatches = stringMatchedEntities.size() + entityMap.get(entityString).size();
                        if (entityMap.get(entityString).size() >= allMatches / 3d) {
                            if (meanConfidence > confidenceThresholdForConsistencyPreservation) {
                                for (EntityMention refEntity : stringMatchedEntities) {
                                    // we have to add the new entities to
                                    // the overlap-index to avoid duplicates
                                    // by other entities that are a
                                    // substring of the current entity
                                    overlapSet.add(refEntity);
                                    refEntity.addToIndexes();
                                }
                            }
                        }
                    }
                }
                // if confidence score doesn't need to be checked, just add
                // all occurrences
                else {
                    for (EntityMention refEntity : stringMatchedEntities) {
                        // we have to add the new entities to the
                        // overlap-index to avoid duplicates by other
                        // entities that are a substring of the current
                        // entity
                        overlapSet.add(refEntity);
                        refEntity.addToIndexes();
                    }
                }
            }

        } catch (final Exception e) {
            LOGGER.error("doStringBased() - exception occured: " + e.getMessage());
            throw new AnalysisEngineProcessException();
        }

    }
}

From source file:ru.codeinside.adm.AdminServiceImpl.java

public void setEmloyeeInGroup(Group group, TreeSet<String> twinValue) {
    List<String> empLogins = findAllEmployeeLogins();
    for (String empLogin : empLogins) {
        final UserItem userItem = AdminServiceProvider.get().getUserItem(empLogin);
        Set<String> groups = userItem.getGroups();
        Boolean change;//from www .java2 s . co m
        if (twinValue.contains(empLogin)) {
            change = groups.add(group.getName());
        } else {
            change = groups.remove(group.getName());
        }
        if (change) {
            userItem.setGroups(groups);
            AdminServiceProvider.get().setUserItem(empLogin, userItem);
        }

    }
}

From source file:org.apache.hadoop.chukwa.inputtools.mdl.TorqueInfoProcessor.java

private void getHodJobInfo() throws IOException {
    StringBuffer sb = new StringBuffer();
    sb.append(torqueBinDir).append("/qstat -a");

    String[] getQueueInfoCommand = new String[3];
    getQueueInfoCommand[0] = "ssh";
    getQueueInfoCommand[1] = torqueServer;
    getQueueInfoCommand[2] = sb.toString();

    String command = getQueueInfoCommand[0] + " " + getQueueInfoCommand[1] + " " + getQueueInfoCommand[2];
    ProcessBuilder pb = new ProcessBuilder(getQueueInfoCommand);

    Process p = pb.start();//from   w  w  w  . ja  v a  2s. c  om

    Timer timeout = new Timer();
    TorqueTimerTask torqueTimerTask = new TorqueTimerTask(p, command);
    timeout.schedule(torqueTimerTask, TorqueTimerTask.timeoutInterval * 1000);

    BufferedReader result = new BufferedReader(new InputStreamReader(p.getInputStream()));
    ErStreamHandler errorHandler = new ErStreamHandler(p.getErrorStream(), command, true);
    errorHandler.start();

    String line = null;
    boolean start = false;
    TreeSet<String> jobsInTorque = new TreeSet<String>();
    while ((line = result.readLine()) != null) {
        if (line.startsWith("---")) {
            start = true;
            continue;
        }

        if (start) {
            String[] items = line.split("\\s+");
            if (items.length >= 10) {
                String hodIdLong = items[0];
                String hodId = hodIdLong.split("[.]")[0];
                String userId = items[1];
                String numOfMachine = items[5];
                String status = items[9];
                jobsInTorque.add(hodId);
                if (!currentHodJobs.containsKey(hodId)) {
                    TreeMap<String, String> aJobData = new TreeMap<String, String>();

                    aJobData.put("userId", userId);
                    aJobData.put("numOfMachine", numOfMachine);
                    aJobData.put("traceCheckCount", "0");
                    aJobData.put("process", "0");
                    aJobData.put("status", status);
                    currentHodJobs.put(hodId, aJobData);
                } else {
                    TreeMap<String, String> aJobData = currentHodJobs.get(hodId);
                    aJobData.put("status", status);
                    currentHodJobs.put(hodId, aJobData);
                } // if..else
            }
        }
    } // while

    try {
        errorHandler.join();
    } catch (InterruptedException ie) {
        log.error(ie.getMessage());
    }
    timeout.cancel();

    Set<String> currentHodJobIds = currentHodJobs.keySet();
    Iterator<String> currentHodJobIdsIt = currentHodJobIds.iterator();
    TreeSet<String> finishedHodIds = new TreeSet<String>();
    while (currentHodJobIdsIt.hasNext()) {
        String hodId = currentHodJobIdsIt.next();
        if (!jobsInTorque.contains(hodId)) {
            TreeMap<String, String> aJobData = currentHodJobs.get(hodId);
            String process = aJobData.get("process");
            if (process.equals("0") || process.equals("1")) {
                aJobData.put("status", "C");
            } else {
                finishedHodIds.add(hodId);
            }
        }
    } // while

    Iterator<String> finishedHodIdsIt = finishedHodIds.iterator();
    while (finishedHodIdsIt.hasNext()) {
        String hodId = finishedHodIdsIt.next();
        currentHodJobs.remove(hodId);
    }

}

From source file:ru.codeinside.adm.AdminServiceImpl.java

Employee createUser(String login, String password, String fio, Set<Role> roles, String creator,
        final Organization org, TreeSet<String> groupExecutor, TreeSet<String> groupSupervisorEmp,
        TreeSet<String> groupSupervisorOrg) {
    Employee employee = new Employee();
    employee.setLogin(login);/*from ww  w.  ja v a2s . c o m*/
    employee.setPasswordHash(DigestUtils.sha256Hex(password));
    employee.setFio(fio);
    employee.getRoles().addAll(roles);
    employee.setCreator(creator);
    employee.setOrganization(org);
    employee.setLocked(false);
    Set<Group> organizationGroups = new HashSet<Group>();
    for (Group g : selectGroupsBySocial(false)) {
        if (groupSupervisorOrg.contains(g.getName())) {
            organizationGroups.add(g);
        }
    }
    employee.setOrganizationGroups(organizationGroups);

    Set<Group> employeeGroups = new HashSet<Group>();
    for (Group g : selectGroupsBySocial(true)) {
        if (groupSupervisorEmp.contains(g.getName())) {
            employeeGroups.add(g);
        }
    }
    employee.setEmployeeGroups(employeeGroups);
    org.getEmployees().add(employee);
    em.persist(employee);
    setUserGroups(employee, groupExecutor);
    final Set<String> groups = new HashSet<String>();
    for (Group group : org.getGroups()) {
        groups.add(group.getName());
    }
    logger.log(Level.FINE, "GROUPS: " + groups);
    syncUser(employee, Collections.<Group>emptySet(), groups, processEngine.get().getIdentityService());
    return employee;
}

From source file:com.cloud.network.router.VpcNetworkHelperImpl.java

@Override
public void reallocateRouterNetworks(final RouterDeploymentDefinition vpcRouterDeploymentDefinition,
        final VirtualRouter router, final VMTemplateVO template, final HypervisorType hType)
        throws ConcurrentOperationException, InsufficientCapacityException {

    final TreeSet<String> publicVlans = new TreeSet<String>();
    publicVlans.add(vpcRouterDeploymentDefinition.getSourceNatIP().getVlanTag());

    //1) allocate nic for control and source nat public ip
    final LinkedHashMap<Network, List<? extends NicProfile>> networks = configureDefaultNics(
            vpcRouterDeploymentDefinition);

    final Long vpcId = vpcRouterDeploymentDefinition.getVpc().getId();
    //2) allocate nic for private gateways if needed
    final List<PrivateGateway> privateGateways = vpcMgr.getVpcPrivateGateways(vpcId);
    if (privateGateways != null && !privateGateways.isEmpty()) {
        for (final PrivateGateway privateGateway : privateGateways) {
            final NicProfile privateNic = nicProfileHelper.createPrivateNicProfileForGateway(privateGateway,
                    router);/*from ww w  .j  a  va  2  s  .c  o  m*/
            final Network privateNetwork = _networkModel.getNetwork(privateGateway.getNetworkId());
            networks.put(privateNetwork, new ArrayList<NicProfile>(Arrays.asList(privateNic)));
        }
    }

    //3) allocate nic for guest gateway if needed
    final List<? extends Network> guestNetworks = vpcMgr.getVpcNetworks(vpcId);
    for (final Network guestNetwork : guestNetworks) {
        if (_networkModel.isPrivateGateway(guestNetwork.getId())) {
            continue;
        }
        if (guestNetwork.getState() == Network.State.Implemented
                || guestNetwork.getState() == Network.State.Setup) {
            final NicProfile guestNic = nicProfileHelper
                    .createGuestNicProfileForVpcRouter(vpcRouterDeploymentDefinition, guestNetwork);
            networks.put(guestNetwork, new ArrayList<NicProfile>(Arrays.asList(guestNic)));
        }
    }

    //4) allocate nic for additional public network(s)
    final List<IPAddressVO> ips = _ipAddressDao.listByAssociatedVpc(vpcId, false);
    final List<NicProfile> publicNics = new ArrayList<NicProfile>();
    Network publicNetwork = null;
    for (final IPAddressVO ip : ips) {
        final PublicIp publicIp = PublicIp.createFromAddrAndVlan(ip, _vlanDao.findById(ip.getVlanId()));
        if ((ip.getState() == IpAddress.State.Allocated || ip.getState() == IpAddress.State.Allocating)
                && vpcMgr.isIpAllocatedToVpc(ip) && !publicVlans.contains(publicIp.getVlanTag())) {
            s_logger.debug("Allocating nic for router in vlan " + publicIp.getVlanTag());
            final NicProfile publicNic = new NicProfile();
            publicNic.setDefaultNic(false);
            publicNic.setIPv4Address(publicIp.getAddress().addr());
            publicNic.setIPv4Gateway(publicIp.getGateway());
            publicNic.setIPv4Netmask(publicIp.getNetmask());
            publicNic.setMacAddress(publicIp.getMacAddress());
            publicNic.setBroadcastType(BroadcastDomainType.Vlan);
            publicNic.setBroadcastUri(BroadcastDomainType.Vlan.toUri(publicIp.getVlanTag()));
            publicNic.setIsolationUri(IsolationType.Vlan.toUri(publicIp.getVlanTag()));
            final NetworkOffering publicOffering = _networkModel
                    .getSystemAccountNetworkOfferings(NetworkOffering.SystemPublicNetwork).get(0);
            if (publicNetwork == null) {
                final List<? extends Network> publicNetworks = _networkMgr.setupNetwork(s_systemAccount,
                        publicOffering, vpcRouterDeploymentDefinition.getPlan(), null, null, false);
                publicNetwork = publicNetworks.get(0);
            }
            publicNics.add(publicNic);
            publicVlans.add(publicIp.getVlanTag());
        }
    }
    if (publicNetwork != null) {
        if (networks.get(publicNetwork) != null) {
            @SuppressWarnings("unchecked")
            final List<NicProfile> publicNicProfiles = (List<NicProfile>) networks.get(publicNetwork);
            publicNicProfiles.addAll(publicNics);
            networks.put(publicNetwork, publicNicProfiles);
        } else {
            networks.put(publicNetwork, publicNics);
        }
    }

    final ServiceOfferingVO routerOffering = _serviceOfferingDao
            .findById(vpcRouterDeploymentDefinition.getServiceOfferingId());

    _itMgr.allocate(router.getInstanceName(), template, routerOffering, networks,
            vpcRouterDeploymentDefinition.getPlan(), hType);
}

From source file:org.dasein.cloud.openstack.nova.os.ext.rackspace.lb.RackspaceLoadBalancers.java

@Override
public @Nonnull String createLoadBalancer(@Nonnull LoadBalancerCreateOptions options)
        throws CloudException, InternalException {
    APITrace.begin(provider, "LB.create");
    try {//from  w  w  w.  j a  v  a 2 s. c o  m
        LbListener[] listeners = options.getListeners();

        if (listeners == null || listeners.length < 1) {
            logger.error("create(): Call failed to specify any listeners");
            throw new CloudException("Rackspace requires exactly one listener");
        }
        HashMap<String, Object> lb = new HashMap<String, Object>();

        lb.put("name", options.getName());
        lb.put("port", listeners[0].getPublicPort());
        if (listeners[0].getNetworkProtocol().equals(LbProtocol.HTTP)) {
            lb.put("protocol", "HTTP");
        } else if (listeners[0].getNetworkProtocol().equals(LbProtocol.HTTPS)) {
            lb.put("protocol", "HTTPS");
        } else if (listeners[0].getNetworkProtocol().equals(LbProtocol.RAW_TCP)) {
            lb.put("protocol", matchProtocol(listeners[0].getPublicPort()));
        } else {
            logger.error("Invalid protocol: " + listeners[0].getNetworkProtocol());
            throw new CloudException("Unsupported protocol: " + listeners[0].getNetworkProtocol());
        }
        if (listeners[0].getAlgorithm().equals(LbAlgorithm.LEAST_CONN)) {
            lb.put("algorithm", "LEAST_CONNECTIONS");
        } else if (listeners[0].getAlgorithm().equals(LbAlgorithm.ROUND_ROBIN)) {
            lb.put("algorithm", "ROUND_ROBIN");
        } else {
            logger.error("create(): Invalid algorithm: " + listeners[0].getAlgorithm());
            throw new CloudException("Unsupported algorithm: " + listeners[0].getAlgorithm());
        }
        ArrayList<Map<String, Object>> ips = new ArrayList<Map<String, Object>>();
        HashMap<String, Object> ip = new HashMap<String, Object>();

        ip.put("type", "PUBLIC");
        ips.add(ip);
        lb.put("virtualIps", ips);

        ArrayList<Map<String, Object>> nodes = new ArrayList<Map<String, Object>>();
        LoadBalancerEndpoint[] endpoints = options.getEndpoints();

        if (endpoints != null) {
            TreeSet<String> addresses = new TreeSet<String>();

            for (LoadBalancerEndpoint endpoint : endpoints) {
                String address = null;

                if (endpoint.getEndpointType().equals(LbEndpointType.IP)) {
                    address = endpoint.getEndpointValue();
                } else {
                    VirtualMachine vm = provider.getComputeServices().getVirtualMachineSupport()
                            .getVirtualMachine(endpoint.getEndpointValue());

                    if (vm != null) {
                        if (vm.getProviderRegionId().equals(provider.getContext().getRegionId())) {
                            RawAddress[] tmp = vm.getPrivateAddresses();

                            if (tmp != null && tmp.length > 0) {
                                address = tmp[0].getIpAddress();
                            }
                        }
                        if (address == null) {
                            RawAddress[] tmp = vm.getPublicAddresses();

                            if (tmp != null && tmp.length > 0) {
                                address = tmp[0].getIpAddress();
                            }
                        }
                    }
                }
                if (address != null && !addresses.contains(address)) {
                    HashMap<String, Object> node = new HashMap<String, Object>();

                    node.put("address", address);
                    node.put("condition", "ENABLED");
                    node.put("port", listeners[0].getPrivatePort());
                    nodes.add(node);
                    addresses.add(address);
                }
            }
        }
        if (nodes.isEmpty()) {
            logger.error("create(): Rackspace requires at least one node assignment");
            throw new CloudException("Rackspace requires at least one node assignment");
        }
        lb.put("nodes", nodes);

        HashMap<String, Object> json = new HashMap<String, Object>();

        json.put("loadBalancer", lb);
        NovaMethod method = new NovaMethod(provider);

        if (logger.isTraceEnabled()) {
            logger.trace("create(): Posting new load balancer data...");
        }
        JSONObject result = method.postString(SERVICE, RESOURCE, null, new JSONObject(json), false);

        if (result == null) {
            logger.error("create(): Method executed successfully, but no load balancer was created");
            throw new CloudException("Method executed successfully, but no load balancer was created");
        }
        try {
            if (result.has("loadBalancer")) {
                JSONObject ob = result.getJSONObject("loadBalancer");

                if (ob != null) {
                    return ob.getString("id");
                }
            }
            logger.error("create(): Method executed successfully, but no load balancer was found in JSON");
            throw new CloudException("Method executed successfully, but no load balancer was found in JSON");
        } catch (JSONException e) {
            logger.error(
                    "create(): Failed to identify a load balancer ID in the cloud response: " + e.getMessage());
            throw new CloudException(
                    "Failed to identify a load balancer ID in the cloud response: " + e.getMessage());
        }
    } finally {
        APITrace.end();
    }
}