Example usage for java.util Collection clear

List of usage examples for java.util Collection clear

Introduction

In this page you can find the example usage for java.util Collection clear.

Prototype

void clear();

Source Link

Document

Removes all of the elements from this collection (optional operation).

Usage

From source file:com.zenesis.qx.remote.RequestHandler.java

/**
 * Handles dynamic changes to a qa.data.Array instance without having a complete replacement; expects a 
 * serverId, propertyName, type (one of "add", "remove", "order"), start, end, and optional array of items 
 * @param jp//from  w w  w  .  java2  s  .c om
 * @throws ServletException
 * @throws IOException
 */
protected void cmdEditArray(JsonParser jp) throws ServletException, IOException {
    // Get the basics
    int serverId = getFieldValue(jp, "serverId", Integer.class);
    String propertyName = getFieldValue(jp, "propertyName", String.class);
    String action = getFieldValue(jp, "type", String.class);
    Integer start = null;
    Integer end = null;

    if (!action.equals("replaceAll")) {
        start = getFieldValue(jp, "start", Integer.class);
        end = getFieldValue(jp, "end", Integer.class);
    }

    // Get our info
    Proxied serverObject = getProxied(serverId);
    ProxyType type = ProxyTypeManager.INSTANCE.getProxyType(serverObject.getClass());
    ProxyProperty prop = getProperty(type, propertyName);

    if (prop.getPropertyClass().isMap()) {
        Map items = null;

        // Get the optional array of items
        if (jp.nextToken() == JsonToken.FIELD_NAME && jp.getCurrentName().equals("items")
                && jp.nextToken() == JsonToken.START_OBJECT) {

            items = readMap(jp, prop.getPropertyClass().getKeyClass(), prop.getPropertyClass().getJavaType());
        }

        // Quick logging
        if (log.isInfoEnabled()) {
            String str = "";
            if (items != null)
                for (Object key : items.keySet()) {
                    if (str.length() > 0)
                        str += ", ";
                    str += String.valueOf(key) + "=" + String.valueOf(items.get(key));
                }
            log.info("edit-array: property=" + prop + ", type=" + action + ", start=" + start + ", end=" + end
                    + str);
        }

        if (action.equals("replaceAll")) {
            Map map = (Map) prop.getValue(serverObject);
            if (map == null) {
                try {
                    map = (Map) prop.getPropertyClass().getCollectionClass().newInstance();
                } catch (Exception e) {
                    throw new IllegalArgumentException(e.getMessage(), e);
                }
                prop.setValue(serverObject, map);
            }
            map.clear();
            map.putAll(items);
        } else
            throw new IllegalArgumentException("Unsupported action in cmdEditArray: " + action);

        // Because collection properties are objects and we change them without the serverObject's
        //   knowledge, we have to make sure we notify other trackers ourselves
        ProxyManager.propertyChanged(serverObject, propertyName, items, null);

        jp.nextToken();
    } else {
        // NOTE: items is an Array!!  But because it may be an array of primitive types, we have
        //   to use java.lang.reflect.Array to access members because we cannot cast arrays of
        //   primitives to Object[]
        Object items = null;

        // Get the optional array of items
        if (jp.nextToken() == JsonToken.FIELD_NAME && jp.getCurrentName().equals("items")
                && jp.nextToken() == JsonToken.START_ARRAY) {

            items = readArray(jp, prop.getPropertyClass().getJavaType());
        }
        int itemsLength = Array.getLength(items);

        // Quick logging
        if (log.isInfoEnabled()) {
            String str = "";
            if (items != null)
                for (int i = 0; i < itemsLength; i++) {
                    if (str.length() != 0)
                        str += ", ";
                    str += Array.get(items, i);
                }
            log.info("edit-array: property=" + prop + ", type=" + action + ", start=" + start + ", end=" + end
                    + str);
        }

        if (action.equals("replaceAll")) {
            if (prop.getPropertyClass().isCollection()) {
                Collection list = (Collection) prop.getValue(serverObject);
                if (list == null) {
                    try {
                        list = (Collection) prop.getPropertyClass().getCollectionClass().newInstance();
                    } catch (Exception e) {
                        throw new IllegalArgumentException(e.getMessage(), e);
                    }
                    prop.setValue(serverObject, list);
                }
                list.clear();
                if (items != null)
                    for (int i = 0; i < itemsLength; i++)
                        list.add(Array.get(items, i));

                // Because collection properties are objects and we change them without the serverObject's
                //   knowledge, we have to make sure we notify other trackers ourselves
                ProxyManager.propertyChanged(serverObject, propertyName, list, null);
            } else {
                prop.setValue(serverObject, items);
            }
        } else
            throw new IllegalArgumentException("Unsupported action in cmdEditArray: " + action);

        jp.nextToken();
    }
}

From source file:org.jactr.core.module.declarative.four.learning.ChunkListener.java

/**
 * handles the updating of associative links, references times, and context/needed
 * tallies. should also handle similarities, but that is not implemented yet and will
 * be by the six version of this.<br>
 * The updating is done here since the listener is removed after encoding, so
 * the master chunk will not have this listener attached
 * //from ww w. j a  va 2  s .c  o m
 * @param event
 * @see org.jactr.core.chunk.event.ChunkListenerAdaptor#mergingInto(org.jactr.core.chunk.event.ChunkEvent)
 */
@Override
public void mergingInto(ChunkEvent event) {
    IChunk self = event.getSource();
    IChunk master = event.getChunk();

    ISubsymbolicChunk4 selfSSC = (ISubsymbolicChunk4) self.getSubsymbolicChunk();
    ISubsymbolicChunk4 masterSSC = (ISubsymbolicChunk4) master.getSubsymbolicChunk();

    //update the references for master
    masterSSC.accessed(event.getSimulationTime());

    masterSSC.setTimesInContext(masterSSC.getTimesInContext() + selfSSC.getTimesInContext());
    masterSSC.setTimesNeeded(masterSSC.getTimesNeeded() + selfSSC.getTimesNeeded());

    /*
     * and refs
     */
    IReferences refs = masterSSC.getReferences();
    for (double refTime : selfSSC.getReferences().getTimes())
        refs.addReferenceTime(refTime);

    // will allocate
    Collection<Link> links = selfSSC.getIAssociations(null);

    for (Link iLink : links) {
        IChunk iChunk = iLink.getIChunk();
        Link masterLink = masterSSC.getIAssociation(iChunk);
        if (masterLink != null) {
            /**
             * need to merge the links
             */
            if (LOGGER.isDebugEnabled())
                LOGGER.debug(master + " already linked to " + iChunk + ", merging");
            masterLink.setCount(Math.max(masterLink.getCount(), iLink.getCount()));
            masterLink.setStrength(Math.max(masterLink.getStrength(), iLink.getStrength()));
        } else {
            /**
             * add the link to master
             */
            if (LOGGER.isDebugEnabled())
                LOGGER.debug(master + " not already linked to " + iChunk + ", linking.");
            Link newLink = new Link(master, iChunk, iLink.getCount(), iLink.getStrength());
            masterSSC.addLink(newLink);
            ((ISubsymbolicChunk4) iChunk.getSubsymbolicChunk()).addLink(newLink);
        }

        /*
         * reduce the count so that we are sure we're removing the link
         */
        iLink.setCount(1);
        selfSSC.removeLink(iLink);
        ((ISubsymbolicChunk4) iChunk.getSubsymbolicChunk()).removeLink(iLink);
    }

    links.clear();
    links = selfSSC.getJAssociations(links);

    for (Link jLink : links) {
        IChunk jChunk = jLink.getJChunk();
        Link masterLink = masterSSC.getIAssociation(jChunk);
        if (masterLink != null) {
            /**
             * need to merge the links
             */
            if (LOGGER.isDebugEnabled())
                LOGGER.debug(jChunk + " already linked to " + master + ", merging");
            masterLink.setCount(Math.max(masterLink.getCount(), jLink.getCount()));
            masterLink.setStrength(Math.max(masterLink.getStrength(), jLink.getStrength()));

        } else {
            /**
             * add the link to master
             */
            if (LOGGER.isDebugEnabled())
                LOGGER.debug(jChunk + " not already linked to " + master + ", linking.");
            Link newLink = new Link(jChunk, master, jLink.getCount(), jLink.getStrength());
            masterSSC.addLink(newLink);
            ((ISubsymbolicChunk4) jChunk.getSubsymbolicChunk()).addLink(newLink);
        }

        /*
         * reduce the count so that we are sure we're removing the link
         */
        jLink.setCount(1);
        selfSSC.removeLink(jLink);
        ((ISubsymbolicChunk4) jChunk.getSubsymbolicChunk()).removeLink(jLink);
    }
}

From source file:ubic.gemma.loader.expression.arrayDesign.ArrayDesignSequenceProcessingServiceImpl.java

private void flushBuffer(Collection<BioSequence> bioSequences, Collection<BioSequence> sequenceBuffer,
        Map<String, CompositeSequence> csBuffer) {
    Collection<BioSequence> newOnes = bioSequenceService.findOrCreate(sequenceBuffer);
    bioSequences.addAll(newOnes);//from  w  w  w. j av  a2 s.c om
    for (BioSequence sequence : newOnes) {
        CompositeSequence cs = csBuffer.get(sequence.getName());
        assert cs != null;
        cs.setBiologicalCharacteristic(sequence);
    }
    csBuffer.clear();
    sequenceBuffer.clear();
}

From source file:ubic.gemma.core.loader.expression.arrayDesign.ArrayDesignSequenceProcessingServiceImpl.java

/**
 * for affymetrix processing/*from w w w  . java2  s .  c  om*/
 * 
 * @param bioSequences bio sequences
 * @param sequenceBuffer sequence buffer
 * @param csBuffer cs buffer
 */
private void flushBuffer(Collection<BioSequence> bioSequences, Collection<BioSequence> sequenceBuffer,
        Map<String, CompositeSequence> csBuffer) {
    Collection<BioSequence> newOnes = bioSequenceService.findOrCreate(sequenceBuffer);
    bioSequences.addAll(newOnes);
    for (BioSequence sequence : newOnes) {
        CompositeSequence cs = csBuffer.get(sequence.getName());
        assert cs != null;
        if (log.isDebugEnabled()) {
            log.debug("Updating " + cs + " to sequence " + sequence + ": " + sequence.getSequence());
        }
        cs.setBiologicalCharacteristic(sequence);
    }
    csBuffer.clear();
    sequenceBuffer.clear();
}

From source file:edu.ksu.cis.indus.staticanalyses.concurrency.escape.EscapeAndReadWriteCLI.java

/**
 * This contains the driver logic.// w ww.j a v  a 2s.  c  om
 * 
 * @param <T> dummy type parameter.
 */
private <T extends ITokens<T, Value>> void execute() {
    setInfoLogger(LOGGER);

    final String _tagName = "SideEffect:FA";
    final IValueAnalyzer<Value> _aa = OFAnalyzer.getFSOSAnalyzer(_tagName,
            TokenUtil.<T, Value, Type>getTokenManager(new SootValueTypeManager()), getStmtGraphFactory());
    final ValueAnalyzerBasedProcessingController _pc = new ValueAnalyzerBasedProcessingController();
    final Collection<IProcessor> _processors = new ArrayList<IProcessor>();
    final PairManager _pairManager = new PairManager(false, true);
    final CallGraphInfo _cgi = new CallGraphInfo(_pairManager);
    final OFABasedCallInfoCollector _callGraphInfoCollector = new OFABasedCallInfoCollector();
    final IThreadGraphInfo _tgi = new ThreadGraph(_cgi, new CFGAnalysis(_cgi, getBbm()), _pairManager);
    final ValueAnalyzerBasedProcessingController _cgipc = new ValueAnalyzerBasedProcessingController();
    final OneAllStmtSequenceRetriever _ssr = new OneAllStmtSequenceRetriever();

    _ssr.setStmtGraphFactory(getStmtGraphFactory());

    _pc.setStmtSequencesRetriever(_ssr);
    _pc.setAnalyzer(_aa);
    _pc.setProcessingFilter(new TagBasedProcessingFilter(_tagName));

    _cgipc.setAnalyzer(_aa);
    _cgipc.setProcessingFilter(new CGBasedProcessingFilter(_cgi));
    _cgipc.setStmtSequencesRetriever(_ssr);

    final Map _info = new HashMap();
    _info.put(ICallGraphInfo.ID, _cgi);
    _info.put(IThreadGraphInfo.ID, _tgi);
    _info.put(PairManager.ID, _pairManager);
    _info.put(IEnvironment.ID, _aa.getEnvironment());
    _info.put(IValueAnalyzer.ID, _aa);

    initialize();
    _aa.analyze(getEnvironment(), getRootMethods());

    _processors.clear();
    _processors.add(_callGraphInfoCollector);
    _pc.reset();
    _pc.driveProcessors(_processors);
    _cgi.createCallGraphInfo(_callGraphInfoCollector.getCallInfo());
    writeInfo("CALL GRAPH:\n" + _cgi.toString());

    _processors.clear();
    ((ThreadGraph) _tgi).reset();
    _processors.add((IProcessor) _tgi);
    _cgipc.reset();
    _cgipc.driveProcessors(_processors);
    writeInfo("THREAD GRAPH:\n" + ((ThreadGraph) _tgi).toString());
    final EquivalenceClassBasedEscapeAnalysis _ecba = new EquivalenceClassBasedEscapeAnalysis(_cgi, _tgi,
            getBbm());
    final IReadWriteInfo _rwInfo = _ecba.getReadWriteInfo();
    final IEscapeInfo _escapeInfo = _ecba.getEscapeInfo();
    final AnalysesController _ac = new AnalysesController(_info, _cgipc, getBbm());
    _ac.addAnalyses(EquivalenceClassBasedEscapeAnalysis.ID, Collections.singleton(_ecba));
    _ac.initialize();
    _ac.execute();
    writeInfo("END: Escape analysis");

    System.out.println("ReadWrite-Effect and Escape Information:");
    final String[] _emptyStringArray = new String[0];

    for (final Iterator<SootMethod> _i = _cgi.getReachableMethods().iterator(); _i.hasNext();) {
        final SootMethod _sm = _i.next();
        System.out.println("Method: " + _sm.getSignature());
        System.out.println("\tsealed: " + _ecba.isMethodSealed(_sm) + ", atomic: " + _ecba.isMethodAtomic(_sm));
        if (!_sm.isStatic()) {
            System.out.println("\tthis:");
            System.out
                    .println("\t\tread =  " + _rwInfo.isThisBasedAccessPathRead(_sm, _emptyStringArray, true));
            System.out.println(
                    "\t\twritten =  " + _rwInfo.isThisBasedAccessPathWritten(_sm, _emptyStringArray, true));
            System.out.println("\t\tescapes = " + _escapeInfo.thisEscapes(_sm));
            System.out.println("\t\tmulti-thread RW = "
                    + _escapeInfo.thisFieldAccessShared(_sm, IEscapeInfo.READ_WRITE_SHARED_ACCESS));
            System.out.println("\t\tmulti-thread LU = " + _escapeInfo.thisLockUnlockShared(_sm));
            System.out.println("\t\tmulti-thread WN = " + _escapeInfo.thisWaitNotifyShared(_sm));
            System.out.println("\t\tfield reading threads = " + _escapeInfo.getReadingThreadsOfThis(_sm));
            System.out.println("\t\tfield writing threads = " + _escapeInfo.getWritingThreadsOfThis(_sm));

            for (final Iterator<SootField> _j = _sm.getDeclaringClass().getFields().iterator(); _j.hasNext();) {
                SootField _field = _j.next();
                if (!_field.isStatic()) {
                    final String[] _accessPath = { _field.getSignature() };
                    System.out.println("\t\t\t" + _accessPath[0] + ": [read: "
                            + _rwInfo.isThisBasedAccessPathRead(_sm, _accessPath, true) + ", written:"
                            + _rwInfo.isThisBasedAccessPathWritten(_sm, _accessPath, true) + "]");
                }
            }
            System.out.print("\t\tcoupled with parameters at position: ");
            for (int _k = 0; _k < _sm.getParameterCount(); _k++) {
                if (EquivalenceClassBasedEscapeAnalysis.canHaveAliasSet(_sm.getParameterType(_k))
                        && _rwInfo.canMethodInduceSharingBetweenParamAndThis(_sm, _k)) {
                    System.out.print(_k + ", ");
                }
            }
            System.out.println("");
        }

        for (int _j = 0; _j < _sm.getParameterCount(); _j++) {
            System.out.println("\tParam" + (_j + 1) + "[" + _sm.getParameterType(_j) + "]:");
            System.out.println(
                    "\t\tread = " + _rwInfo.isParameterBasedAccessPathRead(_sm, _j, _emptyStringArray, true));
            System.out.println("\t\twritten = "
                    + _rwInfo.isParameterBasedAccessPathWritten(_sm, _j, _emptyStringArray, true));
            System.out.println("\t\tfield reading threads: " + _escapeInfo.getReadingThreadsOf(_j, _sm));
            System.out.println("\t\tfield writing threads: " + _escapeInfo.getWritingThreadsOf(_j, _sm));
            System.out.print("\t\tcoupled with parameters at position: ");
            for (int _k = 0; _k < _sm.getParameterCount(); _k++) {
                if (_k != _j && EquivalenceClassBasedEscapeAnalysis.canHaveAliasSet(_sm.getParameterType(_k))
                        && EquivalenceClassBasedEscapeAnalysis.canHaveAliasSet(_sm.getParameterType(_j))
                        && _rwInfo.canMethodInduceSharingBetweenParams(_sm, _j, _k)) {
                    System.out.print(_k + ", ");
                }
            }
            System.out.println("");

        }

        if (_sm.hasActiveBody()) {
            final Body _body = _sm.getActiveBody();

            for (final Iterator<Local> _j = _body.getLocals().iterator(); _j.hasNext();) {
                final Local _local = _j.next();
                System.out.println("\tLocal " + _local.getName() + "[" + _local.getType() + "] : ");
                System.out.println("\t\tescapes = " + _escapeInfo.escapes(_local, _sm));
                System.out.println("\t\tshared RW = "
                        + _escapeInfo.fieldAccessShared(_local, _sm, IEscapeInfo.READ_WRITE_SHARED_ACCESS));
                System.out.println("\t\tshared WW = "
                        + _escapeInfo.fieldAccessShared(_local, _sm, IEscapeInfo.WRITE_WRITE_SHARED_ACCESS));
                System.out.println("\t\tlocking = " + _escapeInfo.lockUnlockShared(_local, _sm));
                System.out.println("\t\twaitNotify  = " + _escapeInfo.waitNotifyShared(_local, _sm));
                System.out
                        .println("\t\tfield reading threads: " + _escapeInfo.getReadingThreadsOf(_local, _sm));
                System.out
                        .println("\t\tfield writing threads: " + _escapeInfo.getWritingThreadsOf(_local, _sm));
            }

            for (final Iterator<Stmt> _j = _body.getUnits().iterator(); _j.hasNext();) {
                final Stmt _stmt = _j.next();
                if (_stmt.containsFieldRef()) {
                    final FieldRef _fr = _stmt.getFieldRef();
                    SootField _field = _fr.getField();
                    if (_field.isStatic()) {
                        System.out.println("\tStatic " + _field.getSignature() + " : ");
                        System.out.println("\t\tshared RW = "
                                + _escapeInfo.staticfieldAccessShared(_field.getDeclaringClass(), _sm,
                                        _field.getSignature(), IEscapeInfo.READ_WRITE_SHARED_ACCESS));
                        System.out.println("\t\tshared WW = "
                                + _escapeInfo.staticfieldAccessShared(_field.getDeclaringClass(), _sm,
                                        _field.getSignature(), IEscapeInfo.WRITE_WRITE_SHARED_ACCESS));
                    }
                }
            }
        }
    }
}

From source file:uk.ac.susx.tag.method51.core.coding.Coder.java

private void clear(Collection<?> collection) {
    if (collection != null)
        collection.clear();
}

From source file:eu.artist.migration.mdt.umlprofilediscovery.codemodel2umlprofile.files.CodeModel2UMLProfilePostProcessor.java

/**
 * /*from w w  w  .ja  v a  2s  .  co m*/
 * @param codeModel
 * @param profileModel
 * @param traceModel
 */
public static void runPostProcessing(IModel codeModel, IModel profileModel, IModel traceModel) {
    // init the trace map
    initProfileModel2CodeModelTraceMap(traceModel);
    initCodeModel2ProfileModelTraceMap(traceModel);

    // set default values
    setDefaultValues();

    // helper collection to find property duplicates
    Collection<String> properties = new ArrayList<String>();

    // helper collection that stores the elements to destroy
    Collection<Element> umlRemovals = new ArrayList<Element>();
    Set<TraceLink> traceRemovals = new HashSet<TraceLink>();

    // check the stereotypes for duplicates -> Property, Association,
    // Operation
    Set<? extends Object> stereotypes = profileModel.getElementsByType(UMLPackage.eINSTANCE.getStereotype());

    for (Object obj : stereotypes) {
        Stereotype stereotype = (Stereotype) obj;

        // TODO: maybe work with validators here !!
        for (Property property : stereotype.getAllAttributes()) {
            if (properties.contains(property.getName())) {

                // the extension member ends of the Extension
                for (Property extensionEnd : property.getAssociation().getMemberEnds()) {
                    umlRemovals.add(extensionEnd);

                    // if we remove an Operation extension, we should remove
                    // the constructor
                    // constraint as well
                    if (property.getType().getName().equalsIgnoreCase("Operation")) {
                        umlRemovals.add(stereotype.getOwnedRule("constructorConstraint"));
                    }

                    // if we remove a Property extension, we should remove
                    // the method
                    // constraint as well
                    if (property.getType().getName().equalsIgnoreCase("Property")) {
                        umlRemovals.add(stereotype.getOwnedRule("methodConstraint"));
                    }

                    traceRemovals.add(profileModel2CodeModeltraceMap.get(extensionEnd));
                }

                // the Extension
                umlRemovals.add(property.getAssociation());
                traceRemovals.add(profileModel2CodeModeltraceMap.get(property.getAssociation()));

                // TODO: we need to remove constraints regarding operations
                // as well!
            } else {
                properties.add(property.getName());
            }
        }

        // clear the collection of properties for the next stereotype
        properties.clear();
    }

    // remove the trace links
    for (TraceLink traceLink : traceRemovals) {
        Trace trace = (Trace) traceLink.eContainer();
        trace.getTraceLinks().remove(traceLink);
    }

    // remove the uml elements
    for (Element element : umlRemovals) {
        element.destroy();
    }
}

From source file:ubic.gemma.core.loader.expression.arrayDesign.ArrayDesignMergeServiceTest.java

@Test
public void testMerge() {
    ArrayDesign ad1 = super.getTestPersistentArrayDesign(10, true);
    ArrayDesign ad2 = super.getTestPersistentArrayDesign(10, true);
    ArrayDesign ad3 = super.getTestPersistentArrayDesign(10, true);

    Collection<ArrayDesign> others = new HashSet<>();
    others.add(ad2);/*w  w w. ja v  a 2  s. co  m*/
    others.add(ad3);

    ArrayDesign ad1ad2ad3 = arrayDesignMergeService.merge(ad1, others,
            "ad1ad2ad3_" + RandomStringUtils.randomAlphabetic(4),
            "ad1ad2ad3_" + RandomStringUtils.randomAlphabetic(4), false);

    ad1 = arrayDesignService.load(ad1.getId());
    ad1 = arrayDesignService.thawLite(ad1);
    ad2 = arrayDesignService.load(ad2.getId());
    ad2 = arrayDesignService.thawLite(ad2);
    ad3 = arrayDesignService.load(ad3.getId());
    ad3 = arrayDesignService.thawLite(ad3);

    /*
     * merged contains all three.
     */
    assertTrue(ad1ad2ad3.getMergees().contains(ad1));
    assertTrue(ad1ad2ad3.getMergees().contains(ad2));
    assertTrue(ad1ad2ad3.getMergees().contains(ad3));
    assertNull(ad1ad2ad3.getMergedInto());
    assertEquals(30, ad1ad2ad3.getCompositeSequences().size());
    assertEquals(ad1ad2ad3, ad1.getMergedInto());
    assertEquals(ad1ad2ad3, ad2.getMergedInto());
    assertEquals(ad1ad2ad3, ad3.getMergedInto());
    assertEquals(ArrayDesignMergeEvent.class, ad1.getAuditTrail().getLast().getEventType().getClass());
    assertEquals(ArrayDesignMergeEvent.class, ad2.getAuditTrail().getLast().getEventType().getClass());
    assertEquals(ArrayDesignMergeEvent.class, ad3.getAuditTrail().getLast().getEventType().getClass());

    /*
     * Making a new one out of a merged design and an unmerged
     */
    ArrayDesign ad4 = super.getTestPersistentArrayDesign(10, true);
    others.clear();
    others.add(ad4);
    ArrayDesign ad1ad2ad3ad4 = arrayDesignMergeService.merge(ad1ad2ad3, others,
            "foo2" + RandomStringUtils.randomAlphabetic(4), "bar2" + RandomStringUtils.randomAlphabetic(4),
            false);

    assertTrue(ad1ad2ad3ad4.getMergees().contains(ad1ad2ad3));
    assertTrue(ad1ad2ad3ad4.getMergees().contains(ad4));
    assertEquals(40, ad1ad2ad3ad4.getCompositeSequences().size());
    assertNull(ad1ad2ad3ad4.getMergedInto());

    /*
     * Add an array to an already merged design.
     */
    ArrayDesign ad5 = super.getTestPersistentArrayDesign(10, true);
    others.clear();
    others.add(ad5);
    ArrayDesign merged3 = arrayDesignMergeService.merge(ad1ad2ad3ad4, others, null, null, true);

    assertEquals(ad1ad2ad3ad4, merged3);
    assertTrue(merged3.getMergees().contains(ad1ad2ad3)); // from before.
    assertTrue(merged3.getMergees().contains(ad4)); // from before
    assertTrue(merged3.getMergees().contains(ad5)); // the extra one.
    assertTrue(merged3.getMergees().contains(ad1ad2ad3));
    assertEquals(50, merged3.getCompositeSequences().size());

}

From source file:storm.mesos.schedulers.DefaultScheduler.java

private List<MesosWorkerSlot> getMesosWorkerSlots(Map<String, AggregatedOffers> aggregatedOffersPerNode,
        Collection<String> nodesWithExistingSupervisors, TopologyDetails topologyDetails) {

    double requestedWorkerCpu = MesosCommon.topologyWorkerCpu(mesosStormConf, topologyDetails);
    double requestedWorkerMem = MesosCommon.topologyWorkerMem(mesosStormConf, topologyDetails);
    int requestedWorkerMemInt = (int) requestedWorkerMem;

    List<MesosWorkerSlot> mesosWorkerSlots = new ArrayList<>();
    boolean slotFound = false;
    int slotsNeeded = topologyDetails.getNumWorkers();

    /* XXX(erikdw): For now we clear out our knowledge of pre-existing supervisors while searching for slots
     * for this topology, to make the behavior of allSlotsAvailableForScheduling() mimic that of assignSlots().
     */*from w w w.  j  a v a2 s.c  o  m*/
     * See this issue: https://github.com/mesos/storm/issues/160
     *
     * Until that issue is fixed, we must not discount the resources used by pre-existing supervisors.
     * Otherwise we will under-represent the resources needed as compared to what the more ignorant
     * assignSlots() will believe is needed, and thus may prevent MesosWorkerSlots from actually being
     * used.  i.e., assignSlots() doesn't know if supervisors already exist, since it doesn't receive the
     * existingSupervisors input parameter that allSlotsAvailableForScheduling() does.
     */
    nodesWithExistingSupervisors.clear();

    do {
        slotFound = false;
        for (String currentNode : aggregatedOffersPerNode.keySet()) {
            AggregatedOffers aggregatedOffers = aggregatedOffersPerNode.get(currentNode);

            boolean supervisorExists = nodesWithExistingSupervisors.contains(currentNode);

            if (!aggregatedOffers.isFit(mesosStormConf, topologyDetails, supervisorExists)) {
                log.info(
                        "{} with requestedWorkerCpu {} and requestedWorkerMem {} does not fit onto {} with resources {}",
                        topologyDetails.getId(), requestedWorkerCpu, requestedWorkerMemInt,
                        aggregatedOffers.getHostname(), aggregatedOffers.toString());
                continue;
            }

            log.info(
                    "{} with requestedWorkerCpu {} and requestedWorkerMem {} does fit onto {} with resources {}",
                    topologyDetails.getId(), requestedWorkerCpu, requestedWorkerMemInt,
                    aggregatedOffers.getHostname(), aggregatedOffers.toString());
            MesosWorkerSlot mesosWorkerSlot;
            try {
                mesosWorkerSlot = SchedulerUtils.createMesosWorkerSlot(mesosStormConf, aggregatedOffers,
                        topologyDetails, supervisorExists);
            } catch (ResourceNotAvailableException rexp) {
                log.warn(rexp.getMessage());
                continue;
            }

            nodesWithExistingSupervisors.add(currentNode);
            mesosWorkerSlots.add(mesosWorkerSlot);
            slotFound = true;
            if (--slotsNeeded == 0) {
                break;
            }
        }
    } while (slotFound && slotsNeeded > 0);

    return mesosWorkerSlots;
}

From source file:edu.ksu.cis.indus.staticanalyses.flow.instances.ofa.OFAXMLizerCLI.java

/**
 * Xmlize the given system.//  w  ww.  ja  v a2 s  . c  om
 * 
 * @param dumpJimple <code>true</code> indicates that the jimple should be xmlized as well; <code>false</code>,
 *            otherwise.
 * @param <T> dummy type parameter.
 */
private <T extends ITokens<T, Value>> void execute(final boolean dumpJimple) {
    setInfoLogger(LOGGER);

    final String _tagName = "OFAXMLizer:FA";
    final IValueAnalyzer<Value> _aa;
    if (type.equals("fioi")) {
        _aa = OFAnalyzer.getFIOIAnalyzer(_tagName,
                TokenUtil.<T, Value, Type>getTokenManager(new SootValueTypeManager()), getStmtGraphFactory());
    } else if (type.equals("fios")) {
        _aa = OFAnalyzer.getFIOSAnalyzer(_tagName,
                TokenUtil.<T, Value, Type>getTokenManager(new SootValueTypeManager()), getStmtGraphFactory());
    } else if (type.equals("fsoi")) {
        _aa = OFAnalyzer.getFSOIAnalyzer(_tagName,
                TokenUtil.<T, Value, Type>getTokenManager(new SootValueTypeManager()), getStmtGraphFactory());
    } else if (type.equals("fsos")) {
        _aa = OFAnalyzer.getFSOSAnalyzer(_tagName,
                TokenUtil.<T, Value, Type>getTokenManager(new SootValueTypeManager()), getStmtGraphFactory());
    } else if (type.equals("fioirt")) {
        _aa = OFAnalyzer.getFSOIRTAnalyzer(_tagName,
                TokenUtil.<T, Value, Type>getTokenManager(new SootValueTypeManager()), getStmtGraphFactory());
    } else if (type.equals("fsoirt")) {
        _aa = OFAnalyzer.getFSOIRTAnalyzer(_tagName,
                TokenUtil.<T, Value, Type>getTokenManager(new SootValueTypeManager()), getStmtGraphFactory());
    } else {
        throw new IllegalArgumentException(
                "ofa-type has to be one of the following: fioi, fsoi, fios, fsos, fioirt," + " fsoirt.");
    }

    final ValueAnalyzerBasedProcessingController _pc = new ValueAnalyzerBasedProcessingController();
    final Collection<IProcessor> _processors = new ArrayList<IProcessor>();
    final CallGraphInfo _cgi = new CallGraphInfo(new PairManager(false, true));
    final OFABasedCallInfoCollector _callGraphInfoCollector = new OFABasedCallInfoCollector();
    final Collection<SootMethod> _rm = new ArrayList<SootMethod>();
    final ProcessingController _xmlcgipc = new ProcessingController();
    final MetricsProcessor _countingProcessor = new MetricsProcessor();
    final OneAllStmtSequenceRetriever _ssr = new OneAllStmtSequenceRetriever();
    _ssr.setStmtGraphFactory(getStmtGraphFactory());
    _pc.setStmtSequencesRetriever(_ssr);
    _pc.setAnalyzer(_aa);

    final IProcessingFilter _tagFilter = new TagBasedProcessingFilter(_tagName);
    _pc.setProcessingFilter(_tagFilter);
    _xmlcgipc.setEnvironment(_aa.getEnvironment());

    final IProcessingFilter _xmlFilter = new CGBasedXMLizingProcessingFilter(_cgi);
    _xmlFilter.chain(_tagFilter);
    _xmlcgipc.setProcessingFilter(_xmlFilter);
    _xmlcgipc.setStmtSequencesRetriever(_ssr);

    final Map _info = new HashMap();
    _info.put(IValueAnalyzer.ID, _aa);
    _info.put(IValueAnalyzer.TAG_ID, _tagName);

    final List<Object> _roots = new ArrayList<Object>();

    if (cumulative) {
        _roots.add(getRootMethods());
    } else {
        _roots.addAll(getRootMethods());
    }
    Collections.sort(_roots, ToStringBasedComparator.getComparator());
    writeInfo("Root methods are: " + _roots.size() + "\n" + _roots);

    for (final Iterator<Object> _k = _roots.iterator(); _k.hasNext();) {
        _rm.clear();

        final Object _root = _k.next();
        final String _fileBaseName = getBaseNameOfFileAndRootMethods(_root, _rm);

        writeInfo("RootMethod: " + _root);
        writeInfo("BEGIN: FA");

        final long _start = System.currentTimeMillis();
        _aa.reset();
        getBbm().reset();

        _aa.analyze(getEnvironment(), _rm);

        final long _stop = System.currentTimeMillis();
        addTimeLog("FA", _stop - _start);
        writeInfo("END: FA");

        _callGraphInfoCollector.reset();
        _processors.clear();
        _processors.add(_callGraphInfoCollector);
        _processors.add(_countingProcessor);
        _pc.reset();
        _pc.driveProcessors(_processors);
        _processors.clear();
        _cgi.reset();
        _cgi.createCallGraphInfo(_callGraphInfoCollector.getCallInfo());

        final ByteArrayOutputStream _stream = new ByteArrayOutputStream();
        new PrintWriter(_stream).write("STATISTICS: " + MapUtils.verbosePrint(
                new TreeMap<MetricKeys, Map<Object, Integer>>(_countingProcessor.getStatistics())));
        writeInfo(_stream.toString());

        _info.put(AbstractXMLizer.FILE_NAME_ID, _fileBaseName);
        _info.put(IStmtGraphFactory.ID, getStmtGraphFactory());
        xmlizer.writeXML(_info);

        if (dumpJimple) {
            ((AbstractXMLizer) xmlizer).dumpJimple(_fileBaseName, xmlizer.getXmlOutputDir(), _xmlcgipc);
        }

        final OFAStringizer _str = new OFAStringizer();
        _str.getOFAInfoAsString(_info, new PrintWriter(System.out));
    }
}