Example usage for java.util Collection toArray

List of usage examples for java.util Collection toArray

Introduction

In this page you can find the example usage for java.util Collection toArray.

Prototype

default <T> T[] toArray(IntFunction<T[]> generator) 

Source Link

Document

Returns an array containing all of the elements in this collection, using the provided generator function to allocate the returned array.

Usage

From source file:com.wingnest.play2.origami.plugin.OrigamiPlugin.java

private void registerGraphClasses() {
    final OGraphDatabase db = GraphDB.open();
    try {/*from ww w  . j a  v  a  2  s.c  o m*/
        debug("Registering Graph Classes");

        final Set<Class<GraphVertexModel>> vertexClasses = new HashSet<Class<GraphVertexModel>>();
        final Set<Class<GraphEdgeModel>> edgeClasses = new HashSet<Class<GraphEdgeModel>>();

        for (String pkg : models) {
            vertexClasses.addAll(getSubTypesOf(pkg, GraphVertexModel.class));
            edgeClasses.addAll(getSubTypesOf(pkg, GraphEdgeModel.class));
        }

        @SuppressWarnings("unchecked")
        final Collection<Class<?>> javaClasses = CollectionUtils.union(vertexClasses, edgeClasses);

        final Class<?>[] javaClassArray = javaClasses.toArray(new Class<?>[0]);
        Arrays.sort(javaClassArray, new Comparator<Class<?>>() {
            @Override
            public int compare(Class<?> o1, Class<?> o2) {
                if (o1.equals(o2))
                    return 0;
                if (o1.isAssignableFrom(o2))
                    return -1;
                if (o2.isAssignableFrom(o1))
                    return 1;
                int o1cnt = calSuperclassCount(o1);
                int o2cnt = calSuperclassCount(o2);
                return (o1cnt - o2cnt);
            }
        });

        javaClasses.clear();
        javaClasses.addAll(Arrays.asList(javaClassArray));

        final OSchema schema = db.getMetadata().getSchema();
        for (final Class<?> javaClass : javaClasses) {
            final String entityName = javaClass.getSimpleName();
            final OClass oClass;
            if (GraphVertexModel.class.isAssignableFrom(javaClass)) {
                final String className = javaClass.getSimpleName();
                debug("Entity: %s", className);
                if (schema.existsClass(className)) {
                    oClass = schema.getClass(className);
                } else {
                    oClass = db.createVertexType(className);
                }
                graphEntityMap.put(className, javaClass);
                final Class<?> sclass = javaClass.getSuperclass();
                if (javaClasses.contains(sclass)) {
                    final OClass sClass = db.getMetadata().getSchema().getClass(sclass.getSimpleName());
                    db.getMetadata().getSchema().getClass(entityName).setSuperClass(sClass);
                }
            } else if (GraphEdgeModel.class.isAssignableFrom(javaClass)) {
                final String className = javaClass.getSimpleName();
                debug("Entity: %s", className);
                if (schema.existsClass(className)) {
                    oClass = schema.getClass(className);
                } else {
                    oClass = db.createEdgeType(className);
                }
                graphEntityMap.put(className, javaClass);
                final Class<?> sclass = javaClass.getSuperclass();
                if (javaClasses.contains(sclass)) {
                    final OClass sClass = db.getMetadata().getSchema().getClass(sclass.getSimpleName());
                    db.getMetadata().getSchema().getClass(entityName).setSuperClass(sClass);
                }
            } else {
                throw new IllegalStateException("bug!?");
            }
            maintainProperties(oClass, javaClass);
        }
        debug("Registering Database Listeners");
        for (final Class<? extends ODatabaseListener> listener : getSubTypesOf("listeners",
                ODatabaseListener.class)) {
            debug("Listener: %s", listener.getName());
            GraphDB.getListeners().add(listener);
        }
        debug("Registering Record Hooks");
        for (final Class<? extends ORecordHook> hook : getSubTypesOf("hooks", ORecordHook.class)) {
            debug("Hook: %s", hook.getName());
            GraphDB.getGraphHooks().add(hook);
        }
    } catch (Exception e) {
        throw new OrigamiUnexpectedException(e);
    } finally {
        db.close();
    }
}

From source file:de.hpi.isg.mdms.benchmark.ConstraintInsertPerfomanceBenchmark.java

@Test
public void testInsertInclusionDependenciesIntoDefaultMetadataStore() throws Exception {

    LOGGER.info("Creating Java-serialized metadata store...");
    File metadataStoreFile = createTempFile("ser");
    MetadataStore metadataStore = MetadataStoreFactory.createAndSaveDefaultMetadataStore(metadataStoreFile);

    LOGGER.info("Creating schema...");
    int numTables = 1000;
    int numColumnsPerTable = 100;
    int numColumns = numTables * numColumnsPerTable;
    Schema schema = createSchema(metadataStore, numTables, numColumnsPerTable);
    metadataStore.flush();//from   ww w .  j  a va 2  s  .  co m

    LOGGER.info("Generating INDs...");
    int numDesiredInds = 100000;
    double indProbablity = numDesiredInds / Math.pow(numTables * numColumnsPerTable, 2);
    // Boost probablity to speed up generation.
    indProbablity = Math.sqrt(indProbablity);

    Collection<Column[]> inclusionDependencies = new LinkedList<Column[]>();
    Random random = new Random();
    OuterLoop: for (final Table table1 : schema.getTables()) {
        for (final Table table2 : schema.getTables()) {
            for (final Column column1 : table1.getColumns()) {
                for (final Column column2 : table2.getColumns()) {
                    if (column1 != column2 && random.nextDouble() <= indProbablity) {
                        inclusionDependencies.add(new Column[] { column1, column2 });
                        if (inclusionDependencies.size() >= numDesiredInds) {
                            break OuterLoop;
                        }
                    }
                }
            }
        }
    }

    LOGGER.info("Inserting the {} generated INDs...", inclusionDependencies.size());
    long startTimeGross = System.currentTimeMillis();
    ConstraintCollection<InclusionDependency> constraintCollection = metadataStore
            .createConstraintCollection(null, InclusionDependency.class);
    long startTimeNet = System.currentTimeMillis();
    for (Column[] columnPair : inclusionDependencies) {
        Collection<Column> dependentColumns = Collections.singleton(columnPair[0]);
        Collection<Column> referencedColumns = Collections.singletonList(columnPair[1]);
        final InclusionDependency.Reference reference = new InclusionDependency.Reference(
                dependentColumns.toArray(new Column[dependentColumns.size()]),
                referencedColumns.toArray(new Column[referencedColumns.size()]));
        InclusionDependency.buildAndAddToCollection(reference, constraintCollection);
    }
    long endTimeNet = System.currentTimeMillis();
    metadataStore.flush();
    long endTimeGross = System.currentTimeMillis();
    double numInsertsPerSecGross = 1000d * numColumns / (endTimeGross - startTimeGross);
    double numInsertsPerSecNet = 1000d * numColumns / (endTimeNet - startTimeNet);
    LOGGER.info("[gross] Inserted in {} ms ({} inserts/s)", endTimeGross - startTimeGross,
            numInsertsPerSecGross);
    LOGGER.info("[net]   Inserted in {} ms ({} inserts/s)", endTimeNet - startTimeNet, numInsertsPerSecNet);
    LOGGER.info("File size: {} MB", metadataStoreFile.length() / (1024 * 1024));
}

From source file:de.hpi.isg.mdms.benchmark.ConstraintInsertPerfomanceBenchmark.java

@Test
public void testInsertInclusionDependenciesIntoRDBMSMetadataStore() throws Exception {

    LOGGER.info("Creating RDBMS metadata store...");
    File metadataStoreFile = createTempFile("sqlite");
    MetadataStore metadataStore = RDBMSMetadataStore
            .createNewInstance(SQLiteInterface.createForFile(metadataStoreFile));

    LOGGER.info("Creating schema...");
    int numTables = 1000;
    int numColumnsPerTable = 100;
    int numColumns = numTables * numColumnsPerTable;
    Schema schema = createSchema(metadataStore, numTables, numColumnsPerTable);
    metadataStore.flush();// ww w.  j a v  a 2s  .co m

    LOGGER.info("Generating INDs...");
    int numDesiredInds = 100000;
    double indProbablity = numDesiredInds / Math.pow(numTables * numColumnsPerTable, 2);
    // Boost probablity to speed up generation.
    indProbablity = Math.sqrt(indProbablity);

    Collection<Column[]> inclusionDependencies = new LinkedList<Column[]>();
    Random random = new Random();
    OuterLoop: for (final Table table1 : schema.getTables()) {
        for (final Table table2 : schema.getTables()) {
            for (final Column column1 : table1.getColumns()) {
                for (final Column column2 : table2.getColumns()) {
                    if (column1 != column2 && random.nextDouble() <= indProbablity) {
                        inclusionDependencies.add(new Column[] { column1, column2 });
                        if (inclusionDependencies.size() >= numDesiredInds) {
                            break OuterLoop;
                        }
                    }
                }
            }
        }
    }

    LOGGER.info("Inserting the {} generated INDs...", inclusionDependencies.size());
    long startTimeGross = System.currentTimeMillis();
    ConstraintCollection<InclusionDependency> constraintCollection = metadataStore
            .createConstraintCollection(null, InclusionDependency.class);
    long startTimeNet = System.currentTimeMillis();
    for (Column[] columnPair : inclusionDependencies) {
        Collection<Column> dependentColumns = Collections.singleton(columnPair[0]);
        Collection<Column> referencedColumns = Collections.singletonList(columnPair[1]);
        final InclusionDependency.Reference reference = new InclusionDependency.Reference(
                dependentColumns.toArray(new Column[dependentColumns.size()]),
                referencedColumns.toArray(new Column[referencedColumns.size()]));
        InclusionDependency.buildAndAddToCollection(reference, constraintCollection);
    }
    long endTimeNet = System.currentTimeMillis();
    metadataStore.flush();
    long endTimeGross = System.currentTimeMillis();
    double numInsertsPerSecGross = 1000d * numColumns / (endTimeGross - startTimeGross);
    double numInsertsPerSecNet = 1000d * numColumns / (endTimeNet - startTimeNet);
    LOGGER.info("[gross] Inserted in {} ms ({} inserts/s)", endTimeGross - startTimeGross,
            numInsertsPerSecGross);
    LOGGER.info("[net]   Inserted in {} ms ({} inserts/s)", endTimeNet - startTimeNet, numInsertsPerSecNet);
    LOGGER.info("File size: {} MB", metadataStoreFile.length() / (1024 * 1024));
}

From source file:com.idega.company.business.impl.CompanyServiceImpl.java

protected Collection<User> getUsers(Collection<String> ids) {
    if (ListUtil.isEmpty(ids)) {
        return null;
    }// w  w w . ja v  a  2  s .  c o  m

    try {
        return getUserBusiness().getUsers(ids.toArray(new String[ids.size()]));
    } catch (EJBException e) {
        getLogger().log(Level.WARNING, "Unable to get " + User.class, e);
    } catch (RemoteException e) {
        getLogger().log(Level.WARNING, "Unable to get " + User.class, e);
    }

    return null;
}

From source file:net.yacy.grid.io.index.YaCyQuery.java

private QueryBuilder parse(String q, int timezoneOffset) {
    // detect usage of OR ORconnective usage. Because of the preparse step we will have only OR or only AND here.
    q = q.replaceAll(" AND ", " "); // AND is default
    boolean ORconnective = q.indexOf(" OR ") >= 0;
    q = q.replaceAll(" OR ", " "); // if we know that all terms are OR, we remove that and apply it later. Because we splitted into OR groups it is right to use OR here only

    // tokenize the query
    Set<String> qe = new LinkedHashSet<String>();
    Matcher m = tokenizerPattern.matcher(q);
    while (m.find())
        qe.add(m.group(1));/*from  w w  w .  j a  v  a 2 s .  co m*/

    // twitter search syntax:
    //   term1 term2 term3 - all three terms shall appear
    //   "term1 term2 term3" - exact match of all terms
    //   term1 OR term2 OR term3 - any of the three terms shall appear
    //   from:user - tweets posted from that user
    //   to:user - tweets posted to that user
    //   @user - tweets which mention that user
    //   near:"location" within:xmi - tweets that are near that location
    //   #hashtag - tweets containing the given hashtag
    //   since:2015-04-01 until:2015-04-03 - tweets within given time range
    // additional constraints:
    //   /image /audio /video /place - restrict to tweets which have attached images, audio, video or place
    ArrayList<String> text_positive_match = new ArrayList<>();
    ArrayList<String> text_negative_match = new ArrayList<>();
    ArrayList<String> text_positive_filter = new ArrayList<>();
    ArrayList<String> text_negative_filter = new ArrayList<>();
    Multimap<String, String> modifier = HashMultimap.create();
    Set<String> constraints_positive = new HashSet<>();
    Set<String> constraints_negative = new HashSet<>();
    for (String t : qe) {
        if (t.length() == 0)
            continue;
        if (t.startsWith("/")) {
            constraints_positive.add(t.substring(1));
            continue;
        } else if (t.startsWith("-/")) {
            constraints_negative.add(t.substring(2));
            continue;
        } else if (t.indexOf(':') > 0) {
            int p = t.indexOf(':');
            String name = t.substring(0, p).toLowerCase();
            String value = t.substring(p + 1);
            if (value.indexOf('|') > 0) {
                String[] values = value.split("\\|");
                for (String v : values) {
                    modifier.put(name, v);
                }
            } else {
                modifier.put(name, value);
            }
            continue;
        } else {
            // patch characters that will confuse elasticsearch or have a different meaning
            boolean negative = t.startsWith("-");
            if (negative)
                t = t.substring(1);
            if (t.length() == 0)
                continue;
            if ((t.charAt(0) == dq && t.charAt(t.length() - 1) == dq)
                    || (t.charAt(0) == sq && t.charAt(t.length() - 1) == sq)) {
                t = t.substring(1, t.length() - 1);
                if (negative) {
                    text_negative_filter.add(t);
                    this.negativeBag.add(t);
                } else {
                    text_positive_filter.add(t);
                    this.positiveBag.add(t);
                }
            } else if (t.indexOf('-') > 0) {
                // this must be handled like a quoted string without the minus
                t = t.replace('-', space);
                if (negative) {
                    text_negative_filter.add(t);
                    this.negativeBag.add(t);
                } else {
                    text_positive_filter.add(t);
                    this.positiveBag.add(t);
                }
            } else {
                if (negative) {
                    text_negative_match.add(t);
                    this.negativeBag.add(t);
                } else {
                    text_positive_match.add(t);
                    this.positiveBag.add(t);
                }
            }
            continue;
        }
    }

    // construct a ranking
    if (modifier.containsKey("boost")) {
        this.boosts.patchWithModifier(modifier.get("boost").iterator().next());
    }

    // compose query for text
    List<QueryBuilder> queries = new ArrayList<>();
    // fuzzy matching
    if (!text_positive_match.isEmpty())
        queries.add(simpleQueryBuilder(String.join(" ", text_positive_match), ORconnective, boosts));
    if (!text_negative_match.isEmpty())
        queries.add(QueryBuilders.boolQuery()
                .mustNot(simpleQueryBuilder(String.join(" ", text_negative_match), ORconnective, boosts)));
    // exact matching
    for (String text : text_positive_filter) {
        queries.add(exactMatchQueryBuilder(text, this.boosts));
    }
    for (String text : text_negative_filter) {
        queries.add(QueryBuilders.boolQuery().mustNot(exactMatchQueryBuilder(text, this.boosts)));
    }

    // apply modifiers
    Collection<String> values;
    modifier_handling: for (String[] modifierType : modifierTypes) {
        String modifier_name = modifierType[0];
        String index_name = modifierType[1];

        if ((values = modifier.get(modifier_name)).size() > 0) {
            if (modifier_name.equals("yacy")) {
                values.forEach(y -> this.yacyModifiers.add(y));
                continue modifier_handling;
            }
            if (modifier_name.equals("site") && values.size() == 1) {
                String host = values.iterator().next();
                if (host.startsWith("www."))
                    values.add(host.substring(4));
                else
                    values.add("www." + host);
            }
            queries.add(QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(index_name, values)));
            continue modifier_handling;
        }

        if ((values = modifier.get("-" + modifier_name)).size() > 0) {
            if (modifier_name.equals("site") && values.size() == 1) {
                String host = values.iterator().next();
                if (host.startsWith("www."))
                    values.add(host.substring(4));
                else
                    values.add("www." + host);
            }
            queries.add(QueryBuilders.boolQuery()
                    .mustNot(QueryBuilders.constantScoreQuery(QueryBuilders.termsQuery(index_name, values))));
            continue modifier_handling;
        }
    }
    if (modifier.containsKey("collection") && (this.collections == null || this.collections.length == 0)) {
        Collection<String> c = modifier.get("collection");
        this.collections = c.toArray(new String[c.size()]);
    }
    if (modifier.containsKey("daterange")) {
        String dr = modifier.get("daterange").iterator().next();
        if (dr.length() > 0) {
            String from_to[] = dr.endsWith("..") ? new String[] { dr.substring(0, dr.length() - 2), "" }
                    : dr.startsWith("..") ? new String[] { "", dr.substring(2) } : dr.split("\\.\\.");
            if (from_to.length == 2) {
                if (from_to[0] != null && from_to[0].length() > 0)
                    try {
                        modifier.put("since", DateParser.dayDateFormat
                                .format(DateParser.parse(from_to[0], timezoneOffset).getTime()));
                    } catch (ParseException e) {
                    }
                if (from_to[1] != null && from_to[1].length() > 0)
                    try {
                        modifier.put("until", DateParser.dayDateFormat
                                .format(DateParser.parse(from_to[1], timezoneOffset).getTime()));
                    } catch (ParseException e) {
                    }
            }
        }
    }
    if (modifier.containsKey("since"))
        try {
            Calendar since = DateParser.parse(modifier.get("since").iterator().next(), timezoneOffset);
            this.since = since.getTime();
            RangeQueryBuilder rangeQuery = QueryBuilders
                    .rangeQuery(WebMapping.last_modified.getMapping().name())
                    .from(DateParser.formatGSAFS(this.since));
            if (modifier.containsKey("until")) {
                Calendar until = DateParser.parse(modifier.get("until").iterator().next(), timezoneOffset);
                if (until.get(Calendar.HOUR) == 0 && until.get(Calendar.MINUTE) == 0) {
                    // until must be the day which is included in results.
                    // To get the result within the same day, we must add one day.
                    until.add(Calendar.DATE, 1);
                }
                this.until = until.getTime();
                rangeQuery.to(DateParser.formatGSAFS(this.until));
            } else {
                this.until = new Date(Long.MAX_VALUE);
            }
            queries.add(rangeQuery);
        } catch (ParseException e) {
        }
    else if (modifier.containsKey("until"))
        try {
            Calendar until = DateParser.parse(modifier.get("until").iterator().next(), timezoneOffset);
            if (until.get(Calendar.HOUR) == 0 && until.get(Calendar.MINUTE) == 0) {
                // until must be the day which is included in results.
                // To get the result within the same day, we must add one day.
                until.add(Calendar.DATE, 1);
            }
            this.until = until.getTime();
            RangeQueryBuilder rangeQuery = QueryBuilders
                    .rangeQuery(WebMapping.last_modified.getMapping().name())
                    .to(DateParser.formatGSAFS(this.until));
            queries.add(rangeQuery);
        } catch (ParseException e) {
        }

    // now combine queries with OR or AND operator

    // simple case where we have one query only
    if (queries.size() == 1) {
        return queries.iterator().next();
    }

    BoolQueryBuilder b = QueryBuilders.boolQuery();
    for (QueryBuilder filter : queries) {
        if (ORconnective)
            b.should(filter);
        else
            b.must(filter);
    }
    if (ORconnective)
        b.minimumShouldMatch(1);

    return b;
}

From source file:net.kaczmarzyk.spring.data.jpa.web.SimpleSpecificationResolver.java

@SuppressWarnings("unchecked")
Specification<Object> buildSpecification(NativeWebRequest req, Spec def) {
    try {/*from  w w w . j  a va2  s  .co  m*/
        Collection<String> args = new ArrayList<String>();
        if (def.params().length != 0) {
            for (String webParam : def.params()) {
                String[] paramValues = req.getParameterValues(webParam);
                addParametersValuesToArgs(paramValues, args);
            }
        } else {
            String[] paramValues = req.getParameterValues(def.path());
            addParametersValuesToArgs(paramValues, args);
        }
        if (args.isEmpty()) {
            return null;
        } else {
            String[] argsArray = args.toArray(new String[args.size()]);

            Specification<Object> spec;
            if (def.config().length == 0) {
                spec = def.spec().getConstructor(String.class, String[].class).newInstance(def.path(),
                        argsArray);
            } else {
                spec = def.spec().getConstructor(String.class, String[].class, String[].class)
                        .newInstance(def.path(), argsArray, def.config());
            }

            return spec;
        }
    } catch (Exception e) {
        throw new IllegalStateException(e);
    }
}

From source file:com.jaspersoft.jasperserver.jaxrs.job.JobsJaxrsService.java

protected ReportJob toServer(ReportJob job) throws IllegalParameterValueException, ResourceNotFoundException {
    if (job.getSource() != null) {
        if (job.getSource().getParameters() == null) {
            job.getSource().setParameters(new HashMap<String, Object>());
        }//from w w  w.ja  v a  2  s  .  c  o m
        final Map<String, Object> parameters = job.getSource().getParameters();
        // safe output time zone before input controls logic run
        final String outputTimeZone = job.getOutputTimeZone();
        try {
            // Parameters comes as Collection<String> but we need to have String[]. Convert them
            final Map<String, String[]> adoptedParameters = new HashMap<String, String[]>();
            for (String currentParameter : parameters.keySet()) {
                if (parameters.get(currentParameter) instanceof Collection) {
                    // ClassCastException is properly processed below. If happens, then input format is incorrect
                    @SuppressWarnings("unchecked")
                    final Collection<String> collection = (Collection) parameters.get(currentParameter);
                    adoptedParameters.put(currentParameter, collection.toArray(new String[collection.size()]));
                }
            }
            final Map<String, Object> typedParameters = inputControlsLogicService
                    .getTypedParameters(job.getSource().getReportUnitURI(), adoptedParameters);
            if (outputTimeZone != null) {
                // restore output time zone
                typedParameters.put(JRParameter.REPORT_TIME_ZONE, TimeZone.getTimeZone(outputTimeZone));
            }
            job.getSource().setParameters(typedParameters);
        } catch (ClassCastException e) {
            log.error(e);
            throw new IllegalParameterValueException("job.source.parameters", "Map with content of wrong type");
        } catch (InputControlsValidationException e) {
            throw new JSValidationException(e.getErrors());
        } catch (CascadeResourceNotFoundException e) {
            throw new ResourceNotFoundException("URI:" + e.getResourceUri() + " Type:" + e.getResourceType());
        }
    }
    return job;
}

From source file:ca.nines.ise.cmd.Command.java

/**
 * Get a list of file paths from the command line arguments.
 *
 * @param cmd//www .  j ava  2s .  c  o  m
 * @return File[]
 */
public File[] getFilePaths(CommandLine cmd) {
    Collection<File> fileList = new ArrayList<>();

    List<?> argList = cmd.getArgList();
    argList = argList.subList(1, argList.size());
    String[] args = argList.toArray(new String[argList.size()]);

    if (argList.isEmpty()) {
        File dir = new File("input");
        SuffixFileFilter sfx = new SuffixFileFilter(".txt");
        fileList = FileUtils.listFiles(dir, sfx, TrueFileFilter.INSTANCE);
    } else {
        for (String name : args) {
            fileList.add(new File(name));
        }
    }

    File[] files = fileList.toArray(new File[fileList.size()]);
    Arrays.sort(files);
    return files;
}

From source file:com.nextep.designer.vcs.services.impl.WorkspaceService.java

@Override
public void move(Collection<IVersionable<?>> versionToMove, IVersionContainer targetContainer,
        IProgressMonitor monitor) {//from  w w  w . j av a  2s .c  o  m
    monitor.beginTask(MessageFormat.format(VCSMessages.getString("movingVersionableCmdGlobal"), //$NON-NLS-1$
            targetContainer.getName()), versionToMove.size());
    if (targetContainer == null) {
        targetContainer = getCurrentWorkspace();
    }
    Collection<IVersionable<?>> toUnlock = new ArrayList<IVersionable<?>>(versionToMove);
    // If target container is versionable, we need to unlock as well
    if (targetContainer instanceof IVersionable<?>) {
        targetContainer = versioningService.ensureModifiable(targetContainer);
    }
    // Requesting unlock if needed (cancellation would raise here)
    versioningService.unlock(true, toUnlock.toArray(new IVersionable<?>[toUnlock.size()]));
    // Everything is ok, moving
    for (IVersionable<?> v : versionToMove) {
        monitor.subTask(MessageFormat.format(VCSMessages.getString("movingVersionableCmd"), v.getName(), v //$NON-NLS-1$
                .getContainer().getName(), targetContainer.getName()));
        v.getContainer().removeVersionable(v);
        targetContainer.addVersionable(v, new ImportPolicyAddOnly());
        CorePlugin.getPersistenceAccessor().save(v);
        monitor.worked(1);
    }
    monitor.done();
}

From source file:com.espertech.esper.core.start.EPPreparedExecuteMethod.java

/**
 * Executes the prepared query./* w w  w.j  a va 2  s  .co m*/
 * @return query results
 */
public EPPreparedQueryResult execute(ContextPartitionSelector[] contextPartitionSelectors) {
    int numStreams = processors.length;

    if (contextPartitionSelectors != null && contextPartitionSelectors.length != numStreams) {
        throw new IllegalArgumentException(
                "Number of context partition selectors does not match the number of named windows in the from-clause");
    }

    // handle non-context case
    if (statementSpec.getOptionalContextName() == null) {

        Collection<EventBean>[] snapshots = new Collection[numStreams];
        for (int i = 0; i < numStreams; i++) {

            ContextPartitionSelector selector = contextPartitionSelectors == null ? null
                    : contextPartitionSelectors[i];
            snapshots[i] = getStreamFilterSnapshot(i, selector);
        }

        resultSetProcessor.clear();
        return process(snapshots);
    }

    List<ContextPartitionResult> contextPartitionResults = new ArrayList<ContextPartitionResult>();

    // context partition runtime query
    Collection<Integer> agentInstanceIds;
    if (contextPartitionSelectors == null
            || contextPartitionSelectors[0] instanceof ContextPartitionSelectorAll) {
        agentInstanceIds = processors[0].getProcessorInstancesAll();
    } else {
        ContextManager contextManager = services.getContextManagementService()
                .getContextManager(statementSpec.getOptionalContextName());
        agentInstanceIds = contextManager.getAgentInstanceIds(contextPartitionSelectors[0]);
    }

    // collect events and agent instances
    for (int agentInstanceId : agentInstanceIds) {
        NamedWindowProcessorInstance processorInstance = processors[0].getProcessorInstance(agentInstanceId);
        if (processorInstance != null) {
            Collection<EventBean> coll = processorInstance.getTailViewInstance().snapshot(filters[0],
                    statementSpec.getAnnotations());
            contextPartitionResults.add(new ContextPartitionResult(coll,
                    processorInstance.getTailViewInstance().getAgentInstanceContext()));
        }
    }

    // process context partitions
    ArrayDeque<EventBean[]> events = new ArrayDeque<EventBean[]>();
    for (ContextPartitionResult contextPartitionResult : contextPartitionResults) {
        Collection<EventBean> snapshot = contextPartitionResult.getEvents();
        if (statementSpec.getFilterRootNode() != null) {
            snapshot = getFiltered(snapshot, Collections.singletonList(statementSpec.getFilterRootNode()));
        }
        EventBean[] rows = snapshot.toArray(new EventBean[snapshot.size()]);
        resultSetProcessor.setAgentInstanceContext(contextPartitionResult.getContext());
        UniformPair<EventBean[]> results = resultSetProcessor.processViewResult(rows, null, true);
        if (results != null && results.getFirst() != null && results.getFirst().length > 0) {
            events.add(results.getFirst());
        }
    }
    return new EPPreparedQueryResult(resultSetProcessor.getResultEventType(), EventBeanUtility.flatten(events));
}