Example usage for java.util LinkedHashSet toArray

List of usage examples for java.util LinkedHashSet toArray

Introduction

In this page you can find the example usage for java.util LinkedHashSet toArray.

Prototype

<T> T[] toArray(T[] a);

Source Link

Document

Returns an array containing all of the elements in this set; the runtime type of the returned array is that of the specified array.

Usage

From source file:org.nuxeo.ecm.webengine.loader.store.ResourceStoreClassLoader.java

protected ResourceStoreClassLoader(final ClassLoader pParent, LinkedHashSet<ResourceStore> cp) {
    super(pParent);
    this.cp = cp;
    if (!cp.isEmpty()) {
        stores = cp.toArray(new ResourceStore[cp.size()]);
    }//from ww w. j  av a 2s.  co m
}

From source file:org.pdfsam.console.business.pdf.handlers.SplitCmdExecutor.java

/**
 * Execute the split of a pdf document when split type is S_BLEVEL
 * /*from   w  ww. java 2  s .  c o  m*/
 * @param inputCommand
 * @throws Exception
 */
private void executeBookmarksSplit(SplitParsedCommand inputCommand) throws Exception {
    pdfReader = PdfUtility.readerFor(inputCommand.getInputFile());
    int bLevel = inputCommand.getBookmarksLevel().intValue();
    Hashtable bookmarksTable = new Hashtable();
    if (bLevel > 0) {
        pdfReader.removeUnusedObjects();
        pdfReader.consolidateNamedDestinations();
        List bookmarks = SimpleBookmark.getBookmark(pdfReader);
        ByteArrayOutputStream out = new ByteArrayOutputStream();
        SimpleBookmark.exportToXML(bookmarks, out, "UTF-8", false);
        ByteArrayInputStream input = new ByteArrayInputStream(out.toByteArray());
        int maxDepth = PdfUtility.getMaxBookmarksDepth(input);
        input.reset();
        if (bLevel <= maxDepth) {
            SAXReader reader = new SAXReader();
            org.dom4j.Document document = reader.read(input);
            // head node
            String headBookmarkXQuery = "/Bookmark/Title[@Action=\"GoTo\"]";
            Node headNode = document.selectSingleNode(headBookmarkXQuery);
            if (headNode != null && headNode.getText() != null && headNode.getText().trim().length() > 0) {
                bookmarksTable.put(new Integer(1), headNode.getText().trim());
            }
            // bLevel nodes
            StringBuffer buffer = new StringBuffer("/Bookmark");
            for (int i = 0; i < bLevel; i++) {
                buffer.append("/Title[@Action=\"GoTo\"]");
            }
            String xQuery = buffer.toString();
            List nodes = document.selectNodes(xQuery);
            input.close();
            input = null;
            if (nodes != null && nodes.size() > 0) {
                LinkedHashSet pageSet = new LinkedHashSet(nodes.size());
                for (Iterator nodeIter = nodes.iterator(); nodeIter.hasNext();) {
                    Node currentNode = (Node) nodeIter.next();
                    Node pageAttribute = currentNode.selectSingleNode("@Page");
                    if (pageAttribute != null && pageAttribute.getText().length() > 0) {
                        String attribute = pageAttribute.getText();
                        int blankIndex = attribute.indexOf(' ');
                        if (blankIndex > 0) {
                            Integer currentNumber = new Integer(attribute.substring(0, blankIndex));
                            String bookmarkText = currentNode.getText().trim();
                            // fix #2789963
                            if (currentNumber.intValue() > 0) {
                                // bookmarks regexp matching if any
                                if (StringUtils.isBlank(inputCommand.getBookmarkRegexp())
                                        || bookmarkText.matches(inputCommand.getBookmarkRegexp())) {
                                    // to split just before the given page
                                    if ((currentNumber.intValue()) > 1) {
                                        pageSet.add(new Integer(currentNumber.intValue() - 1));
                                    }
                                    if (StringUtils.isNotBlank(bookmarkText)) {
                                        bookmarksTable.put(currentNumber, bookmarkText.trim());
                                    }
                                }
                            }
                        }
                    }
                }
                if (pageSet.size() > 0) {
                    if (StringUtils.isBlank(inputCommand.getBookmarkRegexp())) {
                        LOG.debug("Found " + pageSet.size() + " destination pages at level " + bLevel);
                    } else {
                        LOG.debug("Found " + pageSet.size() + " destination pages at level " + bLevel
                                + " matching '" + inputCommand.getBookmarkRegexp() + "'");
                    }
                    inputCommand.setSplitPageNumbers((Integer[]) pageSet.toArray(new Integer[pageSet.size()]));
                } else {
                    throw new SplitException(SplitException.ERR_BLEVEL_NO_DEST, new String[] { "" + bLevel });
                }
            } else {
                throw new SplitException(SplitException.ERR_BLEVEL, new String[] { "" + bLevel });
            }
        } else {
            input.close();
            pdfReader.close();
            throw new SplitException(SplitException.ERR_BLEVEL_OUTOFBOUNDS,
                    new String[] { "" + bLevel, "" + maxDepth });

        }
    } else {
        pdfReader.close();
        throw new SplitException(SplitException.ERR_NOT_VALID_BLEVEL, new String[] { "" + bLevel });
    }
    pdfReader.close();
    executeSplit(inputCommand, bookmarksTable);
}

From source file:org.pentaho.di.ui.spoon.Spoon.java

public void openFile(boolean importfile) {
    SpoonPerspective activePerspective = SpoonPerspectiveManager.getInstance().getActivePerspective();

    // In case the perspective wants to handle open/save itself, let it...
    ///*  www.j  av  a 2s. co m*/
    if (!importfile) {
        if (activePerspective instanceof SpoonPerspectiveOpenSaveInterface) {
            ((SpoonPerspectiveOpenSaveInterface) activePerspective).open();
            return;
        }
    }

    String activePerspectiveId = activePerspective.getId();
    boolean etlPerspective = activePerspectiveId.equals(MainSpoonPerspective.ID);

    if (rep == null || importfile || !etlPerspective) { // Load from XML

        FileDialog dialog = new FileDialog(shell, SWT.OPEN);

        LinkedHashSet<String> extensions = new LinkedHashSet<String>();
        LinkedHashSet<String> extensionNames = new LinkedHashSet<String>();
        StringBuilder allExtensions = new StringBuilder();
        for (FileListener l : fileListeners) {
            for (String ext : l.getSupportedExtensions()) {
                extensions.add("*." + ext);
                allExtensions.append("*.").append(ext).append(";");
            }
            Collections.addAll(extensionNames, l.getFileTypeDisplayNames(Locale.getDefault()));
        }
        extensions.add("*");
        extensionNames.add(BaseMessages.getString(PKG, "Spoon.Dialog.OpenFile.AllFiles"));

        String[] exts = new String[extensions.size() + 1];
        exts[0] = allExtensions.toString();
        System.arraycopy(extensions.toArray(new String[extensions.size()]), 0, exts, 1, extensions.size());

        String[] extNames = new String[extensionNames.size() + 1];
        extNames[0] = BaseMessages.getString(PKG, "Spoon.Dialog.OpenFile.AllTypes");
        System.arraycopy(extensionNames.toArray(new String[extensionNames.size()]), 0, extNames, 1,
                extensionNames.size());

        dialog.setFilterExtensions(exts);

        setFilterPath(dialog);
        String filename = dialog.open();
        if (filename != null) {

            if (importfile) {
                if (activePerspective instanceof SpoonPerspectiveOpenSaveInterface) {
                    ((SpoonPerspectiveOpenSaveInterface) activePerspective).importFile(filename);
                    return;
                }
            }

            lastDirOpened = dialog.getFilterPath();
            openFile(filename, importfile);
        }
    } else {
        SelectObjectDialog sod = new SelectObjectDialog(shell, rep);
        if (sod.open() != null) {
            RepositoryObjectType type = sod.getObjectType();
            String name = sod.getObjectName();
            RepositoryDirectoryInterface repDir = sod.getDirectory();

            // Load a transformation
            if (RepositoryObjectType.TRANSFORMATION.equals(type)) {
                TransLoadProgressDialog tlpd = new TransLoadProgressDialog(shell, rep, name, repDir, null); // Loads
                // the
                // last
                // version
                TransMeta transMeta = tlpd.open();
                sharedObjectsFileMap.put(transMeta.getSharedObjects().getFilename(),
                        transMeta.getSharedObjects());
                setTransMetaVariables(transMeta);

                if (transMeta != null) {
                    if (log.isDetailed()) {
                        log.logDetailed(BaseMessages.getString(PKG, "Spoon.Log.LoadToTransformation", name,
                                repDir.getName()));
                    }
                    props.addLastFile(LastUsedFile.FILE_TYPE_TRANSFORMATION, name, repDir.getPath(), true,
                            rep.getName());
                    addMenuLast();
                    transMeta.clearChanged();
                    // transMeta.setFilename(name); // Don't do it, it's a bad idea!
                    addTransGraph(transMeta);
                }
                refreshGraph();
                refreshTree();
            } else if (RepositoryObjectType.JOB.equals(type)) {
                // Load a job
                JobLoadProgressDialog jlpd = new JobLoadProgressDialog(shell, rep, name, repDir, null); // Loads
                // the last version
                JobMeta jobMeta = jlpd.open();
                sharedObjectsFileMap.put(jobMeta.getSharedObjects().getFilename(), jobMeta.getSharedObjects());
                setJobMetaVariables(jobMeta);
                if (jobMeta != null) {
                    props.addLastFile(LastUsedFile.FILE_TYPE_JOB, name, repDir.getPath(), true, rep.getName());
                    saveSettings();
                    addMenuLast();
                    addJobGraph(jobMeta);
                }
                refreshGraph();
                refreshTree();
            }
        }
    }
}

From source file:org.pentaho.di.ui.trans.steps.annotation.OptionsResolver.java

public String[] resolveOrdinalFieldOptions(final TransMeta transMeta, final String stepName,
        ModelAnnotation modelAnnotation) {
    LinkedHashSet<String> names = new LinkedHashSet<String>();
    try {/*from   w  w w  .j  a  v  a 2s .  c  o  m*/
        RowMetaInterface prevStepFields = transMeta.getPrevStepFields(stepName);
        for (ValueMetaInterface valueMetaInterface : prevStepFields.getValueMetaList()) {
            if (!StringUtils.equals(modelAnnotation.getAnnotation().getField(), valueMetaInterface.getName())) {
                names.add(valueMetaInterface.getName());
            }
        }
    } catch (Exception e) {
        logger.warning(e.getMessage());
    }
    return names.toArray(new String[names.size()]);
}

From source file:org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.AbstractScriptableDataFactory.java

public final String[] getReferencedFields(final String query, final DataRow parameter) {
    try {//from  w  ww.ja v  a  2s . c  o m
        final String[] additionalFields = scriptingSupport.computeAdditionalQueryFields(query, parameter);
        if (additionalFields == null) {
            return null;
        }

        final String realQuery = scriptingSupport.computeQuery(query, parameter);
        if (realQuery == null) {
            throw new ReportDataFactoryException("Query '" + query + "' is not recognized."); //$NON-NLS-1$ //$NON-NLS-2$
        }

        String[] referencedFieldsInternal = getReferencedFieldsInternal(realQuery, parameter);
        if (referencedFieldsInternal == null) {
            return null;
        }

        final LinkedHashSet<String> fields = new LinkedHashSet<String>();
        fields.addAll(Arrays.asList(referencedFieldsInternal));
        fields.addAll(Arrays.asList(additionalFields));
        return fields.toArray(new String[fields.size()]);
    } catch (final ReportDataFactoryException rx) {
        logger.debug("Failed to compute referenced fields", rx); // NON-NLS
        return null;
    }
}

From source file:org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.sql.SimpleSQLReportDataFactory.java

public String[] getReferencedFields(final String query, final DataRow parameters)
        throws ReportDataFactoryException {

    final boolean isNewConnection = connection == null;
    try {//  w  w w.ja  v a2  s.co  m
        final ParametrizationProviderFactory factory = createParametrizationProviderFactory();
        final Connection connection = getConnection(parameters);
        final ParametrizationProvider parametrizationProvider = factory.create(connection);
        final String computedQuery = computedQuery(query, parameters);
        parametrizationProvider.rewriteQueryForParametrization(connection, computedQuery, parameters);
        final LinkedHashSet<String> list = new LinkedHashSet<String>();
        list.addAll(Arrays.asList(parametrizationProvider.getPreparedParameterNames()));
        if (userField != null) {
            list.add(userField);
        }
        if (passwordField != null) {
            list.add(passwordField);
        }
        list.add(DataFactory.QUERY_LIMIT);
        return list.toArray(new String[list.size()]);
    } catch (ReportDataFactoryException e) {
        logger.warn("Unable to perform cache preparation", e);
        throw e;
    } catch (SQLException e) {
        logger.warn("Unable to perform cache preparation", e);
        throw new ReportDataFactoryException("Unable to perform cache preparation", e);
    } finally {
        if (isNewConnection) {
            close();
        }
    }
}

From source file:org.pentaho.reporting.engine.classic.extensions.datasources.mondrian.AbstractMDXDataFactory.java

public String[] getReferencedFields(final String queryName, final DataRow parameters)
        throws ReportDataFactoryException {
    final boolean isNewConnection = connection == null;
    try {/*ww w. j a  va 2  s .co  m*/
        if (connection == null) {
            connection = mondrianConnectionProvider.createConnection(computeProperties(parameters),
                    dataSourceProvider.getDataSource());
        }
    } catch (SQLException e) {
        logger.error(e);
        throw new ReportDataFactoryException("Failed to create DataSource (SQL Exception - error code: "
                + e.getErrorCode() + "):" + e.toString(), e);
    } catch (MondrianException e) {
        logger.error(e);
        throw new ReportDataFactoryException("Failed to create DataSource (Mondrian Exception):" + e.toString(),
                e);
    }

    try {
        if (connection == null) {
            throw new ReportDataFactoryException("Factory is closed.");
        }
        final LinkedHashSet<String> parameter = new LinkedHashSet<String>();

        final MDXCompiler compiler = new MDXCompiler(parameters, getLocale());
        final String computedQuery = computedQuery(queryName, parameters);
        final String mdxQuery = compiler.translateAndLookup(computedQuery, parameters);
        parameter.addAll(compiler.getCollectedParameter());
        // Alternatively, JNDI is possible. Maybe even more ..
        final Query query = connection.parseQuery(mdxQuery);
        final Parameter[] queryParameters = query.getParameters();
        for (int i = 0; i < queryParameters.length; i++) {
            final Parameter queryParameter = queryParameters[i];
            parameter.add(queryParameter.getName());
        }
        if (jdbcUserField != null) {
            parameter.add(jdbcUserField);
        }
        if (roleField != null) {
            parameter.add(roleField);
        }
        parameter.add(DataFactory.QUERY_LIMIT);
        return parameter.toArray(new String[parameter.size()]);
    } catch (MondrianException e) {
        throw new ReportDataFactoryException("Failed to create datasource:" + e.getLocalizedMessage(), e);
    } finally {
        if (isNewConnection) {
            close();
        }
    }
}

From source file:org.pentaho.reporting.engine.classic.extensions.datasources.olap4j.AbstractMDXDataFactory.java

public String[] getReferencedFields(final String queryName, final DataRow parameter)
        throws ReportDataFactoryException {
    final boolean isNewConnection = connection == null;
    try {//from w w w  .  j  a  v a  2s  .  co m
        if (connection == null) {
            connection = connectionProvider.createConnection(computeJdbcUser(parameter),
                    computeJdbcPassword(parameter));
            connection.setLocale(getLocale());

            final String role = computeRole(parameter);
            if (role != null) {
                connection.setRoleName(role);
            }
        }

        final MDXCompiler compiler = new MDXCompiler(parameter, getLocale());
        final String value = computedQuery(queryName, parameter);
        final String translatedQuery = compiler.translateAndLookup(value, parameter);
        final LinkedHashSet<String> params = new LinkedHashSet<String>();
        params.addAll(compiler.getParameter());
        if (getRoleField() != null) {
            params.add(getRoleField());
        }
        if (getJdbcPasswordField() != null) {
            params.add(getJdbcPasswordField());
        }
        if (getJdbcUserField() != null) {
            params.add(getJdbcUserField());
        }
        final PreparedOlapStatement statement = connection.prepareOlapStatement(translatedQuery);

        final OlapParameterMetaData data = statement.getParameterMetaData();
        final int count = data.getParameterCount();
        for (int i = 0; i < count; i++) {
            final String parameterName = data.getParameterName(i + 1);
            params.add(parameterName);
        }
        params.add(DataFactory.QUERY_LIMIT);
        return params.toArray(new String[params.size()]);
    } catch (final Throwable e) {
        throw new ReportDataFactoryException("Failed to obtain a connection", e);
    } finally {
        if (isNewConnection) {
            close();
        }
    }
}

From source file:org.pentaho.reporting.engine.classic.extensions.datasources.pmd.SimplePmdDataFactory.java

public String[] getReferencedFields(final String query, final DataRow parameter)
        throws ReportDataFactoryException {
    final String queryRaw = computedQuery(query, parameter);
    if (query == null) {
        return null;
    }// www .j  a v  a 2  s .c o  m

    final Query queryObject = parseQuery(queryRaw);
    final List<Parameter> queryParamValues = queryObject.getParameters();
    final LinkedHashSet<String> retval = new LinkedHashSet<String>();
    if (userField != null) {
        retval.add(userField);
    }
    if (passwordField != null) {
        retval.add(passwordField);
    }
    if (queryParamValues != null) {
        for (final Parameter p : queryParamValues) {
            retval.add(p.getName());
        }
    }
    retval.add(DataFactory.QUERY_LIMIT);
    retval.add(DataFactory.QUERY_TIMEOUT);
    return retval.toArray(new String[retval.size()]);
}

From source file:org.pentaho.reporting.ui.datasources.pmd.PmdDataSourceEditor.java

private ScriptEngineFactory[] getScriptEngineLanguages() {
    final LinkedHashSet<ScriptEngineFactory> langSet = new LinkedHashSet<ScriptEngineFactory>();
    langSet.add(null);// w  w  w  .ja  va2s. c o m
    final List<ScriptEngineFactory> engineFactories = new ScriptEngineManager().getEngineFactories();
    for (final ScriptEngineFactory engineFactory : engineFactories) {
        langSet.add(engineFactory);
    }
    return langSet.toArray(new ScriptEngineFactory[langSet.size()]);
}