Example usage for java.util TreeSet toArray

List of usage examples for java.util TreeSet toArray

Introduction

In this page you can find the example usage for java.util TreeSet toArray.

Prototype

<T> T[] toArray(T[] a);

Source Link

Document

Returns an array containing all of the elements in this set; the runtime type of the returned array is that of the specified array.

Usage

From source file:com.atolcd.pentaho.di.ui.trans.steps.gisgeoprocessing.GisGeoprocessingDialog.java

private String[] getFieldsFromType(String type) {

    String fieldNamesFromType[] = null;

    try {//from   w  w w  . j  a v a 2 s .  co  m

        // Rcupration des colonnes de l'tape prcdente
        RowMetaInterface r = transMeta.getPrevStepFields(stepname);
        if (r != null) {

            // Filtrage par type de colonne texte
            TreeSet<String> fieldsTree = new TreeSet<String>();
            String[] fieldNames = r.getFieldNames();
            String[] fieldNamesAndTypes = r.getFieldNamesAndTypes(0);

            for (int i = 0; i < fieldNames.length; i++) {
                if (fieldNamesAndTypes[i].toLowerCase().contains(type.toLowerCase())) {
                    if (fieldNames[i] != null && !fieldNames[i].isEmpty()) {
                        fieldsTree.add(fieldNames[i]);
                    }
                }
            }

            fieldNamesFromType = fieldsTree.toArray(new String[] {});

        }

    } catch (KettleException ke) {
        new ErrorDialog(shell,
                BaseMessages.getString(PKG, "ChangeFileEncodingDialog.FailedToGetFields.DialogTitle"),
                BaseMessages.getString(PKG, "ChangeFileEncodingDialog.FailedToGetFields.DialogMessage"), ke);
    }

    return fieldNamesFromType;

}

From source file:org.auscope.gridtools.RegistryQueryClient.java

/**
 * Retrieves all codes (software packages) at a particular site.
 * //  ww  w  . j  a va  2 s  .  c o  m
 * @param site The name of the site
 * @return An array of codes available at the site
 */
public String[] getAllCodesAtSite(String site) {
    String[] siteCodesAvail = new String[0];
    // XPath query to get codes (SoftwarePackages) available at a given site
    String xpathQuery = "//*[local-name()='Site']/child::node()" + "[contains(name(),Name)][text()='" + site
            + "']" + "/ancestor::node()[local-name()='Site']"
            + "/descendant::node()[local-name()='SoftwarePackage']";

    // Parse the document
    NodeList siteSWPackageNodeList = turboMDSquery(xpathQuery);

    if (siteSWPackageNodeList != null) {
        TreeSet<String> myTreeSet = new TreeSet<String>();

        // Iterate through the document to get SoftwarePackage's name.
        for (int i = 0; i < siteSWPackageNodeList.getLength(); i++) {
            // Get SoftwarePackage name.
            Element siteEl = (Element) siteSWPackageNodeList.item(i);
            myTreeSet.add(getTextValue(siteEl, "Name"));
        }

        siteCodesAvail = myTreeSet.toArray(new String[myTreeSet.size()]);
    }

    return siteCodesAvail;
}

From source file:org.dasein.cloud.cloudstack.network.LoadBalancers.java

private void toRule(@Nullable Node node, @Nonnull Map<String, LoadBalancer> current)
        throws InternalException, CloudException {
    NodeList attributes = node.getChildNodes();
    int publicPort = -1, privatePort = -1;
    LbAlgorithm algorithm = null;/*from  w ww. ja  v a2 s.  c  om*/
    String publicIp = null;
    String vlanId = null;
    String ruleId = null;
    String lbName = null;
    String lbDesc = ""; // can't be null

    for (int i = 0; i < attributes.getLength(); i++) {
        Node n = attributes.item(i);
        String name = n.getNodeName().toLowerCase();
        String value;

        if (n.getChildNodes().getLength() > 0) {
            value = n.getFirstChild().getNodeValue();
        } else {
            value = null;
        }
        if (name.equals("publicip")) {
            publicIp = value;
        } else if (name.equals("networkid")) {
            vlanId = value;
        } else if (name.equals("id")) {
            ruleId = value;
        } else if (name.equals("publicport") && value != null) {
            publicPort = Integer.parseInt(value);
        } else if (name.equals("privateport") && value != null) {
            privatePort = Integer.parseInt(value);
        } else if (name.equals("algorithm")) {
            if (value == null || value.equals("roundrobin")) {
                algorithm = LbAlgorithm.ROUND_ROBIN;
            } else if (value.equals("leastconn")) {
                algorithm = LbAlgorithm.LEAST_CONN;
            } else if (value.equals("")) {
                algorithm = LbAlgorithm.SOURCE;
            } else {
                algorithm = LbAlgorithm.ROUND_ROBIN;
            }
        } else if (name.equals("name")) {
            lbName = value;
        } else if (name.equals("description")) {
            lbDesc = value;
        }
    }
    LbListener listener = LbListener.getInstance(algorithm, LbPersistence.NONE, LbProtocol.RAW_TCP, publicPort,
            privatePort);
    Collection<String> serverIds = getServersAt(ruleId);

    if (current.containsKey(publicIp)) {
        LoadBalancer lb = current.get(publicIp);

        @SuppressWarnings("deprecation")
        String[] currentIds = lb.getProviderServerIds();
        LbListener[] listeners = lb.getListeners();

        // TODO: WTF?
        Set<Integer> ports = new TreeSet<Integer>();

        for (int port : lb.getPublicPorts()) {
            ports.add(port);
        }
        ports.add(publicPort);

        int[] portList = new int[ports.size()];
        int i = 0;

        for (Integer p : ports) {
            portList[i++] = p;
        }
        //noinspection deprecation
        lb.setPublicPorts(portList);

        boolean there = false;

        for (LbListener l : listeners) {
            if (l.getAlgorithm().equals(listener.getAlgorithm())) {
                if (l.getNetworkProtocol().equals(listener.getNetworkProtocol())) {
                    if (l.getPublicPort() == listener.getPublicPort()) {
                        if (l.getPrivatePort() == listener.getPrivatePort()) {
                            there = true;
                            break;
                        }
                    }
                }
            }
        }
        if (!there) {
            lb.withListeners(listener);
        }
        // TODO: WTF?
        TreeSet<String> newIds = new TreeSet<String>();

        Collections.addAll(newIds, currentIds);
        for (String id : serverIds) {
            newIds.add(id);
        }
        //noinspection deprecation
        lb.setProviderServerIds(newIds.toArray(new String[newIds.size()]));
        //noinspection deprecation
        lb.setName(lbName);
        //noinspection deprecation
        lb.setDescription(lbDesc);
    } else {
        Collection<DataCenter> dcs = getProvider().getDataCenterServices()
                .listDataCenters(getProvider().getContext().getRegionId());
        String[] ids = new String[dcs.size()];
        int i = 0;

        for (DataCenter dc : dcs) {
            ids[i++] = dc.getProviderDataCenterId();
        }

        LoadBalancer lb = LoadBalancer.getInstance(getContext().getAccountNumber(), getContext().getRegionId(),
                publicIp, LoadBalancerState.ACTIVE, lbName, lbDesc, LoadBalancerAddressType.IP, publicIp,
                publicPort).withListeners(listener).operatingIn(ids);
        lb.forVlan(vlanId);
        //noinspection deprecation
        lb.setProviderServerIds(serverIds.toArray(new String[serverIds.size()]));
        current.put(publicIp, lb);
    }
}

From source file:com.atolcd.pentaho.di.ui.trans.steps.giscoordinatetransformation.GisCoordinateTransformationDialog.java

private void loadFields() {

    if (!gotPreviousFields) {

        try {/*ww w .jav a2 s. c  om*/

            String geometryField = wGeometryField.getText();
            wGeometryField.removeAll();

            // Rcupration des colonnes de l'tape prcdente
            // et alimentation des combos
            RowMetaInterface r = transMeta.getPrevStepFields(stepname);
            if (r != null) {

                // Filtrage par type de colonne texte
                TreeSet<String> textFieldsTree = new TreeSet<String>();
                String[] fieldNames = r.getFieldNames();
                String[] fieldNamesAndTypes = r.getFieldNamesAndTypes(0);

                for (int i = 0; i < fieldNames.length; i++) {
                    if (fieldNamesAndTypes[i].toLowerCase().contains("geometry")) {
                        if (fieldNames[i] != null && !fieldNames[i].isEmpty()) {
                            textFieldsTree.add(fieldNames[i]);
                        }
                    }
                }

                String textFields[] = textFieldsTree.toArray(new String[] {});

                wGeometryField.setItems(textFields);

            }

            if (geometryField != null) {
                wGeometryField.setText(geometryField);
            }

        } catch (KettleException ke) {
            new ErrorDialog(shell,
                    BaseMessages.getString(PKG, "ChangeFileEncodingDialog.FailedToGetFields.DialogTitle"), //$NON-NLS-1$
                    BaseMessages.getString(PKG, "ChangeFileEncodingDialog.FailedToGetFields.DialogMessage"), //$NON-NLS-1$
                    ke);
        }

        gotPreviousFields = true;
    }

}

From source file:org.commonjava.maven.galley.filearc.internal.ZipListing.java

@Override
public ListingResult call() {
    final File src = getArchiveFile(resource.getLocationUri());
    if (!src.canRead() || src.isDirectory()) {
        return null;
    }//from w w  w  .j a  v a 2  s.  c o  m

    final boolean isJar = isJar(resource.getLocationUri());

    final TreeSet<String> filenames = new TreeSet<String>();

    ZipFile zf = null;
    try {
        if (isJar) {
            zf = new JarFile(src);
        } else {
            zf = new ZipFile(src);
        }

        final String path = resource.getPath();
        final int pathLen = path.length();
        for (final ZipEntry entry : Collections.list(zf.entries())) {
            String name = entry.getName();
            if (name.startsWith(path)) {
                name = name.substring(pathLen);

                if (name.startsWith("/") && name.length() > 1) {
                    name = name.substring(1);

                    if (name.indexOf("/") < 0) {
                        filenames.add(name);
                    }
                }
            }
        }

    } catch (final IOException e) {
        error = new TransferException("Failed to get listing for: %s to: %s. Reason: %s", e, resource,
                e.getMessage());
    } finally {
        if (zf != null) {
            try {
                zf.close();
            } catch (final IOException e) {
            }
        }
    }

    if (!filenames.isEmpty()) {
        OutputStream stream = null;
        try {
            stream = target.openOutputStream(TransferOperation.DOWNLOAD);
            stream.write(join(filenames, "\n").getBytes("UTF-8"));

            return new ListingResult(resource, filenames.toArray(new String[filenames.size()]));
        } catch (final IOException e) {
            error = new TransferException("Failed to write listing to: %s. Reason: %s", e, target,
                    e.getMessage());
        } finally {
            closeQuietly(stream);
        }
    }

    return null;
}

From source file:org.jumpmind.symmetric.util.SnapshotUtil.java

public static File createSnapshot(ISymmetricEngine engine) {

    String dirName = engine.getEngineName().replaceAll(" ", "-") + "-"
            + new SimpleDateFormat("yyyyMMddHHmmss").format(new Date());

    IParameterService parameterService = engine.getParameterService();
    File tmpDir = new File(parameterService.getTempDirectory(), dirName);
    tmpDir.mkdirs();/*w ww  . j  a v a 2  s  . co m*/

    File logDir = null;

    String parameterizedLogDir = parameterService.getString("server.log.dir");
    if (isNotBlank(parameterizedLogDir)) {
        logDir = new File(parameterizedLogDir);
    }

    if (logDir != null && logDir.exists()) {
        log.info("Using server.log.dir setting as the location of the log files");
    } else {
        logDir = new File("logs");

        if (!logDir.exists()) {
            File file = findSymmetricLogFile();
            if (file != null) {
                logDir = file.getParentFile();
            }
        }

        if (!logDir.exists()) {
            logDir = new File("../logs");
        }

        if (!logDir.exists()) {
            logDir = new File("target");
        }

        if (logDir.exists()) {
            File[] files = logDir.listFiles();
            if (files != null) {
                for (File file : files) {
                    if (file.getName().toLowerCase().endsWith(".log")) {
                        try {
                            FileUtils.copyFileToDirectory(file, tmpDir);
                        } catch (IOException e) {
                            log.warn("Failed to copy " + file.getName() + " to the snapshot directory", e);
                        }
                    }
                }
            }
        }

    }

    ITriggerRouterService triggerRouterService = engine.getTriggerRouterService();
    List<TriggerHistory> triggerHistories = triggerRouterService.getActiveTriggerHistories();
    TreeSet<Table> tables = new TreeSet<Table>();
    for (TriggerHistory triggerHistory : triggerHistories) {
        Table table = engine.getDatabasePlatform().getTableFromCache(triggerHistory.getSourceCatalogName(),
                triggerHistory.getSourceSchemaName(), triggerHistory.getSourceTableName(), false);
        if (table != null && !table.getName().toUpperCase()
                .startsWith(engine.getSymmetricDialect().getTablePrefix().toUpperCase())) {
            tables.add(table);
        }
    }

    List<Trigger> triggers = triggerRouterService.getTriggers(true);
    for (Trigger trigger : triggers) {
        Table table = engine.getDatabasePlatform().getTableFromCache(trigger.getSourceCatalogName(),
                trigger.getSourceSchemaName(), trigger.getSourceTableName(), false);
        if (table != null) {
            tables.add(table);
        }
    }

    FileWriter fwriter = null;
    try {
        fwriter = new FileWriter(new File(tmpDir, "config-export.csv"));
        engine.getDataExtractorService().extractConfigurationStandalone(engine.getNodeService().findIdentity(),
                fwriter, TableConstants.SYM_NODE, TableConstants.SYM_NODE_SECURITY,
                TableConstants.SYM_NODE_IDENTITY, TableConstants.SYM_NODE_HOST,
                TableConstants.SYM_NODE_CHANNEL_CTL, TableConstants.SYM_CONSOLE_USER);
    } catch (IOException e) {
        log.warn("Failed to export symmetric configuration", e);
    } finally {
        IOUtils.closeQuietly(fwriter);
    }

    FileOutputStream fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "table-definitions.xml"));
        DbExport export = new DbExport(engine.getDatabasePlatform());
        export.setFormat(Format.XML);
        export.setNoData(true);
        export.exportTables(fos, tables.toArray(new Table[tables.size()]));
    } catch (IOException e) {
        log.warn("Failed to export table definitions", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    String tablePrefix = engine.getTablePrefix();

    DbExport export = new DbExport(engine.getDatabasePlatform());
    export.setFormat(Format.CSV);
    export.setNoCreateInfo(true);

    extract(export, new File(tmpDir, "identity.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_IDENTITY));

    extract(export, new File(tmpDir, "node.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE));

    extract(export, new File(tmpDir, "nodesecurity.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_SECURITY));

    extract(export, new File(tmpDir, "nodehost.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_HOST));

    extract(export, new File(tmpDir, "triggerhist.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_TRIGGER_HIST));

    extract(export, new File(tmpDir, "lock.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_LOCK));

    extract(export, new File(tmpDir, "nodecommunication.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_NODE_COMMUNICATION));

    extract(export, 5000, new File(tmpDir, "outgoingbatch.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_OUTGOING_BATCH));

    extract(export, 5000, new File(tmpDir, "incomingbatch.csv"),
            TableConstants.getTableName(tablePrefix, TableConstants.SYM_INCOMING_BATCH));

    final int THREAD_INDENT_SPACE = 50;
    fwriter = null;
    try {
        fwriter = new FileWriter(new File(tmpDir, "threads.txt"));
        ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
        long[] threadIds = threadBean.getAllThreadIds();
        for (long l : threadIds) {
            ThreadInfo info = threadBean.getThreadInfo(l, 100);
            if (info != null) {
                String threadName = info.getThreadName();
                fwriter.append(StringUtils.rightPad(threadName, THREAD_INDENT_SPACE));
                StackTraceElement[] trace = info.getStackTrace();
                boolean first = true;
                for (StackTraceElement stackTraceElement : trace) {
                    if (!first) {
                        fwriter.append(StringUtils.rightPad("", THREAD_INDENT_SPACE));
                    } else {
                        first = false;
                    }
                    fwriter.append(stackTraceElement.getClassName());
                    fwriter.append(".");
                    fwriter.append(stackTraceElement.getMethodName());
                    fwriter.append("()");
                    int lineNumber = stackTraceElement.getLineNumber();
                    if (lineNumber > 0) {
                        fwriter.append(": ");
                        fwriter.append(Integer.toString(stackTraceElement.getLineNumber()));
                    }
                    fwriter.append("\n");
                }
                fwriter.append("\n");
            }
        }
    } catch (IOException e) {
        log.warn("Failed to export thread information", e);
    } finally {
        IOUtils.closeQuietly(fwriter);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "parameters.properties"));
        Properties effectiveParameters = engine.getParameterService().getAllParameters();
        SortedProperties parameters = new SortedProperties();
        parameters.putAll(effectiveParameters);
        parameters.remove("db.password");
        parameters.store(fos, "parameters.properties");
    } catch (IOException e) {
        log.warn("Failed to export parameter information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "parameters-changed.properties"));
        Properties defaultParameters = new Properties();
        InputStream in = SnapshotUtil.class.getResourceAsStream("/symmetric-default.properties");
        defaultParameters.load(in);
        IOUtils.closeQuietly(in);
        in = SnapshotUtil.class.getResourceAsStream("/symmetric-console-default.properties");
        if (in != null) {
            defaultParameters.load(in);
            IOUtils.closeQuietly(in);
        }
        Properties effectiveParameters = engine.getParameterService().getAllParameters();
        Properties changedParameters = new SortedProperties();
        Map<String, ParameterMetaData> parameters = ParameterConstants.getParameterMetaData();
        for (String key : parameters.keySet()) {
            String defaultValue = defaultParameters.getProperty((String) key);
            String currentValue = effectiveParameters.getProperty((String) key);
            if (defaultValue == null && currentValue != null
                    || (defaultValue != null && !defaultValue.equals(currentValue))) {
                changedParameters.put(key, currentValue == null ? "" : currentValue);
            }
        }
        changedParameters.remove("db.password");
        changedParameters.store(fos, "parameters-changed.properties");
    } catch (IOException e) {
        log.warn("Failed to export parameters-changed information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    writeRuntimeStats(engine, tmpDir);
    writeJobsStats(engine, tmpDir);

    if ("true".equals(System.getProperty(SystemConstants.SYSPROP_STANDALONE_WEB))) {
        writeDirectoryListing(engine, tmpDir);
    }

    fos = null;
    try {
        fos = new FileOutputStream(new File(tmpDir, "system.properties"));
        SortedProperties props = new SortedProperties();
        props.putAll(System.getProperties());
        props.store(fos, "system.properties");
    } catch (IOException e) {
        log.warn("Failed to export thread information", e);
    } finally {
        IOUtils.closeQuietly(fos);
    }

    try {
        File jarFile = new File(getSnapshotDirectory(engine), tmpDir.getName() + ".zip");
        JarBuilder builder = new JarBuilder(tmpDir, jarFile, new File[] { tmpDir }, Version.version());
        builder.build();
        FileUtils.deleteDirectory(tmpDir);
        return jarFile;
    } catch (IOException e) {
        throw new IoException("Failed to package snapshot files into archive", e);
    }
}

From source file:org.apache.hadoop.hbase.client.HBaseFsck.java

/**
 * Return a list of table names whose metadata have not been modified in the
 * last few milliseconds specified by timelag
 * if any of the REGIONINFO_QUALIFIER, SERVER_QUALIFIER, STARTCODE_QUALIFIER,
 * SPLITA_QUALIFIER, SPLITB_QUALIFIER have not changed in the last
 * milliseconds specified by timelag, then the table is a candidate to be returned.
 * @param regionList - all entries found in .META
 * @return tables that have not been modified recently
 * @throws IOException if an error is encountered
 *///from  w w  w  .  j  a va  2  s. com
HTableDescriptor[] getTables(AtomicInteger numSkipped) {
    TreeSet<HTableDescriptor> uniqueTables = new TreeSet<HTableDescriptor>();
    long now = System.currentTimeMillis();

    for (HbckInfo hbi : regionInfo.values()) {
        MetaEntry info = hbi.metaEntry;

        // if the start key is zero, then we have found the first region of a table.
        // pick only those tables that were not modified in the last few milliseconds.
        if (info != null && info.getStartKey().length == 0) {
            if (info.modTime + timelag < now) {
                uniqueTables.add(info.getTableDesc());
            } else {
                numSkipped.incrementAndGet(); // one more in-flux table
            }
        }
    }
    return uniqueTables.toArray(new HTableDescriptor[uniqueTables.size()]);
}

From source file:org.opencastproject.capture.impl.SchedulerImpl.java

/**
 * Sets this machine's schedule based on the iCal data passed in as a parameter. Note that this call wipes all
 * currently scheduled captures and then schedules based on the new data. Also note that any files which are in the
 * way when this call tries to save the iCal attachments are overwritten without prompting.
 * /*  ww w  .ja va  2 s  .  co  m*/
 * @param sched
 *          The scheduler to schedule the new events on
 * @param newCal
 *          The new {@code Calendar} data
 */
private synchronized void setCaptureSchedule(Scheduler sched, Calendar newCal) {
    log.debug("setCaptureSchedule(sched, newCal)");

    try {
        Map<Long, String> scheduledEventStarts = new Hashtable<Long, String>();
        Map<String, Date> scheduledEventEnds = new Hashtable<String, Date>();
        // Sort the events into chronological starting order
        TreeSet<VEvent> list = new TreeSet<VEvent>(new VEventStartTimeComparator());
        list.addAll(newCal.getComponents(Component.VEVENT));
        VEvent[] startAry = list.toArray(new VEvent[list.size()]);

        for (int i = 0; i < startAry.length; i++) {
            Event event = new Event(startAry[i], captureAgent, this);
            if (!event.isValidEvent()) {
                continue;
            }

            boolean skipOnError = Boolean
                    .valueOf(configService.getItem(CaptureParameters.CAPTURE_SCHEDULE_DROP_EVENT_IF_CONFLICT));
            int bufferMinutes = 1;
            if (configService.getItem(CaptureParameters.CAPTURE_SCHEDULE_INTEREVENT_BUFFERTIME) != null) {
                try {
                    bufferMinutes = Integer.valueOf(
                            configService.getItem(CaptureParameters.CAPTURE_SCHEDULE_INTEREVENT_BUFFERTIME));
                } catch (NumberFormatException e) {
                    log.info("Unable to parse value for {}, defaulting to 1 minute",
                            CaptureParameters.CAPTURE_SCHEDULE_INTEREVENT_BUFFERTIME);
                }
            }
            long bufferTime = bufferMinutes * CaptureParameters.MINUTES * CaptureParameters.MILLISECONDS;

            // If there could be an event scheduled before this one
            if (i > 0 && startAry[i - 1] != null && scheduledEventEnds.size() > 0) {
                int j = i - 1;
                String otherUID = null;
                // Search through the list of captures which could possibly have been scheduled
                // checking to see which one is closest to us
                while (j > 0) {
                    String testUID = startAry[j].getUid().getValue();
                    if (scheduledEventEnds.containsKey(testUID)) {
                        otherUID = testUID;
                        break;
                    }
                    j--;
                }
                // If we found something
                if (otherUID != null) {
                    Date lastEndDate = scheduledEventEnds.get(otherUID);
                    if (event.getStart().before(lastEndDate)) {
                        if (skipOnError) {
                            log.warn("Start time for event {} is before end time of event {}!  Skipping...",
                                    event.getUID(), otherUID);
                            continue;
                        } else {
                            log.warn(
                                    "Start time for event {} is before end time of event {}!  Shortening to fit...",
                                    event.getUID(), otherUID);
                            event.setStart(new Date(lastEndDate.getTime() + bufferTime));
                        }
                    } else if (ONE_MINUTE_DURATION.compareTo(new Dur(lastEndDate, event.getStart())) >= 0) {
                        if (skipOnError) {
                            log.warn("Start time for event {} is within one minute of event {}!  Skipping...",
                                    event.getUID(), otherUID);
                            continue;
                        } else {
                            log.warn(
                                    "Start time for event {} is within one minute of event {}!  Shortening to fit...",
                                    event.getUID(), otherUID);
                            event.setStart(new Date(lastEndDate.getTime() + bufferTime));
                        }
                    }
                }
            }

            if (!event.isValidEvent()) {
                continue;
            }

            // Get the cron expression and make sure it doesn't conflict with any existing captures
            // Note that this means the order in which the scheduled events appear in the source iCal makes a functional
            // difference!
            String conflict = scheduledEventStarts.get(event.getStart().getTime());
            if (conflict != null) {
                // This case should have disappeared with MH-1253, but I'm leaving it here anyway just in case
                log.warn("Unable to schedule event {} because its starting time coinsides with event {}!",
                        event.getUID(), conflict);
                continue;
            }

            PropertyList attachments = event.getProperties(Property.ATTACH);
            scheduleEvent(sched, event, attachments);
            scheduledEventStarts.put(event.getStart().getTime(), event.getUID());
            scheduledEventEnds.put(event.getUID(), event.getEnd());
        }
    } catch (NullPointerException e) {
        log.error("Invalid calendar data, one of the start or end times is incorrect: {}.", e);
    } catch (ParseException e) {
        log.error("Parsing error: {}.", e);
    } catch (org.opencastproject.util.ConfigurationException e) {
        log.error("Configuration exception: {}.", e);
    } catch (MediaPackageException e) {
        log.error("MediaPackageException exception: {}.", e);
    } catch (MalformedURLException e) {
        log.error("MalformedURLException: {}.", e);
    }
}

From source file:org.apache.fop.complexscripts.fonts.GlyphTable.java

/**
 * Assemble ordered array of lookup table use specifications according to the specified features and candidate lookups,
 * where the order of the array is in accordance to the order of the applicable lookup list.
 * @param features array of feature identifiers to apply
 * @param lookups a mapping from lookup specifications to lists of look tables from which to select lookup tables according to the specified features
 * @return ordered array of assembled lookup table use specifications
 *///from w  w  w .  j  a  v  a2  s.c  om
public UseSpec[] assembleLookups(String[] features, Map/*<LookupSpec,List<LookupTable>>*/ lookups) {
    TreeSet/*<UseSpec>*/ uss = new TreeSet/*<UseSpec>*/();
    for (int i = 0, n = features.length; i < n; i++) {
        String feature = features[i];
        for (Iterator it = lookups.entrySet().iterator(); it.hasNext();) {
            Map.Entry/*<LookupSpec,List<LookupTable>>*/ e = (Map.Entry/*<LookupSpec,List<LookupTable>>*/) it
                    .next();
            LookupSpec ls = (LookupSpec) e.getKey();
            if (ls.getFeature().equals(feature)) {
                List/*<LookupTable>*/ ltl = (List/*<LookupTable>*/) e.getValue();
                if (ltl != null) {
                    for (Iterator ltit = ltl.iterator(); ltit.hasNext();) {
                        LookupTable lt = (LookupTable) ltit.next();
                        uss.add(new UseSpec(lt, feature));
                    }
                }
            }
        }
    }
    return (UseSpec[]) uss.toArray(new UseSpec[uss.size()]);
}

From source file:org.apache.hive.beeline.BeeLine.java

public String[] getMetadataMethodNames() {
    try {/*w w  w .  j  a  v a2s . c om*/
        TreeSet<String> mnames = new TreeSet<String>();
        Method[] m = DatabaseMetaData.class.getDeclaredMethods();
        for (int i = 0; m != null && i < m.length; i++) {
            mnames.add(m[i].getName());
        }
        return mnames.toArray(new String[0]);
    } catch (Throwable t) {
        return new String[0];
    }
}