Example usage for java.util Dictionary get

List of usage examples for java.util Dictionary get

Introduction

In this page you can find the example usage for java.util Dictionary get.

Prototype

public abstract V get(Object key);

Source Link

Document

Returns the value to which the key is mapped in this dictionary.

Usage

From source file:org.apache.felix.webconsole.internal.compendium.ConfigManager.java

private void configForm(JSONWriter jsonWriter, String inputPid, Configuration config, String pidFilter,
        String locale) throws JSONException {

    jsonWriter.key(ConfigManager.SERVICE_PID);
    jsonWriter.value(inputPid);/*from  ww  w  .  j a v  a2  s  . com*/

    if (pidFilter != null) {
        jsonWriter.key(PID_FILTER);
        jsonWriter.value(pidFilter);
    }

    Dictionary propsDictionary = null;
    ObjectClassDefinition classDef;
    if (config != null) {
        propsDictionary = config.getProperties();
        classDef = this.getObjectClassDefinition(config, locale);
    } else {
        classDef = this.getObjectClassDefinition(inputPid, locale);
    }

    propsDictionary = this.mergeWithMetaType(propsDictionary, classDef, jsonWriter);

    if (propsDictionary != null) {

        jsonWriter.key("title");
        jsonWriter.value(inputPid);
        jsonWriter.key("description");
        jsonWriter.value(
                "Please enter configuration properties for this configuration in the field below. This configuration has no associated description");

        jsonWriter.key("propertylist");
        jsonWriter.value("properties");

        jsonWriter.key("properties");
        jsonWriter.object();
        for (Enumeration propEnum = propsDictionary.keys(); propEnum.hasMoreElements();) {
            Object nextElement = propEnum.nextElement();

            // ignore well known special properties
            if (!nextElement.equals(Constants.SERVICE_PID) && !nextElement.equals(Constants.SERVICE_DESCRIPTION)
                    && !nextElement.equals(Constants.SERVICE_ID)
                    && !nextElement.equals(Constants.SERVICE_RANKING)
                    && !nextElement.equals(ConfigurationAdmin.SERVICE_FACTORYPID)
                    && !nextElement.equals(Constants.SERVICE_VENDOR)
                    && !nextElement.equals(ConfigurationAdmin.SERVICE_BUNDLELOCATION)) {
                jsonWriter.key(String.valueOf(nextElement));
                jsonWriter.value(propsDictionary.get(nextElement));
            }
        }
        jsonWriter.endObject();

    }

    if (config != null) {
        this.addConfigurationInfo(config, jsonWriter, locale);
    }
}

From source file:org.opencastproject.capture.impl.SchedulerImpl.java

/**
 * Updates the scheduler with new configuration data. {@inheritDoc}
 * //from w  w w .j a  va2 s  .  c o  m
 * @see org.osgi.service.cm.ManagedService#updated(Dictionary)
 */
//@Override
@SuppressWarnings("unchecked")
private void updated(Dictionary properties) throws ConfigurationException {
    log.debug("Scheduler updated.");

    if (properties == null) {
        log.debug("Null properties in updated!");
        throw new ConfigurationException("Null properties in updated!", "null");
    } else if (properties.size() == 0) {
        log.debug("0 size properties in updated!");
        throw new ConfigurationException(
                "Properties object empty in updated, this should be a scheduler configuration!", "empty");
    }

    // Clone the properties. Note that we can't use serialization to do this because the Dictionary above is actually a
    // org.apache.felix.cm.impl.CaseInsensitiveDictionary
    schedProps = new Properties();
    Enumeration<String> keys = properties.keys();
    while (keys.hasMoreElements()) {
        String key = keys.nextElement();
        schedProps.put(key, properties.get(key));
    }

    try {
        localCalendarCacheURL = new File(configService.getItem(CaptureParameters.CAPTURE_SCHEDULE_CACHE_URL))
                .toURI().toURL();
    } catch (NullPointerException e) {
        log.warn("Invalid location specified for {} unable to cache scheduling data.",
                CaptureParameters.CAPTURE_SCHEDULE_CACHE_URL);
    } catch (MalformedURLException e) {
        log.warn("Invalid location specified for {} unable to cache scheduling data.",
                CaptureParameters.CAPTURE_SCHEDULE_CACHE_URL);
    }
    updateCalendar();
}

From source file:org.energy_home.jemma.ah.internal.hac.lib.HacService.java

/**
 * Create an appliance given its type (i.e. factory type) and properties.
 * The props dictionary may be filled with the properties that have to be
 * assigned to the new appliance.//from  ww w .  j a  v  a  2s  .  c om
 * 
 * @param factoryPid
 *            The type of the new appliance
 * @param props
 *            The properties of the new appliance
 * @return the pid of the newly created appliance.
 */

protected String createApplianceByFactory(String factoryPid, Dictionary props) throws HacException {
    synchronized (lockHacService) {
        ApplianceFactory applianceFactoryService = this.getApplianceFactory(factoryPid);
        if (applianceFactoryService == null)
            throw new HacException("unable to find an appliance factory for type '" + factoryPid + "'");

        // check if the property dictionary contains the ah.app.name
        // property
        String friendlyName = applianceFactoryService.getDescriptor().getFriendlyName();

        String pid;

        if (friendlyName != null) {
            pid = generateUniquePid(friendlyName);
        } else {
            pid = this.generatePid();
        }

        if ((props.get(IAppliance.APPLIANCE_NAME_PROPERTY) == null) && (friendlyName != null)) {
            String name = createUniqueName(friendlyName);
            props.put(IAppliance.APPLIANCE_NAME_PROPERTY, name);
        }

        createConfiguration(factoryPid, pid, props);
        return pid;
    }
}

From source file:org.opencastproject.serviceregistry.impl.ServiceRegistryJpaImpl.java

/**
 * {@inheritDoc}//from   ww w. j a v  a 2s .com
 * 
 * @see org.osgi.service.cm.ManagedService#updated(java.util.Dictionary)
 */
@Override
@SuppressWarnings("rawtypes")
public void updated(Dictionary properties) throws ConfigurationException {
    String maxAttempts = StringUtils.trimToNull((String) properties.get(MAX_ATTEMPTS_CONFIG_KEY));
    if (maxAttempts != null) {
        try {
            maxAttemptsBeforeErrorState = Integer.parseInt(maxAttempts);
            logger.info("Set max attempts before error state to {}", maxAttempts);
        } catch (NumberFormatException e) {
            logger.warn("Can not set max attempts before error state to {}. {} must be an integer", maxAttempts,
                    MAX_ATTEMPTS_CONFIG_KEY);
        }
    }

    long dispatchInterval = DEFAULT_DISPATCH_INTERVAL;
    String dispatchIntervalString = StringUtils.trimToNull((String) properties.get(OPT_DISPATCHINTERVAL));
    if (StringUtils.isNotBlank(dispatchIntervalString)) {
        try {
            dispatchInterval = Long.parseLong(dispatchIntervalString);
        } catch (Exception e) {
            logger.warn("Dispatch interval '{}' is malformed, setting to {}", dispatchIntervalString,
                    MIN_DISPATCH_INTERVAL);
            dispatchInterval = MIN_DISPATCH_INTERVAL;
        }
        if (dispatchInterval == 0) {
            logger.info("Dispatching disabled");
        } else if (dispatchInterval < MIN_DISPATCH_INTERVAL) {
            logger.warn("Dispatch interval {} ms too low, adjusting to {}", dispatchInterval,
                    MIN_DISPATCH_INTERVAL);
            dispatchInterval = MIN_DISPATCH_INTERVAL;
        } else {
            logger.info("Dispatch interval set to {} ms", dispatchInterval);
        }
    }

    long heartbeatInterval = DEFAULT_HEART_BEAT;
    String heartbeatIntervalString = StringUtils.trimToNull((String) properties.get(OPT_HEARTBEATINTERVAL));
    if (StringUtils.isNotBlank(heartbeatIntervalString)) {
        try {
            heartbeatInterval = Long.parseLong(heartbeatIntervalString);
        } catch (Exception e) {
            logger.warn("Heartbeat interval '{}' is malformed, setting to {}", heartbeatIntervalString,
                    DEFAULT_HEART_BEAT);
            heartbeatInterval = DEFAULT_HEART_BEAT;
        }
        if (heartbeatInterval == 0) {
            logger.info("Heartbeat disabled");
        } else if (heartbeatInterval < 0) {
            logger.warn("Heartbeat interval {} minutes too low, adjusting to {}", heartbeatInterval,
                    DEFAULT_HEART_BEAT);
            heartbeatInterval = DEFAULT_HEART_BEAT;
        } else {
            logger.info("Dispatch interval set to {} minutes", heartbeatInterval);
        }
    }

    // Stop the current scheduled executors so we can configure new ones
    if (scheduledExecutor != null) {
        scheduledExecutor.shutdown();
        scheduledExecutor = Executors.newScheduledThreadPool(2);
    }

    // Schedule the service heartbeat if the interval is > 0
    if (heartbeatInterval > 0) {
        logger.debug("Starting service heartbeat at a custom interval of {}s", heartbeatInterval);
        scheduledExecutor.scheduleWithFixedDelay(new JobProducerHearbeat(), heartbeatInterval,
                heartbeatInterval, TimeUnit.SECONDS);
    }

    // Schedule the job dispatching.
    if (dispatchInterval > 0) {
        logger.debug("Starting job dispatching at a custom interval of {}s", DEFAULT_DISPATCH_INTERVAL / 1000);
        scheduledExecutor.scheduleWithFixedDelay(new JobDispatcher(), dispatchInterval, dispatchInterval,
                TimeUnit.MILLISECONDS);
    }
}

From source file:org.energy_home.jemma.ah.internal.hac.lib.HacService.java

public void createAppliance(String appliancePid, Dictionary props) throws HacException {
    if (appliancePid.equals(IAppliancesProxy.PROXY_APPLIANCE_PID))
        throw new IllegalArgumentException("Appliances proxy appliance cannot be created!");
    synchronized (lockHacService) {
        IManagedAppliance managedAppliance = (IManagedAppliance) pid2appliance.get(appliancePid);
        if (managedAppliance != null)
            throw new HacException("appliance " + appliancePid + " already exists");

        Configuration c;//w  w w .  ja  v a 2 s. c  o  m
        try {
            c = this.getApplianceCAConfiguration(appliancePid);
            if (c != null) {
                throw new HacException("appliance " + appliancePid + " already exists");
            }

            String factoryPid = (String) props.get(IAppliance.APPLIANCE_TYPE_PROPERTY);
            if (factoryPid != null) {
                c = this.configAdmin.createFactoryConfiguration(factoryPid, null);
                props.put("appliance.pid", appliancePid);
                LOG.debug("created factory configuration for appliance.pid " + appliancePid);
            } else {
                c = this.configAdmin.getConfiguration(appliancePid);
                props.put("appliance.pid", appliancePid);
                LOG.debug("created factory configuration for appliance.pid " + appliancePid);
            }

            c.update(props);
        } catch (Exception e) {
            LOG.debug(e.getMessage());
            throw new HacException(e.getMessage());
        }
    }
}

From source file:org.energy_home.jemma.ah.internal.greenathome.GreenathomeAppliance.java

public void updateAppliance(Dictionary props) throws ApplianceException {

    log.debug("updateAppliance");
    String appliancePid = (String) props.get("appliance.pid");
    if (appliancePid == null)
        throw new ApplianceException("appliance.pid is null");
    synchronized (lockGatH) {
        if (hacService != null) {
            try {
                // !!! Energy@home webui compatibility
                String[] ids = getDeviceIds(appliancePid);
                appliancePid = ids[0];/*from www  . j a v  a 2  s .  c  om*/
                Integer endPointId = new Integer(ids[1]);
                IAppliance appliance = greenathomeEndPoint.getPeerAppliance(appliancePid);
                props.put(IAppliance.APPLIANCE_TYPE_PROPERTY, appliance.getDescriptor().getType());
                if (appliance.getEndPointIds().length > 2) {
                    ApplianceConfiguration applianceConfig = new ApplianceConfiguration(
                            appliance.getEndPointIds(),
                            convertToMap(this.hacService.getManagedConfiguration(appliancePid)));
                    applianceConfig.updateName(endPointId,
                            (String) props.get(IAppliance.APPLIANCE_NAME_PROPERTY));
                    applianceConfig.updateCategoryPid(endPointId,
                            (String) props.get(IAppliance.APPLIANCE_CATEGORY_PID_PROPERTY));
                    applianceConfig.updateLocationPid(endPointId,
                            (String) props.get(IAppliance.APPLIANCE_LOCATION_PID_PROPERTY));
                    applianceConfig.updateIconName(endPointId,
                            (String) props.get(IAppliance.APPLIANCE_ICON_PROPERTY));
                    this.hacService.updateAppliance(appliancePid,
                            new Hashtable(applianceConfig.getConfigurationMap()));
                } else {
                    this.hacService.updateAppliance(appliancePid, props);
                }
            } catch (HacException e) {
                throw new ApplianceException(e.getMessage());
            }
        } else
            throw new IllegalStateException("hap service not bound");

    }
}

From source file:org.energy_home.jemma.ah.internal.greenathome.GreenathomeAppliance.java

public void installAppliance(Dictionary props) throws ApplianceException {

    // TODO probabilmente e' necessario separeare la installAppliance in
    // set Properties dell'appliance (con accesso ai cluster)

    synchronized (lockGatH) {
        String appliancePid = (String) props.get(IAppliance.APPLIANCE_PID);
        if (appliancePid == null)
            throw new ApplianceException("appliancePid not set");

        try {/*from   www .  j a v  a2s. co m*/
            // !!! Energy@home webui compatibility
            Dictionary c = this.hacService.getManagedConfiguration(appliancePid);
            props.put(IAppliance.APPLIANCE_TYPE_PROPERTY, c.get(IAppliance.APPLIANCE_TYPE_PROPERTY));
            this.hacService.installAppliance(appliancePid, props);
        } catch (HacException e) {
            log.error(e);
            throw new ApplianceException(e.getMessage());
        }
    }
}

From source file:org.energy_home.jemma.ah.internal.greenathome.GreenathomeAppliance.java

public ArrayList getInquiredDevices() {
    synchronized (lockGatH) {
        String[] appliancePids = null;
        appliancePids = this.hacService.getInquiredAppliances();

        ArrayList inquredDevices = new ArrayList();

        if (fakeMode) {
            inquredDevices.add(this.getFakeAppliance());
        } else {/*from  ww  w. j a  v  a2 s  .  c o  m*/
            for (int i = 0; i < appliancePids.length; i++) {
                try {
                    Dictionary c = this.hacService.getManagedConfiguration(appliancePids[i]);
                    // those information that can cause marshalling problems
                    // in JSON RPC.
                    Hashtable config = new Hashtable();
                    Enumeration keys = c.keys();
                    while (keys.hasMoreElements()) {
                        Object key = keys.nextElement();
                        Object value = c.get(key);
                        if (key.equals(IAppliance.APPLIANCE_TYPE_PROPERTY)) {
                            // !!! Energy@home webui compatibility
                            String[] epsTypes = (String[]) config.get(IAppliance.APPLIANCE_EPS_TYPES_PROPERTY);
                            value = encodeGenericApplianceType((String) value, epsTypes[1]);
                            config.put(IAppliance.APPLIANCE_TYPE_PROPERTY, value);
                        } else {
                            config.put(key, value);
                        }
                    }

                    inquredDevices.add(config);
                } catch (Exception e) {
                    log.fatal("Unable to get Inquired Appliance " + appliancePids[i], e);
                }
            }
        }
        return inquredDevices;
    }
}

From source file:org.openanzo.datasource.nodecentric.internal.NodeCentricDatasource.java

/**
 * Create a new NodeCentricDatasource./* w w  w .j  ava2s.co m*/
 * 
 * @param bundleContext
 * @param configProperties
 * @param cacheProvider
 * @param aclEventListeners
 * @param eventAdmin
 * @throws AnzoException
 */
public NodeCentricDatasource(BundleContext bundleContext,
        Dictionary<? extends Object, ? extends Object> configProperties, ICacheProvider cacheProvider,
        Set<IAuthorizationEventListener> aclEventListeners, EventAdmin eventAdmin) throws AnzoException {
    super(configProperties);
    this.configProperties = configProperties;
    //Runtime runtime = Runtime.getRuntime();
    //int nrOfProcessors = runtime.availableProcessors();
    //this.MAX_QUERY_CONNECTIONS = Math.max(2, nrOfProcessors - 1);
    // this.MAX_WRITE_CONNECTIONS = Math.max(2, nrOfProcessors - 1);
    if (DatasourceDictionary.getMaxWriteConnections(configProperties) != null) {
        this.MAX_WRITE_CONNECTIONS = DatasourceDictionary.getMaxWriteConnections(configProperties);
    }
    if (DatasourceDictionary.getMaxQueryConnections(configProperties) != null) {
        this.MAX_QUERY_CONNECTIONS = DatasourceDictionary.getMaxQueryConnections(configProperties);
    }
    this.aclEventListeners = aclEventListeners;
    this.cacheProvider = cacheProvider;
    this.eventAdmin = eventAdmin;
    this.graphSets = cacheProvider.openCache("GraphSetCache", 20, false);
    this.graphSets.registerListener(new ICacheListener<GraphSet, GraphSet>() {
        public void elementRemoved(GraphSet key, GraphSet value) {
            graphSetLock.lock();
            try {
                purgedSets.add(key.getSetId());
                graphSetReady.signal();
            } finally {
                graphSetLock.unlock();
            }
        }
    });

    instanceId = (String) configProperties.get(org.osgi.framework.Constants.SERVICE_PID);
    configuration = CoreDBConfiguration.createConfiguration(configProperties);
    statementProvider = new PreparedStatementProvider();
    URL psURL = bundleContext.getBundle().getResource(configuration.getSqlFilename());
    if (psURL != null) {
        try {
            InputStream stream = psURL.openStream();
            statementProvider.loadSQLFile(stream);
        } catch (IOException ioe) {
            throw new AnzoException(ExceptionConstants.RDB.FAILED_INITIALIZING_POOL, ioe);
        }
    }
    nodeLayout = new CompositeNodeLayout(statementProvider, configuration.getSupportsSequences(), null,
            configuration.getContainerName(), configuration.getMaxLongObjectLength(),
            configuration.getOptimizationString(), true, configuration.getSupportsIdentity(),
            configuration.getSessionPrefix(),
            this.cacheProvider.<Long, URI>openCache(instanceId + "UriValue", 20000, true),
            this.cacheProvider.<URI, Long>openCache(instanceId + "UriIdValue", 20000, true),
            this.cacheProvider.<Long, BlankNode>openCache(instanceId + "BlankValue", 20000, true),
            this.cacheProvider.<BlankNode, Long>openCache(instanceId + "BlankIdValue", 20000, true),
            this.cacheProvider.<Long, PlainLiteral>openCache(instanceId + "PlainLiteralValue", 20000, true),
            this.cacheProvider.<PlainLiteral, Long>openCache(instanceId + "PlainLiteralIdValue", 20000, true),
            this.cacheProvider.<Long, TypedLiteral>openCache(instanceId + "TypedLiteralValue", 20000, true),
            this.cacheProvider.<TypedLiteral, Long>openCache(instanceId + "TypedLiteralIdValue", 20000, true),
            this.cacheProvider.<Long, String>openCache(instanceId + "LanguageValue", 20000, true),
            this.cacheProvider.<String, Long>openCache(instanceId + "LanguageIdValue", 20000, true),
            this.cacheProvider.<Long, String>openCache(instanceId + "DatatypeValue", 20000, true),
            this.cacheProvider.<String, Long>openCache(instanceId + "DatatypeIdValue", 20000, true));

    this.isPrimary = DatasourceDictionary.getIsPrimary(configProperties);

    resetService = new NodeCentricResetService(resetEnabled, this);
    updateService = new NodeCentricUpdateService(this);
    modelService = new NodeCentricModelService(this, (enableCaching) ? this.cacheProvider : null);
    indexService = new NodeCentricIndexService(this);
    indexHandler = new NodeCentricIndexUpdateHandler(this);
    queryService = new NodeCentricQueryService(this, (enableCaching) ? this.cacheProvider : null);
    replicationService = new NodeCentricReplicationService(this, (enableCaching) ? this.cacheProvider : null);
    authorizationService = new CachedAuthorizationService(new NodeCentricAuthorizationService(this),
            (enableCaching) ? this.cacheProvider : null);

    queryPool = initializeConnectionFactory(false, MAX_QUERY_CONNECTIONS);
    writePool = initializeConnectionFactory(true, MAX_WRITE_CONNECTIONS);

    updateService.addDatasourceUpdateResultListener(indexHandler);
    if (replicationService.getCacheUpdateListener() != null)
        updateService.addDatasourceUpdateResultListener(replicationService.getCacheUpdateListener());
    if (queryService.getCacheUpdateListener() != null)
        updateService.addDatasourceUpdateResultListener(queryService.getCacheUpdateListener());
    if (modelService.getCacheUpdateListener() != null)
        updateService.addDatasourceUpdateResultListener(modelService.getCacheUpdateListener());
    resetService.start();
    updateService.start();
    queryService.start();
    replicationService.start();
    authorizationService.start();
    indexHandler.start();
    indexService.start();
    modelService.start();
    try {
        Connection connection = (Connection) writePool.borrowObject();
        writePool.returnObject(connection);
    } catch (AnzoException ae) {
        throw ae;
    } catch (Exception e) {
        throw new AnzoException(ExceptionConstants.RDB.FAILED_INITIALIZING_POOL, e);
    }
    updateListenerTracker = new OsgiServiceTracker<IUpdateResultListener>(
            new IServiceTrackerListener<IUpdateResultListener>() {

                public void unregisterService(IUpdateResultListener service) {
                    updateService.removeGlobalUpdateResultListener(service);
                }

                public void registerService(IUpdateResultListener service) {
                    updateService.addGlobalUpdateResultListener(service);
                }

                public Class<IUpdateResultListener> getComponentType() {
                    return IUpdateResultListener.class;
                }

            }, bundleContext);
    updateListenerTracker.open();
    setupCapabilities();
    // state = ServiceLifecycleState.STARTED;
    setupStats(bundleContext);
    indexHandler.rebuildIndex(indexHandler.rebuildIndex);
    purgeSetThread = new Thread("PurgeQueryGraphSets") {
        @Override
        public void run() {
            while (!interrupted()) {
                try {
                    graphSetLock.lockInterruptibly();
                    try {
                        if (purgedSets.size() > 0) {
                            try {
                                Connection connection = getWriteConnection();
                                begin(connection, true, true);
                                GlitterRdbWrapper.BatchPurgeQueryDataset ps = new GlitterRdbWrapper.BatchPurgeQueryDataset(
                                        connection, statementProvider);
                                try {
                                    for (Long id : purgedSets) {
                                        ps.addEntry(id);
                                    }
                                    purgedSets.clear();
                                    ps.executeStatement();
                                    commit(connection, true, true);
                                    ps.close();
                                } finally {
                                    returnWriteConnection(connection);
                                }
                            } catch (AnzoException ae) {
                                log.error(LogUtils.RDB_MARKER, "Error purging graph datasets", ae);
                            }
                        }
                        graphSetReady.await();
                    } finally {
                        graphSetLock.unlock();
                    }
                } catch (InterruptedException ie) {
                    return;
                }
            }
        }
    };
    purgeSetThread.setDaemon(true);
    purgeSetThread.start();
}