Example usage for java.util Properties entrySet

List of usage examples for java.util Properties entrySet

Introduction

In this page you can find the example usage for java.util Properties entrySet.

Prototype

@Override
    public Set<Map.Entry<Object, Object>> entrySet() 

Source Link

Usage

From source file:com.redhat.red.offliner.alist.PomArtifactListReader.java

public PomArtifactListReader(final File settingsXml, final String typeMappingFile,
        final CredentialsProvider creds) {
    this.settingsXml = settingsXml;
    this.creds = creds;

    Properties props = new Properties();
    if (StringUtils.isEmpty(typeMappingFile)) {
        try (InputStream mappingStream = getClass().getClassLoader()
                .getResourceAsStream(DEFAULT_TYPE_MAPPING_RES)) {
            props.load(mappingStream);/*from   w ww. jav a  2 s .c  o  m*/
        } catch (IOException ex) {
            throw new IllegalStateException("Failed to load Maven type mapping from default properties", ex);
        }
    } else {
        try (InputStream mappingStream = new FileInputStream(typeMappingFile)) {
            props.load(mappingStream);
        } catch (IOException ex) {
            throw new IllegalStateException(
                    "Failed to load Maven type mapping provided properties file " + typeMappingFile, ex);
        }
    }
    this.typeMapping = new HashMap<>(props.size());

    Pattern p = Pattern.compile("([^:]+)(?::(.+))?");
    for (Map.Entry<Object, Object> entry : props.entrySet()) {
        String type = (String) entry.getKey();

        String value = (String) entry.getValue();
        Matcher m = p.matcher(value);
        if (!m.matches()) {
            throw new IllegalArgumentException(
                    "The type mapping string \"" + typeMappingFile + "\" has a wrong format.");
        }
        String extension = m.group(1);
        if (m.groupCount() == 2) {
            String classifier = m.group(2);
            this.typeMapping.put(type, new TypeMapping(extension, classifier));
        } else {
            this.typeMapping.put(type, new TypeMapping(extension));
        }
    }
}

From source file:com.wolvereness.bluebutton.Version.java

/**
 * @param properties The properties to load from
 * @throws UnsupportedEncodingException If UTF8 isn't supported
 */// w  w  w  . j  a  va 2s.c  o m
public Version(final Properties properties) throws UnsupportedEncodingException {
    try {
        this.branch = getProperty(properties, "git.branch");
        this.buildTime = getProperty(properties, "git.build.time");
        this.buildUserEmail = getProperty(properties, "git.build.user.email");
        this.buildUserName = getProperty(properties, "git.build.user.name");
        this.commitId = getProperty(properties, "git.commit.id");
        this.commitMessageFull = getProperty(properties, "git.commit.message.full");
        this.commitMessageShort = getProperty(properties, "git.commit.message.short");
        this.commitTime = getProperty(properties, "git.commit.time");
        this.commitUserEmail = getProperty(properties, "git.commit.user.email");
        this.commitUserName = getProperty(properties, "git.commit.user.name");
        this.describe = getProperty(properties, "git.commit.id.describe");
    } catch (final Throwable t) {
        final ContextedRuntimeException ex = new ContextedRuntimeException("Failed to read properties", t);
        for (final Entry<Object, Object> entry : properties.entrySet()) {
            ex.addContextValue(entry.getKey() == null ? null : entry.getKey().toString(), entry.getValue());
        }
        throw ex;
    }
}

From source file:com.jdom.word.playdough.model.gamepack.GamePackFileGenerator.java

public List<Properties> generateGamePacks(int numberOfPlayableWordsInEachPack) {
    List<Properties> finishedPacks = new ArrayList<Properties>();

    Properties allProperties = generateProperties();

    List<String> allPlayableWords = PropertiesUtil.getPropertyAsList(allProperties,
            GamePack.PLAYABLE_WORDS_KEY);

    Iterator<String> iter = allPlayableWords.iterator();
    Properties currentPack = new Properties();
    List<String> currentPackPlayableWords = new ArrayList<String>();
    for (int i = 0; iter.hasNext(); i++) {
        String playableWord = iter.next();
        System.out.println("Finding a pack for word #" + i + " [" + playableWord + "]");
        String playableWordPrefix = playableWord + ".";

        if (i % numberOfPlayableWordsInEachPack == 0 && i > 0) {
            currentPack.setProperty(GamePack.PLAYABLE_WORDS_KEY,
                    StringUtils.join(currentPackPlayableWords, PropertiesUtil.SEPARATOR));
            finishedPacks.add(currentPack);
            currentPackPlayableWords.clear();
            currentPack = new Properties();
        }/*w ww.java2s .  co  m*/

        currentPackPlayableWords.add(playableWord);
        Iterator<Entry<Object, Object>> propertiesIter = allProperties.entrySet().iterator();

        while (propertiesIter.hasNext()) {
            Entry<Object, Object> entry = propertiesIter.next();
            String key = (String) entry.getKey();
            if (key.startsWith(playableWordPrefix)) {
                currentPack.setProperty(key, (String) entry.getValue());
                propertiesIter.remove();
            }
        }
    }

    if (!currentPack.isEmpty()) {
        currentPack.setProperty(GamePack.PLAYABLE_WORDS_KEY,
                StringUtils.join(currentPackPlayableWords, PropertiesUtil.SEPARATOR));
        finishedPacks.add(currentPack);
    }

    return finishedPacks;
}

From source file:edu.kit.dama.dataworkflow.AbstractExecutionEnvironmentHandler.java

/**
 * Schedule the ingest of the output data of the provided task. At first, a
 * new digital object is created and added to the investigation linked to
 * the provided task. The object label will be statically assigned by the
 * pattern//from  ww  w .j a v  a 2  s . c o  m
 * <i>"DataWorkflowProcessingResult of Task #TASK_ID"</i> where TASK_ID is
 * the numeric key of the task.
 *
 * In the next step, an ingest is scheduled. As soon as the ingest is
 * prepared, all files located in the output directory of pTask will be
 * linked into the staging location. Due to the fact, that repository and
 * execution environment can access the same location, this can be done
 * immediately. As soon as all file links are created, the pre-ingest is
 * marked as finished and the final ingest step into the repository storage
 * can take place asynchronously. The status of the ingest is then checked
 * within {@link #performIngest(edu.kit.dama.dataworkflow.DataWorkflowTask)
 * } by calling {@link #isIngestFinished(edu.kit.dama.dataworkflow.DataWorkflowTask)
 * }.
 *
 * @param pTask The task whose output should be ingested.
 *
 * @throws IngestException If the ingest scheduling fails for some reason,
 * e.g. if the metadata/ingest creation fails.
 */
private void scheduleIngest(DataWorkflowTask pTask) throws IngestException {
    IAuthorizationContext ctx = DataWorkflowHelper.getTaskContext(pTask);
    File stagingPath;
    try {
        stagingPath = DataWorkflowHelper.getStagingBasePath(pTask);
    } catch (IOException ex) {
        throw new IngestException("Failed to schedule ingest.", ex);
    }
    File outputDir = DataWorkflowHelper.getTaskOutputDirectory(stagingPath);
    LOGGER.debug("Performing ingest of output directory {}", outputDir);
    setTaskStatus(pTask, TASK_STATUS.INGEST);

    IMetaDataManager mdm = MetaDataManagement.getMetaDataManagement().getMetaDataManager();
    mdm.setAuthorizationContext(ctx);
    Investigation inv = null;
    try {
        inv = mdm.find(Investigation.class, pTask.getInvestigationId());
    } catch (UnauthorizedAccessAttemptException ex) {
        LOGGER.error("Failed to access investigation with id " + pTask.getInvestigationId(), ex);
    }

    if (inv == null) {
        throw new IngestException("Failed to obtain investigation. Ingest not possible.");
    }
    LOGGER.debug("Creating result data object for task {}", pTask.getId());
    DigitalObject resultObject = DigitalObject.factoryNewDigitalObject();
    resultObject.setLabel("DataWorkflowProcessingResult of Task #" + pTask.getId());
    resultObject.setInvestigation(inv);
    resultObject.setUploadDate(new Date());
    resultObject.setUploader(DataWorkflowHelper.getContact(pTask));
    try {
        LOGGER.debug("Storing digital object with id {} for result data of task {}",
                resultObject.getDigitalObjectId(), pTask.getId());
        resultObject = mdm.save(resultObject);
        LOGGER.debug("Adding object with base id {} to investigation with id {}", resultObject.getBaseId(),
                inv.getInvestigationId());
        inv.addDataSet(resultObject);
        mdm.update(inv);
        LOGGER.debug("Base metadata for task with id {} successfully stored.", pTask.getId());
    } catch (UnauthorizedAccessAttemptException | EntityNotFoundException ex) {
        throw new IngestException("Failed to create base metadata structure for task " + pTask.getJobId(), ex);
    }

    LOGGER.debug("Storing digital object transition information");
    try {
        DataWorkflowTransition t = new DataWorkflowTransition(pTask);
        Properties inputObjects = pTask.getObjectViewMapAsObject();
        Set<Entry<Object, Object>> entries = inputObjects.entrySet();
        for (Entry<Object, Object> entry : entries) {
            try {
                LOGGER.debug("Getting input object with object id {}", (String) entry.getKey());
                DigitalObject input = mdm
                        .findSingleResult("SELECT o FROM DigitalObject o WHERE o.digitalObjectIdentifier='"
                                + (String) entry.getKey() + "'", DigitalObject.class);
                if (input == null) {
                    throw new IngestException(
                            "Failed to obtain digital object for id " + (String) entry.getValue()
                                    + ". Result is null. Unable to build object transition.");
                }
                LOGGER.debug("Adding input object to transition.");
                t.addInputMapping(input, (String) entry.getValue());
            } catch (UnauthorizedAccessAttemptException ex) {
                throw new IngestException("Failed to obtain digital object for id " + (String) entry.getValue()
                        + ". Unable to build up object transition.", ex);
            }
        }
        //add single output object
        t.addOutputObject(resultObject);
        LOGGER.debug("Storing transition for task {}", pTask.getId());
        mdm.save(t);
        LOGGER.debug("DataWorkflow task transition for task {} successfully stored.", pTask.getId());
    } catch (IOException ex) {
        throw new IngestException(
                "Failed to get object-view map to build up object transitions for task " + pTask.getId(), ex);
    } catch (UnauthorizedAccessAttemptException ex) {
        throw new IngestException("Failed to store object transitions for task " + pTask.getId(), ex);
    }

    LOGGER.debug("Collecting TransferClientProperties");
    TransferClientProperties props = new TransferClientProperties();
    props.setSendMailNotification(true);
    props.setReceiverMail(DataWorkflowHelper.getContact(pTask).getEmail());
    props.setStagingAccessPointId(pTask.getExecutionEnvironment().getStagingAccessPointId());
    IngestInformation ingest;
    try {
        LOGGER.debug("Scheduling new ingest for object with id {}", resultObject.getDigitalObjectId());
        ingest = IngestInformationServiceLocal.getSingleton().prepareIngest(resultObject.getDigitalObjectId(),
                props, ctx);
        LOGGER.debug("Scheduled ingest with id {}", ingest.getId());
    } catch (TransferPreparationException ex) {
        throw new IngestException("Failed to prepare ingest for task results.", ex);
    }

    //Wait for the ingest to be prepared. Basically, this should be the case immediately.
    int sleepCnt = 6;
    while (!INGEST_STATUS.PRE_INGEST_SCHEDULED.equals(ingest.getStatusEnum()) && sleepCnt > 0) {
        try {
            Thread.sleep(5000);
            sleepCnt--;
        } catch (InterruptedException ex) {
        }
        //reload ingest information
        ingest = IngestInformationServiceLocal.getSingleton().getIngestInformationById(ingest.getId(), ctx);
        sleepCnt--;
    }

    //Check again for status...if it is not PRE_INGEST_SCHEDULED the preparation has failed.
    if (!INGEST_STATUS.PRE_INGEST_SCHEDULED.equals(ingest.getStatusEnum())) {
        //ingest preparation not successful...abort.
        throw new IngestException(
                "Preparation of result ingest did not succeed within one minute. Current status is "
                        + ingest.getStatusEnum() + ". Ingest will be aborted.");
    }

    AbstractStagingAccessPoint ap = StagingConfigurationManager.getSingleton()
            .getAccessPointById(pTask.getExecutionEnvironment().getStagingAccessPointId());
    File dataFolder = ap.getLocalPathForUrl(ingest.getDataFolderUrl(), ctx);

    for (File outputFile : outputDir.listFiles()) {
        try {
            SystemUtils.createSymbolicLink(outputFile, new File(dataFolder, outputFile.getName()));
        } catch (IOException ex) {
            throw new IngestException("Linking output data from " + outputDir + " to ingest data folder "
                    + dataFolder + " failed. Ingest will be aborted.", ex);
        }
    }
    LOGGER.debug(
            "Output data successfully linked to ingest data directory. Setting status of ingest with id {} to {}",
            ingest.getId(), INGEST_STATUS.PRE_INGEST_FINISHED);
    if (IngestInformationServiceLocal.getSingleton().updateStatus(ingest.getId(),
            INGEST_STATUS.PRE_INGEST_FINISHED.getId(), null, ctx) == 1) {
        LOGGER.debug("Ingest status successfully updated. Pre-Ingest finished.");
        Properties ingestTransferMap = new Properties();
        ingestTransferMap.put(ingest.getDigitalObjectId(), Long.toString(ingest.getId()));
        try {
            LOGGER.debug("Updating object-transfer map in task with id {}", pTask.getId());
            pTask.setObjectTransferMapAsObject(ingestTransferMap);
            LOGGER.debug("Persisting updated task.");
            DataWorkflowPersistenceImpl.getSingleton(ctx).updateTask(pTask);
            LOGGER.debug("Object-transfer map in task successfully updated.");
        } catch (UnauthorizedAccessAttemptException | IOException ex) {
            throw new IngestException(
                    "Failed to update object-transfer map in task with id {} " + pTask.getId(), ex);
        }
    } else {
        throw new IngestException("Failed to update ingest status. No rows where modified.");
    }
}

From source file:ch.cyberduck.core.preferences.Preferences.java

protected void setDefaults(final Properties properties) {
    for (Map.Entry<Object, Object> property : properties.entrySet()) {
        defaults.put(property.getKey().toString(), property.getValue().toString());
    }//from w w w  .  j  ava2  s.c  om
}

From source file:org.apache.jxtadoop.conf.Configuration.java

public void write(DataOutput out) throws IOException {
    Properties props = getProps();
    WritableUtils.writeVInt(out, props.size());
    for (Map.Entry<Object, Object> item : props.entrySet()) {
        org.apache.jxtadoop.io.Text.writeString(out, (String) item.getKey());
        org.apache.jxtadoop.io.Text.writeString(out, (String) item.getValue());
    }/* w  ww  . ja va  2s  .  c  o m*/
}

From source file:org.apache.archiva.metadata.repository.file.FileMetadataRepository.java

@Override
public Collection<ArtifactMetadata> getArtifacts(String repoId, String namespace, String projectId,
        String projectVersion) throws MetadataResolutionException {
    try {/*w w w .j  a  v a2  s .  c o  m*/
        Map<String, ArtifactMetadata> artifacts = new HashMap<>();

        File directory = new File(getDirectory(repoId), namespace + "/" + projectId + "/" + projectVersion);

        Properties properties = readOrCreateProperties(directory, PROJECT_VERSION_METADATA_KEY);

        for (Map.Entry entry : properties.entrySet()) {
            String name = (String) entry.getKey();
            StringTokenizer tok = new StringTokenizer(name, ":");
            if (tok.hasMoreTokens() && "artifact".equals(tok.nextToken())) {
                String field = tok.nextToken();
                String id = tok.nextToken();

                ArtifactMetadata artifact = artifacts.get(id);
                if (artifact == null) {
                    artifact = new ArtifactMetadata();
                    artifact.setRepositoryId(repoId);
                    artifact.setNamespace(namespace);
                    artifact.setProject(projectId);
                    artifact.setProjectVersion(projectVersion);
                    artifact.setVersion(projectVersion);
                    artifact.setId(id);
                    artifacts.put(id, artifact);
                }

                String value = (String) entry.getValue();
                if ("updated".equals(field)) {
                    artifact.setFileLastModified(Long.parseLong(value));
                } else if ("size".equals(field)) {
                    artifact.setSize(Long.valueOf(value));
                } else if ("whenGathered".equals(field)) {
                    artifact.setWhenGathered(new Date(Long.parseLong(value)));
                } else if ("version".equals(field)) {
                    artifact.setVersion(value);
                } else if ("md5".equals(field)) {
                    artifact.setMd5(value);
                } else if ("sha1".equals(field)) {
                    artifact.setSha1(value);
                } else if ("facetIds".equals(field)) {
                    if (value.length() > 0) {
                        String propertyPrefix = "artifact:facet:" + id + ":";
                        for (String facetId : value.split(",")) {
                            MetadataFacetFactory factory = metadataFacetFactories.get(facetId);
                            if (factory == null) {
                                log.error("Attempted to load unknown artifact metadata facet: " + facetId);
                            } else {
                                MetadataFacet facet = factory.createMetadataFacet();
                                String prefix = propertyPrefix + facet.getFacetId();
                                Map<String, String> map = new HashMap<>();
                                for (Object key : new ArrayList(properties.keySet())) {
                                    String property = (String) key;
                                    if (property.startsWith(prefix)) {
                                        map.put(property.substring(prefix.length() + 1),
                                                properties.getProperty(property));
                                    }
                                }
                                facet.fromProperties(map);
                                artifact.addFacet(facet);
                            }
                        }
                    }

                    updateArtifactFacets(artifact, properties);
                }
            }
        }
        return artifacts.values();
    } catch (IOException e) {
        throw new MetadataResolutionException(e.getMessage(), e);
    }
}

From source file:org.apache.archiva.metadata.repository.file.FileMetadataRepository.java

@Override
public Collection<String> getArtifactVersions(String repoId, String namespace, String projectId,
        String projectVersion) throws MetadataResolutionException {
    try {/*from www. j  av a 2 s.  c  om*/
        File directory = new File(getDirectory(repoId), namespace + "/" + projectId + "/" + projectVersion);

        Properties properties = readOrCreateProperties(directory, PROJECT_VERSION_METADATA_KEY);

        Set<String> versions = new HashSet<String>();
        for (Map.Entry entry : properties.entrySet()) {
            String name = (String) entry.getKey();
            if (name.startsWith("artifact:version:")) {
                versions.add((String) entry.getValue());
            }
        }
        return versions;
    } catch (IOException e) {
        throw new MetadataResolutionException(e.getMessage(), e);
    }
}

From source file:lucee.runtime.engine.CFMLEngineImpl.java

private CFMLEngineImpl(CFMLEngineFactory factory, BundleCollection bc) {
    this.factory = factory;
    this.bundleCollection = bc;

    // happen when Lucee is loaded directly
    if (bundleCollection == null) {
        try {//w w  w .  ja  v  a  2  s.c om
            Properties prop = InfoImpl.getDefaultProperties(null);

            // read the config from default.properties
            Map<String, Object> config = new HashMap<String, Object>();
            Iterator<Entry<Object, Object>> it = prop.entrySet().iterator();
            Entry<Object, Object> e;
            String k;
            while (it.hasNext()) {
                e = it.next();
                k = (String) e.getKey();
                if (!k.startsWith("org.") && !k.startsWith("felix."))
                    continue;
                config.put(k, CFMLEngineFactorySupport.removeQuotes((String) e.getValue(), true));
            }

            /*/ TODO no idea what is going on, but this is necessary atm
            config.put(
               Constants.FRAMEWORK_SYSTEMPACKAGES,
               "org.w3c.dom,org.w3c.dom.bootstrap,org.w3c.dom.events,org.w3c.dom.ls,org.xml.sax,org.xml.sax.ext,org.xml.sax.helpers,javax.crypto,javax.crypto.spec");
                    
            config.put(
                  Constants.FRAMEWORK_BOOTDELEGATION,
                  "coldfusion,coldfusion.image,coldfusion.runtime,coldfusion.runtime.java,coldfusion.server,coldfusion.sql,org,org.apache,org.apache.axis,org.apache.axis.encoding,org.apache.axis.encoding.ser,org.apache.taglibs,org.apache.taglibs.datetime,org.jfree,org.jfree.chart,org.jfree.chart.block,org.objectweb,org.objectweb.asm,org.opencfml,org.opencfml.cfx,lucee,lucee.commons,lucee.commons.activation,lucee.commons.cli,lucee.commons.collection,lucee.commons.collection.concurrent,lucee.commons.color,lucee.commons.date,lucee.commons.db,lucee.commons.digest,lucee.commons.i18n,lucee.commons.img,lucee.commons.io,lucee.commons.io.auto,lucee.commons.io.cache,lucee.commons.io.compress,lucee.commons.io.ini,lucee.commons.io.log,lucee.commons.io.log.log4j,lucee.commons.io.log.log4j.appender,lucee.commons.io.log.log4j.appender.task,lucee.commons.io.log.log4j.layout,lucee.commons.io.log.sl4j,lucee.commons.io.reader,lucee.commons.io.res,lucee.commons.io.res.filter,lucee.commons.io.res.type,lucee.commons.io.res.type.cache,lucee.commons.io.res.type.cfml,lucee.commons.io.res.type.compress,lucee.commons.io.res.type.datasource,lucee.commons.io.res.type.datasource.core,lucee.commons.io.res.type.file,lucee.commons.io.res.type.ftp,lucee.commons.io.res.type.http,lucee.commons.io.res.type.ram,lucee.commons.io.res.type.s3,lucee.commons.io.res.type.tar,lucee.commons.io.res.type.tgz,lucee.commons.io.res.type.zip,lucee.commons.io.res.util,lucee.commons.io.retirement,lucee.commons.lang,lucee.commons.lang.font,lucee.commons.lang.lock,lucee.commons.lang.mimetype,lucee.commons.lang.types,lucee.commons.lock,lucee.commons.lock.rw,lucee.commons.management,lucee.commons.math,lucee.commons.net,lucee.commons.net.http,lucee.commons.net.http.httpclient3,lucee.commons.net.http.httpclient3.entity,lucee.commons.net.http.httpclient4,lucee.commons.net.http.httpclient4.entity,lucee.commons.pdf,lucee.commons.res,lucee.commons.res.io,lucee.commons.res.io.filter,lucee.commons.security,lucee.commons.sql,lucee.commons.surveillance,lucee.commons.util,lucee.deployer,lucee.deployer.filter,lucee.intergral,lucee.intergral.fusiondebug,lucee.intergral.fusiondebug.server,lucee.intergral.fusiondebug.server.type,lucee.intergral.fusiondebug.server.type.coll,lucee.intergral.fusiondebug.server.type.nat,lucee.intergral.fusiondebug.server.type.qry,lucee.intergral.fusiondebug.server.type.simple,lucee.intergral.fusiondebug.server.util,lucee.runtime,lucee.runtime.cache,lucee.runtime.cache.eh,lucee.runtime.cache.eh.remote,lucee.runtime.cache.eh.remote.rest,lucee.runtime.cache.eh.remote.rest.sax,lucee.runtime.cache.eh.remote.soap,lucee.runtime.cache.legacy,lucee.runtime.cache.ram,lucee.runtime.cache.tag,lucee.runtime.cache.tag.include,lucee.runtime.cache.tag.query,lucee.runtime.cache.tag.request,lucee.runtime.cache.tag.smart,lucee.runtime.cache.tag.timespan,lucee.runtime.cache.tag.udf,lucee.runtime.cache.util,lucee.runtime.cfx,lucee.runtime.cfx.customtag,lucee.runtime.chart,lucee.runtime.coder,lucee.runtime.com,lucee.runtime.compiler,lucee.runtime.component,lucee.runtime.concurrency,lucee.runtime.config,lucee.runtime.config.ajax,lucee.runtime.config.component,lucee.runtime.converter,lucee.runtime.converter.bin,lucee.runtime.crypt,lucee.runtime.customtag,lucee.runtime.db,lucee.runtime.db.driver,lucee.runtime.db.driver.state,lucee.runtime.debug,lucee.runtime.debug.filter,lucee.runtime.dump,lucee.runtime.engine,lucee.runtime.err,lucee.runtime.exp,lucee.runtime.ext,lucee.runtime.ext.tag,lucee.runtime.extension,lucee.runtime.flash,lucee.runtime.format,lucee.runtime.functions,lucee.runtime.functions.arrays,lucee.runtime.functions.cache,lucee.runtime.functions.closure,lucee.runtime.functions.component,lucee.runtime.functions.conversion,lucee.runtime.functions.csrf,lucee.runtime.functions.dateTime,lucee.runtime.functions.decision,lucee.runtime.functions.displayFormatting,lucee.runtime.functions.dynamicEvaluation,lucee.runtime.functions.file,lucee.runtime.functions.gateway,lucee.runtime.functions.image,lucee.runtime.functions.international,lucee.runtime.functions.list,lucee.runtime.functions.math,lucee.runtime.functions.orm,lucee.runtime.functions.other,lucee.runtime.functions.owasp,lucee.runtime.functions.poi,lucee.runtime.functions.query,lucee.runtime.functions.rest,lucee.runtime.functions.s3,lucee.runtime.functions.string,lucee.runtime.functions.struct,lucee.runtime.functions.system,lucee.runtime.functions.video,lucee.runtime.functions.xml,lucee.runtime.gateway,lucee.runtime.gateway.proxy,lucee.runtime.helpers,lucee.runtime.i18n,lucee.runtime.img,lucee.runtime.img.coder,lucee.runtime.img.composite,lucee.runtime.img.filter,lucee.runtime.img.gif,lucee.runtime.img.interpolation,lucee.runtime.img.math,lucee.runtime.img.vecmath,lucee.runtime.instrumentation,lucee.runtime.interpreter,lucee.runtime.interpreter.ref,lucee.runtime.interpreter.ref.cast,lucee.runtime.interpreter.ref.func,lucee.runtime.interpreter.ref.literal,lucee.runtime.interpreter.ref.op,lucee.runtime.interpreter.ref.util,lucee.runtime.interpreter.ref.var,lucee.runtime.java,lucee.runtime.listener,lucee.runtime.lock,lucee.runtime.monitor,lucee.runtime.net,lucee.runtime.net.amf,lucee.runtime.net.ftp,lucee.runtime.net.http,lucee.runtime.net.imap,lucee.runtime.net.ipsettings,lucee.runtime.net.ldap,lucee.runtime.net.mail,lucee.runtime.net.ntp,lucee.runtime.net.pop,lucee.runtime.net.proxy,lucee.runtime.net.rpc,lucee.runtime.net.rpc.client,lucee.runtime.net.rpc.server,lucee.runtime.net.s3,lucee.runtime.net.smtp,lucee.runtime.op,lucee.runtime.op.date,lucee.runtime.op.validators,lucee.runtime.orm,lucee.runtime.osgi,lucee.runtime.poi,lucee.runtime.query,lucee.runtime.query.caster,lucee.runtime.reflection,lucee.runtime.reflection.pairs,lucee.runtime.reflection.storage,lucee.runtime.regex,lucee.runtime.registry,lucee.runtime.rest,lucee.runtime.rest.path,lucee.runtime.schedule,lucee.runtime.search,lucee.runtime.search.lucene2,lucee.runtime.search.lucene2.analyzer,lucee.runtime.search.lucene2.docs,lucee.runtime.search.lucene2.highlight,lucee.runtime.search.lucene2.html,lucee.runtime.search.lucene2.net,lucee.runtime.search.lucene2.query,lucee.runtime.security,lucee.runtime.services,lucee.runtime.spooler,lucee.runtime.spooler.mail,lucee.runtime.spooler.remote,lucee.runtime.spooler.test,lucee.runtime.sql,lucee.runtime.sql.exp,lucee.runtime.sql.exp.op,lucee.runtime.sql.exp.value,lucee.runtime.sql.old,lucee.runtime.tag,lucee.runtime.tag.util,lucee.runtime.text,lucee.runtime.text.csv,lucee.runtime.text.feed,lucee.runtime.text.pdf,lucee.runtime.text.xml,lucee.runtime.text.xml.storage,lucee.runtime.text.xml.struct,lucee.runtime.thread,lucee.runtime.timer,lucee.runtime.type,lucee.runtime.type.cfc,lucee.runtime.type.comparator,lucee.runtime.type.dt,lucee.runtime.type.it,lucee.runtime.type.query,lucee.runtime.type.ref,lucee.runtime.type.scope,lucee.runtime.type.scope.client,lucee.runtime.type.scope.session,lucee.runtime.type.scope.storage,lucee.runtime.type.scope.storage.clean,lucee.runtime.type.scope.storage.db,lucee.runtime.type.scope.util,lucee.runtime.type.sql,lucee.runtime.type.trace,lucee.runtime.type.util,lucee.runtime.type.wrap,lucee.runtime.user,lucee.runtime.util,lucee.runtime.util.pool,lucee.runtime.video,lucee.runtime.vm,lucee.runtime.writer,lucee.servlet,lucee.servlet.pic,lucee.transformer,lucee.transformer.bytecode,lucee.transformer.bytecode.cast,lucee.transformer.bytecode.expression,lucee.transformer.bytecode.expression.type,lucee.transformer.bytecode.expression.var,lucee.transformer.bytecode.literal,lucee.transformer.bytecode.op,lucee.transformer.bytecode.reflection,lucee.transformer.bytecode.statement,lucee.transformer.bytecode.statement.tag,lucee.transformer.bytecode.statement.udf,lucee.transformer.bytecode.util,lucee.transformer.bytecode.visitor,lucee.transformer.cfml,lucee.transformer.cfml.attributes,lucee.transformer.cfml.attributes.impl,lucee.transformer.cfml.evaluator,lucee.transformer.cfml.evaluator.func,lucee.transformer.cfml.evaluator.func.impl,lucee.transformer.cfml.evaluator.impl,lucee.transformer.cfml.expression,lucee.transformer.cfml.script,lucee.transformer.cfml.tag,lucee.transformer.expression,lucee.transformer.expression.literal,lucee.transformer.expression.var,lucee.transformer.library,lucee.transformer.library.function,lucee.transformer.library.tag,lucee.transformer.util");
            */
            config.put(Constants.FRAMEWORK_BOOTDELEGATION, "lucee.*");

            Felix felix = factory.getFelix(factory.getResourceRoot(), config);

            bundleCollection = new BundleCollection(felix, felix, null);
            //bundleContext=bundleCollection.getBundleContext();
        } catch (Throwable t) {
            throw new RuntimeException(t);
        }
    }

    this.info = new InfoImpl(bundleCollection == null ? null : bundleCollection.core);
    Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); // MUST better location for this

    CFMLEngineFactory.registerInstance((this));// patch, not really good but it works
    ConfigServerImpl cs = getConfigServerImpl();

    // start the controler
    SystemOut.printDate(SystemUtil.getPrintWriter(SystemUtil.OUT), "Start CFML Controller");
    Controler controler = new Controler(cs, initContextes, 5 * 1000, controlerState);
    controler.setDaemon(true);
    controler.setPriority(Thread.MIN_PRIORITY);
    controler.start();

    // install extension defined
    String extensionIds = System.getProperty("lucee-extensions");
    if (!StringUtil.isEmpty(extensionIds, true)) {
        Log log = cs.getLog("deploy", true);
        String[] ids = lucee.runtime.type.util.ListUtil.listToStringArray(extensionIds, ';');
        String id;
        for (int i = 0; i < ids.length; i++) {
            id = ids[i].trim();
            if (StringUtil.isEmpty(id, true))
                continue;
            DeployHandler.deployExtension(cs, id, log);
        }
    }

    //print.e(System.getProperties());

    touchMonitor(cs);
    this.uptime = System.currentTimeMillis();
    //this.config=config; 
}

From source file:com.aliyun.odps.conf.Configuration.java

/**
 *  {@link DataOutput}// ww  w .  j ava  2s .c o  m
 */
@Override
public void write(DataOutput out) throws IOException {
    Properties props = getProps();
    WritableUtils.writeVInt(out, props.size());
    for (Map.Entry<Object, Object> item : props.entrySet()) {
        com.aliyun.odps.io.Text.writeString(out, (String) item.getKey());
        com.aliyun.odps.io.Text.writeString(out, (String) item.getValue());
    }
}