Example usage for java.util Properties size

List of usage examples for java.util Properties size

Introduction

In this page you can find the example usage for java.util Properties size.

Prototype

@Override
    public int size() 

Source Link

Usage

From source file:org.olat.core.util.i18n.I18nManager.java

/**
 * Save the given value for the given i18nItem
 * /*from  w w  w  . j  ava 2s . c  o  m*/
 * @param i18nItem
 * @param value
 */
public void saveOrUpdateI18nItem(I18nItem i18nItem, String value) {
    Properties properties = getPropertiesWithoutResolvingRecursively(i18nItem.getLocale(),
            i18nItem.getBundleName());
    // Add logging block to find bogus save issues
    if (isLogDebugEnabled()) {
        String itemIdent = i18nItem.getLocale() + ":"
                + buildI18nItemIdentifyer(i18nItem.getBundleName(), i18nItem.getKey());
        if (properties.containsKey(i18nItem.getKey())) {
            if (StringHelper.containsNonWhitespace(value)) {
                logDebug("Updating i18n item::" + itemIdent + " with new value::" + value, null);
            } else {
                logDebug("Deleting i18n item::" + itemIdent + " because new value is emty", null);
            }
        } else {
            if (StringHelper.containsNonWhitespace(value)) {
                logDebug("Creating i18n item::" + itemIdent + " with new value::" + value, null);
            }
        }
    }
    //
    if (StringHelper.containsNonWhitespace(value)) {
        properties.setProperty(i18nItem.getKey(), value);
    } else if (properties.containsKey(i18nItem.getKey())) {
        properties.remove(i18nItem.getKey());
    }
    if (properties.size() == 0) {
        // delete empty files
        deleteProperties(i18nItem.getLocale(), i18nItem.getBundleName());
    } else {
        // update
        saveOrUpdateProperties(properties, i18nItem.getLocale(), i18nItem.getBundleName());
    }
    // remove all properties files from cache that contain references to
    // this i18n item, rebuild them lazy on next demand.
    if (cachingEnabled) {
        String identifyer = buildI18nItemIdentifyer(i18nItem.getBundleName(), i18nItem.getKey());
        Deque<String> referencingBundles = referencingBundlesIndex.get(identifyer);
        if (referencingBundles != null) {
            // remove from index
            referencingBundlesIndex.remove(identifyer);
            // remove from bundles cache
            for (String bundleName : referencingBundles) {
                cachedBundles.remove(bundleName);
            }
        }
    }

}

From source file:com.liferay.portal.struts.MultiMessageResources.java

private void _loadProps(String name, String localeKey) {

    if (name.contains("cms_language")) {
        LanguageAPI langAPI = APILocator.getLanguageAPI();
        List<LanguageKey> keys;
        if (localeKey.split("_").length > 1) {
            keys = langAPI.getLanguageKeys(localeKey.split("_")[0], localeKey.split("_")[1]);
        } else {/*from ww w .java  2 s. c o  m*/
            keys = langAPI.getLanguageKeys(localeKey.split("_")[0]);

        }

        if (keys.size() < 1) {
            return;
        }

        synchronized (messages) {
            Iterator<LanguageKey> names = keys.iterator();

            while (names.hasNext()) {
                LanguageKey langkey = (LanguageKey) names.next();
                String key = langkey.getKey();
                messages.put(messageKey(localeKey, key), langkey.getValue());
            }
        }

    } else {
        Properties props = new Properties();

        try {
            URL url = null;

            url = _servletContext.getResource("/WEB-INF/" + name);

            if (url != null) {
                InputStream is = url.openStream();

                BufferedReader buffy = new BufferedReader(new InputStreamReader(is));
                String line = null;

                while ((line = buffy.readLine()) != null) {
                    if (UtilMethods.isSet(line) && line.indexOf("=") > -1 && !line.startsWith("#")) {
                        String[] arr = line.split("=", 2);
                        if (arr.length > 1) {
                            String key = arr[0].trim();
                            String val = arr[1].trim();
                            if (val.indexOf("\\u") > -1) {

                                if (val.indexOf("\\u") > -1) {

                                    StringBuffer buffer = new StringBuffer(val.length());
                                    boolean precedingBackslash = false;
                                    for (int i = 0; i < val.length(); i++) {
                                        char c = val.charAt(i);
                                        if (precedingBackslash) {
                                            switch (c) {
                                            case 'f':
                                                c = '\f';
                                                break;
                                            case 'n':
                                                c = '\n';
                                                break;
                                            case 'r':
                                                c = '\r';
                                                break;
                                            case 't':
                                                c = '\t';
                                                break;
                                            case 'u':
                                                String hex = val.substring(i + 1, i + 5);
                                                c = (char) Integer.parseInt(hex, 16);
                                                i += 4;
                                            }
                                            precedingBackslash = false;
                                        } else {
                                            precedingBackslash = (c == '\\');
                                        }
                                        if (!precedingBackslash) {
                                            buffer.append(c);
                                        }
                                    }
                                    val = buffer.toString();
                                }

                            }
                            props.put(key, val);
                        }

                    }

                }
                buffy.close();
                is.close();
            }
        } catch (Exception e) {
            Logger.error(this, e.getMessage(), e);
        }

        if (props.size() < 1) {
            return;
        }

        synchronized (messages) {
            Enumeration names = props.keys();

            while (names.hasMoreElements()) {
                String key = (String) names.nextElement();

                messages.put(messageKey(localeKey, key), props.getProperty(key));
            }
        }
    }
}

From source file:de.tudarmstadt.ukp.dkpro.tc.weka.report.WekaBatchTrainTestReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();
    Map<List<String>, Double> confMatrixMap = new HashMap<List<String>, Double>();

    Properties outcomeIdProps = new Properties();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        if (subcontext.getType().startsWith(WekaTestTask.class.getName())) {
            try {
                outcomeIdProps.putAll(store.retrieveBinary(subcontext.getId(),
                        WekaOutcomeIDReport.ID_OUTCOME_KEY, new PropertiesAdapter()).getMap());
            } catch (Exception e) {
                // silently ignore if this file was not generated
            }//  w  w  w  .j  ava  2 s.  c o  m

            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();
            Map<String, String> resultMap = store
                    .retrieveBinary(subcontext.getId(), WekaTestTask.RESULTS_FILENAME, new PropertiesAdapter())
                    .getMap();

            File confMatrix = store.getStorageFolder(subcontext.getId(), CONFUSIONMATRIX_KEY);

            if (confMatrix.isFile()) {
                confMatrixMap = ReportUtils.updateAggregateMatrix(confMatrixMap, confMatrix);
            } else {
                confMatrix.delete();
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);
            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());
    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // this report is reused in CV, and we only want to aggregate confusion matrices from folds
    // in CV, and an aggregated OutcomeIdReport
    if (getContext().getId().startsWith(ExperimentCrossValidation.class.getSimpleName())) {
        // no confusion matrix for regression
        if (confMatrixMap.size() > 0) {
            FlexTable<String> confMatrix = ReportUtils.createOverallConfusionMatrix(confMatrixMap);
            getContext().storeBinary(CONFUSIONMATRIX_KEY, confMatrix.getCsvWriter());
        }
        if (outcomeIdProps.size() > 0)
            getContext().storeBinary(WekaOutcomeIDReport.ID_OUTCOME_KEY, new PropertiesAdapter(outcomeIdProps));
    }

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}

From source file:de.tudarmstadt.ukp.dkpro.tc.ml.report.BatchTrainTestReport.java

@Override
public void execute() throws Exception {
    StorageService store = getContext().getStorageService();

    FlexTable<String> table = FlexTable.forClass(String.class);

    Map<String, List<Double>> key2resultValues = new HashMap<String, List<Double>>();
    Map<List<String>, Double> confMatrixMap = new HashMap<List<String>, Double>();

    Properties outcomeIdProps = new Properties();

    for (TaskContextMetadata subcontext : getSubtasks()) {
        // FIXME this is a bad hack
        if (subcontext.getType().contains("TestTask")) {
            try {
                outcomeIdProps.putAll(store
                        .retrieveBinary(subcontext.getId(), Constants.ID_OUTCOME_KEY, new PropertiesAdapter())
                        .getMap());/*from w w w  . jav a 2 s . co m*/
            } catch (Exception e) {
                // silently ignore if this file was not generated
            }

            Map<String, String> discriminatorsMap = store
                    .retrieveBinary(subcontext.getId(), Task.DISCRIMINATORS_KEY, new PropertiesAdapter())
                    .getMap();
            Map<String, String> resultMap = store
                    .retrieveBinary(subcontext.getId(), Constants.RESULTS_FILENAME, new PropertiesAdapter())
                    .getMap();

            File confMatrix = store.getStorageFolder(subcontext.getId(), CONFUSIONMATRIX_KEY);

            if (confMatrix.isFile()) {
                confMatrixMap = ReportUtils.updateAggregateMatrix(confMatrixMap, confMatrix);
            } else {
                confMatrix.delete();
            }

            String key = getKey(discriminatorsMap);

            List<Double> results;
            if (key2resultValues.get(key) == null) {
                results = new ArrayList<Double>();
            } else {
                results = key2resultValues.get(key);
            }
            key2resultValues.put(key, results);

            Map<String, String> values = new HashMap<String, String>();
            Map<String, String> cleanedDiscriminatorsMap = new HashMap<String, String>();

            for (String disc : discriminatorsMap.keySet()) {
                if (!ReportUtils.containsExcludePattern(disc, discriminatorsToExclude)) {
                    cleanedDiscriminatorsMap.put(disc, discriminatorsMap.get(disc));
                }
            }
            values.putAll(cleanedDiscriminatorsMap);
            values.putAll(resultMap);

            table.addRow(subcontext.getLabel(), values);
        }
    }

    getContext().getLoggingService().message(getContextLabel(), ReportUtils.getPerformanceOverview(table));
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + "_compact" + SUFFIX_CSV, table.getCsvWriter());
    table.setCompact(false);
    // Excel cannot cope with more than 255 columns
    if (table.getColumnIds().length <= 255) {
        getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_EXCEL, table.getExcelWriter());
    }
    getContext().storeBinary(EVAL_FILE_NAME + SUFFIX_CSV, table.getCsvWriter());

    // this report is reused in CV, and we only want to aggregate confusion matrices from folds
    // in CV, and an aggregated OutcomeIdReport
    if (getContext().getId().startsWith(ExperimentCrossValidation.class.getSimpleName())) {
        // no confusion matrix for regression
        if (confMatrixMap.size() > 0) {
            FlexTable<String> confMatrix = ReportUtils.createOverallConfusionMatrix(confMatrixMap);
            getContext().storeBinary(CONFUSIONMATRIX_KEY, confMatrix.getCsvWriter());
        }
        if (outcomeIdProps.size() > 0)
            getContext().storeBinary(Constants.ID_OUTCOME_KEY, new PropertiesAdapter(outcomeIdProps));
    }

    // output the location of the batch evaluation folder
    // otherwise it might be hard for novice users to locate this
    File dummyFolder = store.getStorageFolder(getContext().getId(), "dummy");
    // TODO can we also do this without creating and deleting the dummy folder?
    getContext().getLoggingService().message(getContextLabel(),
            "Storing detailed results in:\n" + dummyFolder.getParent() + "\n");
    dummyFolder.delete();
}

From source file:org.webdavaccess.servlet.WebdavServlet.java

/**
 * Propfind helper method.//from  w  ww .j a v a 2s . co  m
 * 
 * @param req
 *            The servlet request
 * @param generatedXML
 *            XML response to the Propfind request
 * @param path
 *            Path of the current resource
 * @param type
 *            Propfind type
 * @param propertiesVector
 *            If the propfind type is find properties by name, then this
 *            Vector contains those properties
 */
private void parseProperties(HttpServletRequest req, XMLWriter generatedXML, String path, int type,
        Vector propertiesVector) throws WebdavException {

    String creationdate = getISOCreationDate(fStore.getCreationDate(path).getTime());
    boolean isFolder = fStore.isFolder(path);
    SimpleDateFormat formatter = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z", Locale.US);
    formatter.setTimeZone(TimeZone.getTimeZone("GMT"));
    String lastModified = formatter.format(fStore.getLastModified(path));
    String resourceLength = String.valueOf(fStore.getResourceLength(path));

    // ResourceInfo resourceInfo = new ResourceInfo(path, resources);

    generatedXML.writeElement(null, "response", XMLWriter.OPENING);
    String status = new String(
            "HTTP/1.1 " + WebdavStatus.SC_OK + " " + WebdavStatus.getStatusText(WebdavStatus.SC_OK));

    // Generating href element
    generatedXML.writeElement(null, "href", XMLWriter.OPENING);

    String href = req.getContextPath();
    if ((href.endsWith("/")) && (path.startsWith("/")))
        href += path.substring(1);
    else
        href += path;
    if ((isFolder) && (!href.endsWith("/")))
        href += "/";

    generatedXML.writeText(rewriteUrl(href));

    generatedXML.writeElement(null, "href", XMLWriter.CLOSING);

    String resourceName = path;
    int lastSlash = path.lastIndexOf('/');
    if (lastSlash != -1)
        resourceName = resourceName.substring(lastSlash + 1);

    Properties customProperties;
    Enumeration en;
    switch (type) {

    case FIND_ALL_PROP:

        generatedXML.writeElement(null, "propstat", XMLWriter.OPENING);
        generatedXML.writeElement(null, "prop", XMLWriter.OPENING);

        generatedXML.writeProperty(null, "creationdate", creationdate);
        generatedXML.writeElement(null, "displayname", XMLWriter.OPENING);
        generatedXML.writeData(resourceName);
        generatedXML.writeElement(null, "displayname", XMLWriter.CLOSING);
        if (!isFolder) {
            generatedXML.writeProperty(null, "getlastmodified", lastModified);
            generatedXML.writeProperty(null, "getcontentlength", resourceLength);
            String contentType = getServletContext().getMimeType(path);
            if (contentType != null) {
                generatedXML.writeProperty(null, "getcontenttype", contentType);
            }
            generatedXML.writeProperty(null, "getetag", getETag(path, resourceLength, lastModified));
            generatedXML.writeElement(null, "resourcetype", XMLWriter.NO_CONTENT);
        } else {
            generatedXML.writeElement(null, "resourcetype", XMLWriter.OPENING);
            generatedXML.writeElement(null, "collection", XMLWriter.NO_CONTENT);
            generatedXML.writeElement(null, "resourcetype", XMLWriter.CLOSING);
        }

        // Get custom properties
        customProperties = fStore.getCustomProperties(path);
        if (customProperties != null && customProperties.size() > 0) {
            en = customProperties.keys();
            while (en.hasMoreElements()) {
                String key = (String) en.nextElement();
                generatedXML.writeProperty(null, key, customProperties.getProperty(key));
            }
        }

        generatedXML.writeProperty(null, "source", "");
        generatedXML.writeElement(null, "prop", XMLWriter.CLOSING);
        generatedXML.writeElement(null, "status", XMLWriter.OPENING);
        generatedXML.writeText(status);
        generatedXML.writeElement(null, "status", XMLWriter.CLOSING);
        generatedXML.writeElement(null, "propstat", XMLWriter.CLOSING);

        break;

    case FIND_PROPERTY_NAMES:

        generatedXML.writeElement(null, "propstat", XMLWriter.OPENING);
        generatedXML.writeElement(null, "prop", XMLWriter.OPENING);

        generatedXML.writeElement(null, "creationdate", XMLWriter.NO_CONTENT);
        generatedXML.writeElement(null, "displayname", XMLWriter.NO_CONTENT);
        if (!isFolder) {
            generatedXML.writeElement(null, "getcontentlanguage", XMLWriter.NO_CONTENT);
            generatedXML.writeElement(null, "getcontentlength", XMLWriter.NO_CONTENT);
            generatedXML.writeElement(null, "getcontenttype", XMLWriter.NO_CONTENT);
            generatedXML.writeElement(null, "getetag", XMLWriter.NO_CONTENT);
            generatedXML.writeElement(null, "getlastmodified", XMLWriter.NO_CONTENT);
        }
        generatedXML.writeElement(null, "resourcetype", XMLWriter.NO_CONTENT);
        generatedXML.writeElement(null, "source", XMLWriter.NO_CONTENT);
        generatedXML.writeElement(null, "lockdiscovery", XMLWriter.NO_CONTENT);

        // Get custom properties
        customProperties = fStore.getCustomProperties(path);
        if (customProperties != null && customProperties.size() > 0) {
            customProperties = fStore.getCustomProperties(path);
            en = customProperties.keys();
            while (en.hasMoreElements()) {
                String key = (String) en.nextElement();
                generatedXML.writeElement(null, key, XMLWriter.NO_CONTENT);
            }
        }

        generatedXML.writeElement(null, "prop", XMLWriter.CLOSING);
        generatedXML.writeElement(null, "status", XMLWriter.OPENING);
        generatedXML.writeText(status);
        generatedXML.writeElement(null, "status", XMLWriter.CLOSING);
        generatedXML.writeElement(null, "propstat", XMLWriter.CLOSING);

        break;

    case FIND_BY_PROPERTY:

        Vector propertiesNotFound = new Vector();

        // Parse the list of properties

        generatedXML.writeElement(null, "propstat", XMLWriter.OPENING);
        generatedXML.writeElement(null, "prop", XMLWriter.OPENING);

        Enumeration properties = propertiesVector.elements();

        customProperties = fStore.getCustomProperties(path);

        while (properties.hasMoreElements()) {

            String property = (String) properties.nextElement();

            if (property.equals("creationdate")) {
                generatedXML.writeProperty(null, "creationdate", creationdate);
            } else if (property.equals("displayname")) {
                generatedXML.writeElement(null, "displayname", XMLWriter.OPENING);
                generatedXML.writeData(resourceName);
                generatedXML.writeElement(null, "displayname", XMLWriter.CLOSING);
            } else if (property.equals("getcontentlanguage")) {
                if (isFolder) {
                    propertiesNotFound.addElement(property);
                } else {
                    generatedXML.writeElement(null, "getcontentlanguage", XMLWriter.NO_CONTENT);
                }
            } else if (property.equals("getcontentlength")) {
                if (isFolder) {
                    propertiesNotFound.addElement(property);
                } else {
                    generatedXML.writeProperty(null, "getcontentlength", resourceLength);
                }
            } else if (property.equals("getcontenttype")) {
                if (isFolder) {
                    propertiesNotFound.addElement(property);
                } else {
                    generatedXML.writeProperty(null, "getcontenttype", getServletContext().getMimeType(path));
                }
            } else if (property.equals("getetag")) {
                if (isFolder) {
                    propertiesNotFound.addElement(property);
                } else {
                    generatedXML.writeProperty(null, "getetag", getETag(path, resourceLength, lastModified));
                }
            } else if (property.equals("getlastmodified")) {
                if (isFolder) {
                    propertiesNotFound.addElement(property);
                } else {
                    generatedXML.writeProperty(null, "getlastmodified", lastModified);
                }
            } else if (property.equals("resourcetype")) {
                if (isFolder) {
                    generatedXML.writeElement(null, "resourcetype", XMLWriter.OPENING);
                    generatedXML.writeElement(null, "collection", XMLWriter.NO_CONTENT);
                    generatedXML.writeElement(null, "resourcetype", XMLWriter.CLOSING);
                } else {
                    generatedXML.writeElement(null, "resourcetype", XMLWriter.NO_CONTENT);
                }
            } else if (property.equals("source")) {
                generatedXML.writeProperty(null, "source", "");
            } else if (customProperties != null && customProperties.containsKey(property)) {
                generatedXML.writeProperty(null, property, customProperties.getProperty(property));
            } else {
                propertiesNotFound.addElement(property);
            }

        }

        generatedXML.writeElement(null, "prop", XMLWriter.CLOSING);
        generatedXML.writeElement(null, "status", XMLWriter.OPENING);
        generatedXML.writeText(status);
        generatedXML.writeElement(null, "status", XMLWriter.CLOSING);
        generatedXML.writeElement(null, "propstat", XMLWriter.CLOSING);

        Enumeration propertiesNotFoundList = propertiesNotFound.elements();

        if (propertiesNotFoundList.hasMoreElements()) {

            status = new String("HTTP/1.1 " + WebdavStatus.SC_NOT_FOUND + " "
                    + WebdavStatus.getStatusText(WebdavStatus.SC_NOT_FOUND));

            generatedXML.writeElement(null, "propstat", XMLWriter.OPENING);
            generatedXML.writeElement(null, "prop", XMLWriter.OPENING);

            while (propertiesNotFoundList.hasMoreElements()) {
                generatedXML.writeElement(null, (String) propertiesNotFoundList.nextElement(),
                        XMLWriter.NO_CONTENT);
            }

            generatedXML.writeElement(null, "prop", XMLWriter.CLOSING);
            generatedXML.writeElement(null, "status", XMLWriter.OPENING);
            generatedXML.writeText(status);
            generatedXML.writeElement(null, "status", XMLWriter.CLOSING);
            generatedXML.writeElement(null, "propstat", XMLWriter.CLOSING);

        }

        break;

    }

    generatedXML.writeElement(null, "response", XMLWriter.CLOSING);

}

From source file:org.fireflow.client.impl.WorkflowStatementLocalImpl.java

public void setVariableValue(Scope scope, String name, Object value, Properties headers)
        throws InvalidOperationException {
    RuntimeContext ctx = this.session.getRuntimeContext();

    // ???????name???value???
    ProcessUtil processUtil = ctx.getEngineModule(ProcessUtil.class, scope.getProcessType());

    Property property = null;/*from  w ww . j  av a 2 s.  c o m*/
    try {
        Object wfDefElm = this.getWorkflowDefinitionElement(scope);
        property = processUtil.getProperty(wfDefElm, name);
    } catch (InvalidModelException e) {
        throw new InvalidOperationException(e);
    }

    //?????
    if (property != null && property.getDataType() != null && value != null) {
        QName qName = property.getDataType();
        // java
        if (qName.getNamespaceURI().endsWith(NameSpaces.JAVA.getUri())) {
            String className = qName.getLocalPart();

            if (value instanceof org.w3c.dom.Document || value instanceof org.dom4j.Document) {
                throw new ClassCastException("Can NOT cast from DOM to " + className);
            }

            try {
                if (!JavaDataTypeConvertor.isTypeValueMatch(className, value)) {
                    throw new InvalidOperationException("?????"
                            + className + "," + value.getClass().getName());
                }
            } catch (ClassNotFoundException e) {
                throw new InvalidOperationException(e);
            }

        }
        // xml
        else {
            if (!(value instanceof org.w3c.dom.Document) && !(value instanceof org.dom4j.Document)) {
                throw new ClassCastException(
                        "Can NOT cast from " + value.getClass().getName() + " to " + qName);
            }

        }
    }
    PersistenceService persistenceService = ctx.getEngineModule(PersistenceService.class, this.processType);
    VariablePersister variablePersister = persistenceService.getVariablePersister();
    Variable v = variablePersister.findVariable(scope.getScopeId(), name);
    //System.out.println("===??????==name="+v.getName()+";"+value);
    if (v != null) {
        ((AbsVariable) v).setPayload(value);
        if (headers != null && headers.size() > 0) {
            v.getHeaders().putAll(headers);
        }
        variablePersister.saveOrUpdate(v);
    } else {
        v = new VariableImpl();
        ((AbsVariable) v).setScopeId(scope.getScopeId());
        ((AbsVariable) v).setName(name);
        ((AbsVariable) v).setProcessElementId(scope.getProcessElementId());
        ((AbsVariable) v).setPayload(value);
        if (value != null) {
            if (value instanceof org.w3c.dom.Document) {
                if (property != null && property.getDataType() != null) {
                    ((AbsVariable) v).setDataType(property.getDataType());
                }
                v.getHeaders().put(Variable.HEADER_KEY_CLASS_NAME, "org.w3c.dom.Document");
            } else if (value instanceof org.dom4j.Document) {
                if (property != null && property.getDataType() != null) {
                    ((AbsVariable) v).setDataType(property.getDataType());
                }
                v.getHeaders().put(Variable.HEADER_KEY_CLASS_NAME, "org.dom4j.Document");
            } else {
                ((AbsVariable) v).setDataType(new QName(NameSpaces.JAVA.getUri(), value.getClass().getName()));
            }

        }
        ((AbsVariable) v).setProcessId(scope.getProcessId());
        ((AbsVariable) v).setVersion(scope.getVersion());
        ((AbsVariable) v).setProcessType(scope.getProcessType());

        if (headers != null && headers.size() > 0) {
            v.getHeaders().putAll(headers);
        }
        variablePersister.saveOrUpdate(v);
    }
    return;
}

From source file:org.sakaiproject.message.impl.BaseMessageService.java

/**
 * try to add synoptic options for this tool to the archive, if they exist
 * @param siteId/*from w  ww  . j  a v  a  2s  .  com*/
 * @param doc
 * @param element
 */
public void archiveSynopticOptions(String siteId, Document doc, Element element) {
    try {
        // archive the synoptic tool options
        Site site = m_siteService.getSite(siteId);
        ToolConfiguration synTool = site.getToolForCommonId("sakai.synoptic." + getLabel());
        Properties synProp = synTool.getPlacementConfig();
        if (synProp != null && synProp.size() > 0) {
            Element synElement = doc.createElement(SYNOPTIC_TOOL);
            Element synProps = doc.createElement(PROPERTIES);

            Set synPropSet = synProp.keySet();
            Iterator propIter = synPropSet.iterator();
            while (propIter.hasNext()) {
                String propName = (String) propIter.next();
                Element synPropEl = doc.createElement(PROPERTY);
                synPropEl.setAttribute(NAME, propName);
                synPropEl.setAttribute(VALUE, synProp.getProperty(propName));
                synProps.appendChild(synPropEl);
            }

            synElement.appendChild(synProps);
            element.appendChild(synElement);
        }
    } catch (Exception e) {
        M_log.warn("archive: exception archiving synoptic options for service: " + serviceName());
    }
}

From source file:org.alfresco.reporting.processor.PersonProcessor.java

public void processPersons(String tableName) throws Exception {
    logger.debug("Enter processPerson");

    // make sure we have a connection
    dbhb.openReportingConnection();/*from  w  w w.j  a  v  a2s . c  om*/

    try {
        tableName = dbhb.fixTableColumnName(tableName);

        dbhb.createEmptyTables(tableName);
        ReportLine rl = new ReportLine(tableName, getSimpleDateFormat(), reportingHelper);
        Statement stmt = null;
        Properties definition = new Properties(); // set of propname-proptype

        long highestDbId = 0;
        boolean continueSearchCycle = true;
        String query = "+TYPE:\"cm:person\"";
        //setTable(tableName);
        ResultSet rs = null;

        while (continueSearchCycle) {
            //continueSearchCycle=false;
            try { // make sure to have a finally to close the result set)
                if (logger.isDebugEnabled())
                    logger.debug("processPerson: classToColumnType=" + getClassToColumnType());
                SearchParameters sp = new SearchParameters();
                String fullQuery = query + " +@sys\\:node-dbid:[" + highestDbId + " TO MAX]";
                if (logger.isDebugEnabled())
                    logger.debug("processPerson: query=" + fullQuery);
                sp.setLanguage(SearchService.LANGUAGE_LUCENE);
                sp.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
                //sp.addSort("@" + ReportingModel.PROP_SYSTEM_NODE_DBID.toString(), true);
                sp.addSort("@{http://www.alfresco.org/model/system/1.0}node-dbid", true);
                sp.setQuery(fullQuery);
                if (logger.isDebugEnabled())
                    logger.debug("processPerson: Before searchService");
                rs = getSearchService().query(sp);
                if (logger.isDebugEnabled())
                    logger.debug("processPerson: Found results=" + rs.length());
                if (rs.length() == 0) {
                    continueSearchCycle = false;
                    if (logger.isDebugEnabled())
                        logger.debug("processPerson: Break fired!");
                    break; // we're done, no more search results
                }
                if (continueSearchCycle) {

                    Iterator<ResultSetRow> rsi = rs.iterator();
                    while (rsi.hasNext()) {
                        ResultSetRow rsr = rsi.next();
                        definition = processPropertyDefinitions(definition, rsr.getNodeRef(),
                                ",cm_homeFolder,cm_homeFolderProvider" + getBlacklist());
                        //definition.setProperty("sys_store_protocol", getClassToColumnType().getProperty("sys_store_protocol","-"));
                        definition.setProperty("noderef", getClassToColumnType().getProperty("noderef", "-"));
                        definition.setProperty("account_enabled",
                                getClassToColumnType().getProperty("boolean", "-"));
                        definition.setProperty("account_expires",
                                getClassToColumnType().getProperty("boolean", "-"));
                        definition.setProperty("account_expirydate",
                                getClassToColumnType().getProperty("datetime", "-"));
                        definition.setProperty("account_locked",
                                getClassToColumnType().getProperty("boolean", "-"));
                        definition.setProperty(Constants.COLUMN_ZONES,
                                getClassToColumnType().getProperty("zones", "-"));

                        if (logger.isDebugEnabled())
                            logger.debug("Processing person with dbid=" + getNodeService()
                                    .getProperty(rsr.getNodeRef(), ReportingModel.PROP_SYSTEM_NODE_DBID));

                        highestDbId = (Long) getNodeService().getProperty(rsr.getNodeRef(),
                                ReportingModel.PROP_SYSTEM_NODE_DBID) + 1;

                        if (logger.isDebugEnabled())
                            logger.debug("## Table def = " + definition);
                    }

                    if (logger.isDebugEnabled())
                        logger.debug("processPerson: Before setTableDefinition size=" + definition.size());
                    setTableDefinition(tableName, definition);

                    rsi = rs.iterator();
                    while (rsi.hasNext()) {
                        ResultSetRow rsr = rsi.next();
                        rl.reset();

                        //logger.debug("processPerson: Before enabled" );
                        rl = processNodeToMap(rsr.getNodeRef().toString(), tableName, rl);

                        @SuppressWarnings("unused")
                        int numberOfRows = 0;
                        if (dbhb.rowExists(rl)) {
                            numberOfRows = dbhb.updateIntoTable(rl);
                            //logger.debug(numberOfRows+ " rows updated");
                        } else {
                            numberOfRows = dbhb.insertIntoTable(rl);
                            //logger.debug(numberOfRows+ " rows inserted");

                        } // end if/else
                    } // end while
                } // end if !continueSearchCycle
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                if (rs != null) {
                    rs.close();
                }
            }

        } // end while continueSearchCycle

    } catch (Exception e) {
        logger.fatal("Exception processPersson: " + e.getMessage());
        throw new Exception(e);
    } finally {
        // make sure we gently close the connection
        dbhb.closeReportingConnection();
    }

    if (logger.isDebugEnabled())
        logger.debug("Exit processPerson");
}

From source file:org.alfresco.reporting.script.AlfrescoReporting.java

/**
 * /*from   www.ja v a 2 s .  com*/
 * @param table
 */
public void processPerson(String tableName) {
    tableName = tableName.replaceAll(" ", "_").trim();
    tableName = tableName.replaceAll("-", "_").trim();
    logger.debug("Enter processPerson");
    dbhb.createEmptyTables(tableName);
    ReportLine rl = new ReportLine(tableName);
    Statement stmt = null;
    Properties definition = new Properties(); // set of propname-proptype
    Properties replacementTypes = getReplacementDataType();

    try {
        long highestDbId = 0;
        Connection conn = dbhb.getConnection();
        conn.setAutoCommit(true);
        stmt = conn.createStatement();
        boolean continueSearchCycle = true;
        String query = "+TYPE:\"cm:person\"";
        //setTable(tableName);
        ResultSet rs = null;

        while (continueSearchCycle) {
            //continueSearchCycle=false;
            try { // make sure to have a finally to close the result set)
                SearchParameters sp = new SearchParameters();
                String fullQuery = query + " +@sys\\:node-dbid:[" + highestDbId + " TO MAX]";
                logger.debug("processPerson: query=" + fullQuery);
                sp.setLanguage(SearchService.LANGUAGE_LUCENE);
                sp.addStore(StoreRef.STORE_REF_WORKSPACE_SPACESSTORE);
                //sp.addSort("@" + ReportingModel.PROP_SYSTEM_NODE_DBID.toString(), true);
                sp.addSort("@{http://www.alfresco.org/model/system/1.0}node-dbid", true);
                sp.setQuery(fullQuery);
                logger.debug("processPerson: Before searchService");
                rs = searchService.query(sp);
                logger.debug("processPerson: Found results=" + rs.length());
                if (rs.length() == 0) {
                    continueSearchCycle = false;
                    logger.debug("processPerson: Break fired!");
                    break; // we're done, no more search results
                }
                if (continueSearchCycle) {
                    Iterator<ResultSetRow> rsi = rs.iterator();
                    while (rsi.hasNext()) {
                        ResultSetRow rsr = rsi.next();
                        definition = processPropertyDefinitions(definition, rsr.getNodeRef(),
                                ",cm_homeFolder,cm_homeFolderProvider" + getBlacklist());
                        definition.setProperty("noderef", getClassToColumnType().getProperty("noderef", "-"));
                        definition.setProperty("account_enabled",
                                getClassToColumnType().getProperty("boolean", "-"));
                        definition.setProperty("account_expires",
                                getClassToColumnType().getProperty("boolean", "-"));
                        definition.setProperty("account_expirydate",
                                getClassToColumnType().getProperty("datetime", "-"));
                        definition.setProperty("account_locked",
                                getClassToColumnType().getProperty("boolean", "-"));
                        definition.setProperty("zone", getClassToColumnType().getProperty("zone", "-"));

                        logger.debug("Procesing person with dbid=" + nodeService.getProperty(rsr.getNodeRef(),
                                ReportingModel.PROP_SYSTEM_NODE_DBID));
                        highestDbId = (Long) nodeService.getProperty(rsr.getNodeRef(),
                                ReportingModel.PROP_SYSTEM_NODE_DBID) + 1;
                    }

                    logger.debug("processPerson: Before setTableDefinition size=" + definition.size());
                    setTableDefinition(definition, tableName);

                    rsi = rs.iterator();
                    while (rsi.hasNext()) {
                        ResultSetRow rsr = rsi.next();
                        rl.reset();

                        //logger.debug("processPerson: Before processProperties");
                        try {
                            rl = processPropertyValues(rl, rsr.getNodeRef(),
                                    ",cm_homeFolder,cm_homeFolderProvider" + getBlacklist());
                        } catch (Exception e) {
                            //logger.error("processUpdate: That is weird, rl.setLine(noderef) crashed! " + rsr.getNodeRef());
                            e.printStackTrace();
                        }

                        //logger.debug("processPerson: Before noderef" );
                        try {
                            rl.setLine("noderef", getClassToColumnType().getProperty("noderef"),
                                    rsr.getNodeRef().toString(), replacementTypes);
                        } catch (Exception e) {
                            logger.error("processPerson: That is weird, rl.setLine(noderef) crashed! "
                                    + rsr.getNodeRef());
                            e.printStackTrace();
                        }

                        //logger.debug("processPerson: Before enabled" );
                        try {
                            String username = (String) nodeService.getProperty(rsr.getNodeRef(),
                                    ContentModel.PROP_USERNAME);
                            String account_expires = null;
                            String account_expirydate = null;
                            String account_locked = null;
                            String enabled = null;

                            username = (String) nodeService.getProperty(rsr.getNodeRef(),
                                    ContentModel.PROP_USERNAME);
                            account_expires = (String) nodeService.getProperty(rsr.getNodeRef(),
                                    ContentModel.PROP_ACCOUNT_EXPIRES);
                            account_expirydate = (String) nodeService.getProperty(rsr.getNodeRef(),
                                    ContentModel.PROP_ACCOUNT_EXPIRY_DATE);
                            account_locked = (String) nodeService.getProperty(rsr.getNodeRef(),
                                    ContentModel.PROP_ACCOUNT_LOCKED);
                            Set<String> zones = authorityService.getAuthorityZones(username);
                            if (serviceRegistry.getAuthenticationService().getAuthenticationEnabled(username)) {
                                enabled = "true";
                            } else {
                                enabled = "false";
                            }

                            //logger.debug("processPerson: Setting user " + username + " is enabled="+ enabled);
                            rl.setLine("account_enabled", getClassToColumnType().getProperty("boolean"),
                                    enabled.toString(), replacementTypes);
                            rl.setLine("account_expires", getClassToColumnType().getProperty("boolean"),
                                    account_expires, replacementTypes);
                            rl.setLine("account_expirydate", getClassToColumnType().getProperty("datetime"),
                                    account_expirydate, replacementTypes);
                            rl.setLine("account_locked", getClassToColumnType().getProperty("boolean"),
                                    account_locked, replacementTypes);
                            rl.setLine("zone", getClassToColumnType().getProperty("zone"),
                                    Utils.setToString(zones), replacementTypes);
                        } catch (Exception e) {
                            logger.error("processPerson: That is weird, rl.setLine(noderef) crashed! "
                                    + rsr.getNodeRef());
                            e.printStackTrace();
                        }

                        int numberOfRows = 0;
                        if (dbhb.rowExists(stmt, rl)) {
                            numberOfRows = dbhb.updateIntoTable(stmt, rl);
                            //logger.debug(numberOfRows+ " rows updated");
                        } else {
                            numberOfRows = dbhb.insertIntoTable(stmt, rl);
                            //logger.debug(numberOfRows+ " rows inserted");

                        } // end if/else
                    } // end while
                } // end if !continueSearchCycle
            } catch (Exception e) {
                e.printStackTrace();
            } finally {
                if (rs != null) {
                    rs.close();
                }
            }

        } // end while continueSearchCycle

    } catch (Exception e) {
        logger.fatal("1#############################################");
        e.printStackTrace();
    } finally {
        rl.reset();
        try {
            if (stmt != null)
                stmt.close();
        } catch (SQLException se2) {
            logger.fatal("2#############################################");
        } // nothing we can do
    }
    logger.debug("Exit processPerson");
}

From source file:gobblin.data.management.conversion.hive.query.HiveAvroORCQueryGenerator.java

/***
 * Generate DDL query to create a different format (default: ORC) Hive table for a given Avro Schema
 * @param schema Avro schema to use to generate the DDL for new Hive table
 * @param tblName New Hive table name//from   w w w  .  ja v a  2  s. co m
 * @param tblLocation New hive table location
 * @param optionalDbName Optional DB name, if not specified it defaults to 'default'
 * @param optionalPartitionDDLInfo Optional partition info in form of map of partition key, partition type pair
 *                                 If not specified, the table is assumed to be un-partitioned ie of type snapshot
 * @param optionalClusterInfo Optional cluster info
 * @param optionalSortOrderInfo Optional sort order
 * @param optionalNumOfBuckets Optional number of buckets
 * @param optionalRowFormatSerde Optional row format serde, default is ORC
 * @param optionalInputFormat Optional input format serde, default is ORC
 * @param optionalOutputFormat Optional output format serde, default is ORC
 * @param tableProperties Optional table properties
 * @param isEvolutionEnabled If schema evolution is turned on
 * @param destinationTableMeta Optional destination table metadata  @return Generated DDL query to create new Hive table
 */
public static String generateCreateTableDDL(Schema schema, String tblName, String tblLocation,
        Optional<String> optionalDbName, Optional<Map<String, String>> optionalPartitionDDLInfo,
        Optional<List<String>> optionalClusterInfo,
        Optional<Map<String, COLUMN_SORT_ORDER>> optionalSortOrderInfo, Optional<Integer> optionalNumOfBuckets,
        Optional<String> optionalRowFormatSerde, Optional<String> optionalInputFormat,
        Optional<String> optionalOutputFormat, Properties tableProperties, boolean isEvolutionEnabled,
        Optional<Table> destinationTableMeta, Map<String, String> hiveColumns) {

    Preconditions.checkNotNull(schema);
    Preconditions.checkArgument(StringUtils.isNotBlank(tblName));
    Preconditions.checkArgument(StringUtils.isNotBlank(tblLocation));

    String dbName = optionalDbName.isPresent() ? optionalDbName.get() : DEFAULT_DB_NAME;
    String rowFormatSerde = optionalRowFormatSerde.isPresent() ? optionalRowFormatSerde.get()
            : DEFAULT_ROW_FORMAT_SERDE;
    String inputFormat = optionalInputFormat.isPresent() ? optionalInputFormat.get() : DEFAULT_ORC_INPUT_FORMAT;
    String outputFormat = optionalOutputFormat.isPresent() ? optionalOutputFormat.get()
            : DEFAULT_ORC_OUTPUT_FORMAT;
    tableProperties = getTableProperties(tableProperties);

    // Start building Hive DDL
    // Refer to Hive DDL manual for explanation of clauses:
    // https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL#LanguageManualDDL-Create/Drop/TruncateTable
    StringBuilder ddl = new StringBuilder();

    // Create statement
    ddl.append(String.format("CREATE EXTERNAL TABLE IF NOT EXISTS `%s`.`%s` ", dbName, tblName));
    // .. open bracket for CREATE
    ddl.append("( \n");

    // 1. If evolution is enabled, and destination table does not exists
    //    .. use columns from new schema
    //    (evolution does not matter if its new destination table)
    // 2. If evolution is enabled, and destination table does exists
    //    .. use columns from new schema
    //    (alter table will be used before moving data from staging to final table)
    // 3. If evolution is disabled, and destination table does not exists
    //    .. use columns from new schema
    //    (evolution does not matter if its new destination table)
    // 4. If evolution is disabled, and destination table does exists
    //    .. use columns from destination schema
    if (isEvolutionEnabled || !destinationTableMeta.isPresent()) {
        log.info("Generating DDL using source schema");
        ddl.append(generateAvroToHiveColumnMapping(schema, Optional.of(hiveColumns), true));
    } else {
        log.info("Generating DDL using destination schema");
        ddl.append(
                generateDestinationToHiveColumnMapping(Optional.of(hiveColumns), destinationTableMeta.get()));
    }

    // .. close bracket for CREATE
    ddl.append(") \n");

    // Partition info
    if (optionalPartitionDDLInfo.isPresent() && optionalPartitionDDLInfo.get().size() > 0) {
        ddl.append("PARTITIONED BY ( ");
        boolean isFirst = true;
        Map<String, String> partitionInfoMap = optionalPartitionDDLInfo.get();
        for (Map.Entry<String, String> partitionInfo : partitionInfoMap.entrySet()) {
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", ");
            }
            ddl.append(String.format("`%s` %s", partitionInfo.getKey(), partitionInfo.getValue()));
        }
        ddl.append(" ) \n");
    }

    if (optionalClusterInfo.isPresent()) {
        if (!optionalNumOfBuckets.isPresent()) {
            throw new IllegalArgumentException(("CLUSTERED BY requested, but no NUM_BUCKETS specified"));
        }
        ddl.append("CLUSTERED BY ( ");
        boolean isFirst = true;
        for (String clusterByCol : optionalClusterInfo.get()) {
            if (!hiveColumns.containsKey(clusterByCol)) {
                throw new IllegalArgumentException(String.format(
                        "Requested CLUSTERED BY column: %s " + "is not present in schema", clusterByCol));
            }
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", ");
            }
            ddl.append(String.format("`%s`", clusterByCol));
        }
        ddl.append(" ) ");

        if (optionalSortOrderInfo.isPresent() && optionalSortOrderInfo.get().size() > 0) {
            Map<String, COLUMN_SORT_ORDER> sortOrderInfoMap = optionalSortOrderInfo.get();
            ddl.append("SORTED BY ( ");
            isFirst = true;
            for (Map.Entry<String, COLUMN_SORT_ORDER> sortOrderInfo : sortOrderInfoMap.entrySet()) {
                if (!hiveColumns.containsKey(sortOrderInfo.getKey())) {
                    throw new IllegalArgumentException(
                            String.format("Requested SORTED BY column: %s " + "is not present in schema",
                                    sortOrderInfo.getKey()));
                }
                if (isFirst) {
                    isFirst = false;
                } else {
                    ddl.append(", ");
                }
                ddl.append(String.format("`%s` %s", sortOrderInfo.getKey(), sortOrderInfo.getValue()));
            }
            ddl.append(" ) ");
        }
        ddl.append(String.format(" INTO %s BUCKETS %n", optionalNumOfBuckets.get()));
    } else {
        if (optionalSortOrderInfo.isPresent()) {
            throw new IllegalArgumentException("SORTED BY requested, but no CLUSTERED BY specified");
        }
    }

    // Field Terminal
    ddl.append("ROW FORMAT SERDE \n");
    ddl.append(String.format("  '%s' %n", rowFormatSerde));

    // Stored as ORC
    ddl.append("STORED AS INPUTFORMAT \n");
    ddl.append(String.format("  '%s' %n", inputFormat));
    ddl.append("OUTPUTFORMAT \n");
    ddl.append(String.format("  '%s' %n", outputFormat));

    // Location
    ddl.append("LOCATION \n");
    ddl.append(String.format("  '%s' %n", tblLocation));

    // Table properties
    if (null != tableProperties && tableProperties.size() > 0) {
        ddl.append("TBLPROPERTIES ( \n");
        boolean isFirst = true;
        for (String property : tableProperties.stringPropertyNames()) {
            if (isFirst) {
                isFirst = false;
            } else {
                ddl.append(", \n");
            }
            ddl.append(String.format("  '%s'='%s'", property, tableProperties.getProperty(property)));
        }
        ddl.append(") \n");
    }

    return ddl.toString();
}