Example usage for java.util Hashtable get

List of usage examples for java.util Hashtable get

Introduction

In this page you can find the example usage for java.util Hashtable get.

Prototype

@SuppressWarnings("unchecked")
public synchronized V get(Object key) 

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:edu.isi.misd.tagfiler.download.FileDownloadImplementation.java

/**
 * Performs the dataset download.//  w  w w  .  j  a  va 2  s. co m
 * 
 * @param destDir
 *            destination directory for the download
 * @param target
 *            resume or download all
 */
@SuppressWarnings("unchecked")
public boolean downloadFiles(String destDir, String target) {
    if (destDir == null || destDir.length() == 0 || target == null)
        throw new IllegalArgumentException(destDir + ", " + target);
    this.target = target;
    try {
        client.setBaseURL(DatasetUtils.getBaseDownloadUrl(dataset, tagFilerServerURL));
    } catch (UnsupportedEncodingException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }

    // the copy is done for performance reasons in the inner loop
    ArrayList<String> tempFiles = new ArrayList<String>(fileNames);

    List<FileWrapper> filesList = new ArrayList<FileWrapper>();
    if (target.equals(RESUME_TARGET)) {
        // resume download
        String filename = destDir + File.separator + TagFilerProperties.getProperty("tagfiler.checkpoint.file");
        File file = new File(filename);
        if (file.exists() && file.isFile() && file.canRead()) {
            // get the download check point status
            try {
                FileInputStream fis = new FileInputStream(filename);
                ObjectInputStream in = new ObjectInputStream(fis);
                Hashtable<String, Long> checkPoint = (Hashtable<String, Long>) in.readObject();
                HashMap<String, String> checksum = (HashMap<String, String>) in.readObject();
                in.close();
                fis.close();
                System.out.println("Check Points Read: " + checkPoint + "\n" + checksum);
                Set<String> keys = checkPoint.keySet();
                for (String key : keys) {
                    if (tempFiles.contains(key)) {
                        tempFiles.remove(key);
                        boolean complete = (long) bytesMap.get(key) == (long) checkPoint.get(key);
                        if (complete && enableChecksum) {
                            complete = checksum.get(key) != null && checksumMap.get(key) != null
                                    && checksum.get(key).equals(checksumMap.get(key));
                        }
                        if (complete) {
                            // file already downloaded
                            bytesMap.remove(key);
                            versionMap.remove(key);
                            checksumMap.remove(key);
                        } else {
                            // file partial downloaded
                            filesList.add(new FileWrapper(key, checkPoint.get(key), versionMap.get(key),
                                    bytesMap.get(key)));
                        }
                    }
                }
            } catch (FileNotFoundException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (ClassNotFoundException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    }

    // add the rest of the files without check points
    for (String filename : tempFiles) {
        filesList.add(new FileWrapper(filename, 0, versionMap.get(filename), bytesMap.get(filename)));
    }

    System.out.println("" + filesList.size() + " file(s) will be downloaded");

    // get the total size of the files to be downloaded and checksum
    long totalSize = 0;
    long tb = 0;
    for (FileWrapper fileWrapper : filesList) {
        tb += fileWrapper.getFileLength() - fileWrapper.getOffset();
        totalSize += fileWrapper.getFileLength() - fileWrapper.getOffset();
        if (enableChecksum) {
            totalSize += fileWrapper.getFileLength();
        }
    }
    fileDownloadListener.notifyLogMessage(
            tb + " total bytes will be transferred\n" + totalSize + " total bytes in the progress bar");
    fileDownloadListener.notifyStart(dataset, totalSize);
    if (!((AbstractTagFilerApplet) applet).allowChunksTransfering()) {
        ClientUtils.disableExpirationWarning(applet);
    }
    start = System.currentTimeMillis();
    cancel = false;
    client.download(filesList, destDir, checksumMap, bytesMap, versionMap);

    return true;
}

From source file:edu.eurac.commul.pepperModules.mmax2.Salt2MMAX2Mapper.java

/**
 * Converts an SDocument into a Salt enhanced Mmax2 document.
 * @param corpus The Salt enhanced Mmax2 corpus into which the converted Salt enhanced Mmax2 document should fo 
 * @param sDocument the SDocument to convert
 * @param factory the SaltExtendedDocumentFactory to use for creating documents
 * @param schemeFactory the SaltExtendedDocumentFactory to use for creating Schemes
 * @throws MMAX2ExporterException/*w ww. j a v  a 2 s.  c  om*/
 * @throws MMAX2WrapperException
 */
@Override
public DOCUMENT_STATUS mapSDocument() {
    // this function goes through all pieces of data in a SDocument and launch accordingly the specialized functions below      
    String documentName = getDocument().getName();
    this.spanStextualRelationCorrespondance = new Hashtable<STextualRelation, Integer>();
    this.spanStextualDSCorrespondance = new Hashtable<STextualDS, ArrayList<String>>();
    this.registeredSNodesMarkables = new Hashtable<SNode, SaltExtendedMarkableFactory.SaltExtendedMarkable>();
    this.registeredSRelationsMarkables = new Hashtable<SRelation, SaltExtendedMarkableFactory.SaltExtendedMarkable>();
    this.registeredSLayerMarkables = new HashMap<SLayer, SaltExtendedMarkableFactory.SaltExtendedMarkable>();

    this.sContainerMarkables = new Hashtable<Object, Hashtable<Scheme, SaltExtendedMarkableContainer>>();

    this.document = factory.newDocument(documentName);

    // it deals with STextualDs
    List<STextualDS> sTextualDSList = new ArrayList<STextualDS>(
            getDocument().getDocumentGraph().getTextualDSs());
    List<STextualRelation> sTextualRelationList = new ArrayList<STextualRelation>(
            getDocument().getDocumentGraph().getTextualRelations());
    int compteurId = 0;
    {
        Hashtable<STextualDS, ArrayList<STextualRelation>> correspondanceDsTextualRelations = new Hashtable<STextualDS, ArrayList<STextualRelation>>();
        for (STextualRelation sTextualRelation : sTextualRelationList) {
            ArrayList<STextualRelation> listRelation = correspondanceDsTextualRelations
                    .get(sTextualRelation.getTarget());
            if (listRelation == null) {
                listRelation = new ArrayList<STextualRelation>();
                correspondanceDsTextualRelations.put(sTextualRelation.getTarget(), listRelation);
            }
            listRelation.add(sTextualRelation);
        }

        for (STextualDS sTextualDS : sTextualDSList) {
            String sText = sTextualDS.getText();

            ArrayList<STextualRelation> listRelation = correspondanceDsTextualRelations.get(sTextualDS);

            STextualRelation[] coveredCarachter = new STextualRelation[sText.length()];
            if (listRelation != null) {
                for (STextualRelation sTextualRelation : listRelation) {
                    int start = sTextualRelation.getStart();
                    int end = sTextualRelation.getEnd();
                    for (int i = start; i < end; i++) {
                        if (coveredCarachter[i] != null)
                            throw new PepperModuleException(
                                    "Unexportable Salt Document => Two STextualRelation span a same caracter/token at position '"
                                            + i + "':\n" + sTextualRelation + "\n" + coveredCarachter[i]
                                            + "\nAs Stokens and STextualRelations are, when available, mapped to MMax2 Base Data Units, they are not allowed to overlap.");
                        //   logger.warn("Something is off with the Salt Object => two STextualRelation span a same caracter/token at position '"+i+"':\n"+sTextualRelation+"\n"+coveredCarachter[i]);
                        coveredCarachter[i] = sTextualRelation;
                    }
                }
            }

            ArrayList<String> spansTextualDS = new ArrayList<String>();
            for (int i = 0; i < coveredCarachter.length; i++) {
                compteurId++;
                if (coveredCarachter[i] != null) {
                    String text = sText.substring(coveredCarachter[i].getStart(), coveredCarachter[i].getEnd());
                    document.addBaseDataUnit(document.newBaseDataUnit("word_" + compteurId, text));
                    this.spanStextualRelationCorrespondance.put(coveredCarachter[i], compteurId);
                    i = coveredCarachter[i].getEnd() - 1;
                } else {
                    document.addBaseDataUnit(
                            document.newBaseDataUnit("word_" + compteurId, sText.substring(i, i + 1)));
                }
                spansTextualDS.add("word_" + compteurId);
            }
            this.spanStextualDSCorrespondance.put(sTextualDS, spansTextualDS);
        }
    }

    // The order of exporting the things can impact on the way an Mmax2 => Mmax2 conversion can look on a diff
    try {
        mapSDocument(compteurId);

        for (SLayer sLayer : new ArrayList<SLayer>(getDocument().getDocumentGraph().getLayers())) {
            mapSLayer(sLayer, compteurId);
        }

        ArrayList<SNode> allSnodes = new ArrayList<SNode>();
        ArrayList<SRelation> allSrelations = new ArrayList<SRelation>();

        for (STextualDS sTextualDs : sTextualDSList) {
            getSNodeMarkable(sTextualDs);
            allSnodes.add(sTextualDs);
        }

        for (STextualRelation sTextualRelation : sTextualRelationList) {
            getSRelationMarkable(sTextualRelation);
            allSrelations.add(sTextualRelation);
        }

        for (SToken sToken : getDocument().getDocumentGraph().getTokens()) {
            getSNodeMarkable(sToken);
            allSnodes.add(sToken);
        }

        for (SSpanningRelation sSpanningRelation : getDocument().getDocumentGraph().getSpanningRelations()) {
            getSRelationMarkable(sSpanningRelation);
            allSrelations.add(sSpanningRelation);
        }

        for (SSpan sSpan : getDocument().getDocumentGraph().getSpans()) {
            getSNodeMarkable(sSpan);
            allSnodes.add(sSpan);
        }

        for (SDominanceRelation sDominanceRelation : getDocument().getDocumentGraph().getDominanceRelations()) {
            getSRelationMarkable(sDominanceRelation);
            allSrelations.add(sDominanceRelation);
        }

        for (SStructure sStruct : getDocument().getDocumentGraph().getStructures()) {
            getSNodeMarkable(sStruct);
            allSnodes.add(sStruct);
        }

        for (SPointingRelation sPointer : getDocument().getDocumentGraph().getPointingRelations()) {
            getSRelationMarkable(sPointer);
            allSrelations.add(sPointer);
        }

        // Records if the snode belongs to a given set of Slayers
        for (SNode sNode : allSnodes) {
            SaltExtendedMarkable markable = getSNodeMarkable(sNode);
            Set<SLayer> sLayers = sNode.getLayers();

            mapSMetaAnnotations(markable.getSName(), markable.getId(), sNode, markable.getId(),
                    markable.getSpan(), markable.getFactory().getScheme().getName(), sLayers);
            mapSAnnotations(markable.getSName(), markable.getId(), sNode, markable.getId(), markable.getSpan(),
                    markable.getFactory().getScheme().getName(), sLayers);

            if (sLayers.size() != 0)
                mapSLayersToMarkable(markable, markable.getFactory().getScheme().getName(), sLayers);
        }

        // Records if the srelation has a certain set of STypes and if it  belongs to a given set of Slayers
        for (SRelation sRelation : allSrelations) {
            SaltExtendedMarkable markable = getSRelationMarkable(sRelation);
            Set<SLayer> sLayers = sRelation.getLayers();
            mapSMetaAnnotations(markable.getSName(), markable.getId(), sRelation, markable.getId(),
                    markable.getSpan(), markable.getFactory().getScheme().getName(), sLayers);
            mapSAnnotations(markable.getSName(), markable.getId(), sRelation, markable.getId(),
                    markable.getSpan(), markable.getFactory().getScheme().getName(), sLayers);

            if (sLayers.size() != 0) {
                mapSLayersToMarkable(markable, markable.getFactory().getScheme().getName(), sLayers);
            }
            mapSTypesToMarkable(markable, markable.getFactory().getScheme().getName(), sRelation.getType());
        }
    } catch (MMAX2WrapperException e) {
        throw new PepperModuleException(this, "", e);
    }
    this.corpus.addDocument(document);

    try {
        SaltExtendedFileGenerator.outputDocument(corpus, document);
    } catch (Exception e) {
        throw new PepperModuleException(this, "", e);
    }

    return (DOCUMENT_STATUS.COMPLETED);
}

From source file:com.medigy.persist.model.data.EntitySeedDataPopulator.java

public void populateEntityCacheData() throws HibernateException {

    Iterator itr = null;/*from   ww w.  ja  va2s  . com*/
    if (!useEjb)
        itr = configuration.getClassMappings();
    else
        itr = ejb3Configuration.getClassMappings();

    while (itr.hasNext()) {
        Class entityClass = ((PersistentClass) itr.next()).getMappedClass(); //(Class) classMappings.next();
        log.warn(entityClass.getName());
        if (!Entity.class.isAssignableFrom(entityClass))
            continue;

        Class[] innerClasses = entityClass.getDeclaredClasses();
        for (Class innerClass : innerClasses) {
            // TODO: assume that this is the inner CACHE class !???!!! maybe make Cache extend an interface to indicate this??
            if (innerClass.isEnum() && !entityClass.equals(Party.class)) {
                try {
                    final BeanInfo beanInfo = Introspector.getBeanInfo(entityClass);
                    final PropertyDescriptor[] descriptors = beanInfo.getPropertyDescriptors();
                    final Hashtable pdsByName = new Hashtable();
                    for (int i = 0; i < descriptors.length; i++) {
                        final PropertyDescriptor descriptor = descriptors[i];
                        if (descriptor.getWriteMethod() != null)
                            pdsByName.put(descriptor.getReadMethod().getName(), descriptor.getWriteMethod());
                    }

                    Object[] enumObjects = innerClass.getEnumConstants();
                    // now match the enum methods with the enclosing class' methods
                    for (Object enumObj : enumObjects) {
                        Object entityObj = entityClass.newInstance();
                        final Method[] enumMethods = enumObj.getClass().getMethods();
                        for (Method enumMethod : enumMethods) {
                            final Method writeMethod = (Method) pdsByName.get(enumMethod.getName());
                            if (writeMethod != null) {
                                writeMethod.invoke(entityObj, enumMethod.invoke(enumObj));
                            }
                        }
                        HibernateUtil.getSession().save(entityObj);
                    }
                } catch (IntrospectionException e) {
                    log.error(e);
                } catch (IllegalAccessException e) {
                    log.error(e);
                } catch (InstantiationException e) {
                    log.error(e);
                } catch (InvocationTargetException e) {
                    log.error(e);
                } catch (HibernateException e) {
                    log.error(e);
                }
            }
        }
    }
}

From source file:com.eleybourn.bookcatalogue.utils.Utils.java

/**
 * Passed a collection of views, a specific view and an INextView object find the 
 * first VISIBLE object returned by INextView when called recursively.
 * // ww w  . ja v  a 2s . c o m
 * @param vh      Collection of all views
 * @param nextId   ID of 'next' view to get
 * @param getter   Interface to lookup 'next' ID given a view
 * 
 * @return         ID if first visible 'next' view
 */
private static int getNextView(Hashtable<Integer, View> vh, int nextId, INextView getter) {
    final View v = vh.get(nextId);
    if (v == null)
        return View.NO_ID;

    if (v.getVisibility() == View.VISIBLE)
        return nextId;

    return getNextView(vh, getter.getNext(v), getter);
}

From source file:net.sf.joost.plugins.traxfilter.THResolver.java

/**
 * Find in the given list of parameters filter's own one and set their state
 *
 * @param params/*w  ww. ja v a  2  s .co  m*/
 */
protected void setFilterAttributes(Hashtable params) {
    if (DEBUG)
        log.debug("setFilterAttributes()");

    // loop over all coming parameters
    Enumeration e = params.keys();
    while (e.hasMoreElements()) {
        String key = (String) e.nextElement();

        // is this a parameter from filter's namespace?
        if (key.startsWith(tmp_FILTER_ATTR_NS)) {

            // it is, extract the name of the attribute and set its value
            String name = key.substring(tmp_FILTER_ATTR_NS.length()).toLowerCase();
            Attribute a = (Attribute) (attrs.get(name));
            if (a == null)
                throw new IllegalArgumentException("setFilterAttributes() : " + name + " not supported");

            a.setValue(String.valueOf(params.get(key)));
            if (DEBUG)
                log.debug("setFilterAttributes(): set attribute " + name + "=" + params.get(key));
        }
    }
}

From source file:gov.nih.nci.cabig.caaers.service.synchronizer.TreatmentAssignmentSynchronizer.java

public void migrate(Study dbStudy, Study xmlStudy, DomainObjectImportOutcome<Study> outcome) {
    //create an Index of existing ones (available in DB)
    Hashtable<String, TreatmentAssignment> dbTacIndexMap = new Hashtable<String, TreatmentAssignment>();
    Hashtable<String, TreatmentAssignment> dbCtepIndexMap = new Hashtable<String, TreatmentAssignment>();
    for (TreatmentAssignment ta : dbStudy.getActiveTreatmentAssignments()) {
        String ctepDbId = StringUtils.upperCase(ta.getCtepDbIdentifier());
        String tac = StringUtils.upperCase(ta.getCode());
        dbTacIndexMap.put(tac, ta);//  w  w  w .  j ava 2 s.  com
        if (StringUtils.isNotEmpty(ctepDbId))
            dbCtepIndexMap.put(ctepDbId, ta);
    }

    //Identify New TreatmentAssignments and also update existing ones.
    for (TreatmentAssignment xmlTreatmentAssignment : xmlStudy.getTreatmentAssignments()) {

        // //CAAERS-7367 - /REFACTORED - always prefer the tac that is available.
        String ctepDbId = StringUtils.upperCase(xmlTreatmentAssignment.getCtepDbIdentifier());
        String tac = StringUtils.upperCase(xmlTreatmentAssignment.getCode());
        if (StringUtils.isEmpty(tac) && StringUtils.isEmpty(ctepDbId))
            continue; //no I cannot process this record
        TreatmentAssignment ta = null;

        //try to identify the TA by ctep-id
        if (StringUtils.isNotEmpty(ctepDbId)) {
            ta = dbCtepIndexMap.get(ctepDbId);
        }
        //TA not found : try to find by tac
        if (ta == null)
            ta = dbTacIndexMap.get(tac);

        //still tac null -- create a new one.
        if (ta == null) {
            ta = xmlTreatmentAssignment;
            dbStudy.addTreatmentAssignment(xmlTreatmentAssignment);
            continue;
        }

        //it is an existing TA, so lets sync up the attributes
        ta.setCtepDbIdentifier(xmlTreatmentAssignment.getCtepDbIdentifier());
        ta.setCode(xmlTreatmentAssignment.getCode());
        ta.setDescription(xmlTreatmentAssignment.getDescription());
        ta.setComments(xmlTreatmentAssignment.getComments());
        ta.setDoseLevelOrder(xmlTreatmentAssignment.getDoseLevelOrder());

        //marking the TA as processed by removing it from index
        dbTacIndexMap.remove(tac);

    }

    //soft delete - all the TAs that were not present in XML Study
    AbstractMutableRetireableDomainObject.retire(dbTacIndexMap.values());

}

From source file:edu.uga.cs.fluxbuster.features.FeatureCalculator.java

/**
 * Calculates the cluster novelty feature for each cluster generated
 * on a specific run date.//ww w.  j  a v  a  2  s.  c o m
 *
 * @param log_date the run date
 * @param window the number of days previous to use in feature calculation
 * @return a table of values where the keys are cluster ids and the values 
 *       are the feature values
 * @throws SQLException if there is an error calculating the feature values
 */
public Map<Integer, Double> calculateNoveltyFeature(Date log_date, int window) throws SQLException {
    HashMap<Integer, Double> retval = new HashMap<Integer, Double>();
    ArrayList<Date> prevDates = getPrevDates(log_date, window);

    if (prevDates.size() > 0) {
        StringBuffer querybuf = new StringBuffer();
        Formatter formatter = new Formatter(querybuf);
        String curdatestr = df.format(log_date);
        formatter.format(properties.getProperty(NOVELTY_QUERY1_1KEY), curdatestr, curdatestr, curdatestr,
                curdatestr);
        for (Date prevDate : prevDates) {
            formatter.format(" " + properties.getProperty(NOVELTY_QUERY1_2KEY) + " ", df.format(prevDate));
        }
        formatter.format(properties.getProperty(NOVELTY_QUERY1_3KEY), curdatestr, curdatestr);

        ResultSet rs2 = null;
        Hashtable<Integer, Hashtable<String, Long>> new_resolved_ips = new Hashtable<Integer, Hashtable<String, Long>>();
        try {
            rs2 = dbi.executeQueryWithResult(querybuf.toString());
            while (rs2.next()) {
                int cluster_id = rs2.getInt(2);
                if (!new_resolved_ips.containsKey(cluster_id)) {
                    new_resolved_ips.put(cluster_id, new Hashtable<String, Long>());
                }
                String secondLevelDomainName = rs2.getString(1);
                long newips = rs2.getLong(3);
                Hashtable<String, Long> clustertable = new_resolved_ips.get(cluster_id);
                clustertable.put(secondLevelDomainName, newips);
            }
        } catch (Exception e) {
            if (log.isErrorEnabled()) {
                log.error(e);
            }
        } finally {
            if (rs2 != null && !rs2.isClosed()) {
                rs2.close();
            }
            formatter.close();
        }

        Hashtable<String, List<Integer>> numDays = new Hashtable<String, List<Integer>>();
        for (Date prevDate : prevDates) {
            String prevDateStr = df.format(prevDate);
            querybuf = new StringBuffer();
            formatter = new Formatter(querybuf);
            formatter.format(properties.getProperty(NOVELTY_QUERY2KEY), curdatestr, prevDateStr, curdatestr,
                    prevDateStr);
            ResultSet rs3 = null;
            try {
                rs3 = dbi.executeQueryWithResult(querybuf.toString());
                while (rs3.next()) {
                    String sldn = rs3.getString(1);
                    if (!numDays.containsKey(sldn)) {
                        numDays.put(sldn, new ArrayList<Integer>());
                    }
                    Date pd = rs3.getDate(2);
                    DateTime start = new DateTime(pd.getTime());
                    DateTime end = new DateTime(log_date.getTime());
                    Days d = Days.daysBetween(start, end);
                    int diffDays = d.getDays();
                    numDays.get(sldn).add(diffDays);
                }
            } catch (Exception e) {
                if (log.isErrorEnabled()) {
                    log.error(e);
                }
            } finally {
                if (rs3 != null && !rs3.isClosed()) {
                    rs3.close();
                }
                formatter.close();
            }
        }

        Hashtable<Integer, List<Float>> clusterValues = new Hashtable<Integer, List<Float>>();
        for (int clusterID : new_resolved_ips.keySet()) {
            clusterValues.put(clusterID, new ArrayList<Float>());

            Hashtable<String, Long> sldnValues = new_resolved_ips.get(clusterID);
            for (String sldn : sldnValues.keySet()) {
                if (numDays.keySet().contains(sldn)) {
                    long newIPCount = sldnValues.get(sldn);
                    float f = ((float) newIPCount) / Collections.max(numDays.get(sldn));
                    clusterValues.get(clusterID).add(f);

                }
            }
        }

        for (int clusterID : clusterValues.keySet()) {
            if (clusterValues.get(clusterID) == null) { //I dont think it is possible for this to ever be true
                retval.put(clusterID, null);
            } else {
                double sum = 0;
                for (double d : clusterValues.get(clusterID)) {
                    sum += d;
                }
                double val = 0;
                if (clusterValues.get(clusterID).size() > 0) {
                    val = sum / clusterValues.get(clusterID).size();
                }
                retval.put(clusterID, val);
            }
        }
    }
    return retval;
}

From source file:edu.ku.brc.specify.tools.schemalocale.SchemaLocalizerDlg.java

@Override
public boolean isLocaleInUse(final Locale locale) {
    // First check the aDatabase because the extra locales are not loaded automatically.
    if (isLocaleInUseInDB(schemaType, locale)) {
        return true;
    }//from  ww  w. java2s  .c o m

    // Now check memory
    Hashtable<String, Boolean> localeHash = new Hashtable<String, Boolean>();
    for (SpLocaleContainer container : tables) {
        SchemaLocalizerXMLHelper.checkForLocales(container, localeHash);
        for (LocalizableItemIFace f : container.getContainerItems()) {
            SchemaLocalizerXMLHelper.checkForLocales(f, localeHash);
        }
    }
    return localeHash.get(SchemaLocalizerXMLHelper.makeLocaleKey(locale)) != null;
}

From source file:hr.restart.util.chart.ChartXY.java

/**
 * /*from ww w. ja  v a 2  s . com*/
 * @return String[], with all captions from the DataSet
 */
private String[] makeCaptions() {

    Hashtable hcols = new Hashtable();
    Column[] ccols = getDataSet().getColumns();
    for (int i = 0; i < ccols.length; i++) {
        //System.out.println("checking "+ccols[i].getColumnName());
        if (ccols[i].getDataType() == Variant.BIGDECIMAL) {
            hcols.put(ccols[i].getColumnName(), ccols[i].getCaption());
        }
    }

    colNamesY = (String[]) hcols.keySet().toArray(new String[0]);
    String[] colCaptionsY = new String[hcols.size()];
    for (int i = 0; i < colNamesY.length; i++) {
        colCaptionsY[i] = hcols.get(colNamesY[i]).toString();
    }

    return colCaptionsY;
}

From source file:info.magnolia.cms.module.ModuleUtil.java

/**
 * Register a servlet in the web.xml including init parameters. The code checks if the servlet already exists
 * @param name//  w  w  w . j a  va  2  s  . c om
 * @param className
 * @param urlPatterns
 * @param comment
 * @param initParams
 * @throws JDOMException
 * @throws IOException
 */
public static boolean registerServlet(String name, String className, String[] urlPatterns, String comment,
        Hashtable initParams) throws JDOMException, IOException {

    boolean changed = false;

    // get the web.xml
    File source = new File(Path.getAppRootDir() + "/WEB-INF/web.xml");
    if (!source.exists()) {
        throw new FileNotFoundException("Failed to locate web.xml " //$NON-NLS-1$
                + source.getAbsolutePath());
    }
    SAXBuilder builder = new SAXBuilder();
    Document doc = builder.build(source);

    // check if there already registered
    XPath xpath = XPath.newInstance("/webxml:web-app/webxml:servlet[webxml:servlet-name='" + name + "']");
    // must add the namespace and use it: there is no default namespace elsewise
    xpath.addNamespace("webxml", doc.getRootElement().getNamespace().getURI());
    Element node = (Element) xpath.selectSingleNode(doc);

    if (node == null) {
        log.info("register servlet " + name);

        // make a nice comment
        doc.getRootElement().addContent(new Comment(comment));

        // the same name space must be used
        Namespace ns = doc.getRootElement().getNamespace();

        node = new Element("servlet", ns);
        node.addContent(new Element("servlet-name", ns).addContent(name));
        node.addContent(new Element("servlet-class", ns).addContent(className));

        if (initParams != null && !(initParams.isEmpty())) {
            Enumeration params = initParams.keys();
            while (params.hasMoreElements()) {
                String paramName = params.nextElement().toString();
                String paramValue = (String) initParams.get(paramName);
                Element initParam = new Element("init-param", ns);
                initParam.addContent(new Element("param-name", ns).addContent(paramName));
                initParam.addContent(new Element("param-value", ns).addContent(paramValue));
                node.addContent(initParam);
            }
        }

        doc.getRootElement().addContent(node);
        changed = true;
    } else {
        log.info("servlet {} already registered", name);
    }
    for (int i = 0; i < urlPatterns.length; i++) {
        String urlPattern = urlPatterns[i];
        changed = changed | registerServletMapping(doc, name, urlPattern, comment);
    }

    if (changed) {
        XMLOutputter outputter = new XMLOutputter(Format.getPrettyFormat());
        outputter.output(doc, new FileWriter(source));
    }
    return changed;
}