Example usage for java.util HashMap remove

List of usage examples for java.util HashMap remove

Introduction

In this page you can find the example usage for java.util HashMap remove.

Prototype

public V remove(Object key) 

Source Link

Document

Removes the mapping for the specified key from this map if present.

Usage

From source file:org.sakaiproject.tool.assessment.ui.bean.author.SectionBean.java

/**List of available question pools for random draw. 
 * returns a list of pools that have not been used by other random drawn parts 
 * @return ArrayList of QuestionPoolFacade objects
 *///w  w  w. j av  a  2 s .  c o m
public ArrayList getPoolsAvailable() {
    ArrayList resultPoolList = new ArrayList();

    AssessmentBean assessmentBean = (AssessmentBean) ContextUtil.lookupBean("assessmentBean");

    QuestionPoolService delegate = new QuestionPoolService();

    String agentId = AgentFacade.getAgentString();

    ArrayList allpoollist = delegate.getBasicInfoOfAllPools(agentId);

    HashMap allPoolsMap = new HashMap();
    for (int i = 0; i < allpoollist.size(); i++) {
        QuestionPoolFacade apool = (QuestionPoolFacade) allpoollist.get(i);
        allPoolsMap.put(apool.getQuestionPoolId().toString(), apool);
    }

    AssessmentService assessdelegate = null;
    AuthorBean author = (AuthorBean) ContextUtil.lookupBean("author");
    boolean isEditPendingAssessmentFlow = author.getIsEditPendingAssessmentFlow();

    if (isEditPendingAssessmentFlow) {
        assessdelegate = new AssessmentService();
    } else {
        assessdelegate = new PublishedAssessmentService();
    }

    List sectionList = assessmentBean.getSectionList();
    for (int i = 0; i < sectionList.size(); i++) {
        SelectItem s = (SelectItem) sectionList.get(i);

        // need to remove the pools already used by random draw parts

        SectionDataIfc section = assessdelegate.getSection(s.getValue().toString());
        if ((section != null) && (section.getSectionMetaDataByLabel(SectionDataIfc.AUTHOR_TYPE) != null)
                && (section.getSectionMetaDataByLabel(SectionDataIfc.AUTHOR_TYPE)
                        .equals(SectionDataIfc.RANDOM_DRAW_FROM_QUESTIONPOOL.toString()))) {
            String poolid = section.getSectionMetaDataByLabel(SectionDataIfc.POOLID_FOR_RANDOM_DRAW);
            if (allPoolsMap.containsKey(poolid)) {
                allPoolsMap.remove(poolid);
            }
        }
    }

    // SAM-2463: Fetch the count of questions for each pool in one query instead of hundreds
    HashMap<Long, Integer> poolQuestionCounts = delegate.getCountItemsForUser(agentId);

    Iterator pooliter = allPoolsMap.keySet().iterator();
    while (pooliter.hasNext()) {
        QuestionPoolFacade pool = (QuestionPoolFacade) allPoolsMap.get(pooliter.next());
        Long poolId = pool.getQuestionPoolId();
        int items = poolQuestionCounts.containsKey(poolId) ? poolQuestionCounts.get(poolId) : 0;
        if (items > 0) {
            resultPoolList.add(new SelectItem((poolId.toString()),
                    getPoolTitleValueForRandomDrawDropDown(pool, items, allpoollist, delegate)));
        }
    }
    //  add pool which is currently used in current Part for modify part
    if (!("".equals(this.getSelectedPool())) && (this.getSelectedPool() != null)) {

        //now we need to get the poolid and displayName

        QuestionPoolFacade currPool = delegate.getPool(new Long(this.getSelectedPool()),
                AgentFacade.getAgentString());
        // now add the current pool used  to the list, so it's available in the pulldown 
        if (currPool != null) {
            // if the pool still exists, it's possible that the pool has been deleted  
            int currItems = delegate.getCountItems(currPool.getQuestionPoolId());
            if (currItems > 0) {
                resultPoolList.add(new SelectItem((currPool.getQuestionPoolId().toString()),
                        getPoolTitleValueForRandomDrawDropDown(currPool, currItems, allpoollist, delegate)));
            }
        } else {
            // the pool has been deleted, 
        }
    }

    Collections.sort(resultPoolList, new ItemComparator());
    return resultPoolList;
}

From source file:org.apache.sqoop.mapreduce.CombineFileInputFormat.java

/**
 * Return all the splits in the specified set of paths
 *//*  w w  w.ja va 2  s . c o  m*/
private void getMoreSplits(JobContext job, Path[] paths, long maxSize, long minSizeNode, long minSizeRack,
        List<InputSplit> splits) throws IOException {
    Configuration conf = job.getConfiguration();

    // all blocks for all the files in input set
    OneFileInfo[] files;

    // mapping from a rack name to the list of blocks it has
    HashMap<String, List<OneBlockInfo>> rackToBlocks = new HashMap<String, List<OneBlockInfo>>();

    // mapping from a block to the nodes on which it has replicas
    HashMap<OneBlockInfo, String[]> blockToNodes = new HashMap<OneBlockInfo, String[]>();

    // mapping from a node to the list of blocks that it contains
    HashMap<String, List<OneBlockInfo>> nodeToBlocks = new HashMap<String, List<OneBlockInfo>>();

    files = new OneFileInfo[paths.length];
    if (paths.length == 0) {
        return;
    }

    // populate all the blocks for all files
    long totLength = 0;
    for (int i = 0; i < paths.length; i++) {
        files[i] = new OneFileInfo(paths[i], conf, isSplitable(job, paths[i]), rackToBlocks, blockToNodes,
                nodeToBlocks, rackToNodes, maxSize);
        totLength += files[i].getLength();
    }

    ArrayList<OneBlockInfo> validBlocks = new ArrayList<OneBlockInfo>();
    Set<String> nodes = new HashSet<String>();
    long curSplitSize = 0;

    // process all nodes and create splits that are local
    // to a node.
    for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = nodeToBlocks.entrySet().iterator(); iter
            .hasNext();) {

        Map.Entry<String, List<OneBlockInfo>> one = iter.next();
        nodes.add(one.getKey());
        List<OneBlockInfo> blocksInNode = one.getValue();

        // for each block, copy it into validBlocks. Delete it from
        // blockToNodes so that the same block does not appear in
        // two different splits.
        for (OneBlockInfo oneblock : blocksInNode) {
            if (blockToNodes.containsKey(oneblock)) {
                validBlocks.add(oneblock);
                blockToNodes.remove(oneblock);
                curSplitSize += oneblock.length;

                // if the accumulated split size exceeds the maximum, then
                // create this split.
                if (maxSize != 0 && curSplitSize >= maxSize) {
                    // create an input split and add it to the splits array
                    addCreatedSplit(splits, nodes, validBlocks);
                    curSplitSize = 0;
                    validBlocks.clear();
                }
            }
        }
        // if there were any blocks left over and their combined size is
        // larger than minSplitNode, then combine them into one split.
        // Otherwise add them back to the unprocessed pool. It is likely
        // that they will be combined with other blocks from the
        // same rack later on.
        if (minSizeNode != 0 && curSplitSize >= minSizeNode) {
            // create an input split and add it to the splits array
            addCreatedSplit(splits, nodes, validBlocks);
        } else {
            for (OneBlockInfo oneblock : validBlocks) {
                blockToNodes.put(oneblock, oneblock.hosts);
            }
        }
        validBlocks.clear();
        nodes.clear();
        curSplitSize = 0;
    }

    // if blocks in a rack are below the specified minimum size, then keep them
    // in 'overflow'. After the processing of all racks is complete, these
    // overflow blocks will be combined into splits.
    ArrayList<OneBlockInfo> overflowBlocks = new ArrayList<OneBlockInfo>();
    Set<String> racks = new HashSet<String>();

    // Process all racks over and over again until there is no more work to do.
    while (blockToNodes.size() > 0) {

        // Create one split for this rack before moving over to the next rack.
        // Come back to this rack after creating a single split for each of the
        // remaining racks.
        // Process one rack location at a time, Combine all possible blocks that
        // reside on this rack as one split. (constrained by minimum and maximum
        // split size).

        // iterate over all racks
        for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = rackToBlocks.entrySet().iterator(); iter
                .hasNext();) {

            Map.Entry<String, List<OneBlockInfo>> one = iter.next();
            racks.add(one.getKey());
            List<OneBlockInfo> blocks = one.getValue();

            // for each block, copy it into validBlocks. Delete it from
            // blockToNodes so that the same block does not appear in
            // two different splits.
            boolean createdSplit = false;
            for (OneBlockInfo oneblock : blocks) {
                if (blockToNodes.containsKey(oneblock)) {
                    validBlocks.add(oneblock);
                    blockToNodes.remove(oneblock);
                    curSplitSize += oneblock.length;

                    // if the accumulated split size exceeds the maximum, then
                    // create this split.
                    if (maxSize != 0 && curSplitSize >= maxSize) {
                        // create an input split and add it to the splits array
                        addCreatedSplit(splits, getHosts(racks), validBlocks);
                        createdSplit = true;
                        break;
                    }
                }
            }

            // if we created a split, then just go to the next rack
            if (createdSplit) {
                curSplitSize = 0;
                validBlocks.clear();
                racks.clear();
                continue;
            }

            if (!validBlocks.isEmpty()) {
                if (minSizeRack != 0 && curSplitSize >= minSizeRack) {
                    // if there is a minimum size specified, then create a single split
                    // otherwise, store these blocks into overflow data structure
                    addCreatedSplit(splits, getHosts(racks), validBlocks);
                } else {
                    // There were a few blocks in this rack that
                    // remained to be processed. Keep them in 'overflow' block list.
                    // These will be combined later.
                    overflowBlocks.addAll(validBlocks);
                }
            }
            curSplitSize = 0;
            validBlocks.clear();
            racks.clear();
        }
    }

    assert blockToNodes.isEmpty();
    assert curSplitSize == 0;
    assert validBlocks.isEmpty();
    assert racks.isEmpty();

    // Process all overflow blocks
    for (OneBlockInfo oneblock : overflowBlocks) {
        validBlocks.add(oneblock);
        curSplitSize += oneblock.length;

        // This might cause an exiting rack location to be re-added,
        // but it should be ok.
        for (int i = 0; i < oneblock.racks.length; i++) {
            racks.add(oneblock.racks[i]);
        }

        // if the accumulated split size exceeds the maximum, then
        // create this split.
        if (maxSize != 0 && curSplitSize >= maxSize) {
            // create an input split and add it to the splits array
            addCreatedSplit(splits, getHosts(racks), validBlocks);
            curSplitSize = 0;
            validBlocks.clear();
            racks.clear();
        }
    }

    // Process any remaining blocks, if any.
    if (!validBlocks.isEmpty()) {
        addCreatedSplit(splits, getHosts(racks), validBlocks);
    }
}

From source file:com.photon.phresco.framework.impl.ProjectManagerImpl.java

@Override
public Boolean deleteDashboardConfig(DashboardInfo dashboardInfo) throws PhrescoException {
    Gson gson = new Gson();
    Dashboards dashboards;//w w  w.java2 s .  co  m
    String json;
    HashMap<String, Dashboard> dashboardMap;
    try {
        String rootModulePath = Utility.getProjectHome() + dashboardInfo.getAppdirname();
        File dashboardInfoFile = new File(
                getProjectPhresoFolder(rootModulePath).concat(FORWARD_SLASH).concat(DASHBOARD_INFO_FILE));
        if (dashboardInfoFile.exists()) {
            json = FileUtils.readFileToString(dashboardInfoFile);
            dashboards = gson.fromJson(json, Dashboards.class);
            dashboardMap = dashboards.getDashboards();
            if (dashboardMap.containsKey(dashboardInfo.getDashboardid())) {
                dashboardMap.remove(dashboardInfo.getDashboardid());
                dashboards.setDashboards(dashboardMap);
                json = gson.toJson(dashboards, Dashboards.class);
                FileUtils.writeStringToFile(dashboardInfoFile, json);
                return true;
            }
            return false;
        }
    } catch (IOException e) {
        throw new PhrescoException(e);
    }
    return false;
}

From source file:org.apache.hadoop.mapreduce.approx.lib.input.SampleTextInputFormat.java

/**
 * Return all the splits in the specified set of paths
 *//*ww w.j a v a2 s.  c o m*/
private void getMoreSplits(JobContext job, Path[] paths, long maxSize, long minSizeNode, long minSizeRack,
        List<InputSplit> splits) throws IOException {
    Configuration conf = job.getConfiguration();

    // all blocks for all the files in input set
    OneFileInfo[] files;

    // mapping from a rack name to the list of blocks it has
    HashMap<String, List<OneBlockInfo>> rackToBlocks = new HashMap<String, List<OneBlockInfo>>();

    // mapping from a block to the nodes on which it has replicas
    HashMap<OneBlockInfo, String[]> blockToNodes = new HashMap<OneBlockInfo, String[]>();

    // mapping from a node to the list of blocks that it contains
    HashMap<String, List<OneBlockInfo>> nodeToBlocks = new HashMap<String, List<OneBlockInfo>>();

    files = new OneFileInfo[paths.length];
    if (paths.length == 0) {
        return;
    }

    // populate all the blocks for all files
    //***************************************sampling info*************************************
    //long totLength = 0;
    for (int i = 0; i < paths.length; i++) {
        files[i] = new OneFileInfo(paths[i], conf, isSplitable(job, paths[i]), rackToBlocks, blockToNodes,
                nodeToBlocks, rackToNodes, maxSize);
        //totLength += files[i].getLength();
    }

    ArrayList<OneBlockInfo> validBlocks = new ArrayList<OneBlockInfo>();
    Set<String> nodes = new HashSet<String>();
    long curSplitSize = 0;

    // process all nodes and create splits that are local
    // to a node. 
    for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = nodeToBlocks.entrySet().iterator(); iter
            .hasNext();) {

        Map.Entry<String, List<OneBlockInfo>> one = iter.next();
        nodes.add(one.getKey());
        List<OneBlockInfo> blocksInNode = one.getValue();

        // for each block, copy it into validBlocks. Delete it from 
        // blockToNodes so that the same block does not appear in 
        // two different splits.
        for (OneBlockInfo oneblock : blocksInNode) {
            if (blockToNodes.containsKey(oneblock)) {
                validBlocks.add(oneblock);
                blockToNodes.remove(oneblock);
                //*******************************************segments compose splits****************
                curSplitSize += oneblock.length;
                if (blockunit) {
                    addCreatedSplit1(splits, validBlocks);
                    curSplitSize = 0;
                    validBlocks.clear();
                    continue;
                }
                // if the accumulated split size exceeds the maximum, then 
                // create this split.
                if (maxSize != 0 && curSplitSize >= maxSize) {
                    // create an input split and add it to the splits array
                    addCreatedSplit(splits, nodes, validBlocks);
                    curSplitSize = 0;
                    validBlocks.clear();
                }
            }
        }
        // if there were any blocks left over and their combined size is
        // larger than minSplitNode, then combine them into one split.
        // Otherwise add them back to the unprocessed pool. It is likely 
        // that they will be combined with other blocks from the 
        // same rack later on.
        if (minSizeNode != 0 && curSplitSize >= minSizeNode) {
            // create an input split and add it to the splits array
            addCreatedSplit(splits, nodes, validBlocks);
        } else {
            for (OneBlockInfo oneblock : validBlocks) {
                blockToNodes.put(oneblock, oneblock.hosts);
            }
        }
        validBlocks.clear();
        nodes.clear();
        curSplitSize = 0;
    }

    // if blocks in a rack are below the specified minimum size, then keep them
    // in 'overflow'. After the processing of all racks is complete, these 
    // overflow blocks will be combined into splits.
    ArrayList<OneBlockInfo> overflowBlocks = new ArrayList<OneBlockInfo>();
    Set<String> racks = new HashSet<String>();

    // Process all racks over and over again until there is no more work to do.
    while (blockToNodes.size() > 0) {

        // Create one split for this rack before moving over to the next rack. 
        // Come back to this rack after creating a single split for each of the 
        // remaining racks.
        // Process one rack location at a time, Combine all possible blocks that
        // reside on this rack as one split. (constrained by minimum and maximum
        // split size).

        // iterate over all racks 
        for (Iterator<Map.Entry<String, List<OneBlockInfo>>> iter = rackToBlocks.entrySet().iterator(); iter
                .hasNext();) {

            Map.Entry<String, List<OneBlockInfo>> one = iter.next();
            racks.add(one.getKey());
            List<OneBlockInfo> blocks = one.getValue();

            // for each block, copy it into validBlocks. Delete it from 
            // blockToNodes so that the same block does not appear in 
            // two different splits.
            boolean createdSplit = false;
            for (OneBlockInfo oneblock : blocks) {
                if (blockToNodes.containsKey(oneblock)) {
                    validBlocks.add(oneblock);
                    blockToNodes.remove(oneblock);
                    curSplitSize += oneblock.length;

                    // if the accumulated split size exceeds the maximum, then 
                    // create this split.
                    if (maxSize != 0 && curSplitSize >= maxSize) {
                        // create an input split and add it to the splits array
                        addCreatedSplit(splits, getHosts(racks), validBlocks);
                        createdSplit = true;
                        break;
                    }
                }
            }

            // if we created a split, then just go to the next rack
            if (createdSplit) {
                curSplitSize = 0;
                validBlocks.clear();
                racks.clear();
                continue;
            }

            if (!validBlocks.isEmpty()) {
                if (minSizeRack != 0 && curSplitSize >= minSizeRack) {
                    // if there is a minimum size specified, then create a single split
                    // otherwise, store these blocks into overflow data structure
                    addCreatedSplit(splits, getHosts(racks), validBlocks);
                } else {
                    // There were a few blocks in this rack that 
                    // remained to be processed. Keep them in 'overflow' block list. 
                    // These will be combined later.
                    overflowBlocks.addAll(validBlocks);
                }
            }
            curSplitSize = 0;
            validBlocks.clear();
            racks.clear();
        }
    }

    assert blockToNodes.isEmpty();
    assert curSplitSize == 0;
    assert validBlocks.isEmpty();
    assert racks.isEmpty();

    // Process all overflow blocks
    for (OneBlockInfo oneblock : overflowBlocks) {
        validBlocks.add(oneblock);
        curSplitSize += oneblock.length;

        // This might cause an exiting rack location to be re-added,
        // but it should be ok.
        for (int i = 0; i < oneblock.racks.length; i++) {
            racks.add(oneblock.racks[i]);
        }

        // if the accumulated split size exceeds the maximum, then 
        // create this split.
        if (maxSize != 0 && curSplitSize >= maxSize) {
            // create an input split and add it to the splits array
            addCreatedSplit(splits, getHosts(racks), validBlocks);
            curSplitSize = 0;
            validBlocks.clear();
            racks.clear();
        }
    }

    // Process any remaining blocks, if any.
    if (!validBlocks.isEmpty()) {
        addCreatedSplit(splits, getHosts(racks), validBlocks);
    }
}

From source file:org.ncic.bioinfo.sparkseq.algorithms.utils.GATKVariantContextUtils.java

public static Map<VariantContext.Type, List<VariantContext>> separateVariantContextsByType(
        final Collection<VariantContext> VCs) {
    if (VCs == null) {
        throw new IllegalArgumentException("VCs cannot be null.");
    }/*w  w  w  .j  a v  a  2  s. c o  m*/

    final HashMap<VariantContext.Type, List<VariantContext>> mappedVCs = new HashMap<>();
    for (final VariantContext vc : VCs) {
        VariantContext.Type vcType = vc.getType();

        // look at previous variant contexts of different type. If:
        // a) otherVC has alleles which are subset of vc, remove otherVC from its list and add otherVC to vc's list
        // b) vc has alleles which are subset of otherVC. Then, add vc to otherVC's type list (rather, do nothing since vc will be added automatically to its list)
        // c) neither: do nothing, just add vc to its own list
        boolean addtoOwnList = true;
        for (final VariantContext.Type type : VariantContext.Type.values()) {
            if (type.equals(vcType))
                continue;

            if (!mappedVCs.containsKey(type))
                continue;

            List<VariantContext> vcList = mappedVCs.get(type);
            for (int k = 0; k < vcList.size(); k++) {
                VariantContext otherVC = vcList.get(k);
                if (allelesAreSubset(otherVC, vc)) {
                    // otherVC has a type different than vc and its alleles are a subset of vc: remove otherVC from its list and add it to vc's type list
                    vcList.remove(k);
                    // avoid having empty lists
                    if (vcList.size() == 0)
                        mappedVCs.remove(type);
                    if (!mappedVCs.containsKey(vcType))
                        mappedVCs.put(vcType, new ArrayList<VariantContext>());
                    mappedVCs.get(vcType).add(otherVC);
                    break;
                } else if (allelesAreSubset(vc, otherVC)) {
                    // vc has a type different than otherVC and its alleles are a subset of VC: add vc to otherVC's type list and don't add to its own
                    mappedVCs.get(type).add(vc);
                    addtoOwnList = false;
                    break;
                }
            }
        }
        if (addtoOwnList) {
            if (!mappedVCs.containsKey(vcType))
                mappedVCs.put(vcType, new ArrayList<VariantContext>());
            mappedVCs.get(vcType).add(vc);
        }
    }

    return mappedVCs;
}

From source file:org.hyperic.hq.appdef.server.session.ServiceManagerImpl.java

public void updateServiceTypes(String plugin, ServiceTypeInfo[] infos) throws VetoException, NotFoundException {
    final boolean debug = log.isDebugEnabled();
    StopWatch watch = new StopWatch();
    AuthzSubject overlord = authzSubjectManager.getOverlordPojo();

    // First, put all of the infos into a Hash
    HashMap<String, ServiceTypeInfo> infoMap = new HashMap<String, ServiceTypeInfo>();
    List<String> names = new ArrayList<String>();
    for (int i = 0; i < infos.length; i++) {
        infoMap.put(infos[i].getName(), infos[i]);
        names.add(infos[i].getName());/*from  w w  w  . j  av  a  2 s  .  c o m*/
    }

    List<ServerType> types = serverTypeDAO.findByName(names);
    HashMap<String, ServerType> serverTypes = new HashMap<String, ServerType>(types.size());
    for (ServerType type : types) {
        serverTypes.put(type.getName(), type);
    }

    try {
        Collection<ServiceType> curServices = serviceTypeDAO.findByPlugin(plugin);

        for (ServiceType serviceType : curServices) {

            if (log.isDebugEnabled()) {
                log.debug("Begin updating ServiceTypeLocal: " + serviceType.getName());
            }

            ServiceTypeInfo sinfo = (ServiceTypeInfo) infoMap.remove(serviceType.getName());

            // See if this exists
            if (sinfo == null) {
                deleteServiceType(serviceType, overlord, resourceGroupManager, resourceManager);
            } else {
                // Just update it
                // XXX TODO MOVE THIS INTO THE ENTITY
                if (!sinfo.getName().equals(serviceType.getName()))
                    serviceType.setName(sinfo.getName());

                if (!sinfo.getDescription().equals(serviceType.getDescription()))
                    serviceType.setDescription(sinfo.getDescription());

                if (sinfo.getInternal() != serviceType.isIsInternal())
                    serviceType.setIsInternal(sinfo.getInternal());

                // Could be null if servertype was deleted/updated by plugin
                ServerType svrtype = serviceType.getServerType();

                // Check server type
                if (svrtype == null || !sinfo.getServerName().equals(svrtype.getName())) {
                    // Lookup the server type
                    if (null == (svrtype = serverTypes.get(sinfo.getServerName()))) {
                        svrtype = serverTypeDAO.findByName(sinfo.getServerName());
                        if (svrtype == null) {
                            throw new NotFoundException("Unable to find server " + sinfo.getServerName()
                                    + " on which service '" + serviceType.getName() + "' relies");
                        }
                        serverTypes.put(svrtype.getName(), svrtype);
                    }
                    serviceType.setServerType(svrtype);
                }
            }
        }

        // Now create the left-overs
        final ResourceType resType = resourceManager
                .findResourceTypeByName(AuthzConstants.servicePrototypeTypeName);
        final Resource rootResource = resourceManager.findRootResource();
        final Set<String> creates = new HashSet<String>();
        for (final ServiceTypeInfo sinfo : infoMap.values()) {
            ServerType servType;
            if (null == (servType = serverTypes.get(sinfo.getServerName()))) {
                servType = serverTypeDAO.findByName(sinfo.getServerName());
                serverTypes.put(servType.getName(), servType);
            }
            if (creates.contains(sinfo.getName())) {
                continue;
            }
            creates.add(sinfo.getName());
            if (debug)
                watch.markTimeBegin("create");
            createServiceType(sinfo, plugin, servType, rootResource, resType);
            if (debug)
                watch.markTimeEnd("create");
        }
    } finally {
        if (debug)
            log.debug(watch);
    }
}

From source file:org.apache.openejb.config.DeploymentLoader.java

protected AppModule createAppModule(final File jarFile, final String jarPath) throws OpenEJBException {
    File appDir = unpack(jarFile);
    try {//from  w  w  w .ja  v  a 2s .  c  o  m
        appDir = appDir.getCanonicalFile();
    } catch (final IOException e) {
        throw new OpenEJBException("Invalid application directory " + appDir.getAbsolutePath());
    }

    final URL appUrl = getFileUrl(appDir);

    final String appId = appDir.getAbsolutePath();
    final ClassLoader tmpClassLoader = ClassLoaderUtil.createTempClassLoader(appId, new URL[] { appUrl },
            getOpenEJBClassLoader());

    final ResourceFinder finder = new ResourceFinder("", tmpClassLoader, appUrl);
    final Map<String, URL> appDescriptors = getDescriptors(finder);

    try {

        //
        // Find all the modules using either the application xml or by searching for all .jar, .war and .rar files.
        //

        final Map<String, URL> ejbModules = new LinkedHashMap<String, URL>();
        final Map<String, URL> clientModules = new LinkedHashMap<String, URL>();
        final Map<String, URL> resouceModules = new LinkedHashMap<String, URL>();
        final Map<String, URL> webModules = new LinkedHashMap<String, URL>();
        final Map<String, String> webContextRoots = new LinkedHashMap<String, String>();

        final URL applicationXmlUrl = appDescriptors.get("application.xml");
        final List<URL> extraLibs = new ArrayList<URL>();

        final Application application;
        if (applicationXmlUrl != null) {

            application = unmarshal(applicationXmlUrl);
            for (final Module module : application.getModule()) {
                try {
                    if (module.getEjb() != null) {
                        final URL url = finder.find(module.getEjb().trim());
                        ejbModules.put(module.getEjb(), url);
                    } else if (module.getJava() != null) {
                        final URL url = finder.find(module.getJava().trim());
                        clientModules.put(module.getJava(), url);
                        extraLibs.add(url);
                    } else if (module.getConnector() != null) {
                        final URL url = finder.find(module.getConnector().trim());
                        resouceModules.put(module.getConnector(), url);
                    } else if (module.getWeb() != null) {
                        final URL url = finder.find(module.getWeb().getWebUri().trim());
                        webModules.put(module.getWeb().getWebUri(), url);
                        webContextRoots.put(module.getWeb().getWebUri(), module.getWeb().getContextRoot());
                    }
                } catch (final IOException e) {
                    throw new OpenEJBException("Invalid path to module " + e.getMessage(), e);
                }
            }
        } else {
            application = new Application();
            final HashMap<String, URL> files = new HashMap<String, URL>();
            scanDir(appDir, files, "", false);
            files.remove("META-INF/MANIFEST.MF");

            // todo we should also filter URLs here using DeploymentsResolver.loadFromClasspath

            createApplicationFromFiles(appId, tmpClassLoader, ejbModules, clientModules, resouceModules,
                    webModules, files);
        }

        final ClassLoaderConfigurer configurer = QuickJarsTxtParser
                .parse(new File(appDir, "META-INF/" + QuickJarsTxtParser.FILE_NAME));
        final Collection<URL> jarsXmlLib = new ArrayList<>();
        if (configurer != null) {
            for (final URL url : configurer.additionalURLs()) {
                try {
                    detectAndAddModuleToApplication(appId, tmpClassLoader, ejbModules, clientModules,
                            resouceModules, webModules,
                            new ImmutablePair<>(URLs.toFile(url).getAbsolutePath(), url));
                } catch (final Exception e) {
                    jarsXmlLib.add(url);
                }
            }
        }

        //
        // Create a class loader for the application
        //

        // lib/*
        if (application.getLibraryDirectory() == null) {
            application.setLibraryDirectory("lib/");
        } else {
            final String dir = application.getLibraryDirectory();
            if (!dir.endsWith("/")) {
                application.setLibraryDirectory(dir + "/");
            }
        }

        try {
            final Map<String, URL> libs = finder.getResourcesMap(application.getLibraryDirectory());
            extraLibs.addAll(libs.values());
        } catch (final IOException e) {
            logger.warning(
                    "Cannot load libs from '" + application.getLibraryDirectory() + "' : " + e.getMessage(), e);
        }

        // APP-INF/lib/*
        try {
            final Map<String, URL> libs = finder.getResourcesMap("APP-INF/lib/");
            extraLibs.addAll(libs.values());
        } catch (final IOException e) {
            logger.warning("Cannot load libs from 'APP-INF/lib/' : " + e.getMessage(), e);
        }

        // META-INF/lib/*
        try {
            final Map<String, URL> libs = finder.getResourcesMap("META-INF/lib/");
            extraLibs.addAll(libs.values());
        } catch (final IOException e) {
            logger.warning("Cannot load libs from 'META-INF/lib/' : " + e.getMessage(), e);
        }

        // All jars nested in the Resource Adapter
        final HashMap<String, URL> rarLibs = new HashMap<String, URL>();
        for (final Map.Entry<String, URL> entry : resouceModules.entrySet()) {
            try {
                // unpack the resource adapter archive
                File rarFile = URLs.toFile(entry.getValue());
                rarFile = unpack(rarFile);
                entry.setValue(rarFile.toURI().toURL());

                scanDir(appDir, rarLibs, "");
            } catch (final MalformedURLException e) {
                throw new OpenEJBException("Malformed URL to app. " + e.getMessage(), e);
            }
        }
        for (final Iterator<Map.Entry<String, URL>> iterator = rarLibs.entrySet().iterator(); iterator
                .hasNext();) {
            // remove all non jars from the rarLibs
            final Map.Entry<String, URL> fileEntry = iterator.next();
            if (!fileEntry.getKey().endsWith(".jar")) {
                continue;
            }
            iterator.remove();
        }

        final List<URL> classPath = new ArrayList<>();
        classPath.addAll(ejbModules.values());
        classPath.addAll(clientModules.values());
        classPath.addAll(rarLibs.values());
        classPath.addAll(extraLibs);
        classPath.addAll(jarsXmlLib);
        final URL[] urls = classPath.toArray(new URL[classPath.size()]);

        SystemInstance.get().fireEvent(new BeforeDeploymentEvent(urls));

        final ClassLoader appClassLoader = ClassLoaderUtil.createTempClassLoader(appId, urls,
                getOpenEJBClassLoader());

        //
        // Create the AppModule and all nested module objects
        //

        final AppModule appModule = new AppModule(appClassLoader, appId, application, false);
        appModule.getAdditionalLibraries().addAll(extraLibs);
        appModule.getAltDDs().putAll(appDescriptors);
        appModule.getWatchedResources().add(appId);
        if (applicationXmlUrl != null) {
            appModule.getWatchedResources().add(URLs.toFilePath(applicationXmlUrl));
        }

        // EJB modules
        for (final String moduleName : ejbModules.keySet()) {
            try {
                URL ejbUrl = ejbModules.get(moduleName);
                // we should try to use a reference to the temp classloader
                if (ClassLoaderUtil.isUrlCached(appModule.getJarLocation(), ejbUrl)) {
                    try {
                        ejbUrl = ClassLoaderUtil.getUrlCachedName(appModule.getJarLocation(), ejbUrl).toURI()
                                .toURL();

                    } catch (final MalformedURLException ignore) {
                        // no-op
                    }
                }
                final File ejbFile = URLs.toFile(ejbUrl);
                final String absolutePath = ejbFile.getAbsolutePath();

                final EjbModule ejbModule = createEjbModule(ejbUrl, absolutePath, appClassLoader);
                appModule.getEjbModules().add(ejbModule);
            } catch (final OpenEJBException e) {
                logger.error("Unable to load EJBs from EAR: " + appId + ", module: " + moduleName
                        + ". Exception: " + e.getMessage(), e);
            }
        }

        // Application Client Modules
        for (final String moduleName : clientModules.keySet()) {
            try {
                URL clientUrl = clientModules.get(moduleName);
                // we should try to use a reference to the temp classloader
                if (ClassLoaderUtil.isUrlCached(appModule.getJarLocation(), clientUrl)) {
                    try {
                        clientUrl = ClassLoaderUtil.getUrlCachedName(appModule.getJarLocation(), clientUrl)
                                .toURI().toURL();

                    } catch (final MalformedURLException ignore) {
                        // no-op
                    }
                }
                final File clientFile = URLs.toFile(clientUrl);
                final String absolutePath = clientFile.getAbsolutePath();

                final ClientModule clientModule = createClientModule(clientUrl, absolutePath, appClassLoader,
                        null);

                appModule.getClientModules().add(clientModule);
            } catch (final Exception e) {
                logger.error("Unable to load App Client from EAR: " + appId + ", module: " + moduleName
                        + ". Exception: " + e.getMessage(), e);
            }
        }

        // Resource modules
        for (final String moduleName : resouceModules.keySet()) {
            try {
                URL rarUrl = resouceModules.get(moduleName);
                // we should try to use a reference to the temp classloader
                if (ClassLoaderUtil.isUrlCached(appModule.getJarLocation(), rarUrl)) {
                    try {
                        rarUrl = ClassLoaderUtil.getUrlCachedName(appModule.getJarLocation(), rarUrl).toURI()
                                .toURL();

                    } catch (final MalformedURLException ignore) {
                        // no-op
                    }
                }
                final ConnectorModule connectorModule = createConnectorModule(appId, URLs.toFilePath(rarUrl),
                        appClassLoader, moduleName);

                appModule.getConnectorModules().add(connectorModule);
            } catch (final OpenEJBException e) {
                logger.error("Unable to load RAR: " + appId + ", module: " + moduleName + ". Exception: "
                        + e.getMessage(), e);
            }
        }

        // Web modules
        for (final String moduleName : webModules.keySet()) {
            try {
                final URL warUrl = webModules.get(moduleName);
                addWebModule(appModule, warUrl, appClassLoader, webContextRoots.get(moduleName), null);
            } catch (final OpenEJBException e) {
                logger.error("Unable to load WAR: " + appId + ", module: " + moduleName + ". Exception: "
                        + e.getMessage(), e);
            }
        }

        addBeansXmls(appModule);

        // Persistence Units
        final Properties p = new Properties();
        p.put(appModule.getModuleId(), appModule.getJarLocation());
        final FileUtils base = new FileUtils(appModule.getModuleId(), appModule.getModuleId(), p);
        final List<URL> filteredUrls = new ArrayList<>();
        DeploymentsResolver.loadFromClasspath(base, filteredUrls, appModule.getClassLoader());
        addPersistenceUnits(appModule, filteredUrls.toArray(new URL[filteredUrls.size()]));

        final Object pXmls = appModule.getAltDDs().get("persistence.xml");

        for (final WebModule webModule : appModule.getWebModules()) {
            final List<URL> foundRootUrls = new ArrayList<>();
            final List<URL> scannableUrls = webModule.getScannableUrls();
            for (final URL url : scannableUrls) {
                if (!addPersistenceUnits(appModule, url).isEmpty()) {
                    foundRootUrls.add(url);
                }
            }

            if (pXmls != null && Collection.class.isInstance(pXmls)) {
                final File webapp = webModule.getFile();
                if (webapp == null) {
                    continue;
                }
                final String webappAbsolutePath = webapp.getAbsolutePath();

                final Collection<URL> list = Collection.class.cast(pXmls);
                for (final URL url : list) {
                    try {
                        final File file = URLs.toFile(url);
                        if (file.getAbsolutePath().startsWith(webappAbsolutePath)) {
                            foundRootUrls.add(url);
                        }
                    } catch (final IllegalArgumentException iae) {
                        // no-op
                    }
                }
            }

            webModule.getAltDDs().put(EAR_WEBAPP_PERSISTENCE_XML_JARS, foundRootUrls);
        }

        for (final DeploymentModule module : appModule.getDeploymentModule()) {
            module.setStandaloneModule(false);
        }

        return appModule;

    } catch (final OpenEJBException e) {
        logger.error("Unable to load EAR: " + jarPath, e);
        throw e;
    }
}

From source file:com.negaheno.android.NotificationsController.java

public void showWearNotifications(boolean notifyAboutLast) {
    if (Build.VERSION.SDK_INT < 19) {
        return;//from ww  w.  j a  va  2s. c  o  m
    }
    ArrayList<Long> sortedDialogs = new ArrayList<>();
    HashMap<Long, ArrayList<MessageObject>> messagesByDialogs = new HashMap<>();
    for (MessageObject messageObject : pushMessages) {
        long dialog_id = messageObject.getDialogId();
        if ((int) dialog_id == 0) {
            continue;
        }

        ArrayList<MessageObject> arrayList = messagesByDialogs.get(dialog_id);
        if (arrayList == null) {
            arrayList = new ArrayList<>();
            messagesByDialogs.put(dialog_id, arrayList);
            sortedDialogs.add(0, dialog_id);
        }
        arrayList.add(messageObject);
    }

    HashMap<Long, Integer> oldIds = new HashMap<>();
    oldIds.putAll(wearNoticationsIds);
    wearNoticationsIds.clear();

    for (long dialog_id : sortedDialogs) {
        ArrayList<MessageObject> messageObjects = messagesByDialogs.get(dialog_id);
        int max_id = messageObjects.get(0).messageOwner.id;
        TLRPC.Chat chat = null;
        TLRPC.User user = null;
        String name = null;
        if (dialog_id > 0) {
            user = MessagesController.getInstance().getUser((int) dialog_id);
            if (user == null) {
                continue;
            }
        } else {
            chat = MessagesController.getInstance().getChat(-(int) dialog_id);
            if (chat == null) {
                continue;
            }
        }
        if (chat != null) {
            name = chat.title;
        } else {
            name = ContactsController.formatName(user.first_name, user.last_name);
        }

        Integer notificationId = oldIds.get(dialog_id);
        if (notificationId == null) {
            notificationId = wearNotificationId++;
        } else {
            oldIds.remove(dialog_id);
        }

        Intent replyIntent = new Intent(ApplicationLoader.applicationContext, WearReplyReceiver.class);
        replyIntent.putExtra("dialog_id", dialog_id);
        replyIntent.putExtra("max_id", max_id);
        PendingIntent replyPendingIntent = PendingIntent.getBroadcast(ApplicationLoader.applicationContext,
                notificationId, replyIntent, PendingIntent.FLAG_UPDATE_CURRENT);
        RemoteInput remoteInput = new RemoteInput.Builder(EXTRA_VOICE_REPLY)
                .setLabel(LocaleController.getString("Reply", R.string.Reply)).build();
        String replyToString;
        if (chat != null) {
            replyToString = LocaleController.formatString("ReplyToGroup", R.string.ReplyToGroup, name);
        } else {
            replyToString = LocaleController.formatString("ReplyToUser", R.string.ReplyToUser, name);
        }
        NotificationCompat.Action action = new NotificationCompat.Action.Builder(R.drawable.ic_reply_icon,
                replyToString, replyPendingIntent).addRemoteInput(remoteInput).build();

        String text = "";
        for (MessageObject messageObject : messageObjects) {
            String message = getStringForMessage(messageObject, false);
            if (message == null) {
                continue;
            }
            if (chat != null) {
                message = message.replace(" @ " + name, "");
            } else {
                message = message.replace(name + ": ", "").replace(name + " ", "");
            }
            if (text.length() > 0) {
                text += "\n\n";
            }
            text += message;
        }

        Intent intent = new Intent(ApplicationLoader.applicationContext, LaunchActivity.class);
        intent.setAction("com.tmessages.openchat" + Math.random() + Integer.MAX_VALUE);
        intent.setFlags(32768);
        if (chat != null) {
            intent.putExtra("chatId", chat.id);
        } else if (user != null) {
            intent.putExtra("userId", user.id);
        }
        PendingIntent contentIntent = PendingIntent.getActivity(ApplicationLoader.applicationContext, 0, intent,
                PendingIntent.FLAG_ONE_SHOT);

        NotificationCompat.Builder builder = new NotificationCompat.Builder(
                ApplicationLoader.applicationContext).setContentTitle(name)
                        .setSmallIcon(R.drawable.notification).setGroup("messages").setContentText(text)
                        .setGroupSummary(false).setContentIntent(contentIntent)
                        .extend(new NotificationCompat.WearableExtender().addAction(action))
                        .setCategory(NotificationCompat.CATEGORY_MESSAGE);

        if (chat == null && user != null && user.phone != null && user.phone.length() > 0) {
            builder.addPerson("tel:+" + user.phone);
        }

        notificationManager.notify(notificationId, builder.build());
        wearNoticationsIds.put(dialog_id, notificationId);
    }

    for (HashMap.Entry<Long, Integer> entry : oldIds.entrySet()) {
        notificationManager.cancel(entry.getValue());
    }
}

From source file:it.eng.spagobi.commons.presentation.tags.ListTag.java

/**
 * Builds Table list columns, reading all request information.
 * /*from w  w w  .ja v a  2 s  .  c o  m*/
 * @throws JspException If any Exception occurs.
 */

protected void makeColumns() throws JspException {

    SourceBean captionSB = (SourceBean) _layout.getAttribute("CAPTIONS");
    List captions = captionSB.getContainedSourceBeanAttributes();
    int numCaps = captions.size();
    String columnFilter = (String) _serviceRequest.getAttribute("columnFilter");
    String typeFilter = (String) _serviceRequest.getAttribute("typeFilter");
    String typeValueFilter = (String) _serviceRequest.getAttribute("typeValueFilter");
    String valueFilter = (String) _serviceRequest.getAttribute("valueFilter");

    _htmlStream.append("<TABLE class='list' style='width:100%;margin-top:1px'>\n");
    _htmlStream.append("   <TR>\n");

    for (int i = 0; i < _columns.size(); i++) {
        String nameColumn = (String) ((SourceBean) _columns.elementAt(i)).getAttribute("NAME");
        String labelColumnCode = (String) ((SourceBean) _columns.elementAt(i)).getAttribute("LABEL");
        String labelColumn = "";
        if (labelColumnCode != null)
            labelColumn = msgBuilder.getMessage(labelColumnCode, _bundle, httpRequest);
        else
            labelColumn = nameColumn;
        // if an horizontal-align is specified it is considered, otherwise the defualt is align='left'
        String align = (String) ((SourceBean) _columns.elementAt(i)).getAttribute("horizontal-align");
        String orderButtons = (String) ((SourceBean) _columns.elementAt(i)).getAttribute("order_buttons");
        boolean hideOrderButtons = false;
        if (orderButtons != null && orderButtons.equalsIgnoreCase("false"))
            hideOrderButtons = true;

        if (align == null || align.trim().equals(""))
            align = "left";
        //defines order url for dynamic ordering

        HashMap orderParamsMap = new HashMap();
        orderParamsMap.putAll(_providerUrlMap);
        orderParamsMap.put("FIELD_ORDER", nameColumn);
        orderParamsMap.put("TYPE_ORDER", " ASC");
        if (columnFilter != null && typeFilter != null && typeValueFilter != null) {
            orderParamsMap.put("columnFilter", columnFilter);
            orderParamsMap.put("typeFilter", typeFilter);
            orderParamsMap.put("typeValueFilter", typeValueFilter);
            orderParamsMap.put("valueFilter", valueFilter);
        }
        String orderUrlAsc = createUrl(orderParamsMap);
        orderUrlAsc = StringEscapeUtils.escapeHtml(orderUrlAsc);

        orderParamsMap.remove("TYPE_ORDER");
        orderParamsMap.put("TYPE_ORDER", " DESC");

        List _makeTitleButton = (List) _titleButton.elementAt(i);

        //orderParamsMap.put("MESSAGEDET",SpagoBIConstants.MESSAGE_ORDER_JOB_LIST);

        String orderUrlDesc = createUrl(orderParamsMap);
        orderUrlDesc = StringEscapeUtils.escapeHtml(orderUrlDesc);

        _htmlStream.append("<TD class='portlet-section-header' style='vertical-align:middle;text-align:" + align
                + ";'  >");
        _htmlStream.append(labelColumn);
        if (!hideOrderButtons) {
            if (!nameColumn.equalsIgnoreCase("INSTANCES")) {
                _htmlStream.append("   <A href=\"" + orderUrlAsc + "\">\n");
                _htmlStream.append("      <img  src='"
                        + urlBuilder.getResourceLinkByTheme(httpRequest, "/img/commons/ArrowUp.gif", currTheme)
                        + "'/>\n");
                _htmlStream.append("   </A>\n");
                _htmlStream.append("   <A href=\"" + orderUrlDesc + "\">\n");
                _htmlStream.append("      <img  src='" + urlBuilder.getResourceLinkByTheme(httpRequest,
                        "/img/commons/ArrowDown.gif", currTheme) + "'/>\n");
                _htmlStream.append("   </A>\n");
            }
        }

        if (_makeTitleButton.size() > 0) {

            _htmlStream.append(makeTitleButton(_makeTitleButton) + "\n");

        }

        _htmlStream.append("</TD>\n");
    }
    for (int i = 0; i < numCaps; i++) {
        _htmlStream.append("<TD class='portlet-section-header' style='text-align:center'>&nbsp;</TD>\n");
    }
    _htmlStream.append("</TR>\n");
}