Example usage for java.util Hashtable keySet

List of usage examples for java.util Hashtable keySet

Introduction

In this page you can find the example usage for java.util Hashtable keySet.

Prototype

Set keySet

To view the source code for java.util Hashtable keySet.

Click Source Link

Document

Each of these fields are initialized to contain an instance of the appropriate view the first time this view is requested.

Usage

From source file:examples.javarpc.CustomMapping.java

public Map parseToMap(Object obj) {
    try {//www . j a v  a2 s . c o  m

        if (Hashtable.class.isInstance(obj)) {
            Hashtable table = (Hashtable) obj;
            mLogger.debug("Is Table");
            Map m = new HashMap();
            for (Iterator iter = table.keySet().iterator(); iter.hasNext();) {
                Object key = iter.next();
                m.put(key, table.get(key));
            }

            return m;
        } else {
            mLogger.debug("Is No Table: " + obj);
        }

    } catch (Exception err) {
        mLogger.error("[parseToMap]", err);
    }
    return null;
}

From source file:org.alfresco.filesys.NfsServerNodeMonitor.java

/**
 * Searches for {@link ShareDetails} to access NFS server cache for specific device name (e.g. 'Alfresco', etc)
 * // w  ww.  j  a  v  a  2s.c  o  m
 * @param fileId - {@link Integer} value which contains <code>fileId</code> specific to device
 * @return {@link ShareDetails} instance which contains <code>fileId</code> key in the cache or <code>null</code> if such instance was not found
 */
private ShareDetails findShareDetailsForId(int fileId) {
    if ((null == nfsServer) || (null == nfsServer.getShareDetails())) {
        return null;
    }

    Hashtable<Integer, ShareDetails> details = nfsServer.getShareDetails().getShareDetails();
    for (Integer key : details.keySet()) {
        ShareDetails shareDetails = details.get(key);
        if (null != shareDetails.getFileIdCache().findPath(fileId)) {
            return shareDetails;
        }
    }

    return null;
}

From source file:org.apache.hadoop.mapred.FIFOWorkflowScheduler.java

/**
 * This will be called by the JobTracker. Here I put together all jobs
 * and wjobs./*from   w  w  w  .  j  a va  2 s .  c o  m*/
 */
@Override
public synchronized Collection<JobInProgress> getJobs(String queueName) {
    ArrayList<JobInProgress> jobs = new ArrayList<JobInProgress>();

    Collection<Object> queue = fifoWorkflowListener.getQueue();
    Hashtable<JobID, JobInProgress> wjobs = fifoWorkflowListener.getWJobs();
    for (Object obj : queue) {
        if (obj instanceof JobInProgress) {
            jobs.add((JobInProgress) obj);
        } else {
            WorkflowInProgress wip = (WorkflowInProgress) obj;
            Hashtable<JobID, String> idToName = wip.getStatus().getIDToName();
            for (JobID id : idToName.keySet()) {
                jobs.add(wjobs.get(id));
            }
        }
    }

    return jobs;
}

From source file:org.apache.hadoop.mapred.EDFWorkflowScheduler.java

/**
 * This will be called by the JobTracker. Here I put together all jobs
 * and wjobs./* w ww. j a  v  a  2 s.  c o m*/
 */
@Override
public synchronized Collection<JobInProgress> getJobs(String queueName) {
    ArrayList<JobInProgress> jobs = new ArrayList<JobInProgress>();

    Collection<Object> queue = edfWorkflowListener.getQueue();
    Hashtable<JobID, JobInProgress> wjobs = edfWorkflowListener.getWJobs();
    for (Object obj : queue) {
        if (obj instanceof JobInProgress) {
            jobs.add((JobInProgress) obj);
        } else {
            WorkflowInProgress wip = (WorkflowInProgress) obj;
            Hashtable<JobID, String> idToName = wip.getStatus().getIDToName();
            for (JobID id : idToName.keySet()) {
                jobs.add(wjobs.get(id));
            }
        }
    }

    return jobs;
}

From source file:org.cocos2dx.plugin.AnalyticsUmeng.java

private HashMap<String, String> changeTableToMap(Hashtable<String, String> param) {
    HashMap<String, String> retParam = new HashMap<String, String>();
    for (Iterator<String> it = param.keySet().iterator(); it.hasNext();) {
        String key = it.next();// w w  w  . j  av  a 2  s  .c om
        String value = param.get(key);

        retParam.put(key, value);
    }

    return retParam;
}

From source file:org.nees.rpi.vis.ui.XYChartPanelProxy.java

private String processLabelfromUnits(Hashtable<String, Integer> h, String defaultLabel) {
    String axisLabel = "";

    ArrayList<String> units = new ArrayList<String>();
    for (String key : h.keySet())
        if (h.get(key) > 0)
            units.add(key);/*from   w  w  w .  ja  v  a2s  .  c  o m*/

    for (String s : units)
        axisLabel += s + ", ";

    if (axisLabel.length() == 0)
        axisLabel = defaultLabel;
    else
        axisLabel = axisLabel.substring(0, axisLabel.length() - 2);

    return axisLabel;
}

From source file:com.globalsight.everest.webapp.pagehandler.projects.workflows.JobControlInProgressHandler.java

/**
 * Invokes this EntryPageHandler object//from ww  w.  ja  v  a  2  s.c  o m
 * <p>
 * 
 * @param p_ageDescriptor
 *            the description of the page to be produced.
 * @param p_request
 *            original request sent from the browser.
 * @param p_response
 *            original response object.
 * @param p_context
 *            the Servlet context.
 */
public void myInvokePageHandler(WebPageDescriptor p_thePageDescriptor, HttpServletRequest p_request,
        HttpServletResponse p_response, ServletContext p_context)
        throws ServletException, IOException, RemoteException, EnvoyServletException {
    HttpSession session = p_request.getSession(false);
    SessionManager sessionMgr = (SessionManager) session.getAttribute(SESSION_MANAGER);
    boolean stateMarch = false;
    if (Job.DISPATCHED.equals((String) sessionMgr.getMyjobsAttribute("lastState")))
        stateMarch = true;
    String action = p_request.getParameter(ACTION_STRING);
    if (StringUtil.isNotEmpty(action) && "removeJobFromGroup".equals(action)) {
        removeJobFromGroup(p_request);
    }
    setJobSearchFilters(sessionMgr, p_request, stateMarch);

    HashMap beanMap = invokeJobControlPage(p_thePageDescriptor, p_request, BASE_BEAN);
    p_request.setAttribute("searchType", p_request.getParameter("searchType"));

    // since an instance of a page handler is used by different clients,
    // this instance variable needs to be set only once. There's no need
    // to synchronize this section as the value of export url is always the
    // same.
    if (m_exportUrl == null) {
        m_exportUrl = ((NavigationBean) beanMap.get(EXPORT_BEAN)).getPageURL();
    }

    if (p_request.getParameter("checkIsUploadingForExport") != null) {
        long jobId = Long.parseLong(p_request.getParameter("jobId"));
        Job job = WorkflowHandlerHelper.getJobById(jobId);
        String result = "";
        for (Workflow workflow : job.getWorkflows()) {
            if (result.length() > 0)
                break;
            Hashtable<Long, Task> tasks = workflow.getTasks();
            for (Long taskKey : tasks.keySet()) {
                if (tasks.get(taskKey).getIsUploading() == 'Y') {
                    result = "uploading";
                    break;
                }
            }
        }
        PrintWriter out = p_response.getWriter();
        p_response.setContentType("text/html");
        out.write(result);
        out.close();
        return;
    } else if (p_request.getParameter("action") != null
            && "checkDownloadQAReport".equals(p_request.getParameter("action"))) {
        ServletOutputStream out = p_response.getOutputStream();
        String jobIds = p_request.getParameter("jobIds");
        boolean checkQA = checkQAReport(sessionMgr, jobIds);
        String download = "";
        if (checkQA) {
            download = "success";
        } else {
            download = "fail";
        }
        Map<String, Object> returnValue = new HashMap();
        returnValue.put("download", download);
        out.write((JsonUtil.toObjectJson(returnValue)).getBytes("UTF-8"));
        return;
    } else if (p_request.getParameter("action") != null
            && "downloadQAReport".equals(p_request.getParameter("action"))) {
        Set<Long> jobIdSet = (Set<Long>) sessionMgr.getAttribute("jobIdSet");
        Set<File> exportFilesSet = (Set<File>) sessionMgr.getAttribute("exportFilesSet");
        Set<String> localesSet = (Set<String>) sessionMgr.getAttribute("localesSet");
        long companyId = (Long) sessionMgr.getAttribute("companyId");
        WorkflowHandlerHelper.zippedFolder(p_request, p_response, companyId, jobIdSet, exportFilesSet,
                localesSet);
        sessionMgr.removeElement("jobIdSet");
        sessionMgr.removeElement("exportFilesSet");
        sessionMgr.removeElement("localesSet");
        return;
    }

    performAppropriateOperation(p_request);

    sessionMgr.setMyjobsAttribute("lastState", Job.DISPATCHED);
    JobVoInProgressSearcher searcher = new JobVoInProgressSearcher();
    searcher.setJobVos(p_request, true);
    p_request.setAttribute(EXPORT_URL_PARAM, m_exportUrl);
    p_request.setAttribute(JOB_ID, JOB_ID);
    p_request.setAttribute(JOB_LIST_START_PARAM, p_request.getParameter(JOB_LIST_START_PARAM));
    p_request.setAttribute(PAGING_SCRIPTLET,
            getPagingText(p_request, ((NavigationBean) beanMap.get(BASE_BEAN)).getPageURL(), Job.DISPATCHED));
    try {
        Company company = ServerProxy.getJobHandler()
                .getCompanyById(CompanyWrapper.getCurrentCompanyIdAsLong());
        p_request.setAttribute("company", company);
    } catch (Exception e) {
        e.printStackTrace();
    }

    // Set the EXPORT_INIT_PARAM in the sessionMgr so we can bring
    // the user back here after they Export
    sessionMgr.setAttribute(JobManagementHandler.EXPORT_INIT_PARAM, BASE_BEAN);
    sessionMgr.setAttribute("destinationPage", "inprogress");
    // clear the session for download job from joblist page
    sessionMgr.setAttribute(DownloadFileHandler.DOWNLOAD_JOB_LOCALES, null);
    sessionMgr.setAttribute(DownloadFileHandler.DESKTOP_FOLDER, null);
    setJobProjectsLocales(sessionMgr, session);

    // turn on cache. do both. "pragma" for the older browsers.
    p_response.setHeader("Pragma", "yes-cache"); // HTTP 1.0
    p_response.setHeader("Cache-Control", "yes-cache"); // HTTP 1.1
    p_response.addHeader("Cache-Control", "yes-store"); // tell proxy not to
                                                        // cache
                                                        // forward to the jsp page.
    RequestDispatcher dispatcher = p_context.getRequestDispatcher(p_thePageDescriptor.getJspURL());
    dispatcher.forward(p_request, p_response);
}

From source file:org.apache.hadoop.mapred.DAPWorkflowScheduler.java

/**
 * This will be called by the JobTracker. Here I put together all jobs
 * and wjobs./*from w w  w.  ja  v  a 2  s .  c o m*/
 */
@Override
public synchronized Collection<JobInProgress> getJobs(String queueName) {
    ArrayList<JobInProgress> jobs = new ArrayList<JobInProgress>();

    Collection<Object> queue = dapWorkflowListener.getQueue();
    Hashtable<JobID, JobInProgress> wjobs = dapWorkflowListener.getWJobs();
    for (Object obj : queue) {
        if (obj instanceof JobInProgress) {
            jobs.add((JobInProgress) obj);
        } else {
            WorkflowInProgress wip = (WorkflowInProgress) obj;
            Hashtable<JobID, String> idToName = wip.getStatus().getIDToName();
            for (JobID id : idToName.keySet()) {
                jobs.add(wjobs.get(id));
            }
        }
    }

    return jobs;
}

From source file:org.apache.hadoop.mapred.PlanWorkflowScheduler.java

/**
 * This will be called by the JobTracker. Here I put together all jobs
 * and wjobs./*from   www.ja  v  a  2 s. com*/
 */
@Override
public synchronized Collection<JobInProgress> getJobs(String queueName) {
    ArrayList<JobInProgress> jobs = new ArrayList<JobInProgress>();

    Collection<Object> queue = planWorkflowListener.getQueue();
    Hashtable<JobID, JobInProgress> wjobs = planWorkflowListener.getWJobs();
    for (Object obj : queue) {
        if (obj instanceof JobInProgress) {
            jobs.add((JobInProgress) obj);
        } else {
            WorkflowInProgress wip = (WorkflowInProgress) obj;
            Hashtable<JobID, String> idToName = wip.getStatus().getIDToName();
            for (JobID id : idToName.keySet()) {
                jobs.add(wjobs.get(id));
            }
        }
    }

    return jobs;
}

From source file:org.apache.hadoop.mapred.FairWorkflowScheduler.java

/**
 * This will be called by the JobTracker. Here I put together all jobs
 * and wjobs.//w  w w.j  av a 2s . co  m
 */
@Override
public synchronized Collection<JobInProgress> getJobs(String queueName) {
    ArrayList<JobInProgress> jobs = new ArrayList<JobInProgress>();

    Collection<Object> queue = fairWorkflowListener.getMapQueue();
    Hashtable<JobID, JobInProgress> wjobs = fairWorkflowListener.getWJobs();
    for (Object obj : queue) {
        if (obj instanceof JobInProgress) {
            jobs.add((JobInProgress) obj);
        } else {
            WorkflowInProgress wip = (WorkflowInProgress) obj;
            Hashtable<JobID, String> idToName = wip.getStatus().getIDToName();
            for (JobID id : idToName.keySet()) {
                jobs.add(wjobs.get(id));
            }
        }
    }

    return jobs;
}