Example usage for java.util Properties entrySet

List of usage examples for java.util Properties entrySet

Introduction

In this page you can find the example usage for java.util Properties entrySet.

Prototype

@Override
    public Set<Map.Entry<Object, Object>> entrySet() 

Source Link

Usage

From source file:com.streamsets.datacollector.cluster.BaseClusterProvider.java

private List<URL> findJars(String name, URLClassLoader cl, @Nullable String stageClazzName) throws IOException {
    Properties properties = readDataCollectorProperties(cl);
    List<String> blacklist = new ArrayList<>();
    for (Map.Entry entry : properties.entrySet()) {
        String key = (String) entry.getKey();
        if (stageClazzName != null && key.equals(CLUSTER_MODE_JAR_BLACKLIST + stageClazzName)) {
            String value = (String) entry.getValue();
            blacklist.addAll(Splitter.on(",").trimResults().omitEmptyStrings().splitToList(value));
        } else if (key.equals(CLUSTER_MODE_JAR_BLACKLIST + ALL_STAGES)) {
            String value = (String) entry.getValue();
            blacklist.addAll(Splitter.on(",").trimResults().omitEmptyStrings().splitToList(value));
        }// w w  w  .j  a  va  2  s .  c om
    }
    if (isIsTraceEnabled()) {
        getLog().trace("Blacklist for '{}': '{}'", name, blacklist);
    }
    List<URL> urls = new ArrayList<>();
    for (URL url : cl.getURLs()) {
        if (blacklist.isEmpty()) {
            urls.add(url);
        } else {
            if (exclude(blacklist, FilenameUtils.getName(url.getPath()))) {
                getLog().trace("Skipping '{}' for '{}' due to '{}'", url, name, blacklist);
            } else {
                urls.add(url);
            }
        }
    }
    return urls;
}

From source file:org.traccar.web.server.model.DataServiceImpl.java

@Override
public HashMap<String, String> getCustomLayers() {
    String fileName = "/WEB-INF/custom.properties";
    HashMap<String, String> map = new HashMap<String, String>();
    Properties customLayerFile = new Properties();
    try {//from   w  w w. j av a 2 s  .c  o m
        customLayerFile.load(super.getServletContext().getResourceAsStream(fileName));
        if (!customLayerFile.isEmpty()) {
            Iterator<Entry<Object, Object>> layers = customLayerFile.entrySet().iterator();
            while (layers.hasNext()) {
                Entry e = layers.next();
                String layerName = e.getKey().toString();
                String layerUrl = e.getValue().toString();
                map.put(layerName, layerUrl);
            }
        }
    } catch (FileNotFoundException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
    return map;
}

From source file:io.amient.yarn1.YarnClient.java

/**
 * This method should be called by the implementing application static main
 * method. It does all the work around creating a yarn application and
 * submitting the request to the yarn resource manager. The class given in
 * the appClass argument will be run inside the yarn-allocated master
 * container./*from   w  ww.j  a v a 2  s  . c  om*/
 */
public static void submitApplicationMaster(Properties appConfig, Class<? extends YarnMaster> masterClass,
        String[] args, Boolean awaitCompletion) throws Exception {
    log.info("Yarn1 App Configuration:");
    for (Object param : appConfig.keySet()) {
        log.info(param.toString() + " = " + appConfig.get(param).toString());
    }
    String yarnConfigPath = appConfig.getProperty("yarn1.site", "/etc/hadoop");
    String masterClassName = masterClass.getName();
    appConfig.setProperty("yarn1.master.class", masterClassName);
    String applicationName = appConfig.getProperty("yarn1.application.name", masterClassName);
    log.info("--------------------------------------------------------------");

    if (Boolean.valueOf(appConfig.getProperty("yarn1.local.mode", "false"))) {
        YarnMaster.run(appConfig, args);
        return;
    }

    int masterPriority = Integer.valueOf(
            appConfig.getProperty("yarn1.master.priority", String.valueOf(YarnMaster.DEFAULT_MASTER_PRIORITY)));
    int masterMemoryMb = Integer.valueOf(appConfig.getProperty("yarn1.master.memory.mb",
            String.valueOf(YarnMaster.DEFAULT_MASTER_MEMORY_MB)));
    int masterNumCores = Integer.valueOf(
            appConfig.getProperty("yarn1.master.num.cores", String.valueOf(YarnMaster.DEFAULT_MASTER_CORES)));
    String queue = appConfig.getProperty("yarn1.queue");

    Configuration yarnConfig = new YarnConfiguration();
    yarnConfig.addResource(new FileInputStream(yarnConfigPath + "/core-site.xml"));
    yarnConfig.addResource(new FileInputStream(yarnConfigPath + "/hdfs-site.xml"));
    yarnConfig.addResource(new FileInputStream(yarnConfigPath + "/yarn-site.xml"));
    for (Map.Entry<Object, Object> entry : appConfig.entrySet()) {
        yarnConfig.set(entry.getKey().toString(), entry.getValue().toString());
    }

    final org.apache.hadoop.yarn.client.api.YarnClient yarnClient = org.apache.hadoop.yarn.client.api.YarnClient
            .createYarnClient();
    yarnClient.init(yarnConfig);
    yarnClient.start();

    for (NodeReport report : yarnClient.getNodeReports(NodeState.RUNNING)) {
        log.debug("Node report:" + report.getNodeId() + " @ " + report.getHttpAddress() + " | "
                + report.getCapability());
    }

    log.info("Submitting application master class " + masterClassName);

    YarnClientApplication app = yarnClient.createApplication();
    GetNewApplicationResponse appResponse = app.getNewApplicationResponse();
    final ApplicationId appId = appResponse.getApplicationId();
    if (appId == null) {
        System.exit(111);
    } else {
        appConfig.setProperty("am.timestamp", String.valueOf(appId.getClusterTimestamp()));
        appConfig.setProperty("am.id", String.valueOf(appId.getId()));
    }

    YarnClient.distributeResources(yarnConfig, appConfig, applicationName);

    String masterJvmArgs = appConfig.getProperty("yarn1.master.jvm.args", "");
    YarnContainerContext masterContainer = new YarnContainerContext(yarnConfig, appConfig, masterJvmArgs,
            masterPriority, masterMemoryMb, masterNumCores, applicationName, YarnMaster.class, args);

    ApplicationSubmissionContext appContext = app.getApplicationSubmissionContext();
    appContext.setApplicationName(masterClassName);
    appContext.setResource(masterContainer.capability);
    appContext.setPriority(masterContainer.priority);
    appContext.setQueue(queue);
    appContext.setApplicationType(appConfig.getProperty("yarn1.application.type", "YARN"));
    appContext.setAMContainerSpec(masterContainer.createContainerLaunchContext());

    log.info("Master container spec: " + masterContainer.capability);

    yarnClient.submitApplication(appContext);

    ApplicationReport report = yarnClient.getApplicationReport(appId);
    log.info("Tracking URL: " + report.getTrackingUrl());

    if (awaitCompletion) {
        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                if (!yarnClient.isInState(Service.STATE.STOPPED)) {
                    log.info("Killing yarn application in shutdown hook");
                    try {
                        yarnClient.killApplication(appId);
                    } catch (Throwable e) {
                        log.error("Failed to kill yarn application - please check YARN Resource Manager", e);
                    }
                }
            }
        });

        float lastProgress = -0.0f;
        while (true) {
            try {
                Thread.sleep(10000);
                report = yarnClient.getApplicationReport(appId);
                if (lastProgress != report.getProgress()) {
                    lastProgress = report.getProgress();
                    log.info(report.getApplicationId() + " " + (report.getProgress() * 100.00) + "% "
                            + (System.currentTimeMillis() - report.getStartTime()) + "(ms) "
                            + report.getDiagnostics());
                }
                if (!report.getFinalApplicationStatus().equals(FinalApplicationStatus.UNDEFINED)) {
                    log.info(report.getApplicationId() + " " + report.getFinalApplicationStatus());
                    log.info("Tracking url: " + report.getTrackingUrl());
                    log.info("Finish time: " + ((System.currentTimeMillis() - report.getStartTime()) / 1000)
                            + "(s)");
                    break;
                }
            } catch (Throwable e) {
                log.error("Master Heart Beat Error - terminating", e);
                yarnClient.killApplication(appId);
                Thread.sleep(2000);
            }
        }
        yarnClient.stop();

        if (!report.getFinalApplicationStatus().equals(FinalApplicationStatus.SUCCEEDED)) {
            System.exit(112);
        }
    }
    yarnClient.stop();
}

From source file:org.ala.spatial.web.services.SitesBySpeciesWSControllerTabulated.java

@RequestMapping(value = { "sxs/add", "sxs/sxs/add" }, method = { RequestMethod.GET, RequestMethod.POST })
public ModelAndView sxsAdd(HttpServletRequest req, HttpServletResponse resp) throws IOException {
    String speciesquery = req.getParameter("speciesquery");
    String layers = req.getParameter("layers");
    String bs = URLEncoder.encode(AlaspatialProperties.getBiocacheWsURL(), "UTF-8");
    String gridsize = req.getParameter("gridsize");

    String minuncertainty = req.getParameter("minuncertainty") == null ? ""
            : req.getParameter("minuncertainty");
    String maxuncertainty = req.getParameter("maxuncertainty") == null ? ""
            : req.getParameter("maxuncertainty");
    String nulluncertainty = req.getParameter("nulluncertainty") == null ? "false"
            : req.getParameter("nulluncertainty");

    String min = minuncertainty.length() == 0 ? "*" : minuncertainty;
    String max = maxuncertainty.length() == 0 ? "*" : maxuncertainty;

    if (nulluncertainty.equals("true")) {
        speciesquery = "(" + speciesquery + ")%20AND%20coordinate_uncertainty:%5B" + min + "%20TO%20" + max
                + "%5D";
    } else if (minuncertainty.length() + maxuncertainty.length() > 0) {
        speciesquery = "(" + speciesquery
                + ")%20AND%20-(coordinate_uncertainty:*%20AND%20-coordinate_uncertainty:%5B" + min + "%20TO%20"
                + max + "%5D)";
    }//from  w w w  .  j  a v a  2s.co m

    String url = req.getParameter("u");

    try {
        if (url == null) {
            url = "q=" + speciesquery + "&gridsize=" + gridsize + "&layers=" + layers;
        }

        String pth = AlaspatialProperties.getAnalysisWorkingDir() + File.separator + "sxs" + File.separator;

        initListProperties();
        Properties p = new Properties();
        p.load(new FileReader(pth + "list.properties"));

        synchronized (lockProperties) {
            FileWriter fw = new FileWriter(pth + "list.properties", true);
            if (!p.containsValue(url)) {
                for (int i = 1; i < Integer.MAX_VALUE; i++) {
                    if (!p.containsKey(String.valueOf(i))) {
                        fw.write("\n" + i + "=" + url);
                        new File(pth + i).delete();
                        break;
                    }
                }
            }
            fw.flush();
            fw.close();
        }

        for (Entry<Object, Object> entry : p.entrySet()) {
            if (((String) entry.getValue()).equals(url)) {
                File f = new File(pth + ((String) entry.getKey()));
                if (f.exists()) {
                    new File(pth + ((String) entry.getKey())).delete();
                }
            }
        }

        run();

    } catch (Exception e) {
        e.printStackTrace();
    }

    return new ModelAndView("redirect:" + AlaspatialProperties.getAlaspatialUrl() + "/sxs");
}

From source file:com.sshtools.common.configuration.SshToolsConnectionProfile.java

/**
 *
 *
 * @return// w  w w  .j  av  a 2  s  . com
 */
public String toString() {
    String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n";
    xml += ("<SshToolsConnectionProfile Hostname=\"" + host + "\" Port=\"" + String.valueOf(port)
            + "\" Username=\"" + username + "\"" + " Provider=\"" + getTransportProviderString() + "\">");
    xml += ("   <PreferedCipher Client2Server=\"" + prefEncryption + "\" Server2Client=\"" + prefDecryption
            + "\"/>\n");
    xml += ("   <PreferedMac Client2Server=\"" + prefRecvMac + "\" Server2Client=\"" + prefSendMac + "\"/>\n");
    xml += ("   <PreferedCompression Client2Server=\"" + prefRecvComp + "\" Server2Client=\"" + prefSendComp
            + "\"/>\n");
    xml += ("   <PreferedPublicKey Name=\"" + prefPK + "\"/>\n");
    xml += ("   <PreferedKeyExchange Name=\"" + prefKex + "\"/>\n");
    xml += "   <OnceAuthenticated value=\"" + String.valueOf(onceAuthenticated) + "\"/>\n";

    if (onceAuthenticated == EXECUTE_COMMANDS) {
        xml += "    <ExecuteCommands>" + executeCommands + "</ExecuteCommands>\n";
    }

    Iterator it = authMethods.entrySet().iterator();
    Map.Entry entry;
    Properties properties;

    while (it.hasNext()) {
        entry = (Map.Entry) it.next();
        xml += ("   <AuthenticationMethod Name=\"" + entry.getKey() + "\">\n");

        SshAuthenticationClient auth = (SshAuthenticationClient) entry.getValue();
        properties = auth.getPersistableProperties();

        Iterator it2 = properties.entrySet().iterator();

        while (it2.hasNext()) {
            entry = (Map.Entry) it2.next();
            xml += ("      <AuthenticationProperty Name=\"" + entry.getKey() + "\" Value=\"" + entry.getValue()
                    + "\"/>\n");
        }

        xml += "   </AuthenticationMethod>\n";
    }

    it = applicationProperties.entrySet().iterator();

    while (it.hasNext()) {
        entry = (Map.Entry) it.next();
        xml += ("   <ApplicationProperty Name=\"" + entry.getKey() + "\" Value=\"" + entry.getValue()
                + "\"/>\n");
    }

    // Write the SFTP Favorite entries to file
    it = sftpFavorites.entrySet().iterator();

    while (it.hasNext()) {
        entry = (Map.Entry) it.next();
        xml += ("   <SftpFavorite Name=\"" + entry.getKey() + "\" Value=\"" + entry.getValue() + "\"/>\n");
    }

    it = localForwardings.values().iterator();

    xml += "   <ForwardingAutoStart value=\"" + String.valueOf(forwardingAutoStart) + "\"/>\n";

    while (it.hasNext()) {
        ForwardingConfiguration config = (ForwardingConfiguration) it.next();
        xml += ("   <LocalPortForwarding Name=\"" + config.getName() + "\" AddressToBind=\""
                + config.getAddressToBind() + "\" PortToBind=\"" + String.valueOf(config.getPortToBind())
                + "\" AddressToConnect=\"" + config.getHostToConnect() + "\" PortToConnect=\""
                + String.valueOf(config.getPortToConnect()) + "\"/>\n");
    }

    it = remoteForwardings.values().iterator();

    while (it.hasNext()) {
        ForwardingConfiguration config = (ForwardingConfiguration) it.next();
        xml += ("   <RemotePortForwarding Name=\"" + config.getName() + "\" AddressToBind=\""
                + config.getAddressToBind() + "\" PortToBind=\"" + String.valueOf(config.getPortToBind())
                + "\" AddressToConnect=\"" + config.getHostToConnect() + "\" PortToConnect=\""
                + String.valueOf(config.getPortToConnect()) + "\"/>\n");
    }

    xml += "</SshToolsConnectionProfile>";

    return xml;
}

From source file:edu.kit.dama.dataworkflow.AbstractExecutionEnvironmentHandler.java

/**
 * Check if the scheduled ingest has finished. Therefor, the associated
 * ingest stored in the object-transfer-map of pTask will be checked for the
 * status {@link INGEST_STATUS#INGEST_FINISHED}. If the ingest status is
 * {@link INGEST_STATUS#PRE_INGEST_FINISHED} or
 * {@link INGEST_STATUS#INGEST_RUNNING} FALSE is returned. If the ingest
 * status is {@link INGEST_STATUS#INGEST_FINISHED} or
 * {@link INGEST_STATUS#INGEST_FAILED} then TRUE is returned as the ingest
 * is basically finished because it won't change its state without further
 * interaction./*w  w w .  ja  v a 2  s . co m*/
 *
 * @param pTask The task for which the ingest will be checked.
 *
 * @return TRUE the ingest has finished (successfully or not), FALSE if the
 * ingest is not yet or still running.
 *
 * @throws IngestException If the ingest has failed.
 */
private boolean isIngestFinished(DataWorkflowTask pTask) throws IngestException {
    boolean ingestFinished = true;
    try {
        Properties dataMap = pTask.getObjectTransferMapAsObject();
        Set<Entry<Object, Object>> entries = dataMap.entrySet();
        IAuthorizationContext ctx = DataWorkflowHelper.getTaskContext(pTask);
        for (Entry<Object, Object> entry : entries) {
            String objectId = (String) entry.getKey();
            Long transferId = Long.parseLong((String) entry.getValue());
            LOGGER.debug("Checking ingest status for object {} with ingest id {}", objectId, transferId);
            IngestInformation result = IngestInformationServiceLocal.getSingleton()
                    .getIngestInformationById(transferId, ctx);
            if (result == null) {
                setTaskStatus(pTask, TASK_STATUS.INGEST_FAILED);
                throw new IngestException("No ingest information found for id " + transferId
                        + ". Processing of task " + pTask.getId() + " cannot be continued.");
            }
            if (INGEST_STATUS.INGEST_FINISHED.equals(result.getStatusEnum())) {
                LOGGER.debug("Ingest for object {} with ingest id {} has finished.", objectId, transferId);
                setTaskStatus(pTask, TASK_STATUS.INGEST_FINISHED);
            } else if (INGEST_STATUS.INGEST_RUNNING.equals(result.getStatusEnum())
                    || INGEST_STATUS.PRE_INGEST_FINISHED.equals(result.getStatusEnum())) {
                LOGGER.debug("Ingest for object {} with ingest id {} is still in progress with status {}.",
                        objectId, transferId, result.getStatusEnum());
                ingestFinished = false;
                break;
            } else {
                setTaskStatus(pTask, TASK_STATUS.INGEST_FAILED);
                throw new IngestException("Ingest for object " + objectId + " with ingest id " + pTask.getId()
                        + " has failed with status " + result.getStatusEnum());
            }
        }
    } catch (IOException ex) {
        throw new IngestException("Failed to obtain object-transfer mapping for task with id " + pTask.getId(),
                ex);
    }

    return ingestFinished;
}

From source file:org.apache.hadoop.minikdc.MiniKdc.java

/**
 * Creates a MiniKdc.//from   www . j a  v  a  2 s .c  o  m
 *
 * @param conf MiniKdc configuration.
 * @param workDir working directory, it should be the build directory. Under
 * this directory an ApacheDS working directory will be created, this
 * directory will be deleted when the MiniKdc stops.
 * @throws Exception thrown if the MiniKdc could not be created.
 */
public MiniKdc(Properties conf, File workDir) throws Exception {
    if (!conf.keySet().containsAll(PROPERTIES)) {
        Set<String> missingProperties = new HashSet<String>(PROPERTIES);
        missingProperties.removeAll(conf.keySet());
        throw new IllegalArgumentException("Missing configuration properties: " + missingProperties);
    }
    this.workDir = new File(workDir, Long.toString(System.currentTimeMillis()));
    if (!workDir.exists() && !workDir.mkdirs()) {
        throw new RuntimeException("Cannot create directory " + workDir);
    }
    LOG.info("Configuration:");
    LOG.info("---------------------------------------------------------------");
    for (Map.Entry<?, ?> entry : conf.entrySet()) {
        LOG.info("  {}: {}", entry.getKey(), entry.getValue());
    }
    LOG.info("---------------------------------------------------------------");
    this.conf = conf;
    port = Integer.parseInt(conf.getProperty(KDC_PORT));
    String orgName = conf.getProperty(ORG_NAME);
    String orgDomain = conf.getProperty(ORG_DOMAIN);
    realm = orgName.toUpperCase(Locale.ENGLISH) + "." + orgDomain.toUpperCase(Locale.ENGLISH);
}

From source file:edu.kit.dama.dataworkflow.AbstractExecutionEnvironmentHandler.java

/**
 * Stage all data needed for pTask to the storage location accessible by the
 * processing environment. The staging phase can only be entered if pTask is
 * in status {@link DataWorkflowTask.TASK_STATUS#PREPARATION_FINISHED} or
 * for checks if the status is//from   www.j  ava  2s  .  c  o  m
 * {@link DataWorkflowTask.TASK_STATUS#STAGING}, {@link DataWorkflowTask.TASK_STATUS#STAGING_FINISHED}
 * or {@link DataWorkflowTask.TASK_STATUS#STAGING_FAILED}. Otherwise, a
 * StagingPreparationException will be thrown. If this method is called for
 * pTask for the first time, downloads will be scheduled for all digital
 * objects associated with pTask. The ids of the downloads will be stored in
 * the task, the task status will change to
 * {@link DataWorkflowTask.TASK_STATUS#STAGING} and FALSE will be returned.
 * If scheduling fails, the status will change to
 * {@link DataWorkflowTask.TASK_STATUS#STAGING_FAILED} and a
 * StagingPreparationException will be thrown.
 *
 * In subsequent calls, the transfer ids are used to query for the staging
 * status of all transfers. As long as not all transfers have finished, this
 * method will return FALSE and the status will remain
 * {@link DataWorkflowTask.TASK_STATUS#STAGING}. As soon as all data is
 * staged, the task status will switch to
 * {@link DataWorkflowTask.TASK_STATUS#STAGING_FINISHED} and TRUE is
 * returned. If any transfer fails, the status will switch to
 * {@link DataWorkflowTask.TASK_STATUS#STAGING_FAILED} and TRUE will be
 * returned in subsequent calls to indicate that the staging is in a final
 * state.
 *
 * In that case the task has to be reset to
 * {@link DataWorkflowTask.TASK_STATUS#PREPARATION_FINISHED} in order to
 * reattempt the staging process.
 *
 * @param pTask The task for which the transfers will be created/checked.
 *
 * @return TRUE if the staging process has finished (successful or not),
 * FALSE if at least one download is still unfinished.
 *
 * @throws StagingPreparationException if at least one associated object has
 * no valid transfer or if any download is in a failure state.
 */
private boolean performStaging(DataWorkflowTask pTask) throws StagingPreparationException {
    if (!DataWorkflowTask.TASK_STATUS.isStagingPhase(pTask.getStatus())) {
        throw new StagingPreparationException("Task with id " + pTask.getId() + " is in state "
                + pTask.getStatus() + ". Staging not possible.");
    }

    boolean stagingFinished = true;
    if (!DataWorkflowTask.TASK_STATUS.STAGING_FINISHED.equals(pTask.getStatus())) {
        //only enter as long as staging is not finished
        if (DataWorkflowTask.TASK_STATUS.PREPARATION_FINISHED.equals(pTask.getStatus())) {//schedule staging
            LOGGER.debug("Schedule staging for task with id {}", pTask.getId());
            stagingFinished = false;
            boolean statusUpdated = false;
            try {
                Properties objectTransferMap = DataWorkflowHelper.scheduleStaging(pTask);
                pTask.setObjectTransferMapAsObject(objectTransferMap);
                setTaskStatus(pTask, DataWorkflowTask.TASK_STATUS.STAGING);
                statusUpdated = true;
            } catch (StagingPreparationException | IOException ex) {
                setTaskStatus(pTask, DataWorkflowTask.TASK_STATUS.STAGING_FAILED);
                statusUpdated = true;
                throw new StagingPreparationException("Failed to schedule staging of input objects.", ex);
            } finally {
                if (!statusUpdated) {
                    //try to update manually again
                    setTaskStatus(pTask, DataWorkflowTask.TASK_STATUS.STAGING_FAILED);
                }
            }
        } else if (DataWorkflowTask.TASK_STATUS.STAGING.equals(pTask.getStatus())) {//status is STAGING, monitor status
            try {
                boolean wasStatusChangedToError = false;
                //update status
                LOGGER.debug("Checking staging status. Obtaining object-transfer map.");
                Properties dataMap = pTask.getObjectTransferMapAsObject();
                Set<Entry<Object, Object>> entries = dataMap.entrySet();
                LOGGER.debug("Checking {} transfer(s).", entries.size());
                IAuthorizationContext ctx = DataWorkflowHelper.getTaskContext(pTask);
                for (Entry<Object, Object> entry : entries) {
                    String objectId = (String) entry.getKey();
                    Long transferId = Long.parseLong((String) entry.getValue());
                    LOGGER.debug("Checking download status for object {} with download id {}", objectId,
                            transferId);
                    DownloadInformation result = DownloadInformationServiceLocal.getSingleton()
                            .getDownloadInformationById(transferId, ctx);
                    if (result == null) {//failed due to missing download entry
                        setTaskStatus(pTask, DataWorkflowTask.TASK_STATUS.STAGING_FAILED);
                        LOGGER.error("No download information found for id " + transferId
                                + ". Processing of task " + pTask.getId() + " cannot be continued.");
                        //"stagingFinished" remains 'TRUE' as the staging process itself ends here
                        wasStatusChangedToError = true;
                    } else if (DOWNLOAD_STATUS.DOWNLOAD_READY.equals(result.getStatusEnum())) {
                        LOGGER.debug("Download for object {} with download id {} is ready.", objectId,
                                transferId);
                    } else if (DOWNLOAD_STATUS.SCHEDULED.equals(result.getStatusEnum())
                            || DOWNLOAD_STATUS.PREPARING.equals(result.getStatusEnum())) {
                        LOGGER.debug(
                                "Download for object {} with download id {} is still in preparation with status {}.",
                                objectId, transferId, result.getStatusEnum());
                        stagingFinished = false;
                        //cancel loop over downloads
                        break;
                    } else {//transfer has failed normally
                        setTaskStatus(pTask, DataWorkflowTask.TASK_STATUS.STAGING_FAILED);
                        LOGGER.error("Download for object " + objectId + " with download id " + pTask.getId()
                                + " has failed with status " + result.getStatusEnum());
                        //"stagingFinished" remains 'TRUE' as the staging process itself ends here
                        wasStatusChangedToError = true;
                    }
                }
                if (stagingFinished && !wasStatusChangedToError) {
                    //if stagingFinished is still true, all downloads have been done
                    setTaskStatus(pTask, DataWorkflowTask.TASK_STATUS.STAGING_FINISHED);
                }

            } catch (IOException ex) {
                setTaskStatus(pTask, DataWorkflowTask.TASK_STATUS.STAGING_FAILED);
                LOGGER.error("Failed to obtain object-transfer mapping for task with id " + pTask.getId(), ex);
                //"stagingFinished" remains 'TRUE' as the staging process itself ends here
            }
        } else {//status should be STAGING_FAILED...just set dataAvailable to false
            stagingFinished = false;
        }
    } //status is STAGING_FINISHED, just return TRUE
    return stagingFinished;
}

From source file:edu.umd.cs.submit.CommandLineSubmit.java

/**
 * @param p//w w w .  j a va 2s.  c om
 * @param find
 * @param files
 * @param userProps
 * @return
 * @throws IOException
 * @throws FileNotFoundException
 */
public static MultipartPostMethod createFilePost(Properties p, FindAllFiles find, Collection<File> files,
        Properties userProps) throws IOException, FileNotFoundException {
    // ========================== assemble zip file in byte array
    // ==============================
    String loginName = userProps.getProperty("loginName");
    String classAccount = userProps.getProperty("classAccount");
    String from = classAccount;
    if (loginName != null && !loginName.equals(classAccount))
        from += "/" + loginName;
    System.out.println(" submitted by " + from);
    System.out.println();
    System.out.println("Submitting the following files");
    ByteArrayOutputStream bytes = new ByteArrayOutputStream(4096);
    byte[] buf = new byte[4096];
    ZipOutputStream zipfile = new ZipOutputStream(bytes);
    zipfile.setComment("zipfile for CommandLineTurnin, version " + VERSION);
    for (File resource : files) {
        if (resource.isDirectory())
            continue;
        String relativePath = resource.getCanonicalPath().substring(find.rootPathLength + 1);
        System.out.println(relativePath);
        ZipEntry entry = new ZipEntry(relativePath);
        entry.setTime(resource.lastModified());

        zipfile.putNextEntry(entry);
        InputStream in = new FileInputStream(resource);
        try {
            while (true) {
                int n = in.read(buf);
                if (n < 0)
                    break;
                zipfile.write(buf, 0, n);
            }
        } finally {
            in.close();
        }
        zipfile.closeEntry();

    } // for each file
    zipfile.close();

    MultipartPostMethod filePost = new MultipartPostMethod(p.getProperty("submitURL"));

    p.putAll(userProps);
    // add properties
    for (Map.Entry<?, ?> e : p.entrySet()) {
        String key = (String) e.getKey();
        String value = (String) e.getValue();
        if (!key.equals("submitURL"))
            filePost.addParameter(key, value);
    }
    filePost.addParameter("submitClientTool", "CommandLineTool");
    filePost.addParameter("submitClientVersion", VERSION);
    byte[] allInput = bytes.toByteArray();
    filePost.addPart(new FilePart("submittedFiles", new ByteArrayPartSource("submit.zip", allInput)));
    return filePost;
}