Java tutorial
// Copyright 2007 Hitachi Data Systems // All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); you may // not use this file except in compliance with the License. You may obtain // a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, WITHOUT // WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the // License for the specific language governing permissions and limitations // under the License. package com.archivas.clienttools.arcmover.cli; import java.io.*; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import com.archivas.clienttools.arcutils.api.*; import com.archivas.clienttools.arcutils.api.jobs.ManagedJobSummary; import com.archivas.clienttools.arcutils.impl.adapter.ConnectionTestException; import com.archivas.clienttools.arcutils.impl.adapter.StorageAdapter; import com.archivas.clienttools.arcutils.impl.adapter.StorageAdapterMgr; import com.archivas.clienttools.arcutils.profile.AbstractProfileBase; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.lang.StringUtils; import com.archivas.clienttools.arcutils.api.jobs.ManagedJob; import com.archivas.clienttools.arcutils.config.ConfigurationHelper; import com.archivas.clienttools.arcutils.impl.jobs.FileStats; import com.archivas.clienttools.arcutils.impl.jobs.ManagedJobImpl; import com.archivas.clienttools.arcutils.model.LoadSchedule; import com.archivas.clienttools.arcutils.utils.FileUtil; import com.archivas.clienttools.arcutils.utils.database.DatabaseException; import javax.net.ssl.SSLPeerUnverifiedException; public abstract class ManagedCLIJob extends AbstractArcCli { public static final String PACKAGE_NAME = ArcCopy.class.getPackage().getName(); public static final String CLASS_FULL_NAME = ArcCopy.class.getName(); public static final String CLASS_NAME = CLASS_FULL_NAME.substring(PACKAGE_NAME.length() + 1); public static Logger LOG = Logger.getLogger(CLASS_FULL_NAME); // This is 1 if arccopy was called and 2 if it was called via the outer batch file protected int numCmdLineArgs; // Help Constants protected static String HELP_USAGE_LINE; protected static String HELP_HEADER = ""; protected static String HELP_FOOTER = ""; // We don't have one at this time protected static String HELP_OPTION = "help"; protected static String MAX_CONNECTIONS = "max-sys-ops"; protected static String MAX_NODE_CONNECTIONS = "max-node-ops"; protected static String REDUCED_MAX_CONNECTIONS = "reduced-max-sys-ops"; protected static String REDUCED_MAX_NODE_CONNECTIONS = "reduced-max-node-ops"; protected static String REDUCED_START = "reduced-start"; protected static String REDUCED_END = "reduced-end"; protected static String EXPORT_RESULTS_TYPE = "export-results"; protected static String EXPORT_RESULTS_PATH = "export-results-path"; protected static String EXPORT_RESULTS_PREFIX = "export-results-prefix"; protected static String IGNORE_CONFLICTS = "ignore-conflicts"; protected static String JOB_NAME = "job-name"; protected static String RERUN = "rerun"; protected static String RESUME = "resume"; public static final String JOB_TYPE_METADATA = "Metadata"; public static final String JOB_TYPE_DELETE = "Delete"; public static final String JOB_TYPE_COPY = "Copy"; ArcMoverEngine arcMover = ArcMoverFactory.getInstance(); // Program Options ManagedJobImpl managedJobImpl = null; ManagedJob managedJob = null; boolean printHelp = false; // Command Data boolean isFirstStatusLine = true; protected static SimpleDateFormat timeFormat = new SimpleDateFormat("HH:mm"); ExportResults exportResults; public ManagedCLIJob(String args[], int numCmdLineArgs) { super(args); this.numCmdLineArgs = numCmdLineArgs; } public String getHelpFooter() { return HELP_FOOTER; } public String getHelpHeader() { return HELP_HEADER; } public String getHelpUsageLine() { return HELP_USAGE_LINE; } protected boolean shouldPrintHelp() { return printHelp; } public enum ExportListType { ALL, SUCCESS, FAILURE, JOBLIST, CONFLICT; } @SuppressWarnings({ "UseOfSystemOutOrSystemErr" }) public static void main(String args[]) { if (LOG.isLoggable(Level.FINE)) { StringBuffer sb = new StringBuffer(); sb.append("Program Arguments").append(NEWLINE); for (int i = 0; i < args.length; i++) { sb.append(" ").append(i).append(": ").append(args[i]); sb.append(NEWLINE); } LOG.log(Level.FINE, sb.toString()); } ConfigurationHelper.validateLaunchOK(); ManagedCLIJob arcCmd = null; try { if (args[0].equals("copy")) { arcCmd = new ArcCopy(args, 2); } else if (args[0].equals("delete")) { arcCmd = new ArcDelete(args, 2); } else if (args[0].equals("metadata")) { arcCmd = new ArcMetadata(args, 2); } else { throw new RuntimeException("Unsupported operation: " + args[0]); } arcCmd.parseArgs(); if (arcCmd.shouldPrintHelp()) { System.out.println(arcCmd.helpScreen()); } else { arcCmd.execute(new PrintWriter(System.out), new PrintWriter(System.err)); } } catch (ParseException e) { System.out.println("Error: " + e.getMessage()); System.out.println(); System.out.println(arcCmd.helpScreen()); arcCmd.setExitCode(EXIT_CODE_OPTION_PARSE_ERROR); } catch (Exception e) { LOG.log(Level.SEVERE, e.getMessage(), e); System.out.println(); System.err.println("Job failed. " + e.getMessage()); if (arcCmd != null) { arcCmd.setExitCode(EXIT_CODE_DM_ERROR); } } finally { if (arcCmd != null) { arcCmd.exit(); } } } @SuppressWarnings({ "static-access", "AccessStaticViaInstance" }) protected void getSharedOptions(Options options) { options.addOption(OptionBuilder.withArgName("results_path").hasArg().withDescription( "Directory in which to store the exported results lists. If omitted, defaults to the current directory.") .withLongOpt(EXPORT_RESULTS_PATH).create()); options.addOption(OptionBuilder.withArgName("string").hasArg().withDescription( "Prefix for the results lists file names. If omitted, results lists file names have no prefix.") .withLongOpt(EXPORT_RESULTS_PREFIX).create()); options.addOption(OptionBuilder.withArgName("integer").hasArg().withDescription( "Maximum number of connections per HCP system node. Valid range is from 1 to 100. If omitted, defaults to 25.") .withLongOpt(MAX_NODE_CONNECTIONS).create()); options.addOption(OptionBuilder.withArgName("integer").hasArg().withDescription( "Maximum number of connections to the HCP system. Valid range is from 2 to 1000. If omitted, defaults to 200") .withLongOpt(MAX_CONNECTIONS).create()); options.addOption(OptionBuilder.withArgName("integer").hasArg().withDescription( "Maximum number of connections per HCP system node during reduced load. Valid range is from 1 to 100. If omitted, defaults to 4.") .withLongOpt(REDUCED_MAX_NODE_CONNECTIONS).create()); options.addOption(OptionBuilder.withArgName("integer").hasArg().withDescription( "Maximum number of connections to the HCP system during reduced load. Valid range is from 2 to 1000. If omitted, defaults to 50.") .withLongOpt(REDUCED_MAX_CONNECTIONS).create()); options.addOption(OptionBuilder.withArgName("hh:mm").hasArg() .withDescription("Start time for the reduced load. If omitted, defaults to 08:00. If " + REDUCED_END + " is also omitted, reduced load is off.") .withLongOpt(REDUCED_START).create()); options.addOption(OptionBuilder.withArgName("hh:mm").hasArg() .withDescription("End time for the reduced load. If omitted, defaults to 20:00. If " + REDUCED_START + " is also omitted, reduced load is off.") .withLongOpt(REDUCED_END).create()); options.addOption(OptionBuilder.withArgName("job_name").hasArg().withDescription("The name of the job.") .withLongOpt(JOB_NAME).create()); options.addOption(getInsecureSSLOption()); } /** * Entry point for generating a directory listing. */ public void execute(PrintWriter out, PrintWriter err) throws Exception { // We have to at least have a job and a source profile if (managedJob == null || managedJob.getSourceProfile() == null) { throw new ParseException("Job not created during parsing of the commandline"); } // Let's make sure we can access the two profiles try { testConnection(managedJob.getSourceProfile()); testConnection(managedJob.getTargetProfile()); } catch (Exception e) { try { arcMover.removeManagedJob(managedJobImpl.getJobId(), managedJob.getJobType()); } catch (Exception ex) { LOG.log(Level.WARNING, "An error occurred removing job " + managedJobImpl.getJob().getJobName() + ": " + ex.getMessage(), ex); } finally { managedJobImpl = null; throw e; } } if (managedJob.getJobType() == ManagedJob.Type.COPY && managedJob.getSourceProfile().getType() .isLesserApiVersion(managedJob.getTargetProfile().getType())) { out.println(StringUtils.repeat("*", 80)); out.println("Copying objects to an earlier release of HCP results in loss of metadata that is"); out.println("not supported by the earlier release."); out.println(StringUtils.repeat("*", 80)); out.println(); } ManagedJobStats jobStats; arcMover.startManagedJob(managedJobImpl); // If we are rerunning the job there could be a small window where // arcMover.getStatus(managedJobImpl).isFinished() // is true. If that is the case sleep for a second and then continue if (managedJobImpl.getStatus().isFinished()) { try { Thread.sleep(1000); } catch (InterruptedException e) { /* Ignore it */ } } while (!managedJobImpl.getStatus().isFinished()) { jobStats = arcMover.getManagedJobStats(managedJobImpl); out.print(buildProgressString(jobStats)); out.flush(); LOG.fine("Job Stats : " + jobStats); try { Thread.sleep(1000); } catch (InterruptedException e) { // do nothing } } jobStats = arcMover.getManagedJobStats(managedJobImpl); out.print(buildProgressString(jobStats)); LOG.fine("Job Stats : " + jobStats); String details = generateDetails(arcMover.getManagedJobDetails(managedJobImpl, true)); String summary = generateSummary(arcMover.getManagedJobStats(managedJobImpl), managedJob.getJobType()); LOG.fine(details); LOG.info(summary); out.println(summary); // Export results synchronously if (exportResults.shouldExportResults()) { out.println("Exporting results"); exportResults.exportResults(); } // Set the error code if we have failures if (jobStats.areErrors()) { setExitCode(EXIT_CODE_DM_FAILED_FILES); } // Delete the job if it completed and we have no failed files if (jobStats.getStatus().equals(JobStatus.COMPLETED) && !jobStats.areErrors()) { try { arcMover.removeManagedJob(jobStats.getJobId(), managedJob.getJobType()); } catch (Exception e) { LOG.log(Level.WARNING, "An error occurred removing job " + managedJobImpl.getJob().getJobName() + ": " + e.getMessage(), e); } finally { managedJobImpl = null; } } // Flush the CLI out.flush(); err.flush(); } protected void setUpExportListThread(CommandLine cmdLine) throws ParseException { List<ExportListType> exportTypeList = new ArrayList<ExportListType>(); String exportListPath = ""; String exportListPrefix = ""; if (cmdLine.hasOption(EXPORT_RESULTS_TYPE)) { String resultsListStr = cmdLine.getOptionValue(EXPORT_RESULTS_TYPE); if (resultsListStr.contains("ALL")) { for (ExportListType type : ExportListType.values()) { if (type == ExportListType.CONFLICT && !supportsConflictReports()) { continue; } exportTypeList.add(type); } } else { String[] resultsList = resultsListStr.split(","); for (String type : resultsList) { try { ExportListType listType = ExportListType.valueOf(type.trim()); if (listType == ExportListType.CONFLICT && !supportsConflictReports()) { throw new IllegalArgumentException(); } exportTypeList.add(listType); } catch (IllegalArgumentException e) { throw new ParseException("Values provided for export-results does not match expected."); } } } } if (cmdLine.hasOption(EXPORT_RESULTS_PATH)) { exportListPath = cmdLine.getOptionValue(EXPORT_RESULTS_PATH); } if (cmdLine.hasOption(EXPORT_RESULTS_PREFIX)) { exportListPrefix = cmdLine.getOptionValue(EXPORT_RESULTS_PREFIX); } try { exportResults = new ExportResults(exportListPath, exportListPrefix, exportTypeList.contains(ExportListType.FAILURE), exportTypeList.contains(ExportListType.SUCCESS), exportTypeList.contains(ExportListType.JOBLIST), exportTypeList.contains(ExportListType.CONFLICT)); } catch (IOException e) { LOG.log(Level.WARNING, "IOException caught when trying to create export files. Make sure path is valid and writable", e); throw new ParseException( "IOException caught when trying to create export files. Make sure path is valid and writable"); } } protected void getLoadSchedule(CommandLine cmdLine, LoadSchedule result) throws ParseException { if (result == null) { return; } if (cmdLine.hasOption(MAX_NODE_CONNECTIONS)) { result.setNormalLoadMaxConnPerNodeCount(new Integer(cmdLine.getOptionValue(MAX_NODE_CONNECTIONS))); } if (cmdLine.hasOption(MAX_CONNECTIONS)) { result.setNormalLoadMaxConnCount(new Integer(cmdLine.getOptionValue(MAX_CONNECTIONS))); } if (cmdLine.hasOption(REDUCED_MAX_NODE_CONNECTIONS)) { result.setLowLoadMaxConnPerNodeCount(new Integer(cmdLine.getOptionValue(REDUCED_MAX_NODE_CONNECTIONS))); result.setLowLoadEnabled(true); } if (cmdLine.hasOption(REDUCED_MAX_CONNECTIONS)) { result.setLowLoadMaxConnCount(new Integer(cmdLine.getOptionValue(REDUCED_MAX_CONNECTIONS))); result.setLowLoadEnabled(true); } if (cmdLine.hasOption(REDUCED_START)) { try { result.setLowLoadStartTime(timeFormat.parse(cmdLine.getOptionValue("reduced-start"))); result.setLowLoadEnabled(true); } catch (java.text.ParseException e) { throw new ParseException("Invalid format for start time. Excpeted HH:mm"); } } if (cmdLine.hasOption(REDUCED_END)) { try { result.setLowLoadEndTime(timeFormat.parse(cmdLine.getOptionValue("reduced-end"))); result.setLowLoadEnabled(true); } catch (java.text.ParseException e) { throw new ParseException("Invalid format for end time. Excpeted HH:mm"); } } } private String buildProgressString(ManagedJobStats stats) { StringBuilder sb = new StringBuilder(); if (isFirstStatusLine) { isFirstStatusLine = false; } else { sb.append("\r"); } sb.append(getFormattedJobStats(stats)); return sb.toString(); } private String generateSummary(ManagedJobStats jobStats, ManagedJob.Type jobType) { String totalObjectsLabel = "Total Objects to " + jobType.getUiName(); if (JOB_TYPE_METADATA.equals(jobType.getUiName())) { totalObjectsLabel = "Total Objects to Change"; } String longestLabel = totalObjectsLabel; // construct a padding string StringBuilder pb = new StringBuilder(); for (int i = 0; i < longestLabel.length(); i++) { pb.append(" "); } String padding = pb.toString(); StringBuilder sb = new StringBuilder(); String newLine = NEWLINE; String spaces = String.format("%2s", " "); sb.append(newLine); sb.append(newLine); sb.append("Job Summary").append(newLine); sb.append(newLine); appendLabel(sb, "Status", padding); sb.append(jobStats.getStatus()); sb.append(newLine); appendLabel(sb, "Objects Found", padding); sb.append(jobStats.getDiscoveredObjectCount()); sb.append(newLine); appendLabel(sb, totalObjectsLabel, padding); sb.append(jobStats.getTotalObjectCount()); // do not report size information for delete and metadata job if (JOB_TYPE_COPY.equals(jobType.getUiName())) { sb.append(spaces + ", "); sb.append(jobStats.getTotalBytesInKB()); sb.append(" KB"); } sb.append(newLine); appendLabel(sb, "Successful", padding); sb.append(jobStats.getCompletedObjectCount()); if (JOB_TYPE_COPY.equals(jobType.getUiName())) { sb.append(spaces + ", "); sb.append(jobStats.getCompletedBytesInKB()); sb.append(" KB"); } sb.append(newLine); appendLabel(sb, "Errors", padding); sb.append(jobStats.getErroredObjectCount()); if (JOB_TYPE_COPY.equals(jobType.getUiName())) { sb.append(spaces + ", "); sb.append(jobStats.getErroredBytesInKB()); sb.append(" KB"); } sb.append(newLine); appendAdditionalOutput(jobStats, sb, padding); appendLabel(sb, "Total Time", padding); sb.append(jobStats.getRunTimeMs() / 1000); sb.append(" seconds"); sb.append(newLine); if (jobStats.getJobException() != null) { sb.append(newLine); appendLabel(sb, "Exception", padding); sb.append(jobStats.getJobException()); StringWriter sw = new StringWriter(); jobStats.getJobException().printStackTrace(new PrintWriter(sw)); sb.append(newLine); sb.append(sw.toString()); // sb.append(newLine); } return sb.toString(); /* * Sample rise output Run summary: Total Operations: 1000 Errors: 0 Error rate: 0.00% * Breakdown: Writes: 1000 (100.0%) Reads: 0 (0.0%) Exists: 0 (0.0%) Deletes: 0 (0.0%) * Request Breakdown: Writes: 0 (nan%) Reads: 0 (nan%) Exists: 0 (nan%) Deletes: 0 (nan%) * I/O Rate: 13.87 operations/second Throughput: 13.87 KB/second * */ } protected static final StringBuilder appendLabel(StringBuilder sb, String label, String padding) { return sb.append(label).append(padding.substring(label.length())).append(" : "); } protected abstract void appendAdditionalOutput(ManagedJobStats jobStats, StringBuilder sb, String padding); protected abstract String getFormattedJobStats(ManagedJobStats jobStats); protected abstract String getErrorMessage(); protected abstract ManagedJob.Type getJobType(); protected abstract JobId getLastJobID() throws NumberFormatException; protected abstract boolean supportsConflictReports(); private String generateDetails(ManagedJobImpl.JobDetails jobDetails) { String newLine = NEWLINE; StringBuilder sb = new StringBuilder(); List<FileStats> errors = jobDetails.getObjectsFailed(); for (FileStats fileStats : errors) { sb.append(fileStats.getDisplayUriRelativeToBasePath()); sb.append(getErrorMessage()); Throwable e = fileStats.getException(); if (e != null) { sb.append(e.getMessage()); sb.append(newLine); } } if (sb.length() > 0) { sb.insert(0, newLine).insert(0, "Job Details ").insert(0, newLine); } return sb.toString(); } protected class ExportResults { private File copyListFile; private File successesFile; private File failuresFile; private File conflictsFile; boolean exportResults = false; public ExportResults(String path, String prefix, boolean exportFailures, boolean exportSuccessess, boolean exportJobList, boolean exportConflicts) throws IOException { if (exportFailures) { // We want to make sure if no path was provided we are in the current working // directory so we don't // want a file separator. FileUtil.resolvePath will add one so we don't call that // function in this case if (path == null || path.equals("")) { failuresFile = new File(prefix + ManagedJobImpl.FAILURES_SUFFIX); } else { failuresFile = new File( FileUtil.resolvePath(path, prefix + ManagedJobImpl.FAILURES_SUFFIX, File.separator)); } if (!failuresFile.exists()) { failuresFile.createNewFile(); } } if (exportSuccessess) { // See above comment if (path == null || path.equals("")) { successesFile = new File(prefix + ManagedJobImpl.SUCCESSES_SUFFIX); } else { successesFile = new File( FileUtil.resolvePath(path, prefix + ManagedJobImpl.SUCCESSES_SUFFIX, File.separator)); } if (!successesFile.exists()) { successesFile.createNewFile(); } } if (exportJobList) { // See above comment if (path == null || path.equals("")) { copyListFile = new File(prefix + ManagedJobImpl.COPY_LIST_SUFFIX); } else { copyListFile = new File( FileUtil.resolvePath(path, prefix + ManagedJobImpl.COPY_LIST_SUFFIX, File.separator)); } if (!copyListFile.exists()) { copyListFile.createNewFile(); } } if (exportConflicts) { // See above comment if (path == null || path.equals("")) { conflictsFile = new File(prefix + ManagedJobImpl.CONFLICTS_SUFFIX); } else { conflictsFile = new File( FileUtil.resolvePath(path, prefix + ManagedJobImpl.CONFLICTS_SUFFIX, File.separator)); } if (!conflictsFile.exists()) { conflictsFile.createNewFile(); } } exportResults = copyListFile != null || successesFile != null || failuresFile != null || conflictsFile != null; } public boolean shouldExportResults() { return exportResults; } public void exportResults() { try { managedJobImpl.exportResults(copyListFile, successesFile, failuresFile, conflictsFile); } catch (DatabaseException e) { LOG.log(Level.INFO, "Export Results Thread Stopping: DatabaseException", e); } } } protected boolean handleRerunAndResume(CommandLine cmdLine, LoadSchedule schedule) throws ParseException { boolean result = false; String whichCmd = null; if (cmdLine.hasOption(RERUN) && cmdLine.hasOption(RESUME)) { throw new ParseException( "Cannot use both --resume and --rerun. Please use just one. Consult your manual if you have questions."); } else if (cmdLine.hasOption(RERUN)) { whichCmd = RERUN; } else if (cmdLine.hasOption(RESUME)) { whichCmd = RESUME; } if (whichCmd != null) { try { managedJobImpl = getManagedJobImplFromCmdLine(cmdLine, whichCmd); // Error Handling // Rerun only works on not completed jobs // Resume only works on completed jobs that have no failed objects if (whichCmd.equals(RERUN)) { if (managedJobImpl.getStatus() != JobStatus.COMPLETED) { throw new ParseException( "The job with this job name: \"" + managedJobImpl.getJob().getJobName() + "\" has not completed. Please run this job using --resume."); } if (managedJobImpl.getFailedObjectCount() == 0) { throw new ParseException( "The job with this job name: \"" + managedJobImpl.getJob().getJobName() + "\" has already run to completion and has no failed files."); } } else if (whichCmd.equals(RESUME)) { if (managedJobImpl.getStatus() == JobStatus.COMPLETED) { throw new ParseException("The job with this job name: \"" + managedJobImpl.getJob().getJobName() + "\" has already run to completion. Please run that job with --rerun if there were failed files."); } } managedJob = managedJobImpl.getJob(); if (cmdLine.hasOption(RERUN) || cmdLine.hasOption(RESUME)) { LoadSchedule RerunResumeSchedule = managedJob.getLoadSchedule(); getLoadSchedule(cmdLine, RerunResumeSchedule); managedJob.setLoadSchedule(RerunResumeSchedule); } else { managedJob.setLoadSchedule(schedule); } // Flush the stats arcMover.getManagedJobDetails(managedJobImpl, true); result = true; } catch (JobNotFoundException e) { throw new ParseException( "JobNotFoundException which trying to " + whichCmd + " a job. Msg: " + e.getMessage()); } catch (DatabaseException e) { throw new ParseException( "DatabaseException trying to get job to " + whichCmd + " a job. Msg: " + e.getMessage()); } catch (JobException e) { throw new ParseException( "JobException trying to initialize job to " + whichCmd + " . Msg: " + e.getMessage()); } } return result; } private ManagedJobImpl getManagedJobImplFromCmdLine(CommandLine cmdLine, String whichCmd) throws ParseException, DatabaseException, JobException { JobId reRunJobID = null; String jobName = cmdLine.getOptionValue(whichCmd); // If no option was provided get the last job run if (jobName == null) { // Get the last jobID try { reRunJobID = getLastJobID(); } catch (NumberFormatException e) { throw new ParseException( "Last job not found. Either no jobs have been run or the job was deleted."); } if (reRunJobID == null) { throw new ParseException("Error determining the last job run."); } } else { List<ManagedJobSummary> jobList = arcMover.getAllManagedJobs(); for (ManagedJobSummary job : jobList) { if (job.getJobName().equals(jobName)) { if (!job.getJobType().equals(getJobType())) { throw new ParseException( "Job name: " + jobName + " provided is not of type " + getJobType().getUiName()); } reRunJobID = job.getJobId(); break; } } if (reRunJobID == null) { throw new ParseException("Job name not found in job list."); } } return arcMover.loadManagedJob(reRunJobID, getJobType()); } private void testConnection(AbstractProfileBase profile) throws JobException { boolean result = false; if (profile != null) { try { StorageAdapter sourceAdapter = StorageAdapterMgr.getStorageAdapter(profile, null); result = sourceAdapter.testConnection(); } catch (ConnectionTestException e) { String msg = null; if (e.getMessage() != null) { msg = e.getMessage(); } if (e.getCause() != null && e.getCause() instanceof SSLPeerUnverifiedException) { msg += " If SSL is enabled make sure you are using the --insecure flag."; } if (msg != null) { LOG.log(Level.WARNING, "Exception while connecting to profile: " + profile.getName() + ": " + msg, e); throw new JobException(msg); } } if (result == false) { throw new JobException("Cannot connect to profile: " + profile.getName() + "."); } } } }