List of usage examples for java.lang IndexOutOfBoundsException getMessage
public String getMessage()
From source file:org.apache.hadoop.hive.ql.exec.ExecDriver.java
public static void main(String[] args) throws IOException, HiveException { String planFileName = null;/*from w ww. j ava2 s.c o m*/ ArrayList<String> jobConfArgs = new ArrayList<String>(); boolean noLog = false; String files = null; boolean localtask = false; try { for (int i = 0; i < args.length; i++) { if (args[i].equals("-plan")) { planFileName = args[++i]; } else if (args[i].equals("-jobconf")) { jobConfArgs.add(args[++i]); } else if (args[i].equals("-nolog")) { noLog = true; } else if (args[i].equals("-files")) { files = args[++i]; } else if (args[i].equals("-localtask")) { localtask = true; } } } catch (IndexOutOfBoundsException e) { System.err.println("Missing argument to option"); printUsage(); } JobConf conf; if (localtask) { conf = new JobConf(MapredLocalTask.class); } else { conf = new JobConf(ExecDriver.class); } StringBuilder sb = new StringBuilder("JobConf:\n"); for (String one : jobConfArgs) { int eqIndex = one.indexOf('='); if (eqIndex != -1) { try { String key = one.substring(0, eqIndex); String value = URLDecoder.decode(one.substring(eqIndex + 1), "UTF-8"); conf.set(key, value); sb.append(key).append("=").append(value).append("\n"); } catch (UnsupportedEncodingException e) { System.err.println( "Unexpected error " + e.getMessage() + " while encoding " + one.substring(eqIndex + 1)); System.exit(3); } } } if (files != null) { conf.set("tmpfiles", files); } boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT); if (noLog) { // If started from main(), and noLog is on, we should not output // any logs. To turn the log on, please set -Dtest.silent=false BasicConfigurator.resetConfiguration(); BasicConfigurator.configure(new NullAppender()); } else { setupChildLog4j(conf); } Log LOG = LogFactory.getLog(ExecDriver.class.getName()); LogHelper console = new LogHelper(LOG, isSilent); if (planFileName == null) { console.printError("Must specify Plan File Name"); printUsage(); } // print out the location of the log file for the user so // that it's easy to find reason for local mode execution failures for (Appender appender : Collections .list((Enumeration<Appender>) LogManager.getRootLogger().getAllAppenders())) { if (appender instanceof FileAppender) { console.printInfo("Execution log at: " + ((FileAppender) appender).getFile()); } } // log the list of job conf parameters for reference LOG.info(sb.toString()); // the plan file should always be in local directory Path p = new Path(planFileName); FileSystem fs = FileSystem.getLocal(conf); InputStream pathData = fs.open(p); // this is workaround for hadoop-17 - libjars are not added to classpath of the // child process. so we add it here explicitly String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); String addedJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEADDEDJARS); try { // see also - code in CliDriver.java ClassLoader loader = conf.getClassLoader(); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } if (StringUtils.isNotBlank(addedJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(addedJars, ",")); } conf.setClassLoader(loader); // Also set this to the Thread ContextClassLoader, so new threads will // inherit // this class loader, and propagate into newly created Configurations by // those // new threads. Thread.currentThread().setContextClassLoader(loader); } catch (Exception e) { throw new HiveException(e.getMessage(), e); } int ret; if (localtask) { memoryMXBean = ManagementFactory.getMemoryMXBean(); MapredLocalWork plan = Utilities.deserializeMapRedLocalWork(pathData, conf); MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent); ret = ed.executeFromChildJVM(new DriverContext()); } else { MapredWork plan = Utilities.deserializeMapRedWork(pathData, conf); ExecDriver ed = new ExecDriver(plan, conf, isSilent); ret = ed.execute(new DriverContext()); } if (ret != 0) { System.exit(2); } }
From source file:org.apache.hadoop.hive.ql.io.rcfile.merge.BlockMergeTask.java
public static void main(String[] args) { ArrayList<String> jobConfArgs = new ArrayList<String>(); String inputPathStr = null;/* www. j a v a2 s.c o m*/ String outputDir = null; try { for (int i = 0; i < args.length; i++) { if (args[i].equals("-input")) { inputPathStr = args[++i]; } else if (args[i].equals("-jobconf")) { jobConfArgs.add(args[++i]); } else if (args[i].equals("-outputDir")) { outputDir = args[++i]; } } } catch (IndexOutOfBoundsException e) { System.err.println("Missing argument to option"); printUsage(); } if (inputPathStr == null || outputDir == null || outputDir.trim().equals("")) { printUsage(); } List<String> inputPaths = new ArrayList<String>(); String[] paths = inputPathStr.split(INPUT_SEPERATOR); if (paths == null || paths.length == 0) { printUsage(); } FileSystem fs = null; JobConf conf = new JobConf(BlockMergeTask.class); for (String path : paths) { try { Path pathObj = new Path(path); if (fs == null) { fs = FileSystem.get(pathObj.toUri(), conf); } FileStatus fstatus = fs.getFileStatus(pathObj); if (fstatus.isDir()) { FileStatus[] fileStatus = fs.listStatus(pathObj); for (FileStatus st : fileStatus) { inputPaths.add(st.getPath().toString()); } } else { inputPaths.add(fstatus.getPath().toString()); } } catch (IOException e) { e.printStackTrace(System.err); } } StringBuilder sb = new StringBuilder("JobConf:\n"); for (String one : jobConfArgs) { int eqIndex = one.indexOf('='); if (eqIndex != -1) { try { String key = one.substring(0, eqIndex); String value = URLDecoder.decode(one.substring(eqIndex + 1), "UTF-8"); conf.set(key, value); sb.append(key).append("=").append(value).append("\n"); } catch (UnsupportedEncodingException e) { System.err.println( "Unexpected error " + e.getMessage() + " while encoding " + one.substring(eqIndex + 1)); System.exit(3); } } } HiveConf hiveConf = new HiveConf(conf, BlockMergeTask.class); Log LOG = LogFactory.getLog(BlockMergeTask.class.getName()); boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT); LogHelper console = new LogHelper(LOG, isSilent); // print out the location of the log file for the user so // that it's easy to find reason for local mode execution failures for (Appender appender : Collections .list((Enumeration<Appender>) LogManager.getRootLogger().getAllAppenders())) { if (appender instanceof FileAppender) { console.printInfo("Execution log at: " + ((FileAppender) appender).getFile()); } } // log the list of job conf parameters for reference LOG.info(sb.toString()); MergeWork mergeWork = new MergeWork(inputPaths, outputDir); DriverContext driverCxt = new DriverContext(); BlockMergeTask taskExec = new BlockMergeTask(); taskExec.initialize(hiveConf, null, driverCxt); taskExec.setWork(mergeWork); int ret = taskExec.execute(driverCxt); if (ret != 0) { System.exit(2); } }
From source file:org.apache.hadoop.hive.ql.exec.mr2.MR2ExecDriver.java
@SuppressWarnings("unchecked") public static void main(String[] args) throws IOException, HiveException { String planFileName = null;//from w ww. j av a 2 s . c o m String jobConfFileName = null; boolean noLog = false; String files = null; boolean localtask = false; try { for (int i = 0; i < args.length; i++) { if (args[i].equals("-plan")) { planFileName = args[++i]; } else if (args[i].equals("-jobconffile")) { jobConfFileName = args[++i]; } else if (args[i].equals("-nolog")) { noLog = true; } else if (args[i].equals("-files")) { files = args[++i]; } else if (args[i].equals("-localtask")) { localtask = true; } } } catch (IndexOutOfBoundsException e) { System.err.println("Missing argument to option"); printUsage(); } JobConf conf; if (localtask) { conf = new JobConf(MapredLocalTask.class); } else { conf = new JobConf(MR2ExecDriver.class); } if (jobConfFileName != null) { conf.addResource(new Path(jobConfFileName)); } if (files != null) { conf.set("tmpfiles", files); } if (UserGroupInformation.isSecurityEnabled()) { String hadoopAuthToken = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION); if (hadoopAuthToken != null) { conf.set("mapreduce.job.credentials.binary", hadoopAuthToken); } } boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT); String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID, "").trim(); if (queryId.isEmpty()) { queryId = "unknown-" + System.currentTimeMillis(); } System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId); if (noLog) { // If started from main(), and noLog is on, we should not output // any logs. To turn the log on, please set -Dtest.silent=false BasicConfigurator.resetConfiguration(); BasicConfigurator.configure(new NullAppender()); } else { setupChildLog4j(conf); } Log LOG = LogFactory.getLog(MR2ExecDriver.class.getName()); LogHelper console = new LogHelper(LOG, isSilent); if (planFileName == null) { console.printError("Must specify Plan File Name"); printUsage(); } // print out the location of the log file for the user so // that it's easy to find reason for local mode execution failures for (Appender appender : Collections .list((Enumeration<Appender>) LogManager.getRootLogger().getAllAppenders())) { if (appender instanceof FileAppender) { console.printInfo("Execution log at: " + ((FileAppender) appender).getFile()); } } // the plan file should always be in local directory Path p = new Path(planFileName); FileSystem fs = FileSystem.getLocal(conf); InputStream pathData = fs.open(p); // this is workaround for hadoop-17 - libjars are not added to classpath of the // child process. so we add it here explicitly String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); String addedJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEADDEDJARS); try { // see also - code in CliDriver.java ClassLoader loader = conf.getClassLoader(); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } if (StringUtils.isNotBlank(addedJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(addedJars, ",")); } conf.setClassLoader(loader); // Also set this to the Thread ContextClassLoader, so new threads will // inherit // this class loader, and propagate into newly created Configurations by // those // new threads. Thread.currentThread().setContextClassLoader(loader); } catch (Exception e) { throw new HiveException(e.getMessage(), e); } int ret; if (localtask) { memoryMXBean = ManagementFactory.getMemoryMXBean(); MapredLocalWork plan = Utilities.deserializePlan(pathData, MapredLocalWork.class, conf); MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent); ret = ed.executeInProcess(new DriverContext()); } else { MR2Work plan = Utilities.deserializePlan(pathData, MR2Work.class, conf); MR2ExecDriver ed = new MR2ExecDriver(plan, conf, isSilent); ret = ed.execute(new DriverContext()); } if (ret != 0) { System.exit(ret); } }
From source file:org.apache.hadoop.hive.ql.exec.mr.ExecDriver.java
@SuppressWarnings("unchecked") public static void main(String[] args) throws IOException, HiveException { String planFileName = null;//from w ww. j a v a2s. co m String jobConfFileName = null; boolean noLog = false; String files = null; String libjars = null; boolean localtask = false; try { for (int i = 0; i < args.length; i++) { if (args[i].equals("-plan")) { planFileName = args[++i]; } else if (args[i].equals("-jobconffile")) { jobConfFileName = args[++i]; } else if (args[i].equals("-nolog")) { noLog = true; } else if (args[i].equals("-files")) { files = args[++i]; } else if (args[i].equals("-libjars")) { libjars = args[++i]; } else if (args[i].equals("-localtask")) { localtask = true; } } } catch (IndexOutOfBoundsException e) { System.err.println("Missing argument to option"); printUsage(); } JobConf conf; if (localtask) { conf = new JobConf(MapredLocalTask.class); } else { conf = new JobConf(ExecDriver.class); } if (jobConfFileName != null) { conf.addResource(new Path(jobConfFileName)); } // Initialize the resources from command line if (files != null) { conf.set("tmpfiles", files); } if (libjars != null) { conf.set("tmpjars", libjars); } if (UserGroupInformation.isSecurityEnabled()) { String hadoopAuthToken = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION); if (hadoopAuthToken != null) { conf.set("mapreduce.job.credentials.binary", hadoopAuthToken); } } boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT); String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID, "").trim(); if (queryId.isEmpty()) { queryId = "unknown-" + System.currentTimeMillis(); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYID, queryId); } System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId); LogUtils.registerLoggingContext(conf); if (noLog) { // If started from main(), and noLog is on, we should not output // any logs. To turn the log on, please set -Dtest.silent=false org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger(); NullAppender appender = NullAppender.createNullAppender(); appender.addToLogger(logger.getName(), Level.ERROR); appender.start(); } else { setupChildLog4j(conf); } Logger LOG = LoggerFactory.getLogger(ExecDriver.class.getName()); LogHelper console = new LogHelper(LOG, isSilent); if (planFileName == null) { console.printError("Must specify Plan File Name"); printUsage(); } // print out the location of the log file for the user so // that it's easy to find reason for local mode execution failures for (Appender appender : ((org.apache.logging.log4j.core.Logger) LogManager.getRootLogger()).getAppenders() .values()) { if (appender instanceof FileAppender) { console.printInfo("Execution log at: " + ((FileAppender) appender).getFileName()); } else if (appender instanceof RollingFileAppender) { console.printInfo("Execution log at: " + ((RollingFileAppender) appender).getFileName()); } } // the plan file should always be in local directory Path p = new Path(planFileName); FileSystem fs = FileSystem.getLocal(conf); InputStream pathData = fs.open(p); // this is workaround for hadoop-17 - libjars are not added to classpath of the // child process. so we add it here explicitly try { // see also - code in CliDriver.java ClassLoader loader = conf.getClassLoader(); if (StringUtils.isNotBlank(libjars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(libjars, ",")); } conf.setClassLoader(loader); // Also set this to the Thread ContextClassLoader, so new threads will // inherit // this class loader, and propagate into newly created Configurations by // those // new threads. Thread.currentThread().setContextClassLoader(loader); } catch (Exception e) { throw new HiveException(e.getMessage(), e); } int ret; if (localtask) { memoryMXBean = ManagementFactory.getMemoryMXBean(); MapredLocalWork plan = SerializationUtilities.deserializePlan(pathData, MapredLocalWork.class); MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent); ret = ed.executeInProcess(new DriverContext()); } else { MapredWork plan = SerializationUtilities.deserializePlan(pathData, MapredWork.class); ExecDriver ed = new ExecDriver(plan, conf, isSilent); ret = ed.execute(new DriverContext()); } if (ret != 0) { System.exit(ret); } }
From source file:org.thaliproject.p2p.btconnectorlib.internal.bluetooth.BluetoothUtils.java
/** * Checks the validity of the received handshake message. * @param handshakeMessage The received handshake message as a byte array. * @param handshakeMessageLength The length of the handshake message. * @param bluetoothSocketOfSender The Bluetooth socket of the sender. * @return The resolved peer properties of the sender, if the handshake was valid. Null otherwise. *//*from w ww.j ava2s .co m*/ public static PeerProperties validateReceivedHandshakeMessage(byte[] handshakeMessage, int handshakeMessageLength, BluetoothSocket bluetoothSocketOfSender) { String handshakeMessageAsString = new String(handshakeMessage, StandardCharsets.UTF_8); PeerProperties peerProperties = null; boolean receivedHandshakeMessageValidated = false; if (!handshakeMessageAsString.isEmpty()) { if (handshakeMessageLength == SIMPLE_HANDSHAKE_MESSAGE_AS_BYTE_ARRAY.length) { // This must be the simple handshake message try { handshakeMessageAsString = handshakeMessageAsString.substring(0, SIMPLE_HANDSHAKE_MESSAGE_AS_STRING.length()); } catch (IndexOutOfBoundsException e) { Log.e(TAG, "validateReceivedHandshakeMessage: " + e.getMessage(), e); } if (handshakeMessageAsString.equals(SIMPLE_HANDSHAKE_MESSAGE_AS_STRING)) { String bluetoothMacAddress = getBluetoothMacAddressFromSocket(bluetoothSocketOfSender); if (isValidBluetoothMacAddress(bluetoothMacAddress)) { receivedHandshakeMessageValidated = true; peerProperties = new PeerProperties(bluetoothMacAddress); } } } else { // Long handshake message with peer name and Bluetooth MAC address peerProperties = new PeerProperties(); try { receivedHandshakeMessageValidated = AbstractBluetoothConnectivityAgent .getPropertiesFromIdentityString(handshakeMessageAsString, peerProperties); } catch (JSONException e) { Log.e(TAG, "validateReceivedHandshakeMessage: Failed to resolve peer properties: " + e.getMessage(), e); } if (receivedHandshakeMessageValidated) { String bluetoothMacAddress = BluetoothUtils .getBluetoothMacAddressFromSocket(bluetoothSocketOfSender); if (bluetoothMacAddress == null || !bluetoothMacAddress.equals(peerProperties.getBluetoothMacAddress())) { Log.e(TAG, "validateReceivedHandshakeMessage: Bluetooth MAC address mismatch: Got \"" + peerProperties.getBluetoothMacAddress() + "\", but was expecting \"" + bluetoothMacAddress + "\""); receivedHandshakeMessageValidated = false; } } } } return receivedHandshakeMessageValidated ? peerProperties : null; }
From source file:org.kuali.rice.testtools.selenium.AutomatedFunctionalTestUtils.java
protected static String incidentReportMessage(String contents, String linkLocator, String message) { if (incidentReported(contents)) { try {/*from w ww . ja va2 s.co m*/ return processIncidentReport(contents, linkLocator, message); } catch (IndexOutOfBoundsException e) { return "\nIncident report detected " + message + " but there was an exception during processing: " + e.getMessage() + "\nStack Trace from processing exception" + stackTrace(e) + "\nContents that triggered exception: " + deLinespace(contents); } } if (contents.contains("HTTP Status 404")) { return "HTTP Status 404 contents: " + contents; } if (contents.contains("HTTP Status 500")) { return "\nHTTP Status 500 stacktrace: " + extract500Exception(contents); } // freemarker exception if (contents.contains("Java backtrace for programmers:") || contents.contains("Java stack trace (for programmers):") || contents.contains("FreeMarker template error:")) { try { return freemarkerExceptionMessage(contents, linkLocator, message); } catch (IndexOutOfBoundsException e) { return "\nFreemarker exception detected " + message + " but there was an exception during processing: " + e.getMessage() + "\nStack Trace from processing exception" + stackTrace(e) + "\nContents that triggered exception: " + deLinespace(contents); } } if (contents.contains("Document Expired")) { // maybe Firefox specific return "Document Expired message."; } return null; }
From source file:edu.samplu.common.ITUtil.java
protected static void checkForIncidentReport(String contents, String linkLocator, Failable failable, String message) {/* w ww . j a v a 2s . c o m*/ if (contents == null) { //guard clause return; } if (incidentReported(contents)) { try { processIncidentReport(contents, linkLocator, failable, message); } catch (IndexOutOfBoundsException e) { failable.fail( "\nIncident report detected " + message + " but there was an exception during processing: " + e.getMessage() + "\nStack Trace from processing exception" + stackTrace(e) + "\nContents that triggered exception: " + deLinespace(contents)); } } if (contents.contains("HTTP Status 404")) { failable.fail("\nHTTP Status 404 " + linkLocator + " " + message + " " + "\ncontents:" + contents); } if (contents.contains("Java backtrace for programmers:")) { // freemarker exception try { processFreemarkerException(contents, linkLocator, failable, message); } catch (IndexOutOfBoundsException e) { failable.fail("\nFreemarker exception detected " + message + " but there was an exception during processing: " + e.getMessage() + "\nStack Trace from processing exception" + stackTrace(e) + "\nContents that triggered exception: " + deLinespace(contents)); } } if (contents.contains("Document Expired")) { // maybe Firefox specific failable.fail("Document Expired message."); } }
From source file:org.wso2.carbon.inbound.salesforce.poll.SoapLoginUtil.java
public static void login(HttpClient client, String username, String password) throws IOException, InterruptedException, SAXException, ParserConfigurationException { try {//from www .ja v a 2 s . c o m ContentExchange exchange = new ContentExchange(); exchange.setMethod("POST"); exchange.setURL(getSoapURL()); exchange.setRequestContentSource(new ByteArrayInputStream(soapXmlForLogin(username, password))); exchange.setRequestHeader("Content-Type", "text/xml"); exchange.setRequestHeader("SOAPAction", "''"); exchange.setRequestHeader("PrettyPrint", "Yes"); client.send(exchange); exchange.waitForDone(); try { String response = exchange.getResponseContent(); String tagSession = "<sessionId>"; String tagServerUrl = "<serverUrl>"; String serverUrl = response.substring(response.indexOf(tagServerUrl) + tagServerUrl.length(), response.indexOf("</serverUrl>")); sessionId = response.substring(response.indexOf(tagSession) + tagSession.length(), response.indexOf("</sessionId>")); LoginUrl = serverUrl.substring(0, serverUrl.indexOf("/services")); } catch (IndexOutOfBoundsException e) { log.error("Login credentials of Salesforce is wrong...."); throw new SynapseException("Login credentials of Salesforce is wrong....", e); } } catch (MalformedURLException e) { log.error("Error while building URL", e); } catch (InterruptedException e) { log.error("Error in exchange the asynchronous message", e); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } catch (IOException e) { log.error("Error while login to Salesforce" + e.getMessage(), e); } }
From source file:de.sub.goobi.metadaten.copier.DataCopyrule.java
/** * Factory method to create a class implementing the metadata copy rule * referenced by a given command string//from w w w . j a v a2 s .c om * * @param command * A space-separated string consisting of subject (aka. patiens), * operator (aka. agens) and (optional) objects (depending on * what objects the operator requires). * @return a class implementing the metadata copy rule referenced * @throws ConfigurationException * if the operator cannot be resolved or the number of arguments * doesnt match */ public static DataCopyrule createFor(String command) throws ConfigurationException { List<String> arguments = Arrays.asList(command.split("\\s+")); String operator; try { operator = arguments.get(1); } catch (IndexOutOfBoundsException e) { throw new ConfigurationException("Missing operator (second argument) in line: " + command); } Class<? extends DataCopyrule> ruleClass = AVAILABLE_RULES.get(operator); if (ruleClass == null) { throw new ConfigurationException("Unknown operator: " + operator); } DataCopyrule ruleImplementation; try { ruleImplementation = ruleClass.newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException(e.getMessage(), e); } ruleImplementation.setSubject(arguments.get(0)); if (ruleImplementation.getMaxObjects() > 0) { List<String> objects = arguments.subList(2, arguments.size()); if (objects.size() < ruleImplementation.getMinObjects()) { throw new ConfigurationException("Too few arguments in line: " + command); } if (objects.size() > ruleImplementation.getMaxObjects()) { throw new ConfigurationException("Too many arguments in line: " + command); } ruleImplementation.setObjects(objects); } return ruleImplementation; }
From source file:org.kitodo.production.metadata.copier.DataCopyrule.java
/** * Factory method to create a class implementing the metadata copy rule * referenced by a given command string. * * @param command// ww w . j a v a 2 s .c o m * A space-separated string consisting of subject (aka. patiens), * operator (aka. agens) and (optional) objects (depending on * what objects the operator requires). * @return a class implementing the metadata copy rule referenced * @throws ConfigurationException * if the operator cannot be resolved or the number of arguments * doesnt match */ public static DataCopyrule createFor(String command) throws ConfigurationException { List<String> arguments = Arrays.asList(command.split("\\s+")); String operator; try { operator = arguments.get(1); } catch (IndexOutOfBoundsException e) { throw new ConfigurationException("Missing operator (second argument) in line: " + command); } Class<? extends DataCopyrule> ruleClass = AVAILABLE_RULES.get(operator); if (ruleClass == null) { throw new ConfigurationException("Unknown operator: " + operator); } DataCopyrule ruleImplementation; try { ruleImplementation = ruleClass.newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new MetadataException(e.getMessage(), e); } ruleImplementation.setSubject(arguments.get(0)); if (ruleImplementation.getMaxObjects() > 0) { List<String> objects = arguments.subList(2, arguments.size()); if (objects.size() < ruleImplementation.getMinObjects()) { throw new ConfigurationException("Too few arguments in line: " + command); } if (objects.size() > ruleImplementation.getMaxObjects()) { throw new ConfigurationException("Too many arguments in line: " + command); } ruleImplementation.setObjects(objects); } return ruleImplementation; }