Example usage for java.net URISyntaxException getMessage

List of usage examples for java.net URISyntaxException getMessage

Introduction

In this page you can find the example usage for java.net URISyntaxException getMessage.

Prototype

public String getMessage() 

Source Link

Document

Returns a string describing the parse error.

Usage

From source file:org.dataconservancy.access.connector.HttpDcsConnector.java

@Override
public URL depositSIP(Dcp dcp) throws DcsClientFault {
    HttpPost post;/*from  w  ww .  ja v  a2 s.c  o  m*/

    try {
        post = new HttpPost(config.getDepositSipUrl().toURI());
    } catch (URISyntaxException e) {
        final String msg = "Malformed deposit sip endpoint URL " + config.getUploadFileUrl() + ": "
                + e.getMessage();
        log.debug(msg, e);
        throw new DcsClientFault(msg, e);
    }

    post.setHeader("Content-Type", "application/xml");
    post.setHeader("X-Packaging", "http://dataconservancy.org/schemas/dcp/1.0");

    ByteArrayOutputStream dcp_buf = new ByteArrayOutputStream();
    mb.buildSip(dcp, dcp_buf);

    ByteArrayEntity data = new ByteArrayEntity(dcp_buf.toByteArray());
    post.setEntity(data);

    HttpResponse resp = execute(post, HttpStatus.SC_ACCEPTED);

    // Parse atom feed and pull out <link href> containing sip status url

    Document doc = null;
    ByteArrayOutputStream atom_buf = null;

    try {
        atom_buf = new ByteArrayOutputStream();
        resp.getEntity().writeTo(atom_buf);

        DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
        dbf.setNamespaceAware(true);
        DocumentBuilder db = dbf.newDocumentBuilder();
        doc = db.parse(new ByteArrayInputStream(atom_buf.toByteArray()));
    } catch (IOException e) {
        throw new DcsClientFault("Error reading server response", e);
    } catch (ParserConfigurationException e) {
        throw new DcsClientFault("Error parsing atom: " + atom_buf, e);
    } catch (SAXException e) {
        throw new DcsClientFault("Error parsing atom: " + atom_buf, e);
    }
    NodeList nl = doc.getElementsByTagNameNS("http://www.w3.org/2005/Atom", "link");

    if (nl.getLength() == 0) {
        throw new DcsClientFault("Could not parse atom: " + atom_buf);
    }

    String status_url = ((Element) nl.item(0)).getAttribute("href");

    if (status_url == null) {
        throw new DcsClientFault("Could not parse atom: " + atom_buf);
    }

    try {
        return new URL(status_url);
    } catch (MalformedURLException e) {
        throw new DcsClientFault("Malformed status url: " + status_url, e);
    }
}

From source file:org.apache.hadoop.registry.cli.RegistryCli.java

public int bind(String[] args) {
    Option rest = OptionBuilder.withArgName("rest").hasArg().withDescription("rest Option").create("rest");
    Option webui = OptionBuilder.withArgName("webui").hasArg().withDescription("webui Option").create("webui");
    Option inet = OptionBuilder.withArgName("inet").withDescription("inet Option").create("inet");
    Option port = OptionBuilder.withArgName("port").hasArg().withDescription("port to listen on [9999]")
            .create("p");
    Option host = OptionBuilder.withArgName("host").hasArg().withDescription("host name").create("h");
    Option apiOpt = OptionBuilder.withArgName("api").hasArg().withDescription("api").create("api");
    Options inetOption = new Options();
    inetOption.addOption(inet);/*from   ww w. j av a  2  s  .co  m*/
    inetOption.addOption(port);
    inetOption.addOption(host);
    inetOption.addOption(apiOpt);

    Options webuiOpt = new Options();
    webuiOpt.addOption(webui);
    webuiOpt.addOption(apiOpt);

    Options restOpt = new Options();
    restOpt.addOption(rest);
    restOpt.addOption(apiOpt);

    CommandLineParser parser = new GnuParser();
    ServiceRecord sr = new ServiceRecord();
    CommandLine line;
    if (args.length <= 1) {
        return usageError("Invalid syntax ", BIND_USAGE);
    }
    if (args[1].equals("-inet")) {
        int portNum;
        String hostName;
        String api;

        try {
            line = parser.parse(inetOption, args);
        } catch (ParseException exp) {
            return usageError("Invalid syntax " + exp.getMessage(), BIND_USAGE);
        }
        if (line.hasOption("inet") && line.hasOption("p") && line.hasOption("h") && line.hasOption("api")) {
            try {
                portNum = Integer.parseInt(line.getOptionValue("p"));
            } catch (NumberFormatException exp) {
                return usageError("Invalid Port - int required" + exp.getMessage(), BIND_USAGE);
            }
            hostName = line.getOptionValue("h");
            api = line.getOptionValue("api");
            sr.addExternalEndpoint(inetAddrEndpoint(api, ProtocolTypes.PROTOCOL_HADOOP_IPC, hostName, portNum));

        } else {
            return usageError("Missing options: must have host, port and api", BIND_USAGE);
        }

    } else if (args[1].equals("-webui")) {
        try {
            line = parser.parse(webuiOpt, args);
        } catch (ParseException exp) {
            return usageError("Invalid syntax " + exp.getMessage(), BIND_USAGE);
        }
        if (line.hasOption("webui") && line.hasOption("api")) {
            URI theUri;
            try {
                theUri = new URI(line.getOptionValue("webui"));
            } catch (URISyntaxException e) {
                return usageError("Invalid URI: " + e.getMessage(), BIND_USAGE);
            }
            sr.addExternalEndpoint(webEndpoint(line.getOptionValue("api"), theUri));

        } else {
            return usageError("Missing options: must have value for uri and api", BIND_USAGE);
        }
    } else if (args[1].equals("-rest")) {
        try {
            line = parser.parse(restOpt, args);
        } catch (ParseException exp) {
            return usageError("Invalid syntax " + exp.getMessage(), BIND_USAGE);
        }
        if (line.hasOption("rest") && line.hasOption("api")) {
            URI theUri = null;
            try {
                theUri = new URI(line.getOptionValue("rest"));
            } catch (URISyntaxException e) {
                return usageError("Invalid URI: " + e.getMessage(), BIND_USAGE);
            }
            sr.addExternalEndpoint(restEndpoint(line.getOptionValue("api"), theUri));

        } else {
            return usageError("Missing options: must have value for uri and api", BIND_USAGE);
        }

    } else {
        return usageError("Invalid syntax", BIND_USAGE);
    }
    @SuppressWarnings("unchecked")
    List<String> argsList = line.getArgList();
    if (argsList.size() != 2) {
        return usageError("bind requires exactly one path argument", BIND_USAGE);
    }
    if (!validatePath(argsList.get(1))) {
        return -1;
    }

    try {
        registry.bind(argsList.get(1), sr, BindFlags.OVERWRITE);
        return 0;
    } catch (Exception e) {
        syserr.println(analyzeException("bind", e, argsList));
    }

    return -1;
}

From source file:com.vmware.identity.openidconnect.client.ClientRegistrationHelper.java

private ClientInformation convertToClientInformation(JSONObject jsonObject) throws OIDCClientException {
    Validate.notEmpty(jsonObject, "jsonObject");

    try {//from  ww w.  j  av a 2  s  .  c om
        JSONObject oidcclientMetadataDTO = (JSONObject) jsonObject.get("oidcclientMetadataDTO");

        Set<URI> redirectUriSet = new HashSet<URI>();
        JSONArray jsonArray = (JSONArray) oidcclientMetadataDTO.get("redirectUris");
        for (Object uri : jsonArray) {
            redirectUriSet.add(new URI((String) uri));
        }

        Set<URI> postLogoutRedirectUriSet = null;
        jsonArray = (JSONArray) oidcclientMetadataDTO.get("postLogoutRedirectUris");
        if (jsonArray != null) {
            postLogoutRedirectUriSet = new HashSet<URI>();
            for (Object uri : jsonArray) {
                postLogoutRedirectUriSet.add(new URI((String) uri));
            }
        }

        return new ClientInformation(new ClientID((String) jsonObject.get("clientId")), redirectUriSet,
                ClientAuthenticationMethod.getClientAuthenticationMethod(
                        (String) oidcclientMetadataDTO.get("tokenEndpointAuthMethod")),
                postLogoutRedirectUriSet,
                (oidcclientMetadataDTO.get("logoutUri") == null) ? null
                        : new URI((String) oidcclientMetadataDTO.get("logoutUri")),
                (String) oidcclientMetadataDTO.get("certSubjectDN"));
    } catch (URISyntaxException e) {
        throw new OIDCClientException(
                "Exception caught during converting client information: " + e.getMessage(), e);
    }
}

From source file:com.trickl.crawler.protocol.http.HttpProtocol.java

@Override
public boolean isAllowed(URI uri) throws IOException {
    if (forceAllow) {
        return forceAllow;
    }/*from w ww  .  j a v  a2  s  .c  om*/

    URI baseURI;
    try {
        baseURI = new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), uri.getPort(), "/", null, null);
    } catch (URISyntaxException ex) {
        log.error("Unable to determine base URI for " + uri);
        return false;
    }

    NoRobotClient nrc = new NoRobotClient(contentLoader, userAgent);
    try {
        nrc.parse(baseURI);
    } catch (NoRobotException ex) {
        log.error("Failure parsing robots.txt: " + ex.getMessage());
        return false;
    }
    boolean test = nrc.isUrlAllowed(uri);
    if (log.isInfoEnabled()) {
        log.info(uri + " is " + (test ? "allowed" : "denied"));
    }
    return test;
}

From source file:org.drftpd.protocol.speedtest.net.slave.SpeedTestHandler.java

private float getDownloadSpeed(String url) {
    long totalTime = 0L;
    long totalBytes = 0L;

    long startTime = System.currentTimeMillis();

    RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(60000).setConnectTimeout(5000)
            .setConnectionRequestTimeout(5000).build();

    HttpGet httpGet = new HttpGet();
    httpGet.setConfig(requestConfig);//from w w  w  .  ja  va2s  .c o  m

    SpeedTestCallable[] speedTestCallables = new SpeedTestCallable[_downThreads];
    for (int i = 0; i < _downThreads; i++) {
        speedTestCallables[i] = new SpeedTestCallable();
    }

    ExecutorService executor = Executors.newFixedThreadPool(_downThreads);
    List<Future<Long>> threadList;
    Set<Callable<Long>> callables = new HashSet<Callable<Long>>();

    url = url.substring(0, url.lastIndexOf('/') + 1) + "random";

    StopWatch watch = new StopWatch();

    for (int size : _sizes) { // Measure dl speed for each size in _sizes
        if ((System.currentTimeMillis() - startTime) > _downTime) {
            break;
        }

        String tmpURL = url + size + "x" + size + ".jpg";
        try {
            httpGet.setURI(new URI(tmpURL));
        } catch (URISyntaxException e) {
            logger.error("URI syntax error for " + tmpURL + " :: " + e.getMessage());
            close(executor, callables);
            return 0;
        }

        callables.clear();
        for (int k = 0; k < _downThreads; k++) {
            speedTestCallables[k].setHttpGet(httpGet);
            callables.add(speedTestCallables[k]);
        }

        for (int j = 0; j < _sizeLoop; j++) {
            try {
                watch.reset();
                Thread.sleep(_sleep);
                watch.start();
                threadList = executor.invokeAll(callables);
                for (Future<Long> fut : threadList) {
                    Long bytes = fut.get();
                    totalBytes += bytes;
                }
                watch.stop();
                totalTime += watch.getTime();
            } catch (InterruptedException e) {
                logger.error(e.getMessage());
                close(executor, callables);
                return 0;
            } catch (ExecutionException e) {
                logger.error(e.getMessage());
                close(executor, callables);
                return 0;
            }
            if ((System.currentTimeMillis() - startTime) > _downTime) {
                break;
            }
        }
    }

    if (totalBytes == 0L || totalTime == 0L) {
        close(executor, callables);
        return 0;
    }

    close(executor, callables);

    return (float) (((totalBytes * 8) / totalTime) * 1000) / 1000000;
}

From source file:edu.jhu.pha.vospace.node.ContainerNode.java

@Override
public void copy(VospaceId newLocationId, boolean keepBytes) {
    if (!isStoredMetadata())
        throw new NotFoundException("NodeNotFound");

    if (getMetastore().isStored(newLocationId)) {
        throw new ForbiddenException("DestinationNodeExists");
    }//  w w  w  .j a  v  a  2s  .  c om

    if (newLocationId.getNodePath().isParent(this.getUri().getNodePath())) {
        throw new ForbiddenException("Forbidden to copy into itself");
    }

    Node newDataNode = NodeFactory.createNode(newLocationId, owner, this.getType());
    newDataNode.setNode(null);
    newDataNode.getStorage().updateNodeInfo(newLocationId.getNodePath(), newDataNode.getNodeInfo());
    newDataNode.getMetastore().storeInfo(newLocationId, newDataNode.getNodeInfo());
    newDataNode.getMetastore().updateUserProperties(newLocationId, getNodeMeta(PropertyType.property));

    NodesList childrenList = getDirectChildren(false, 0, -1);
    List<Node> children = childrenList.getNodesList();

    for (Node child : children) {
        Node childNode = NodeFactory.getNode(child.getUri(), owner);
        String relativePath = childNode.getUri().getNodePath()
                .getParentRelativePath(this.getUri().getNodePath());
        try {
            VospaceId newChildId = newLocationId.appendPath(new NodePath(relativePath));
            logger.debug("Copying child " + childNode.getUri() + " with relpath " + relativePath + " to "
                    + newChildId.toString());
            childNode.copy(newChildId, keepBytes);
        } catch (URISyntaxException e) {
            logger.error("Error copying child " + childNode.getUri().toString() + ": " + e.getMessage());
        }
    }

    if (!keepBytes) {
        getMetastore().remove(this.getUri());

        if (this.getUri().getNodePath().getNodeStoragePathArray().length == 1) { // moving first-level container to another one
            getStorage().remove(this.getUri().getNodePath(), false);
        }
    }

    QueueConnector.goAMQP("movedNode", new QueueConnector.AMQPWorker<Boolean>() {
        @Override
        public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel)
                throws IOException {

            channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false);

            Map<String, Object> nodeData = new HashMap<String, Object>();
            nodeData.put("uri", getUri().toString());
            nodeData.put("owner", getOwner());
            nodeData.put("container", getUri().getNodePath().getParentPath().getNodeStoragePath());

            byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData);
            channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer);

            return true;
        }
    });
}

From source file:org.eclipse.aether.transport.http.HttpTransporter.java

HttpTransporter(RemoteRepository repository, RepositorySystemSession session) throws NoTransporterException {
    if (!"http".equalsIgnoreCase(repository.getProtocol())
            && !"https".equalsIgnoreCase(repository.getProtocol())) {
        throw new NoTransporterException(repository);
    }//from  w ww  . ja v a 2s . c  o m
    try {
        baseUri = new URI(repository.getUrl()).parseServerAuthority();
        if (baseUri.isOpaque()) {
            throw new URISyntaxException(repository.getUrl(), "URL must not be opaque");
        }
        server = URIUtils.extractHost(baseUri);
        if (server == null) {
            throw new URISyntaxException(repository.getUrl(), "URL lacks host name");
        }
    } catch (URISyntaxException e) {
        throw new NoTransporterException(repository, e.getMessage(), e);
    }
    proxy = toHost(repository.getProxy());

    repoAuthContext = AuthenticationContext.forRepository(session, repository);
    proxyAuthContext = AuthenticationContext.forProxy(session, repository);

    state = new LocalState(session, repository, new SslConfig(session, repoAuthContext));

    headers = ConfigUtils.getMap(session, Collections.emptyMap(),
            ConfigurationProperties.HTTP_HEADERS + "." + repository.getId(),
            ConfigurationProperties.HTTP_HEADERS);

    DefaultHttpClient client = new DefaultHttpClient(state.getConnectionManager());

    configureClient(client.getParams(), session, repository, proxy);

    client.setCredentialsProvider(toCredentialsProvider(server, repoAuthContext, proxy, proxyAuthContext));

    this.client = new DecompressingHttpClient(client);
}

From source file:edu.jhu.pha.vospace.node.Node.java

public void copy(VospaceId newLocationId, final boolean keepBytes) {
    if (!isStoredMetadata())
        throw new NotFoundException("NodeNotFound");

    if (getMetastore().isStored(newLocationId))
        throw new ForbiddenException("DestinationNodeExists");

    getStorage().copyBytes(getUri().getNodePath(), newLocationId.getNodePath(), keepBytes);

    if (!keepBytes) {
        // update node's container size metadata
        try {//from  w  w w. j a va2 s . co m
            ContainerNode contNode = (ContainerNode) NodeFactory.getNode(
                    new VospaceId(new NodePath(getUri().getNodePath().getContainerName())), getOwner());
            getStorage().updateNodeInfo(contNode.getUri().getNodePath(), contNode.getNodeInfo());
            getMetastore().storeInfo(contNode.getUri(), contNode.getNodeInfo());
        } catch (URISyntaxException e) {
            logger.error("Updating root node size failed: " + e.getMessage());
        }
    }

    final Node newDataNode = NodeFactory.createNode(newLocationId, owner, this.getType());
    newDataNode.setNode(null);
    newDataNode.getStorage().updateNodeInfo(newLocationId.getNodePath(), newDataNode.getNodeInfo());
    newDataNode.getMetastore().storeInfo(newLocationId, newDataNode.getNodeInfo());
    newDataNode.getMetastore().updateUserProperties(newLocationId, getNodeMeta(PropertyType.property));

    // Update chunks table to point to the new node if the node is chunked
    // copy with keepBytes=true is prohibited for chunked files by swift storage
    if (null != this.getNodeInfo().getChunkedName()) {
        VoSyncMetaStore vosyncMeta = new VoSyncMetaStore(this.owner);
        vosyncMeta.mapChunkedToNode(newDataNode.getUri(), this.getNodeInfo().getChunkedName());
    }

    if (!keepBytes)
        newDataNode.getMetastore().remove(this.getUri());

    QueueConnector.goAMQP("copyNode", new QueueConnector.AMQPWorker<Boolean>() {
        @Override
        public Boolean go(com.rabbitmq.client.Connection conn, com.rabbitmq.client.Channel channel)
                throws IOException {

            channel.exchangeDeclare(conf.getString("vospace.exchange.nodechanged"), "fanout", false);
            channel.exchangeDeclare(conf.getString("process.exchange.nodeprocess"), "fanout", true);

            Map<String, Object> nodeData = new HashMap<String, Object>();
            nodeData.put("uri", newDataNode.getUri().toString());
            nodeData.put("owner", getOwner());
            nodeData.put("container", newDataNode.getUri().getNodePath().getParentPath().getNodeStoragePath());

            byte[] jobSer = (new ObjectMapper()).writeValueAsBytes(nodeData);
            channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, jobSer);
            channel.basicPublish(conf.getString("process.exchange.nodeprocess"), "",
                    MessageProperties.PERSISTENT_TEXT_PLAIN, jobSer);

            if (!keepBytes) {
                Map<String, Object> oldNodeData = new HashMap<String, Object>();
                oldNodeData.put("uri", getUri().toString());
                oldNodeData.put("owner", getOwner());
                oldNodeData.put("container", getUri().getNodePath().getParentPath().getNodeStoragePath());

                byte[] oldNodejobSer = (new ObjectMapper()).writeValueAsBytes(oldNodeData);
                channel.basicPublish(conf.getString("vospace.exchange.nodechanged"), "", null, oldNodejobSer);
            }
            return true;
        }
    });
}

From source file:ezbake.deployer.publishers.EzAzkabanPublisher.java

/**
 * This will publish the artifact to Azkaban for scheduled running.  The artifact should be of the format
 * <p/>/*from   ww  w . ja v a  2 s  . c om*/
 * <p/>
 * The artifact at this point in time will already have included the SSL certs.
 * <p/>
 * Its up to the publisher to reorganize the tar file if needed for its PaaS
 *
 * @param artifact    The artifact to deploy
 * @param callerToken - The token of the user or application that initiated this call
 * @throws DeploymentException - On any exceptions
 */
@Override
public void publish(DeploymentArtifact artifact, EzSecurityToken callerToken) throws DeploymentException {
    File unzippedPack = null;
    File azkabanZip = null;
    ZipOutputStream zipOutputStream = null;
    String flowName;
    final BatchJobInfo jobInfo = artifact.getMetadata().getManifest().getBatchJobInfo();

    // Get the Azkaban authentication token
    final AuthenticationResult authenticatorResult;
    try {
        authenticatorResult = new AuthenticationManager(new URI(azConf.getAzkabanUrl()), azConf.getUsername(),
                azConf.getPassword()).login();
    } catch (URISyntaxException e) {
        throw new DeploymentException(e.getMessage());
    }

    if (authenticatorResult.hasError()) {
        log.error("Could not log into Azkaban: " + authenticatorResult.getError());
        throw new DeploymentException(authenticatorResult.getError());
    }

    log.info("Successfully logged into Azkaban. Now creating .zip to upload");

    try {
        // Unzip the artifact
        unzippedPack = UnzipUtil.unzip(new File(unzipDir), ByteBuffer.wrap(artifact.getArtifact()));
        log.info("Unzipped artifact to: " + unzippedPack.getAbsolutePath());

        // Create a .zip file to submit to Azkaban
        azkabanZip = File.createTempFile("ezbatch_", ".zip");
        log.info("Created temporary zip file: " + azkabanZip.getCanonicalPath());
        zipOutputStream = new ZipOutputStream(new FileOutputStream(azkabanZip));

        // Copy the configs from the artifact to the top level of the zip.  This should contain the Azkaban
        // .jobs and .properties
        final String configDir = UnzipUtil.getConfDirectory(unzippedPack).get();
        final File configDirFile = new File(configDir);
        for (File f : FileUtils.listFiles(configDirFile, TrueFileFilter.TRUE, TrueFileFilter.TRUE)) {
            zipOutputStream.putNextEntry(new ZipArchiveEntry(f.getCanonicalPath().replaceFirst(configDir, "")));
            IOUtils.copy(new FileInputStream(f), zipOutputStream);
            zipOutputStream.closeEntry();
        }
        log.info("Copied configs to the .zip");

        // Copy the jars from bin/ in the artifact to lib/ in the .zip file and other things to the jar as needed
        final String dirPrefix = unzippedPack.getAbsolutePath() + "/bin/";
        for (File f : FileUtils.listFiles(new File(dirPrefix), TrueFileFilter.TRUE, TrueFileFilter.TRUE)) {
            zipOutputStream
                    .putNextEntry(new ZipArchiveEntry(f.getCanonicalPath().replaceFirst(dirPrefix, "lib/")));

            final JarInputStream jarInputStream = new JarInputStream(new FileInputStream(f));
            final JarOutputStream jarOutputStream = new JarOutputStream(zipOutputStream);

            JarEntry je;
            while ((je = jarInputStream.getNextJarEntry()) != null) {
                jarOutputStream.putNextEntry(je);
                IOUtils.copy(jarInputStream, jarOutputStream);
                jarOutputStream.closeEntry();
            }
            log.info("Created Jar file");

            // Add the SSL certs to the jar
            final String sslPath = UnzipUtil.getSSLPath(configDirFile).get();
            for (File sslFile : FileUtils.listFiles(new File(sslPath), TrueFileFilter.TRUE,
                    TrueFileFilter.TRUE)) {
                if (sslFile.isFile()) {
                    jarOutputStream.putNextEntry(new JarArchiveEntry("ssl/" + sslFile.getName()));
                    IOUtils.copy(new FileInputStream(sslFile), jarOutputStream);
                    jarOutputStream.closeEntry();
                }
            }
            log.info("Added SSL certs to jar");

            // Add the application.properties to the jar file so the jobs can read it
            final File appProps = new File(configDir, "application.properties");
            final Properties adjustedProperties = new Properties();
            adjustedProperties.load(new FileInputStream(appProps));
            adjustedProperties.setProperty("ezbake.security.ssl.dir", "/ssl/");
            jarOutputStream.putNextEntry(new JarArchiveEntry("application.properties"));
            adjustedProperties.store(jarOutputStream, null);
            jarOutputStream.closeEntry();

            jarOutputStream.finish();
            zipOutputStream.closeEntry();
        }

        // Check to see if there are any .job files.  If there aren't, this is an external job and we need to create
        // one for the .zip file
        final Collection<File> jobFiles = FileUtils.listFiles(configDirFile, new String[] { "job" }, false);
        if (jobFiles.isEmpty()) {
            // If there are no job files present then we need to create one for the user
            final StringBuilder sb = new StringBuilder(
                    "type=hadoopJava\n" + "job.class=ezbatch.amino.api.EzFrameworkDriver\n"
                            + "classpath=./lib/*\n" + "main.args=-d /ezbatch/amino/config");

            for (File xmlConfig : FileUtils.listFiles(configDirFile, new String[] { "xml" }, false)) {
                sb.append(" -c ").append(xmlConfig.getName());
            }

            zipOutputStream.putNextEntry(new ZipEntry("Analytic.job"));
            IOUtils.copy(new StringReader(sb.toString()), zipOutputStream);
            zipOutputStream.closeEntry();
            log.info("There was no .job file so one was created for the .zip");
            flowName = "Analytic";
        } else {
            flowName = jobInfo.getFlowName();
            if (flowName == null) {
                log.warn("Manifest did not contain flow_name. Guessing what it should be");
                flowName = FilenameUtils.getBaseName(jobFiles.toArray(new File[jobFiles.size()])[0].getName());
                log.info("Guessing the flow name should be:" + flowName);
            }
        }

        zipOutputStream.finish();
        log.info("Finished creating .zip");

        // Now that we've created the zip to upload, attempt to create a project for it to be uploaded to. Every .zip
        // file needs to be uploaded to a project, and the project may or may not already exist.
        final String projectName = ArtifactHelpers.getAppId(artifact) + "_"
                + ArtifactHelpers.getServiceId(artifact);
        final ProjectManager projectManager = new ProjectManager(authenticatorResult.getSessionId(),
                new URI(azConf.getAzkabanUrl()));
        final ManagerResult managerResult = projectManager.createProject(projectName, "EzBatch Deployed");

        // If the project already exists, it will return an error, but really it's not a problem
        if (managerResult.hasError()) {
            if (!managerResult.getMessage().contains("already exists")) {
                log.error("Could not create project: " + managerResult.getMessage());
                throw new DeploymentException(managerResult.getMessage());
            } else {
                log.info("Reusing the existing project: " + projectName);
            }
        } else {
            log.info("Created new project: " + projectName);
            log.info("Path: " + managerResult.getPath());
        }

        // Upload the .zip file to the project
        final UploadManager uploader = new UploadManager(authenticatorResult.getSessionId(),
                azConf.getAzkabanUrl(), projectName, azkabanZip);
        final UploaderResult uploaderResult = uploader.uploadZip();

        if (uploaderResult.hasError()) {
            log.error("Could not upload the zip file: " + uploaderResult.getError());
            throw new DeploymentException(uploaderResult.getError());
        }

        log.info("Successfully submitted zip file to Azkaban");

        // Schedule the jar to run.  If the start times aren't provided, it will run in 2 minutes

        final ScheduleManager scheduler = new ScheduleManager(authenticatorResult.getSessionId(),
                new URI(azConf.getAzkabanUrl()));

        // Add the optional parameters if they are present
        if (jobInfo.isSetStartDate()) {
            scheduler.setScheduleDate(jobInfo.getStartDate());
        }
        if (jobInfo.isSetStartTime()) {
            scheduler.setScheduleTime(jobInfo.getStartTime());
        }
        if (jobInfo.isSetRepeat()) {
            scheduler.setPeriod(jobInfo.getRepeat());
        }

        final SchedulerResult schedulerResult = scheduler.scheduleFlow(projectName, flowName,
                uploaderResult.getProjectId());
        if (schedulerResult.hasError()) {
            log.error("Failure to schedule job: " + schedulerResult.getError());
            throw new DeploymentException(schedulerResult.getError());
        }

        log.info("Successfully scheduled flow: " + flowName);

    } catch (Exception ex) {
        log.error("No Nos!", ex);
        throw new DeploymentException(ex.getMessage());
    } finally {
        IOUtils.closeQuietly(zipOutputStream);
        FileUtils.deleteQuietly(azkabanZip);
        FileUtils.deleteQuietly(unzippedPack);
    }
}

From source file:com.espertech.esper.client.ConfigurationParser.java

private static void handlePlugInEventRepresentation(Configuration configuration, Element element) {
    DOMElementIterator nodeIterator = new DOMElementIterator(element.getChildNodes());
    String uri = getRequiredAttribute(element, "uri");
    String className = getRequiredAttribute(element, "class-name");
    String initializer = null;/* ww w  .j  av a  2  s.c om*/
    while (nodeIterator.hasNext()) {
        Element subElement = nodeIterator.next();
        if (subElement.getNodeName().equals("initializer")) {
            DOMElementIterator nodeIter = new DOMElementIterator(subElement.getChildNodes());
            if (!nodeIter.hasNext()) {
                throw new ConfigurationException("Error handling initializer for plug-in event representation '"
                        + uri + "', no child node found under initializer element, expecting an element node");
            }

            StringWriter output = new StringWriter();
            try {
                TransformerFactory.newInstance().newTransformer().transform(new DOMSource(nodeIter.next()),
                        new StreamResult(output));
            } catch (TransformerException e) {
                throw new ConfigurationException("Error handling initializer for plug-in event representation '"
                        + uri + "' :" + e.getMessage(), e);
            }
            initializer = output.toString();
        }
    }

    URI uriParsed;
    try {
        uriParsed = new URI(uri);
    } catch (URISyntaxException ex) {
        throw new ConfigurationException(
                "Error parsing URI '" + uri + "' as a valid java.net.URI string:" + ex.getMessage(), ex);
    }
    configuration.addPlugInEventRepresentation(uriParsed, className, initializer);
}