Example usage for java.net MalformedURLException printStackTrace

List of usage examples for java.net MalformedURLException printStackTrace

Introduction

In this page you can find the example usage for java.net MalformedURLException printStackTrace.

Prototype

public void printStackTrace() 

Source Link

Document

Prints this throwable and its backtrace to the standard error stream.

Usage

From source file:Main.java

public static void main(String[] args) {
    File f = new File("c:/a/b/a/test.txt");

    URL uri;//from w w  w  . j av  a  2  s.co m
    try {
        uri = f.toURL();
        System.out.println("uri: " + uri);
    } catch (MalformedURLException e) {
        e.printStackTrace();
    }
}

From source file:Main.java

public static void main(String[] args) {
    try {/* ww  w .j a  va 2 s. co m*/
        // get the java lang package
        Package pack = Package.getPackage("java.lang");

        // check if this package is sealed
        URL url = new URL("http://www.oracle.com");
        System.out.println("" + pack.isSealed(url));
    } catch (MalformedURLException ex) {
        ex.printStackTrace();
    }

}

From source file:MainClass.java

public static void main(String[] args) {
    try {/*  w w w. ja v  a2s. c om*/

        URL url = new URL("http://www.java2s.com/");
        URLConnection urlConnection = url.openConnection();
        Map<String, List<String>> headers = urlConnection.getHeaderFields();
        Set<Map.Entry<String, List<String>>> entrySet = headers.entrySet();
        for (Map.Entry<String, List<String>> entry : entrySet) {
            String headerName = entry.getKey();
            System.out.println("Header Name:" + headerName);
            List<String> headerValues = entry.getValue();
            for (String value : headerValues) {
                System.out.print("Header value:" + value);
            }
            System.out.println();
            System.out.println();
        }
        InputStream inputStream = urlConnection.getInputStream();
        BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(inputStream));
        String line = bufferedReader.readLine();
        while (line != null) {
            System.out.println(line);
            line = bufferedReader.readLine();
        }
        bufferedReader.close();
    } catch (MalformedURLException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:icevaluation.BingAPIAccess.java

public static void main(String[] args) {
    String searchText = "arts site:wikipedia.org";
    searchText = searchText.replaceAll(" ", "%20");
    // String accountKey="jTRIJt9d8DR2QT/Z3BJCAvY1BfoXj0zRYgSZ8deqHHo";
    String accountKey = "JfeJSA3x6CtsyVai0+KEP0A6CYEUBT8VWhZmm9CS738";

    byte[] accountKeyBytes = Base64.encodeBase64((accountKey + ":" + accountKey).getBytes());
    String accountKeyEnc = new String(accountKeyBytes);
    URL url;//  ww w  . j  a  v a  2  s  . c  o m
    try {
        url = new URL("https://api.datamarket.azure.com/Bing/Search/v1/Composite?Sources=%27Web%27&Query=%27"
                + searchText + "%27&$format=JSON");
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setRequestMethod("GET");
        conn.setRequestProperty("Authorization", "Basic " + accountKeyEnc);

        BufferedReader br = new BufferedReader(new InputStreamReader((conn.getInputStream())));
        StringBuilder sb = new StringBuilder();
        String output;
        System.out.println("Output from Server .... \n");
        //write json to string sb
        int c = 0;
        if ((output = br.readLine()) != null) {
            System.out.println("Output is: " + output);
            sb.append(output);
            c++;
            //System.out.println("C:"+c);

        }

        conn.disconnect();
        //find webtotal among output      
        int find = sb.indexOf("\"WebTotal\":\"");
        int startindex = find + 12;
        System.out.println("Find: " + find);

        int lastindex = sb.indexOf("\",\"WebOffset\"");

        System.out.println(sb.substring(startindex, lastindex));

    } catch (MalformedURLException e1) {
        e1.printStackTrace();
    } catch (IOException e) {

        e.printStackTrace();
    }

}

From source file:org.javaee7.ejb.stateless.remote.AccountSessionBeanWithInterface.java

public static void main(String[] args) {
    try {/*ww w.  ja  va2  s.c  o m*/
        ObjectMapper mapper = new ObjectMapper();
        URL url = new URL("http://localhost:8180/account-1.0-SNAPSHOT/statistics");
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setRequestMethod("GET");
        conn.setRequestProperty("Accept", "application/json");

        if (conn.getResponseCode() != 200) {
            throw new RuntimeException("Failed : HTTP error code : " + conn.getResponseCode());
        }

        BufferedReader br = new BufferedReader(new InputStreamReader((conn.getInputStream())));

        String output;
        String result = null;
        while ((output = br.readLine()) != null) {
            result = result + output;
        }
        conn.disconnect();

    } catch (MalformedURLException e) {

        e.printStackTrace();

    } catch (IOException e) {

        e.printStackTrace();

    }
}

From source file:eu.planets_project.ifr.core.servreg.utils.client.PlanetsCommand.java

/**
 * //www. ja  va  2 s  . c  o m
 * @param args
 */
public static void main(String[] args) {

    /* FIXME, point to log4j.properties instead of doing this? */
    /*
    java.util.logging.Logger.getLogger("com.sun.xml.ws.model").setLevel(java.util.logging.Level.WARNING); 
    java.util.logging.Logger.getAnonymousLogger().setLevel(java.util.logging.Level.WARNING);
    Logger sunlogger = Logger.getLogger("com.sun.xml.ws.model");
    sunlogger.setLevel(Level.WARNING);
    java.util.logging.Logger.getLogger( com.sun.xml.ws.util.Constants.LoggingDomain).setLevel(java.util.logging.Level.WARNING);
    */
    /* Lots of info please: */
    java.util.logging.Logger.getAnonymousLogger().setLevel(java.util.logging.Level.FINEST);
    java.util.logging.Logger.getLogger(com.sun.xml.ws.util.Constants.LoggingDomain)
            .setLevel(java.util.logging.Level.FINEST);
    // TODO See https://jax-ws.dev.java.net/guide/Logging.html for info on more logging to set up.
    //System.setProperty("com.sun.xml.ws.transport.local.LocalTransportPipe.dump","true");
    //System.setProperty("com.sun.xml.ws.util.pipe.StandaloneTubeAssembler.dump","true");
    //System.setProperty("com.sun.xml.ws.transport.http.HttpAdapter.dump","true");
    // Doing this KILLS STREAMING. Log that.
    //System.setProperty("com.sun.xml.ws.transport.http.client.HttpTransportPipe.dump","true");

    URL wsdl;
    try {
        wsdl = new URL(args[0]);
    } catch (MalformedURLException e) {
        e.printStackTrace();
        return;
    }

    PlanetsServiceExplorer pse = new PlanetsServiceExplorer(wsdl);

    System.out.println(".describe(): " + pse.getServiceDescription());

    Service service = Service.create(wsdl, pse.getQName());
    //service.addPort(portName, SOAPBinding.SOAP11HTTP_MTOM_BINDING, endpointAddress)
    PlanetsService ps = (PlanetsService) service.getPort(pse.getServiceClass());

    // TODO The client wrapper code should enforce this stuff:
    SOAPBinding binding = (SOAPBinding) ((BindingProvider) ps).getBinding();
    System.out.println("Logging MTOM=" + binding.isMTOMEnabled());
    ((BindingProvider) ps).getRequestContext().put(JAXWSProperties.MTOM_THRESHOLOD_VALUE, 8192);
    ((BindingProvider) ps).getRequestContext().put(JAXWSProperties.HTTP_CLIENT_STREAMING_CHUNK_SIZE, 8192);
    System.out.println("Logging MTOM=" + binding.isMTOMEnabled());
    binding.setMTOMEnabled(true);
    System.out.println("Logging MTOM=" + binding.isMTOMEnabled());
    //System.out.println("Logging MTOM="+((BindingProvider)ps).getBinding().getBindingID()+" v. "+SOAPBinding.SOAP11HTTP_MTOM_BINDING);

    /* 
     * The different services are invoked in different ways...
     */
    if (pse.getQName().equals(Migrate.QNAME)) {
        System.out.println("Is a Migrate service. ");
        Migrate s = MigrateWrapper.createWrapper(wsdl);

        DigitalObject dobIn = new DigitalObject.Builder(Content.byReference(new File(args[1]))).build();

        MigrateResult result = s.migrate(dobIn, URI.create(args[2]), URI.create(args[3]), null);

        System.out.println("ServiceReport: " + result.getReport());

        DigitalObjectUtils.toFile(result.getDigitalObject(), new File("output"));

    } else if (pse.getQName().equals(Identify.QNAME)) {
        System.out.println("Is an Identify service. ");
        Identify s = new IdentifyWrapper(wsdl);

        DigitalObject dobIn = new DigitalObject.Builder(Content.byReference(new File(args[1]))).build();

        IdentifyResult result = s.identify(dobIn, null);

        System.out.println("ServiceReport: " + result.getReport());
    }
}

From source file:edu.uci.ics.crawler4j.examples.login.LoginCrawlController.java

public static void main(String[] args) throws Exception {
    //        if (args.length != 2) {
    //            System.out.println("Needed parameters: ");
    //            System.out.println("\t rootFolder (it will contain intermediate crawl data)");
    //            System.out.println("\t numberOfCralwers (number of concurrent threads)");
    //            return;
    //        }/*  w w  w .ja  v  a  2  s.c o m*/

    /*
       * crawlStorageFolder is a folder where intermediate crawl data is
     * stored.
     */
    String crawlStorageFolder = "/tmp/test_crawler/";

    /*
     * numberOfCrawlers shows the number of concurrent threads that should
     * be initiated for crawling.
     */
    int numberOfCrawlers = 1;

    CrawlConfig config = new CrawlConfig();

    config.setCrawlStorageFolder(crawlStorageFolder);

    /*
     * Be polite: Make sure that we don't send more than 1 request per
     * second (1000 milliseconds between requests).
     */
    config.setPolitenessDelay(1000);

    /*
     * You can set the maximum crawl depth here. The default value is -1 for
     * unlimited depth
     */
    config.setMaxDepthOfCrawling(0);

    /*
     * You can set the maximum number of pages to crawl. The default value
     * is -1 for unlimited number of pages
     */
    config.setMaxPagesToFetch(1000);

    /*
     * Do you need to set a proxy? If so, you can use:
     * config.setProxyHost("proxyserver.example.com");
     * config.setProxyPort(8080);
     *
     * If your proxy also needs authentication:
     * config.setProxyUsername(username); config.getProxyPassword(password);
     */

    /*
     * This config parameter can be used to set your crawl to be resumable
     * (meaning that you can resume the crawl from a previously
     * interrupted/crashed crawl). Note: if you enable resuming feature and
     * want to start a fresh crawl, you need to delete the contents of
     * rootFolder manually.
     */
    config.setResumableCrawling(false);

    config.setIncludeHttpsPages(true);
    HttpClient client = new DefaultHttpClient();
    HttpResponse response = client.execute(new HttpGet("http://58921.com/user/login"));

    HttpEntity entity = response.getEntity();
    String content = EntityUtils.toString(entity, HTTP.UTF_8);
    Document doc = Jsoup.parse(content);
    Elements elements = doc.getElementById("user_login_form").children();
    Element tokenEle = elements.last();
    String token = tokenEle.val();
    System.out.println(token);
    LoginConfiguration somesite;
    try {
        somesite = new LoginConfiguration("58921.com", new URL("http://58921.com/user/login"),
                new URL("http://58921.com/user/login/ajax?ajax=submit&__q=user/login"));
        somesite.addParam("form_id", "user_login_form");
        somesite.addParam("mail", "paxbeijing@gmail.com");
        somesite.addParam("pass", "cetas123");
        somesite.addParam("submit", "");
        somesite.addParam("form_token", token);
        config.addLoginConfiguration(somesite);
    } catch (MalformedURLException e) {
        e.printStackTrace();
    }

    /*
     * Instantiate the controller for this crawl.
     */
    PageFetcher pageFetcher = new PageFetcher(config);
    RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
    robotstxtConfig.setEnabled(false);
    RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);
    CrawlController controller = new CrawlController(config, pageFetcher, robotstxtServer);

    /*
     * For each crawl, you need to add some seed urls. These are the first
     * URLs that are fetched and then the crawler starts following links
     * which are found in these pages
     */

    controller.addSeed("http://58921.com/alltime?page=60");

    /*
     * Start the crawl. This is a blocking operation, meaning that your code
     * will reach the line after this only when crawling is finished.
     */
    controller.start(LoginCrawler.class, numberOfCrawlers);

    controller.env.close();
}

From source file:com.sme.SmePoliceCheck.java

public static void main(String[] args) throws IOException, JSONException {

    // This API is for SME
    // After creating Police Check you should Upload documents and then submit the police check
    // 1-Create Police Check
    // 2-Upload Documents for Police Check ID
    // 3-Submit Police Check to Intercheck

    final String apiEndPoint = "https://secure.policecheckexpress.com.au/pce/api/portalCheckSme/new";
    final String apiToken = "secure token";
    try {/*  w ww .j  a  va2s  .  com*/

        DefaultHttpClient httpClient = new DefaultHttpClient();
        HttpPost postRequest = new HttpPost(apiEndPoint);

        //filling Portal Check with sample Data

        SmePortalCheck smePortalCheck = fillSampleData();
        String parameters = fillParameters(smePortalCheck, apiToken);
        StringEntity input = new StringEntity(parameters);
        input.setContentType("application/json");
        postRequest.setEntity(input);
        HttpResponse response = httpClient.execute(postRequest);
        BufferedReader br = new BufferedReader(new InputStreamReader((response.getEntity().getContent())));
        String jsonText = readAll(br);
        JSONArray json = new JSONArray("[" + jsonText + "]");
        JSONObject obj = (JSONObject) json.get(0);
        if (!(Boolean) obj.get("error")) {

            System.out.println(obj.get("message"));
            System.out.println("Invitation Id = " + obj.get("id"));
        } else {
            System.out.println("++++++++++++++++++++++++++");
            System.out.println("Error  = " + obj.get("message"));
            System.out.println("++++++++++++++++++++++++++");
        }

        httpClient.getConnectionManager().shutdown();

    } catch (MalformedURLException e) {

        e.printStackTrace();

    } catch (IOException e) {

        e.printStackTrace();

    }

}

From source file:com.direct.PortalCheckDirect.java

public static void main(String[] args) throws IOException, JSONException {

    //This API is for Direct Business
    final String apiEndPoint = "https://secure.policecheckexpress.com.au/pce/api/portalCheckDirect/new";
    final String apiToken = "secure Token";
    try {//ww w . j  av a2 s.c o  m

        DefaultHttpClient httpClient = new DefaultHttpClient();
        HttpPost postRequest = new HttpPost(apiEndPoint);

        //filling Portal Check with Sample Data
        DirectPortalCheck directPortalCheck = fillSampleData();
        String parameters = fillParameters(directPortalCheck, apiToken);
        StringEntity input = new StringEntity(parameters);
        input.setContentType("application/json");
        postRequest.setEntity(input);
        HttpResponse response = httpClient.execute(postRequest);
        BufferedReader br = new BufferedReader(new InputStreamReader((response.getEntity().getContent())));

        String jsonText = readAll(br);
        JSONArray json = new JSONArray("[" + jsonText + "]");
        JSONObject obj = (JSONObject) json.get(0);
        if (!(Boolean) obj.get("error")) {

            System.out.println(obj.get("message"));
            System.out.println("Invitation Id = " + obj.get("id"));
        } else {
            System.out.println("++++++++++++++++++++++++++");
            System.out.println("Error  = " + obj.get("message"));
            System.out.println("++++++++++++++++++++++++++");

        }

        httpClient.getConnectionManager().shutdown();

    } catch (MalformedURLException e) {

        e.printStackTrace();

    } catch (IOException e) {

        e.printStackTrace();

    }

}

From source file:Viewer3D.java

/**
 * The main method of the application takes one argument in the args array;
 * the filname that you want to load. Note that the file must be reachable
 * from the directory in which you're running this application.
 *///ww w  .j  a  v  a  2s  .  c o  m
public static void main(String args[]) {
    java.net.URL url = null;
    java.net.URL pathUrl = null;
    if (args.length > 0) {
        try {
            if ((args[0].indexOf("file:") == 0) || (args[0].indexOf("http") == 0)) {
                url = new java.net.URL(args[0]);
            } else if (args[0].charAt(0) != '/') {
                url = new java.net.URL("file:./" + args[0]);
            } else {
                url = new java.net.URL("file:" + args[0]);
            }
        } catch (java.net.MalformedURLException ex) {
            System.err.println(ex.getMessage());
            ex.printStackTrace();
            System.exit(1);
        }
    } else {
        // the path to the image for an application
        try {
            url = new java.net.URL("file:./ballcone.lws");
        } catch (java.net.MalformedURLException ex) {
            System.err.println(ex.getMessage());
            ex.printStackTrace();
            System.exit(1);
        }
    }
    new MainFrame(new Viewer3D(url), 500, 500);
}