Example usage for org.apache.commons.io IOUtils toString

List of usage examples for org.apache.commons.io IOUtils toString

Introduction

In this page you can find the example usage for org.apache.commons.io IOUtils toString.

Prototype

public static String toString(byte[] input, String encoding) throws IOException 

Source Link

Document

Get the contents of a byte[] as a String using the specified character encoding.

Usage

From source file:com.cloudbees.tftwoway.Client.java

public static void main(String[] args) throws Exception {
    URL url = new URL(SERVER_ADDRESS);
    HttpsURLConnection connection = (HttpsURLConnection) url.openConnection();

    SSLContext sslContext = createSSLContext();
    connection.setSSLSocketFactory(sslContext.getSocketFactory());

    connection.connect();//from   w w w .  ja v a 2  s .com

    int responseCode = connection.getResponseCode();
    String response = IOUtils.toString(connection.getInputStream(), connection.getContentEncoding());
    System.out.println(responseCode);
    System.out.println(response);
}

From source file:net.cloudkit.relaxation.HttpClientTest.java

public static void main(String[] args) throws Exception {

    InetAddress[] addresss = InetAddress.getAllByName("google.com");
    for (InetAddress address : addresss) {

        System.out.println(address);

    }//from   w ww  .  j  a v a  2s .  com

    CloseableHttpClient httpclient = HttpClients.createDefault();

    String __VIEWSTATE = "";
    String __EVENTVALIDATION = "";

    HttpGet httpGet = new HttpGet("http://query.customs.gov.cn/MNFTQ/MRoadQuery.aspx?" + Math.random() * 1000);
    httpGet.setHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8");
    httpGet.setHeader("Accept-Encoding", "gzip, deflate, sdch");
    httpGet.setHeader("Accept-Language", "zh-CN,zh;q=0.8,en;q=0.6");
    httpGet.setHeader("Cache-Control", "no-cache");
    // httpGet.setHeader("Connection", "keep-alive");
    httpGet.setHeader("Host", "query.customs.gov.cn");
    httpGet.setHeader("Pragma", "no-cache");
    httpGet.setHeader("Upgrade-Insecure-Requests", "1");
    httpGet.setHeader("User-Agent",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36");

    HttpClientContext context = HttpClientContext.create();
    // CloseableHttpResponse response1 = httpclient.execute(httpGet, context);
    CloseableHttpResponse response1 = httpclient.execute(httpGet);
    // Header[] headers = response1.getHeaders(HttpHeaders.CONTENT_TYPE);
    // System.out.println("context cookies:" + context.getCookieStore().getCookies());
    // String setCookie = response1.getFirstHeader("Set-Cookie").getValue();
    // System.out.println("context cookies:" + setCookie);

    try {
        System.out.println(response1.getStatusLine());
        HttpEntity entity1 = response1.getEntity();
        // do something useful with the response body and ensure it is fully consumed

        String result = IOUtils.toString(entity1.getContent(), "GBK");
        // System.out.println(result);

        Matcher m1 = Pattern.compile(
                "<input type=\\\"hidden\\\" name=\\\"__VIEWSTATE\\\" id=\\\"__VIEWSTATE\\\" value=\\\"(.*)\\\" />")
                .matcher(result);
        __VIEWSTATE = m1.find() ? m1.group(1) : "";
        Matcher m2 = Pattern.compile(
                "<input type=\\\"hidden\\\" name=\\\"__EVENTVALIDATION\\\" id=\\\"__EVENTVALIDATION\\\" value=\\\"(.*)\\\" />")
                .matcher(result);
        __EVENTVALIDATION = m2.find() ? m2.group(1) : "";

        System.out.println(__VIEWSTATE);
        System.out.println(__EVENTVALIDATION);

        /*
        File storeFile = new File("D:\\customs\\customs"+ i +".jpg");
        FileOutputStream output = new FileOutputStream(storeFile);
        IOUtils.copy(input, output);
        output.close();
        */
        EntityUtils.consume(entity1);
    } finally {
        response1.close();
    }

    HttpPost httpPost = new HttpPost(
            "http://query.customs.gov.cn/MNFTQ/MRoadQuery.aspx?" + Math.random() * 1000);
    httpPost.setHeader("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8");
    httpPost.setHeader("Accept-Encoding", "gzip, deflate");
    httpPost.setHeader("Accept-Language", "zh-CN,zh;q=0.8,en;q=0.6");
    httpPost.setHeader("Cache-Control", "no-cache");
    // httpPost.setHeader("Connection", "keep-alive");
    httpPost.setHeader("Content-Type", "application/x-www-form-urlencoded");
    httpPost.setHeader("Cookie", "ASP.NET_SessionId=t1td453hcuy4oqiplekkqe55");
    httpPost.setHeader("Host", "query.customs.gov.cn");
    httpPost.setHeader("Origin", "http://query.customs.gov.cn");
    httpPost.setHeader("Pragma", "no-cache");
    httpPost.setHeader("Referer", "http://query.customs.gov.cn/MNFTQ/MRoadQuery.aspx");
    httpPost.setHeader("Upgrade-Insecure-Requests", "1");
    httpPost.setHeader("User-Agent",
            "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36");

    List<NameValuePair> nvps = new ArrayList<NameValuePair>();
    nvps.add(new BasicNameValuePair("__VIEWSTATE", __VIEWSTATE));
    nvps.add(new BasicNameValuePair("__EVENTVALIDATION", __EVENTVALIDATION));
    nvps.add(new BasicNameValuePair("ScrollTop", ""));
    nvps.add(new BasicNameValuePair("__essVariable", ""));
    nvps.add(new BasicNameValuePair("MRoadQueryCtrl1$txtManifestID", "5100312462240"));
    nvps.add(new BasicNameValuePair("MRoadQueryCtrl1$txtBillNo", "7PH650021105"));
    nvps.add(new BasicNameValuePair("MRoadQueryCtrl1$txtCode", "a778"));
    nvps.add(new BasicNameValuePair("MRoadQueryCtrl1$btQuery", "   "));
    nvps.add(new BasicNameValuePair("select", ""));
    nvps.add(new BasicNameValuePair("select1", ""));
    nvps.add(new BasicNameValuePair("select2", ""));
    nvps.add(new BasicNameValuePair("select3", ""));
    nvps.add(new BasicNameValuePair("select4", ""));
    nvps.add(new BasicNameValuePair("select5", "??"));
    nvps.add(new BasicNameValuePair("select6", ""));
    nvps.add(new BasicNameValuePair("select7", ""));
    nvps.add(new BasicNameValuePair("select8", ""));

    httpPost.setEntity(new UrlEncodedFormEntity(nvps, "GBK"));

    CloseableHttpResponse response2 = httpclient.execute(httpPost);

    try {
        System.out.println(response2.getStatusLine());
        HttpEntity entity2 = response2.getEntity();
        // do something useful with the response body
        // and ensure it is fully consumed
        // System.out.println(entity2.getContent());
        System.out.println(IOUtils.toString(response2.getEntity().getContent(), "GBK"));

        EntityUtils.consume(entity2);
    } finally {
        response2.close();
    }

}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkScalaInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> logger = new SetOnce<>();

    try {//from w w w .  ja v  a 2  s.  c  o m
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        logger.set(context.getLogger(context.getBucket()));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        //INFO:
        System.out.println("Starting SparkScalaInterpreterTopology logging=" + logger.optional().isPresent());

        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.INFO,
                    ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                            "Starting SparkScalaInterpreterTopology.{0}",
                            Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
        });

        final SparkTopologyConfigBean job_config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();
        final String scala_script = Optional.ofNullable(job_config.script()).orElse("");

        final String wrapper_script = IOUtils.toString(
                SparkScalaInterpreterTopology.class.getClassLoader().getResourceAsStream("ScriptRunner.scala"),
                "UTF-8");
        final String to_compile = wrapper_script.replace("USER_SCRIPT", scala_script);
        final SparkCompilerService scs = new SparkCompilerService();
        final Tuple2<ClassLoader, Object> o = scs.buildClass(to_compile, "ScriptRunner", logger.optional());

        Thread.currentThread().setContextClassLoader(o._1());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        SparkConf spark_context = new SparkConf().setAppName("SparkPassthroughTopology");

        final long streaming_batch_interval = (long) spark_context
                .getInt(SparkTopologyConfigBean.STREAMING_BATCH_INTERVAL, 10);

        // MAIN PROCESSING

        final Method m = o._2().getClass().getMethod("runScript", SparkScriptEngine.class);

        //DEBUG
        //final boolean test_mode = test_spec.isPresent(); // (serializable thing i can pass into the map)

        boolean is_streaming = context.getJob().map(j -> j.analytic_type())
                .map(t -> MasterEnrichmentType.streaming == t).orElse(false);
        final Either<JavaSparkContext, JavaStreamingContext> jsc = Lambdas.get(() -> {
            return is_streaming
                    ? Either.<JavaSparkContext, JavaStreamingContext>right(new JavaStreamingContext(
                            spark_context, Durations.seconds(streaming_batch_interval)))
                    : Either.<JavaSparkContext, JavaStreamingContext>left(new JavaSparkContext(spark_context));
        });
        try {
            final JavaSparkContext jsc_batch = jsc.either(l -> l, r -> r.sparkContext());

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc_batch, Collections.emptySet());

            final Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>> streaming_inputs = jsc
                    .<Multimap<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>>either(
                            l -> HashMultimap
                                    .<String, JavaPairDStream<String, Tuple2<Long, IBatchRecord>>>create(),
                            r -> SparkTechnologyUtils.buildStreamingSparkInputs(context, test_spec, r,
                                    Collections.emptySet()));

            final SparkScriptEngine script_engine_bridge = new SparkScriptEngine(context, inputs,
                    streaming_inputs, test_spec, jsc_batch, jsc.either(l -> null, r -> r), job_config);

            // Add driver and generated JARs to path:
            jsc_batch.addJar(LiveInjector.findPathJar(o._2().getClass()));

            m.invoke(o._2(), script_engine_bridge);

            jsc.either(l -> {
                l.stop();
                return null;
            }, r -> {
                r.stop();
                return null;
            });

            logger.optional().ifPresent(l -> {
                l.inefficientLog(Level.INFO,
                        ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology", "main",
                                "Stopping SparkScalaInterpreterTopology.{0}",
                                Optionals.of(() -> context.getJob().get().name()).orElse("no_name")));
            });

            //INFO:
            System.out.println("Finished interpreter");
        } finally {
            jsc.either(l -> {
                l.close();
                return null;
            }, r -> {
                r.close();
                return null;
            });
        }
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
    } catch (Throwable t) {
        logger.optional().ifPresent(l -> {
            l.inefficientLog(Level.ERROR, ErrorUtils.buildSuccessMessage("SparkScalaInterpreterTopology",
                    "main",
                    ErrorUtils.getLongForm("Error executing SparkScalaInterpreterTopology.unknown: {0}", t)));
        });

        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));
        logger.optional().ifPresent(Lambdas.wrap_consumer_u(l -> l.flush().get(10, TimeUnit.SECONDS)));
        System.exit(-1);
    }
}

From source file:com.ikanow.aleph2.analytics.spark.services.SparkCompilerService.java

public static void main(String[] args) throws Exception {

    final String scala_script = "";
    final String wrapper_script = IOUtils.toString(
            SparkScalaInterpreterTopology.class.getClassLoader().getResourceAsStream("ScriptRunner.scala"),
            "UTF-8");
    final String to_compile = wrapper_script.replace("USER_SCRIPT", scala_script);

    final SparkCompilerService scs = new SparkCompilerService();
    final Tuple2<ClassLoader, Object> t2 = scs.buildClass(to_compile, "ScriptRunner", Optional.empty());
    System.out.println(t2._2().getClass().getClassLoader());

    //OK so java -cp "jar1;jar2;.." works here, BUT java -cp "./;jar1;./jar2" *doesn't*

    java.net.URLClassLoader cl = ((java.net.URLClassLoader) t2._1());
    System.out.println(t2._2().getClass().getClassLoader());

    cl.loadClass("ScriptRunner$$anon$1");
}

From source file:com.ikanow.aleph2.analytics.spark.assets.SparkJsInterpreterTopology.java

public static void main(String[] args)
        throws InstantiationException, IllegalAccessException, ClassNotFoundException {

    final SetOnce<IBucketLogger> bucket_logger = new SetOnce<>();
    final SetOnce<String> job_name = new SetOnce<>(); // (the string we'll use in logging activities)
    try {//from   w  w w. ja  va 2s .  co m
        final Tuple2<IAnalyticsContext, Optional<ProcessingTestSpecBean>> aleph2_tuple = SparkTechnologyUtils
                .initializeAleph2(args);
        final IAnalyticsContext context = aleph2_tuple._1();
        final Optional<ProcessingTestSpecBean> test_spec = aleph2_tuple._2();

        bucket_logger.set(context.getLogger(context.getBucket()));
        job_name.set(context.getJob().map(j -> j.name()).orElse("no_name"));

        // Optional: make really really sure it exists after the specified timeout
        SparkTechnologyUtils.registerTestTimeout(test_spec, () -> {
            System.exit(0);
        });

        final SparkTopologyConfigBean config = BeanTemplateUtils
                .from(context.getJob().map(job -> job.config()).orElse(Collections.emptyMap()),
                        SparkTopologyConfigBean.class)
                .get();

        final String js_script = Optional.ofNullable(config.script()).orElse("");

        //INFO:
        System.out.println("Starting " + job_name.get());

        SparkConf spark_context = new SparkConf().setAppName(job_name.get());

        test_spec.ifPresent(spec -> System.out
                .println("OPTIONS: test_spec = " + BeanTemplateUtils.toJson(spec).toString()));

        try (final JavaSparkContext jsc = new JavaSparkContext(spark_context)) {

            final Multimap<String, JavaPairRDD<Object, Tuple2<Long, IBatchRecord>>> inputs = SparkTechnologyUtils
                    .buildBatchSparkInputs(context, test_spec, jsc, Collections.emptySet());
            final JavaPairRDD<Object, Tuple2<Long, IBatchRecord>> all_inputs = inputs.values().stream()
                    .reduce((acc1, acc2) -> acc1.union(acc2)).orElse(null);

            // Load globals:
            ScriptEngineManager manager = new ScriptEngineManager();
            ScriptEngine engine = manager.getEngineByName("JavaScript");
            engine.put("_a2_global_context", context);
            engine.put("_a2_global_bucket", context.getBucket().get());
            engine.put("_a2_global_job", context.getJob().get());
            engine.put("_a2_global_config",
                    BeanTemplateUtils.configureMapper(Optional.empty()).convertValue(config, JsonNode.class));
            engine.put("_a2_global_mapper", BeanTemplateUtils.configureMapper(Optional.empty()));
            //TODO (until bucket logger is serializable, don't allow anywhere)
            //engine.put("_a2_bucket_logger", bucket_logger.optional().orElse(null));
            engine.put("_a2_enrichment_name", job_name.get());
            engine.put("_a2_spark_inputs", inputs);
            engine.put("_a2_spark_inputs_all", all_inputs);
            engine.put("_a2_spark_context", jsc);

            Stream.concat(config.uploaded_lang_files().stream(),
                    Stream.of("aleph2_sparkjs_globals_before.js", ""))
                    .flatMap(Lambdas.flatWrap_i(import_path -> {
                        try {
                            if (import_path.equals("")) { // also import the user script just before here
                                return js_script;
                            } else
                                return IOUtils.toString(SparkJsInterpreterTopology.class.getClassLoader()
                                        .getResourceAsStream(import_path), "UTF-8");
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (script {1}): {2}",
                                                            job_name.get(), import_path, e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    })).forEach(Lambdas.wrap_consumer_i(script -> {
                        try {
                            engine.eval(script);
                        } catch (Throwable e) {
                            bucket_logger.optional()
                                    .ifPresent(l -> l.log(Level.ERROR,
                                            ErrorUtils.lazyBuildMessage(false,
                                                    () -> SparkJsInterpreterTopology.class.getSimpleName(),
                                                    () -> job_name.get() + ".main", () -> null,
                                                    () -> ErrorUtils.get(
                                                            "Error initializing stage {0} (main script): {1}",
                                                            job_name.get(), e.getMessage()),
                                                    () -> ImmutableMap.<String, Object>of("full_error",
                                                            ErrorUtils.getLongForm("{0}", e)))));

                            System.out.println(ErrorUtils.getLongForm("onStageInitialize: {0}", e));
                            throw e; // ignored
                        }
                    }));
            ;

            jsc.stop();

            //INFO:
            System.out.println("Finished " + job_name.get());
        }
    } catch (Throwable t) {
        System.out.println(ErrorUtils.getLongForm("ERROR: {0}", t));

        bucket_logger.optional().ifPresent(l -> l.log(Level.ERROR, ErrorUtils.lazyBuildMessage(false,
                () -> SparkJsInterpreterTopology.class.getSimpleName()
                        + job_name.optional().map(j -> "." + j).orElse(""),
                () -> job_name.optional().orElse("global") + ".main", () -> null,
                () -> ErrorUtils.get("Error on batch in job {0}: {1}",
                        job_name.optional().orElse("global") + ".main", t.getMessage()),
                () -> ImmutableMap.<String, Object>of("full_error", ErrorUtils.getLongForm("{0}", t)))));
    }
}

From source file:BusinessLogic.CodesImporter.java

public static Set<CodeAward> importCodes() throws MalformedURLException, IOException {
    u = new URL(Code.codeUrl);
    InputStream in = u.openStream();
    String myString = IOUtils.toString(in, "UTF-8");
    String[] lines = myString.split("\n");

    Set<CodeAward> codeAwards = new HashSet();
    for (String line : lines) {
        String[] fields = line.split(",");
        if (fields.length > 2) {
            try {
                String code = fields[0].toLowerCase().trim();
                code = !code.startsWith("#") ? "#" + code : code;
                codeAwards.add(new CodeAward(code, fields[1].trim(), Integer.parseInt(fields[2].trim())));
            } catch (Exception e) {
                System.out.println("Error when importing codes from dropbox");
            }/*w  ww  .  ja va2  s . c o  m*/
        }
    }
    return codeAwards;

}

From source file:bookstore.BookUnmarshaller.java

public static Book[] BooksFromCsv(String url) {
    InputStream in = null;/*from   w  w  w.j  a v a2  s  .co  m*/
    Book[] books;
    try {
        in = new URL(url).openStream();
        books = BooksFromString(IOUtils.toString(in, Charset.forName("utf-8")));
    } catch (IOException | ParseException | NumberFormatException e) {
        return null;
    } finally {
        IOUtils.closeQuietly(in);
    }
    return books;
}

From source file:inm5151.FileReader.java

public static String loadFileIntoString(String filePath, String fileEncoding)
        throws FileNotFoundException, IOException {

    return IOUtils.toString(new FileInputStream(filePath), fileEncoding);
}

From source file:Model.MappingCodeAwards.java

public static void initializeMapping() throws MalformedURLException, IOException {
    u = new URL("https://dl.dropboxusercontent.com/u/28091845/codapps_icons.txt");
    InputStream in = u.openStream();
    String myString = IOUtils.toString(in, "UTF-8");
    String[] lines = myString.split("\n");
    mapCategoryToFontIcon = new HashMap();

    for (String line : lines) {
        String[] fields = line.split(",");
        if (fields.length > 1) {
            try {
                mapCategoryToFontIcon.put(fields[0].toLowerCase().trim(), fields[1].toLowerCase().trim());
            } catch (Exception e) {
                System.out.println("Error when importing icons from dropbox");
            }//w  w  w  .ja v a  2 s  .  co m
        }
    }
}

From source file:com.hp.mqm.client.ResourceUtils.java

public static String readContent(String fileName) throws IOException {
    return IOUtils.toString(MqmRestClientImplTest.class.getResourceAsStream(fileName), "UTF-8");
}