Example usage for java.io DataInputStream DataInputStream

List of usage examples for java.io DataInputStream DataInputStream

Introduction

In this page you can find the example usage for java.io DataInputStream DataInputStream.

Prototype

public DataInputStream(InputStream in) 

Source Link

Document

Creates a DataInputStream that uses the specified underlying InputStream.

Usage

From source file:edu.indiana.d2i.htrc.util.MemcachedValidation.java

@Override
public int run(String[] args) throws Exception {
    String idDir = args[0];//from   w ww .j  a v a 2 s .c o  m
    String memhostsPath = args[1];

    Configuration conf = getConf();
    MemCachedUtil.configHelper(conf, memhostsPath);
    ThreadedMemcachedClient client = ThreadedMemcachedClient.getThreadedMemcachedClient(conf);
    MemcachedClient cache = client.getCache();
    Transcoder<VectorWritable> vectorTranscoder = new HadoopWritableTranscoder<VectorWritable>(conf,
            VectorWritable.class);

    // id list
    FileSystem fs = FileSystem.get(conf);
    DataInputStream fsinput = new DataInputStream(fs.open(new Path(idDir)));
    Iterator<Text> idIterator = new IDList(fsinput).iterator();
    List<String> idlist = new ArrayList<String>();
    while (idIterator.hasNext()) {
        Text id = idIterator.next();
        idlist.add(id.toString());
    }

    BufferedWriter writer = new BufferedWriter(new FileWriter("memdebug.txt"));
    String namespace = "";
    for (String id : idlist) {
        VectorWritable vec = cache.get(namespace + id, vectorTranscoder);
        if (vec == null) {
            System.out.println(id);
            writer.write(id + "\n");
        }
    }
    writer.close();

    return 0;
}

From source file:io.druid.data.input.impl.prefetch.RetryingInputStreamTest.java

@Before
public void setup() throws IOException {
    testFile = temporaryFolder.newFile();

    try (FileOutputStream fis = new FileOutputStream(testFile);
            GZIPOutputStream gis = new GZIPOutputStream(fis);
            DataOutputStream dis = new DataOutputStream(gis)) {
        for (int i = 0; i < 10000; i++) {
            dis.writeInt(i);/*from   www  .j  a v  a 2  s  .c  o  m*/
        }
    }

    throwError = false;

    final InputStream retryingInputStream = new RetryingInputStream<>(testFile, new ObjectOpenFunction<File>() {
        @Override
        public InputStream open(File object) throws IOException {
            return new TestInputStream(new FileInputStream(object));
        }

        @Override
        public InputStream open(File object, long start) throws IOException {
            final FileInputStream fis = new FileInputStream(object);
            Preconditions.checkState(fis.skip(start) == start);
            return new TestInputStream(fis);
        }
    }, e -> e instanceof IOException, MAX_RETRY);

    inputStream = new DataInputStream(new GZIPInputStream(retryingInputStream));

    throwError = true;
}

From source file:com.cloudera.recordbreaker.hive.RegExpSerDe.java

/**
 * <code>initDeserializer</code> sets up the RegExp-specific
 * parts of the SerDe.  In particular, it loads in the regular
 * expressions and corresponding schema descriptions.
 *
 * The patternPayloadJSONFile parameter is a serailized JSON
 * object that contains the schema and regexp info.
 *//*from   www  .ja  v  a2s.c o  m*/
void initDeserializer(String patternPayloadJSONFile) {
    try {
        DataInputStream in = new DataInputStream(new FileInputStream(new File(patternPayloadJSONFile)));
        byte buf[] = new byte[8096];
        StringBuffer payload = new StringBuffer();
        try {
            int numBytes = in.read(buf);
            while (numBytes >= 0) {
                payload.append(new String(buf, 0, numBytes));
                numBytes = in.read(buf);
            }
        } finally {
            in.close();
        }
        String payloadStr = payload.toString();
        JSONObject jobj = new JSONObject(payloadStr);

        this.patterns = new ArrayList<Pattern>();
        JSONArray patternArray = jobj.getJSONArray("patterns");
        for (int i = 0; i < patternArray.length(); i++) {
            String patternStr = patternArray.getString(i);
            this.patterns.add(Pattern.compile(patternStr));
        }

        this.schemaOptions = new ArrayList<Schema>();
        JSONArray schemaOptionArray = jobj.getJSONArray("schemaoptions");
        for (int i = 0; i < schemaOptionArray.length(); i++) {
            String schemaStr = schemaOptionArray.getString(i);
            this.schemaOptions.add(Schema.parse(schemaStr));
        }
    } catch (UnsupportedEncodingException uee) {
        uee.printStackTrace();
    } catch (IOException iex) {
        iex.printStackTrace();
    } catch (JSONException jse) {
        jse.printStackTrace();
    }
}

From source file:com.wso2telco.services.bw.FileUtil.java

public static String ReadFullyIntoVar(String fullpath) {

    String result = "";

    try {//from  w ww. j  av  a2  s.c o m
        FileInputStream file = new FileInputStream(fullpath);
        DataInputStream in = new DataInputStream(file);
        byte[] b = new byte[in.available()];
        in.readFully(b);
        in.close();
        result = new String(b, 0, b.length, "Cp850");
    } catch (Exception e) {
        e.printStackTrace();
    }
    return result;
}

From source file:edu.indiana.d2i.htrc.io.index.solr.SequentialVectorFromSolr.java

@Override
public int run(String[] args) throws Exception {
    if (args.length != 4) {
        printUsage();//from  www  .ja v a  2s . co m
    }

    String solrURL = args[0];
    String dictionaryFile = args[1];
    String idsFile = args[2];
    String outputFile = args[3];

    logger.info("SequentialVectorFromSolr ");
    logger.info(" - solrURL: " + solrURL);
    logger.info(" - dictionaryFile: " + dictionaryFile);
    logger.info(" - idsFile: " + idsFile); // on HDFS
    logger.info(" - outputFile: " + outputFile); // on HDFS

    Configuration conf = getConf();
    //      conf.set(HTRCConstants.SOLR_MAIN_URL, solrURL);
    conf.set("htrc.solr.url", solrURL);
    conf.set(HTRCConstants.DICTIONARY_PATH, dictionaryFile);

    SolrClient client = new SolrClient(conf, true);
    FileSystem fs = FileSystem.get(conf);

    SequenceFile.Writer writer = new SequenceFile.Writer(fs, conf, new Path(outputFile), Text.class,
            VectorWritable.class);

    long t0 = System.nanoTime();
    DataInputStream fsinput = new DataInputStream(fs.open(new Path(idsFile)));
    BufferedReader reader = new BufferedReader(new InputStreamReader(fsinput));
    String line = null;
    String[] ids = new String[1];
    VectorWritable value = new VectorWritable();
    Text key = new Text();
    int count = 0;
    while ((line = reader.readLine()) != null) {
        ids[0] = line;
        Iterable<NamedVector> termVectors = client.getTermVectors(ids);
        for (NamedVector namedVector : termVectors) {
            value.set(namedVector);
            key.set(namedVector.getName());
            writer.append(key, value);
            count++;
        }
        if (count % 1000 == 0)
            System.out.println("Finish " + count + " volumes.");
    }
    long t1 = System.nanoTime();
    System.out.println("Takes " + (t1 - t0) / 1e9 + " seconds");

    writer.close();
    reader.close();

    return 0;
}

From source file:hudson.cli.Connection.java

public Connection(InputStream in, OutputStream out) {
    this.in = in;
    this.out = out;
    this.din = new DataInputStream(in);
    this.dout = new DataOutputStream(out);
}

From source file:J2MESearchMixedRecordDataTypeExample.java

public void commandAction(Command command, Displayable displayable) {
    if (command == exit) {
        destroyApp(true);//from w  ww.  j a v a 2 s. c  o  m
        notifyDestroyed();
    } else if (command == start) {
        try {
            recordstore = RecordStore.openRecordStore("myRecordStore", true);
            byte[] outputRecord;
            String outputString[] = { "A", "B", "M" };
            int outputInteger[] = { 15, 10, 5 };
            ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
            DataOutputStream outputDataStream = new DataOutputStream(outputStream);
            for (int x = 0; x < 3; x++) {
                outputDataStream.writeUTF(outputString[x]);
                outputDataStream.writeInt(outputInteger[x]);
                outputDataStream.flush();
                outputRecord = outputStream.toByteArray();
                recordstore.addRecord(outputRecord, 0, outputRecord.length);
                outputStream.reset();
            }
            outputStream.close();
            outputDataStream.close();
            String inputString;
            byte[] byteInputData = new byte[300];
            ByteArrayInputStream inputStream = new ByteArrayInputStream(byteInputData);
            DataInputStream inputDataStream = new DataInputStream(inputStream);
            if (recordstore.getNumRecords() > 0) {
                filter = new Filter("Mary");
                recordEnumeration = recordstore.enumerateRecords(filter, null, false);
                while (recordEnumeration.hasNextElement()) {
                    recordstore.getRecord(recordEnumeration.nextRecordId(), byteInputData, 0);
                    inputString = inputDataStream.readUTF() + " " + inputDataStream.readInt();
                    alert = new Alert("Reading", inputString, null, AlertType.WARNING);
                    alert.setTimeout(Alert.FOREVER);
                    display.setCurrent(alert);
                }
            }
            inputStream.close();
            recordstore.closeRecordStore();
            if (RecordStore.listRecordStores() != null) {
                RecordStore.deleteRecordStore("myRecordStore");
                filter.filterClose();
                recordEnumeration.destroy();
            }
        } catch (Exception error) {
            alert = new Alert("Error Removing", error.toString(), null, AlertType.WARNING);
            alert.setTimeout(Alert.FOREVER);
            display.setCurrent(alert);
        }
    }
}

From source file:edu.indiana.d2i.htrc.io.dataapi.IDInputFormat.java

@Override
public List<InputSplit> getSplits(JobContext job) throws IOException {
    int numIdsInSplit = job.getConfiguration().getInt(HTRCConstants.MAX_IDNUM_SPLIT, (int) 1e6);
    String hostStr = job.getConfiguration().get(HTRCConstants.HOSTS_SEPARATEDBY_COMMA,
            HTRCConstants.DATA_API_DEFAULT_URL);
    if (hostStr == null)
        throw new RuntimeException("Cannot find hosts of HTRC Data Storage.");
    String[] hosts = hostStr.split(",");

    IDInputSplit split = new IDInputSplit(hosts);
    List<InputSplit> splits = new ArrayList<InputSplit>();
    Path[] dirs = getInputPaths(job);
    try {//from  ww w.ja va2 s  .  c  om
        for (int i = 0; i < dirs.length; i++) {
            FileSystem fs = dirs[i].getFileSystem(job.getConfiguration());
            DataInputStream fsinput = new DataInputStream(fs.open(dirs[i]));
            Iterator<Text> idlist = new IDList(fsinput).iterator();
            while (idlist.hasNext()) {
                Text id = idlist.next();
                split.addID(id.toString());
                if (split.getLength() >= numIdsInSplit) {
                    splits.add(split);
                    split = new IDInputSplit(hosts);
                }
            }

            //            LineReader reader = new LineReader(fsinput);
            //            Text line = new Text();
            //            while (reader.readLine(line) > 0) {
            //               split.addID(line.toString());
            //               if (split.getLength() >= numIdsInSplit) {
            //                  splits.add(split);
            //                  split = new IDInputSplit(hosts);
            //               }
            //            }
            //            reader.close();
        }
        if (split != null && split.getLength() != 0)
            splits.add(split);
    } catch (InterruptedException e) {
        logger.error(e);
    }

    logger.info("#Splits " + splits.size());
    return splits;
}

From source file:com.igormaznitsa.jhexed.swing.editor.filecontainer.FileContainer.java

private List<FileContainerSection> loadFromStream(final InputStream in) throws IOException {
    final DataInputStream din = in instanceof DataInputStream ? (DataInputStream) in : new DataInputStream(in);

    if (din.readInt() != MAGIC) {
        throw new IOException("Wrong format, can't find magic");
    }/*from  ww w  .  j  ava  2 s.c  om*/

    final int version = din.readShort() & 0xFFFF;

    if (version > FORMAT_VERSION) {
        throw new IllegalArgumentException("Detected unsupported version [" + version + ']');
    }

    final int sectionNumber = din.readUnsignedShort();

    final List<FileContainerSection> result = new ArrayList<FileContainerSection>(Math.max(5, sectionNumber));

    for (int i = 0; i < sectionNumber; i++) {
        final FileContainerSection s = new FileContainerSection(in);
        result.add(s);
    }
    if (din.readInt() != MAGIC) {
        throw new IOException("Can't detecte the end MAGIC");
    }

    return result;
}

From source file:org.kurento.repository.test.RangePutTests.java

protected void uploadFileWithSeqPUTs(RepositoryHttpRecorder recorder, File fileToUpload,
        RepositoryItem repositoryItem) throws Exception {

    recorder.setAutoTerminationTimeout(500000);
    String url = recorder.getURL();

    DataInputStream is = null;/* w  ww  .  j  av  a2s . c o m*/

    try {

        is = new DataInputStream(new FileInputStream(fileToUpload));

        int sentBytes = 0;

        byte[] info = new byte[40000];

        int readBytes;

        int numRequest = 0;

        while ((readBytes = is.read(info)) != -1) {

            ResponseEntity<String> response = putContent(url, Arrays.copyOf(info, readBytes), sentBytes);

            sentBytes += readBytes;

            log.info(numRequest + ": " + response.toString());

            assertEquals("Returned response: " + response.getBody(), HttpStatus.OK, response.getStatusCode());

            if (numRequest == 3) {

                // Simulating retry

                response = putContent(url, Arrays.copyOf(info, readBytes), sentBytes - readBytes);

                log.info(numRequest + ": " + response.toString());

                assertEquals("Returned response: " + response.getBody(), HttpStatus.OK,
                        response.getStatusCode());

            } else if (numRequest == 4) {

                // Simulating retry with new data

                byte[] newInfo = new byte[500];
                int newReadBytes = is.read(newInfo);

                response = putContent(url,
                        concat(Arrays.copyOf(info, readBytes), Arrays.copyOf(newInfo, newReadBytes)),
                        sentBytes - readBytes);

                sentBytes += newReadBytes;

                log.info(numRequest + ": " + response.toString());

                assertEquals("Returned response: " + response.getBody(), HttpStatus.OK,
                        response.getStatusCode());

            } else if (numRequest == 5) {

                // Simulating send ahead data

                response = putContent(url, Arrays.copyOf(info, readBytes), sentBytes + 75000);

                log.info(numRequest + ": " + response.toString());

                assertEquals("Returned response: " + response.getBody(), HttpStatus.NOT_IMPLEMENTED,
                        response.getStatusCode());

            }

            numRequest++;
        }

    } finally {
        if (is != null) {
            try {
                is.close();
            } catch (IOException e) {
            }
        }

        recorder.stop();
    }
}