List of usage examples for java.io DataInputStream DataInputStream
public DataInputStream(InputStream in)
From source file:com.csipsimple.backup.SipProfilesHelper.java
@Override public void performBackup(ParcelFileDescriptor oldState, BackupDataOutput data, ParcelFileDescriptor newState) { boolean forceBackup = (oldState == null); long fileModified = databaseFile.lastModified(); try {/*from w w w.j a va 2 s . com*/ if (!forceBackup) { FileInputStream instream = new FileInputStream(oldState.getFileDescriptor()); DataInputStream in = new DataInputStream(instream); long lastModified = in.readLong(); in.close(); if (lastModified < fileModified) { forceBackup = true; } } } catch (IOException e) { Log.e(THIS_FILE, "Cannot manage previous local backup state", e); forceBackup = true; } Log.d(THIS_FILE, "Will backup profiles ? " + forceBackup); if (forceBackup) { JSONArray accountsSaved = SipProfileJson.serializeSipProfiles(mContext); try { writeData(data, accountsSaved.toString()); } catch (IOException e) { Log.e(THIS_FILE, "Cannot manage remote backup", e); } } try { FileOutputStream outstream = new FileOutputStream(newState.getFileDescriptor()); DataOutputStream out = new DataOutputStream(outstream); out.writeLong(fileModified); out.close(); } catch (IOException e) { Log.e(THIS_FILE, "Cannot manage final local backup state", e); } }
From source file:au.org.ala.spatial.util.RecordsSmall.java
public RecordsSmall(String dir) throws IOException { this.filename = dir + File.separator; //look for a small file File smallFile = new File(filename + "records.csv.small.species"); if (!smallFile.exists() && new File(filename + "records.csv").exists()) { try {//from w w w.j a va2 s . co m makeSmallFile(filename); makeUniquePoints(); } catch (Exception e) { logger.error("failed to make small records files", e); } } //read species if (smallFile.exists()) { try { //open points and pointsToSpecies points = new RandomAccessFile(filename + "records.csv.small.points", "r"); pointsToSpecies = new RandomAccessFile(filename + "records.csv.small.pointsToSpecies", "r"); maxPoints = new File(filename + "records.csv.small.pointsToSpecies").length() / 4; pointsDis = new DataInputStream( new BufferedInputStream(new FileInputStream(filename + "records.csv.small.points"))); pointsToSpeciesDis = new DataInputStream(new BufferedInputStream( new FileInputStream(filename + "records.csv.small.pointsToSpecies"))); lsids = FileUtils.readLines(new File(filename + "records.csv.small.species")); getUniquePointsAll(); } catch (Exception e) { logger.error("failed to open small records file", e); } } }
From source file:genepi.db.DatabaseUpdater.java
public static String readFileAsString(String filename) throws java.io.IOException, URISyntaxException { InputStream is = new FileInputStream(filename); DataInputStream in = new DataInputStream(is); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String strLine;/*from ww w .ja v a2 s. c o m*/ StringBuilder builder = new StringBuilder(); while ((strLine = br.readLine()) != null) { // builder.append("\n"); builder.append(strLine); } in.close(); return builder.toString(); }
From source file:org.eclipse.swt.examples.clipboard.MyTypeTransfer.java
@Override public Object nativeToJava(TransferData transferData) { if (isSupportedType(transferData)) { byte[] buffer = (byte[]) super.nativeToJava(transferData); if (buffer == null) return null; MyType[] myData = new MyType[0]; try {//from w w w.j a v a 2 s . co m ByteArrayInputStream in = new ByteArrayInputStream(buffer); try (DataInputStream readIn = new DataInputStream(in)) { while (readIn.available() > 20) { MyType datum = new MyType(); int size = readIn.readInt(); byte[] name = new byte[size]; readIn.read(name); datum.firstName = new String(name); size = readIn.readInt(); name = new byte[size]; readIn.read(name); datum.lastName = new String(name); MyType[] newMyData = new MyType[myData.length + 1]; System.arraycopy(myData, 0, newMyData, 0, myData.length); newMyData[myData.length] = datum; myData = newMyData; } } } catch (IOException ex) { return null; } return myData; } return null; }
From source file:de.tu_berlin.dima.aim3.naivebayes.io.BinaryInputFormat.java
private boolean getNextReader() { if (files.isEmpty() == false) { FileStatus fileStatus = files.poll(); FSDataInputStream fsDataInputStream; try {//from www .j av a 2 s . co m LOG.info("Opening reader :" + fileStatus.getPath()); fsDataInputStream = fs.open(fileStatus.getPath()); dataInputStream = new DataInputStream(fsDataInputStream); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); return false; } return true; } else { return false; } }
From source file:com.evilco.license.client.decoder.CompressedLicenseDecoder.java
/** * {@inheritDoc}//from w w w . j av a2 s . c o m */ @Override public <T extends ILicense> T decode(@Nonnull String input, @Nonnull Class<T> licenseType) throws LicenseDecoderException { // define streams ByteArrayInputStream inputStream = null; GZIPInputStream gzipInputStream = null; DataInputStream dataInputStream = null; // read data try { // remove newlines input = CharMatcher.anyOf("\n\r").removeFrom(input); // decode data byte[] data = BaseEncoding.base64().decode(input); // create streams inputStream = new ByteArrayInputStream(data); gzipInputStream = new GZIPInputStream(inputStream); dataInputStream = new DataInputStream(gzipInputStream); // decode data return ((T) this.childDecoder.decode(dataInputStream, licenseType)); } catch (IOException ex) { throw new LicenseDecoderException(ex.getMessage(), ex); } finally { IOUtils.closeQuietly(dataInputStream); IOUtils.closeQuietly(gzipInputStream); IOUtils.closeQuietly(inputStream); } }
From source file:com.lyonsdensoftware.vanitymirror.PythonConnectionThread.java
@Override public void run() { // Try connecting to the master server try {//from w w w .j av a 2 s . c o m // Create a socket to connect to the server this.socket = new Socket(this.mainWindow.getConfigFile().getHostname(), this.mainWindow.getConfigFile().getPort()); // Note it in the log log.info("LOG", "Connect to MasterServer at IP: " + socket.getInetAddress().getHostAddress() + " on PORT: " + socket.getPort()); connectedToServer = true; // Create an input stream to receive data from the server mainWindow.setFromServer(new DataInputStream(socket.getInputStream())); // Create an output stream to send data to the server mainWindow.setToServer(new DataOutputStream(socket.getOutputStream())); // Main Loop while (runThread) { // Check for data from the server BufferedReader in = new BufferedReader(new InputStreamReader(mainWindow.getFromServer())); if (in.ready()) { // Convert in data to json Gson gson = new Gson(); JSONObject json = new JSONObject(gson.fromJson(in.readLine(), JSONObject.class)); if (json.keys().hasNext()) { // handle the data handleDataFromServer(json); } else { System.out.println(json.toString()); } } } // Loop not running now so close connection socket.close(); mainWindow.getFromServer().close(); mainWindow.getToServer().close(); } catch (IOException ex) { log.error("ERROR", ex.toString()); } }
From source file:net.mybox.mybox.ServerClientConnection.java
public ServerClientConnection(Server _server, Socket _socket) { server = _server;//from w w w.j av a 2 s .c om socket = _socket; handle = socket.getPort(); try { inStream = socket.getInputStream(); outStream = socket.getOutputStream(); } catch (Exception e) { System.out.println("Error setting socket streams"); } dataInStream = new DataInputStream(inStream); dataOutStream = new DataOutputStream(outStream); // load the sqlite-JDBC driver using the current class loader try { Class.forName("org.sqlite.JDBC"); System.out.println(String.format("SQLiteJDBC is running in %s mode", org.sqlite.SQLiteJDBCLoader.isNativeMode() ? "OS native" : "pure-java")); } catch (Exception e) { System.out.println("Unable to load sqlite driver " + e.getMessage()); System.exit(1); } startServerInputListenerThread(); }
From source file:com.fluidops.iwb.cache.ImageFileCache.java
/** * Return image mapping stored in one file using cache * if file has already been loaded.//from www .ja v a 2 s . co m * * @param file * @return The defined mappings */ public Map<String, String> getImageMappings(String file) { Map<String, String> mapping = cache.get(file); // if not defined load if (mapping == null) { mapping = new HashMap<String, String>(); BufferedReader br = null; try { FileInputStream fstream = new FileInputStream(file); DataInputStream in = new DataInputStream(fstream); br = new BufferedReader(new InputStreamReader(in)); String line; while ((line = br.readLine()) != null) { String[] spl = line.split("="); if (spl.length == 2 && spl[0] != null && spl[1] != null) mapping.put(spl[0].trim(), spl[1].trim()); } } catch (Exception e) { logger.warn(e.getMessage()); } finally { IOUtils.closeQuietly(br); } // store in cache cache.put(file, mapping); } return mapping; }
From source file:com.aliyun.openservices.tablestore.hadoop.Endpoint.java
public static Endpoint deserialize(String in) { if (in == null) { return null; }/*w w w .jav a 2s .c o m*/ byte[] buf = Base64.decodeBase64(in); ByteArrayInputStream is = new ByteArrayInputStream(buf); DataInputStream din = new DataInputStream(is); try { return read(din); } catch (IOException ex) { return null; } }