List of usage examples for java.nio.channels FileChannel close
public final void close() throws IOException
From source file:net.librec.data.convertor.appender.SocialDataAppender.java
/** * Read data from the data file. Note that we didn't take care of the * duplicated lines./* ww w . ja va2s .co m*/ * * @param inputDataPath * the path of the data file * @throws IOException if I/O error occurs during reading */ private void readData(String inputDataPath) throws IOException { // Table {row-id, col-id, rate} Table<Integer, Integer, Double> dataTable = HashBasedTable.create(); // Map {col-id, multiple row-id}: used to fast build a rating matrix Multimap<Integer, Integer> colMap = HashMultimap.create(); // BiMap {raw id, inner id} userIds, itemIds final List<File> files = new ArrayList<File>(); final ArrayList<Long> fileSizeList = new ArrayList<Long>(); SimpleFileVisitor<Path> finder = new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { fileSizeList.add(file.toFile().length()); files.add(file.toFile()); return super.visitFile(file, attrs); } }; Files.walkFileTree(Paths.get(inputDataPath), finder); long allFileSize = 0; for (Long everyFileSize : fileSizeList) { allFileSize = allFileSize + everyFileSize.longValue(); } // loop every dataFile collecting from walkFileTree for (File dataFile : files) { FileInputStream fis = new FileInputStream(dataFile); FileChannel fileRead = fis.getChannel(); ByteBuffer buffer = ByteBuffer.allocate(BSIZE); int len; String bufferLine = new String(); byte[] bytes = new byte[BSIZE]; while ((len = fileRead.read(buffer)) != -1) { buffer.flip(); buffer.get(bytes, 0, len); bufferLine = bufferLine.concat(new String(bytes, 0, len)).replaceAll("\r", "\n"); String[] bufferData = bufferLine.split("(\n)+"); boolean isComplete = bufferLine.endsWith("\n"); int loopLength = isComplete ? bufferData.length : bufferData.length - 1; for (int i = 0; i < loopLength; i++) { String line = new String(bufferData[i]); String[] data = line.trim().split("[ \t,]+"); String userA = data[0]; String userB = data[1]; Double rate = (data.length >= 3) ? Double.valueOf(data[2]) : 1.0; if (userIds.containsKey(userA) && userIds.containsKey(userB)) { int row = userIds.get(userA); int col = userIds.get(userB); dataTable.put(row, col, rate); colMap.put(col, row); } } if (!isComplete) { bufferLine = bufferData[bufferData.length - 1]; } buffer.clear(); } fileRead.close(); fis.close(); } int numRows = userIds.size(), numCols = userIds.size(); // build rating matrix userSocialMatrix = new SparseMatrix(numRows, numCols, dataTable, colMap); // release memory of data table dataTable = null; }
From source file:org.mule.transport.file.FileMessageReceiver.java
/** * Try to acquire a lock on a file and release it immediately. Usually used as a * quick check to see if another process is still holding onto the file, e.g. a * large file (more than 100MB) is still being written to. * * @param sourceFile file to check/*from w w w . j a v a 2s .c o m*/ * @return <code>true</code> if the file can be locked */ protected boolean attemptFileLock(final File sourceFile) throws MuleException { // check if the file can be processed, be sure that it's not still being // written // if the file can't be locked don't process it yet, since creating // a new FileInputStream() will throw an exception FileLock lock = null; FileChannel channel = null; boolean fileCanBeLocked = false; try { channel = new RandomAccessFile(sourceFile, "rw").getChannel(); // Try acquiring the lock without blocking. This method returns // null or throws an exception if the file is already locked. lock = channel.tryLock(); } catch (FileNotFoundException fnfe) { throw new DefaultMuleException(FileMessages.fileDoesNotExist(sourceFile.getName())); } catch (IOException e) { // Unable to create a lock. This exception should only be thrown when // the file is already locked. No sense in repeating the message over // and over. } finally { if (lock != null) { // if lock is null the file is locked by another process fileCanBeLocked = true; try { // Release the lock lock.release(); } catch (IOException e) { // ignore } } if (channel != null) { try { // Close the file channel.close(); } catch (IOException e) { // ignore } } } return fileCanBeLocked; }
From source file:eu.stratosphere.nephele.taskmanager.runtime.EnvelopeConsumptionLog.java
private void loadNextOutstandingEnvelopes() { final int pos = this.outstandingEnvelopesAsIntBuffer.position(); if (pos > 0) { final int rem = this.outstandingEnvelopesAsIntBuffer.remaining(); for (int i = 0; i < rem; ++i) { this.outstandingEnvelopesAsIntBuffer.put(i, this.outstandingEnvelopesAsIntBuffer.get(i + pos)); }/*w ww .j a v a 2 s .c o m*/ this.outstandingEnvelopesAsIntBuffer.position(0); this.outstandingEnvelopesAsIntBuffer.limit(rem); } if (this.numberOfEntriesReadFromLog == this.numberOfInitialLogEntries) { return; } FileChannel fc = null; try { this.outstandingEnvelopesAsByteBuffer .position(this.outstandingEnvelopesAsIntBuffer.limit() * SIZE_OF_INTEGER); this.outstandingEnvelopesAsByteBuffer.limit(this.outstandingEnvelopesAsByteBuffer.capacity()); fc = new FileInputStream(this.logFile).getChannel(); fc.position(this.numberOfEntriesReadFromLog * SIZE_OF_INTEGER); int totalBytesRead = 0; while (this.outstandingEnvelopesAsByteBuffer.hasRemaining()) { final int bytesRead = fc.read(this.outstandingEnvelopesAsByteBuffer); if (bytesRead == -1) { break; } totalBytesRead += bytesRead; } if (totalBytesRead % SIZE_OF_INTEGER != 0) { LOG.error("Read " + totalBytesRead + " from " + this.logFile.getAbsolutePath() + ", file may be corrupt"); } final int numberOfNewEntries = totalBytesRead / SIZE_OF_INTEGER; this.outstandingEnvelopesAsIntBuffer .limit(this.outstandingEnvelopesAsIntBuffer.limit() + numberOfNewEntries); this.numberOfEntriesReadFromLog += numberOfNewEntries; fc.close(); } catch (IOException ioe) { LOG.error(StringUtils.stringifyException(ioe)); } finally { if (fc != null) { try { fc.close(); } catch (IOException ioe) { } } } }
From source file:com.rapidminer.tools.Tools.java
public static void copy(File srcPath, File dstPath) throws IOException { if (srcPath.isDirectory()) { if (!dstPath.exists()) { boolean result = dstPath.mkdir(); if (!result) { throw new IOException("Unable to create directoy: " + dstPath); }//from w w w.j a v a 2 s .c o m } String[] files = srcPath.list(); for (String file : files) { copy(new File(srcPath, file), new File(dstPath, file)); } } else { if (srcPath.exists()) { FileChannel in = null; FileChannel out = null; try (FileInputStream fis = new FileInputStream(srcPath); FileOutputStream fos = new FileOutputStream(dstPath)) { in = fis.getChannel(); out = fos.getChannel(); long size = in.size(); MappedByteBuffer buf = in.map(FileChannel.MapMode.READ_ONLY, 0, size); out.write(buf); } finally { if (in != null) { in.close(); } if (out != null) { out.close(); } } } } }
From source file:com.ehdev.chronos.lib.Chronos.java
@Override public void onUpgrade(SQLiteDatabase db, ConnectionSource connectionSource, int oldVersion, int newVersion) { try {/*from w w w . jav a 2s . c o m*/ Log.w(TAG, "Upgrading database, this will drop tables and recreate."); Log.w(TAG, "oldVerion: " + oldVersion + "\tnewVersion: " + newVersion); //Back up database try { File sd = Environment.getExternalStorageDirectory(); File data = Environment.getDataDirectory(); if (sd.canWrite()) { String currentDBPath = "/data/com.kopysoft.chronos/databases/" + DATABASE_NAME; String backupDBPath = DATABASE_NAME + ".db"; File currentDB = new File(data, currentDBPath); File backupDB = new File(sd, backupDBPath); if (currentDB.exists()) { FileChannel src = new FileInputStream(currentDB).getChannel(); FileChannel dst = new FileOutputStream(backupDB).getChannel(); dst.transferFrom(src, 0, src.size()); src.close(); dst.close(); } } } catch (Exception e) { Log.e(TAG, "ERROR: Can not move file"); } /* db.execSQL("CREATE TABLE " + TABLE_NAME_CLOCK + " ( _id INTEGER PRIMARY KEY NOT NULL, time LONG NOT NULL, actionReason INTEGER NOT NULL )"); db.execSQL("CREATE TABLE " + TABLE_NAME_NOTE + " ( _id LONG PRIMARY KEY, note_string TEXT NOT NULL, time LONG NOT NULL )"); */ if (oldVersion < 15) { DateTime jobMidnight = DateTime.now().withDayOfWeek(7).minusWeeks(1).toDateMidnight().toDateTime() .withZone(DateTimeZone.getDefault()); Job currentJob = new Job("", 10, jobMidnight, PayPeriodDuration.TWO_WEEKS); SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(gContext); currentJob.setPayRate(Float.valueOf(pref.getString("normal_pay", "7.25"))); currentJob.setOvertime(Float.valueOf(pref.getString("over_time_threshold", "40"))); currentJob.setDoubletimeThreshold(Float.valueOf(pref.getString("double_time_threshold", "60"))); SharedPreferences.Editor edit = pref.edit(); edit.remove("8_or_40_hours"); //Moved from string to boolean edit.commit(); String date[] = pref.getString("date", "2011.1.17").split("\\p{Punct}"); jobMidnight = new DateTime(Integer.parseInt(date[0]), Integer.parseInt(date[1]), Integer.parseInt(date[2]), 0, 0); currentJob.setStartOfPayPeriod(jobMidnight.withZone(DateTimeZone.getDefault())); List<Punch> punches = new LinkedList<Punch>(); List<Task> tasks = new LinkedList<Task>(); List<Note> notes = new LinkedList<Note>(); Task newTask; //Basic element newTask = new Task(currentJob, 0, "Regular"); tasks.add(newTask); newTask = new Task(currentJob, 1, "Lunch Break"); newTask.setEnablePayOverride(true); newTask.setPayOverride(-7.25f); tasks.add(newTask); newTask = new Task(currentJob, 2, "Other Break"); newTask.setEnablePayOverride(true); newTask.setPayOverride(-7.25f); tasks.add(newTask); newTask = new Task(currentJob, 3, "Travel"); tasks.add(newTask); newTask = new Task(currentJob, 4, "Admin"); tasks.add(newTask); newTask = new Task(currentJob, 5, "Sick Leave"); tasks.add(newTask); newTask = new Task(currentJob, 6, "Personal Time"); tasks.add(newTask); newTask = new Task(currentJob, 7, "Other"); tasks.add(newTask); newTask = new Task(currentJob, 8, "Holiday Pay"); tasks.add(newTask); Cursor cursor = db.query("clockactions", null, null, null, null, null, "_id desc"); final int colTime = cursor.getColumnIndex("time"); final int colAR = cursor.getColumnIndex("actionReason"); if (cursor.moveToFirst()) { do { long time = cursor.getLong(colTime); Task type = tasks.get(0); if (colAR != -1) { type = tasks.get(cursor.getInt(colAR)); } punches.add(new Punch(currentJob, type, new DateTime(time))); } while (cursor.moveToNext()); } if (cursor != null && !cursor.isClosed()) { cursor.close(); } cursor = db.query("notes", null, null, null, null, null, "_id desc"); final int colInsertTime = cursor.getColumnIndex("time"); final int colText = cursor.getColumnIndex("note_string"); if (cursor.moveToFirst()) { do { long time = cursor.getLong(colInsertTime); String note = cursor.getString(colText); notes.add(new Note(new DateTime(time), currentJob, note)); } while (cursor.moveToNext()); } if (cursor != null && !cursor.isClosed()) { cursor.close(); } db.execSQL("DROP TABLE IF EXISTS clockactions"); db.execSQL("DROP TABLE IF EXISTS notes"); db.execSQL("DROP TABLE IF EXISTS misc"); //Recreate DB TableUtils.createTable(connectionSource, Punch.class); //Punch - Create Table TableUtils.createTable(connectionSource, Task.class); //Task - Create Table TableUtils.createTable(connectionSource, Job.class); //Job - Create Table TableUtils.createTable(connectionSource, Note.class); //Task - Create Table //recreate entries Dao<Task, String> taskDAO = getTaskDao(); Dao<Job, String> jobDAO = getJobDao(); Dao<Note, String> noteDAO = getNoteDao(); Dao<Punch, String> punchDOA = getPunchDao(); jobDAO.create(currentJob); for (Task t : tasks) { taskDAO.create(t); } for (Note n : notes) { noteDAO.create(n); } for (Punch p : punches) { punchDOA.create(p); } //"CREATE TABLE " + TABLE_NAME_NOTE " ( _id LONG PRIMARY KEY, note_string TEXT NOT NULL, time LONG NOT NULL )"); } else if (oldVersion == 15) { //Drop //DB - 15 //TableUtils.dropTable(connectionSource, Punch.class, true); //Punch - Drop all //TableUtils.dropTable(connectionSource, Task.class, true); //Task - Drop all //TableUtils.dropTable(connectionSource, Job.class, true); //Job - Drop all //TableUtils.dropTable(connectionSource, Note.class, true); //Note - Drop all Dao<Task, String> taskDAO = getTaskDao(); List<Task> tasks = taskDAO.queryForAll(); db.execSQL("DROP TABLE IF EXISTS tasks"); //create TableUtils.createTable(connectionSource, Task.class); //Task - Create Table for (Task t : tasks) { taskDAO.create(t); } } else if (oldVersion == 16) { //Drop //DB - 15 //TableUtils.dropTable(connectionSource, Punch.class, true); //Punch - Drop all //TableUtils.dropTable(connectionSource, Task.class, true); //Task - Drop all //TableUtils.dropTable(connectionSource, Job.class, true); //Job - Drop all TableUtils.dropTable(connectionSource, Note.class, true); //Note - Drop all //create TableUtils.createTable(connectionSource, Note.class); //Task - Create Table } else if (oldVersion == 17) { //update db from old version Dao<Job, String> dao = getJobDao(); dao.executeRaw("ALTER TABLE `jobs` ADD COLUMN fourtyHourWeek BOOLEAN DEFAULT 1;"); } else if (oldVersion == 18) { Dao<Task, String> taskDAO = getTaskDao(); List<Task> tasks = taskDAO.queryForAll(); Job currentJob = getAllJobs().get(0); if (tasks.size() == 0) { Task newTask; //Basic element newTask = new Task(currentJob, 0, "Regular"); tasks.add(newTask); newTask = new Task(currentJob, 1, "Lunch Break"); newTask.setEnablePayOverride(true); newTask.setPayOverride(-7.25f); tasks.add(newTask); newTask = new Task(currentJob, 2, "Other Break"); newTask.setEnablePayOverride(true); newTask.setPayOverride(-7.25f); tasks.add(newTask); newTask = new Task(currentJob, 3, "Travel"); tasks.add(newTask); newTask = new Task(currentJob, 4, "Admin"); tasks.add(newTask); newTask = new Task(currentJob, 5, "Sick Leave"); tasks.add(newTask); newTask = new Task(currentJob, 6, "Personal Time"); tasks.add(newTask); newTask = new Task(currentJob, 7, "Other"); tasks.add(newTask); newTask = new Task(currentJob, 8, "Holiday Pay"); tasks.add(newTask); for (Task t : tasks) { taskDAO.createOrUpdate(t); } } } else if (oldVersion == 19) { try { TableUtils.dropTable(connectionSource, Job.class, true); //Job - Create Table TableUtils.createTable(connectionSource, Job.class); //Job - Create Table DateTime jobMidnight = new DateMidnight().toDateTime().minusWeeks(1) .withZone(DateTimeZone.getDefault()); Job thisJob = new Job("", 7.25f, jobMidnight, PayPeriodDuration.TWO_WEEKS); SharedPreferences pref = PreferenceManager.getDefaultSharedPreferences(gContext); try { thisJob.setPayRate(Float.valueOf(pref.getString("normal_pay", "7.25"))); } catch (NumberFormatException e) { thisJob.setPayRate(7.25f); Log.d(TAG, e.getMessage()); } try { thisJob.setOvertime(Float.valueOf(pref.getString("over_time_threshold", "40"))); } catch (NumberFormatException e) { thisJob.setOvertime(40f); Log.d(TAG, e.getMessage()); } try { thisJob.setDoubletimeThreshold( Float.valueOf(pref.getString("double_time_threshold", "60"))); } catch (NumberFormatException e) { thisJob.setDoubletimeThreshold(60f); Log.d(TAG, e.getMessage()); } String date[] = pref.getString("date", "2011.1.17").split("\\p{Punct}"); String time[] = pref.getString("time", "00:00").split("\\p{Punct}"); thisJob.setStartOfPayPeriod(new DateTime(Integer.parseInt(date[0]), Integer.parseInt(date[1]), Integer.parseInt(date[2]), Integer.parseInt(time[0]), Integer.parseInt(time[1]))); switch (Integer.parseInt(pref.getString("len_of_month", "2"))) { case 1: thisJob.setDuration(PayPeriodDuration.ONE_WEEK); break; case 2: thisJob.setDuration(PayPeriodDuration.TWO_WEEKS); break; case 3: thisJob.setDuration(PayPeriodDuration.THREE_WEEKS); break; case 4: thisJob.setDuration(PayPeriodDuration.FOUR_WEEKS); break; case 5: thisJob.setDuration(PayPeriodDuration.FULL_MONTH); break; case 6: thisJob.setDuration(PayPeriodDuration.FIRST_FIFTEENTH); break; default: thisJob.setDuration(PayPeriodDuration.TWO_WEEKS); break; } getJobDao().create(thisJob); } catch (SQLException e1) { e1.printStackTrace(); } } else if (oldVersion == 20) { getJobDao().executeRaw( "ALTER TABLE 'jobs' ADD COLUMN '" + Job.OVERTIME_OPTIONS + "' VARCHAR default 'NONE';"); getJobDao().executeRaw("ALTER TABLE 'jobs' ADD COLUMN '" + Job.SATURDAY_OVERRIDE_FIELD + "' VARCHAR default 'NONE';"); getJobDao().executeRaw("ALTER TABLE 'jobs' ADD COLUMN '" + Job.SUNDAY_OVERRIDE_FIELD + "' VARCHAR default 'NONE';"); List<Job> jobList = getAllJobs(); for (Job job : jobList) { GenericRawResults<String[]> rawResults = getJobDao().queryRaw( "select fourtyHourWeek,overTimeEnabled from jobs where job_id = " + job.getID()); String[] results = rawResults.getResults().get(0); if (results[0] == "0") { job.setOvertimeOptions(OvertimeOptions.NONE); } else { if (results[1] == "0") { job.setOvertimeOptions(OvertimeOptions.DAY); } else if (results[1] == "1") { //being paranoid job.setOvertimeOptions(OvertimeOptions.WEEK); } } } //delete stuff getJobDao().executeRaw("ALTER TABLE 'jobs' DROP COLUMN 'fourtyHourWeek';"); getJobDao().executeRaw("ALTER TABLE 'jobs' DROP COLUMN 'overTimeEnabled';"); } } catch (SQLException e) { e.printStackTrace(); Log.e(TAG, "Could not upgrade the table for Thing", e); } }
From source file:imageuploader.ImgWindow.java
private void generatedImages(String code, File[] files) throws IOException, IllegalStateException, FTPIllegalReplyException, FTPException, FtpException { int ubicacion; File directory = new File(code); ArrayList<File> imageList = new ArrayList(); DefaultListModel mod = (DefaultListModel) jL_Info.getModel(); if (!directory.exists()) { boolean result = directory.mkdir(); if (!result) { JOptionPane.showMessageDialog(rootPane, "Directory -- Error"); } else {//from w ww . j av a 2 s. c o m File dir, img; boolean rst; FileChannel source = null; FileChannel dest = null; FtpCredentials.getInstancia().connect(); for (int i = 0; i < files.length; i++) { int val = 1 + i; //Create the Angle directory dir = new File(directory, "Angle" + val); rst = dir.mkdir(); //Copy Images //DefaultListModel mod = (DefaultListModel)jL_Info.getModel(); for (int j = 0; j < mod.getSize(); j++) { img = new File(dir, code + "~" + mod.getElementAt(j).toString() + ".jpg"); rst = img.createNewFile(); imageList.add(img); source = new RandomAccessFile(files[i], "rw").getChannel(); dest = new RandomAccessFile(img, "rw").getChannel(); long position = 0; long count = source.size(); source.transferTo(position, count, dest); if (source != null) { source.close(); } if (dest != null) { dest.close(); } } ubicacion = i + 1; /*Using the private library */ if (jCHBox_MY.isSelected()) { FtpCredentials.getInstancia().getClient().setDir("/Myron/angle" + ubicacion + "Flash"); FtpCredentials.getInstancia().copyImage(imageList); } if (jCHB_CA.isSelected()) { FtpCredentials.getInstancia().getClient().setDir("/canada/angle" + ubicacion + "Flash"); FtpCredentials.getInstancia().copyImage(imageList); } if (jCHB_AZ.isSelected()) { FtpCredentials.getInstancia().getClient().setDir("/australia/angle" + ubicacion + "Flash"); FtpCredentials.getInstancia().copyImage(imageList); } imageList.clear(); } mod.removeAllElements(); jTF_StyleCode.setText(""); //jL_Info.removeAll(); //list.removeAllElements(); JOptionPane.showMessageDialog(rootPane, "Images uploaded"); } } else { JOptionPane.showMessageDialog(rootPane, "There is a folder with the same name in the same location"); } }
From source file:eu.optimis.vc.api.IsoCreator.IsoImageCreation.java
private void storeAgents(File scriptsDirectory) { if (virtualMachine.isHasIPS() || virtualMachine.isHasBTKey() || virtualMachine.isHasVPNKey()) { LOGGER.debug("Adding agents to ISO"); // Add the agent tar ball String agentsTarBallName = "vpn.tar.gz"; // Agents tar ball source File agentsTarBallFileSource = new File( configuration.getAgentsDirectory() + File.separator + agentsTarBallName); LOGGER.debug("agentsTarBallFileSource is: " + agentsTarBallFileSource.getPath()); // Destination folder File agentsIsoDirectory = new File(isoDataDirectory + File.separator + "agents"); agentsIsoDirectory.mkdirs();/*from w ww .ja v a2 s . com*/ LOGGER.debug("agentsIsoDirectory is: " + agentsIsoDirectory.getPath()); // Agents tar ball destination File agentsTarBallFileDestination = new File(agentsIsoDirectory + File.separator + agentsTarBallName); LOGGER.debug("agentsTarBallFileDestination is: " + agentsTarBallFileDestination.getPath()); // Copy the file to the iso directory LOGGER.debug("Copying agent file to ISO directory..."); FileChannel source = null; FileChannel destination = null; try { if (!agentsTarBallFileDestination.exists()) { agentsTarBallFileDestination.createNewFile(); } source = new FileInputStream(agentsTarBallFileSource).getChannel(); destination = new FileOutputStream(agentsTarBallFileDestination).getChannel(); destination.transferFrom(source, 0, source.size()); LOGGER.debug( "Copied agent file to ISO directory, size is : " + agentsTarBallFileDestination.length()); agentsTarBallFileDestination.getTotalSpace(); } catch (IOException e) { LOGGER.error( "Failed to create agents tar ball with path: " + agentsTarBallFileDestination.getPath(), e); } finally { if (source != null) { try { source.close(); } catch (IOException e) { LOGGER.error("Failed to close source agents tar ball file with path: " + agentsTarBallFileSource.getPath(), e); } } if (destination != null) { try { destination.close(); } catch (IOException e) { LOGGER.error("Failed to close destination agents tar ball file with path: " + agentsTarBallFileDestination.getPath(), e); } } } // Add the agent script File agentsFile = new File(scriptsDirectory + File.separator + "agents.sh"); try { LOGGER.debug(ATTEMPTING_TO_CREATE_FILE + agentsFile.getPath()); agentsFile.createNewFile(); LOGGER.debug(CREATED_FILE + agentsFile.getPath()); // TODO: This should be stored somewhere else and not hardcoded // Mount location is currently hard coded in the init.d scripts // of the base VM /mnt/context/ String agentsScript = "#!/bin/bash\n" + "#Setup environment\n" + "touch /var/lock/subsys/local\n" + "source /etc/profile\n" + "\n" + "#Extract the agent from the ISO agent directory to /opt/optimis/vpn/\n" + "mkdir -p /opt/optimis\n" + "tar zxvf /mnt/context/agents/" + agentsTarBallName + " -C /opt/optimis/\n" + "chmod -R 777 /opt/optimis/vpn\n" + "\n" + "#Install and start the agents\n" + "\n"; // Add VPN install and init script to // /mnt/context/scripts/agents.sh if (virtualMachine.isHasIPS()) { agentsScript += "#IPS\n" + "/opt/optimis/vpn/IPS_Meta.sh\n" + "/bin/date > /opt/optimis/vpn/dsa.log\n" + "\n"; } // Add VPN install and init script to // /mnt/context/scripts/agents.sh ? // KMS if (virtualMachine.isHasBTKey()) { agentsScript += "#KMS\n" + "/opt/optimis/vpn/KMS_Meta.sh\n" + "/bin/date >> /opt/optimis/vpn/scagent.log\n" + "\n"; } // Add VPN install and init script to // /mnt/context/scripts/agents.sh if (virtualMachine.isHasVPNKey()) { agentsScript += "#VPN\n" + "/opt/optimis/vpn/VPN_Meta.sh\n"; } // Write out the agents file FileOutputStream fos = new FileOutputStream(agentsFile.getPath()); fos.write(agentsScript.getBytes()); fos.close(); LOGGER.debug("Writing agents script complete!"); } catch (IOException e) { LOGGER.error("Failed to create agents script file with path: " + agentsFile.getPath(), e); } } else { LOGGER.debug("Agents not not needed by service!"); } }
From source file:com.owncloud.android.operations.ChunkedUploadFileOperation.java
@Override protected int uploadFile(WebdavClient client) throws HttpException, IOException { int status = -1; FileChannel channel = null; RandomAccessFile raf = null;//from ww w . ja v a2 s . co m try { File file = new File(getStoragePath()); raf = new RandomAccessFile(file, "r"); channel = raf.getChannel(); mEntity = new ChunkFromFileChannelRequestEntity(channel, getMimeType(), CHUNK_SIZE, file); ((ProgressiveDataTransferer) mEntity).addDatatransferProgressListeners(getDataTransferListeners()); long offset = 0; String uriPrefix = client.getBaseUri() + WebdavUtils.encodePath(getRemotePath()) + "-chunking-" + Math.abs((new Random()).nextInt(9000) + 1000) + "-"; long chunkCount = (long) Math.ceil((double) file.length() / CHUNK_SIZE); for (int chunkIndex = 0; chunkIndex < chunkCount; chunkIndex++, offset += CHUNK_SIZE) { if (mPutMethod != null) { mPutMethod.releaseConnection(); // let the connection available for other methods } mPutMethod = new PutMethod(uriPrefix + chunkCount + "-" + chunkIndex); mPutMethod.addRequestHeader(OC_CHUNKED_HEADER, OC_CHUNKED_HEADER); ((ChunkFromFileChannelRequestEntity) mEntity).setOffset(offset); mPutMethod.setRequestEntity(mEntity); status = client.executeMethod(mPutMethod); client.exhaustResponse(mPutMethod.getResponseBodyAsStream()); Log_OC.d(TAG, "Upload of " + getStoragePath() + " to " + getRemotePath() + ", chunk index " + chunkIndex + ", count " + chunkCount + ", HTTP result status " + status); if (!isSuccess(status)) break; } } finally { if (channel != null) channel.close(); if (raf != null) raf.close(); if (mPutMethod != null) mPutMethod.releaseConnection(); // let the connection available for other methods } return status; }
From source file:com.stimulus.archiva.domain.Volume.java
public void save() throws ConfigurationException { try {/*from w ww . j a v a 2 s.c o m*/ Config.getConfig().getConfigAutoLoadService().block(); if (getStatus() != Status.NEW && isEjected()) return; if (getStatus() == Status.EJECTED) return; if (getStatus() == Status.NEW) { setStatus(Status.UNUSED); readyFileSystem(); if (getStatus() == Status.UNUSED) { try { calculateSpace(); } catch (Exception e) { logger.error("failed to retrieve disk space {" + toString() + "}", e); } } } synchronized (volumeinfoLock) { logger.debug("save() volumeinfo"); RandomAccessFile randomAccessFile = null; try { randomAccessFile = getRandomAccessFile("rw"); } catch (FileNotFoundException fnfe) { logger.error("failed to write to volumeinfo:" + fnfe.getMessage(), fnfe); logger.warn("ensure mailarchiva service is running under account with sufficient privileges"); closeVolInfo(randomAccessFile); return; } logger.debug( "open volumeinfo file for write {file='" + getPath() + File.separator + INFO_FILE + "'"); FileChannel channel = randomAccessFile.getChannel(); /* FileLock fileLock = null; try { fileLock = channel.lock(); } catch(IOException io) { logger.error("failed to obtain lock to volumeinfo file:"+io.getMessage()+" {"+toString()+"}"); closeVolInfo(randomAccessFile); return; } catch(OverlappingFileLockException ofle) { logger.error("failed to obtain lock to volumeinfo file:"+ofle.getMessage()+" {"+toString()+"}"); closeVolInfo(randomAccessFile); return; }*/ writeVolumeInfoLines(randomAccessFile); /* try { fileLock.release(); } catch (IOException io) { logger.error("failed to release the write lock on volumeinfo file:"+io.getMessage()+" {"+toString()+"}"); } */ try { channel.close(); } catch (IOException io) { logger.error("failed to close volumeinfo file:" + io.getMessage() + " {" + toString() + "}"); } closeVolInfo(randomAccessFile); } } finally { Config.getConfig().getConfigAutoLoadService().unblock(); } }
From source file:com.eucalyptus.blockstorage.DASManager.java
public void dupFile(String oldFileName, String newFileName) { FileOutputStream fileOutputStream = null; FileChannel out = null; FileInputStream fileInputStream = null; FileChannel in = null;//from w ww .ja v a2 s . com try { fileOutputStream = new FileOutputStream(new File(newFileName)); out = fileOutputStream.getChannel(); fileInputStream = new FileInputStream(new File(oldFileName)); in = fileInputStream.getChannel(); in.transferTo(0, in.size(), out); } catch (Exception ex) { ex.printStackTrace(); } finally { if (fileOutputStream != null) { try { out.close(); fileOutputStream.close(); } catch (IOException e) { LOG.error(e); } } if (fileInputStream != null) { try { in.close(); fileInputStream.close(); } catch (IOException e) { LOG.error(e); } } } }