List of usage examples for java.nio.file StandardOpenOption APPEND
StandardOpenOption APPEND
To view the source code for java.nio.file StandardOpenOption APPEND.
Click Source Link
From source file:de.appsolve.padelcampus.utils.HtmlResourceUtil.java
private String concatenateCss(ServletContext context, Path path, Path outFile) throws FileNotFoundException, IOException { DirectoryStream<Path> cssFiles = Files.newDirectoryStream(path, "*.css"); if (Files.exists(outFile)) { Files.delete(outFile);//from w w w. j av a 2 s . c o m } List<Path> sortedFiles = new ArrayList<>(); for (Path cssFile : cssFiles) { sortedFiles.add(cssFile); } Collections.sort(sortedFiles, new PathByFileNameComparator()); for (Path cssFile : sortedFiles) { Files.write(outFile, Files.readAllBytes(cssFile), StandardOpenOption.CREATE, StandardOpenOption.APPEND); } byte[] cssData; if (Files.exists(outFile)) { cssData = Files.readAllBytes(outFile); } else { //read from classpath InputStream is = context.getResourceAsStream(ALL_MIN_CSS); cssData = IOUtils.toByteArray(is); } String css = new String(cssData, Constants.UTF8); return css; }
From source file:net.dv8tion.jda.utils.SimpleLog.java
private static void logToFiles(String msg, Level level) { Set<File> files = collectFiles(level); for (File file : files) { try {//w ww. j av a2s. c o m Files.write(file.toPath(), (msg + '\n').getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.APPEND); } catch (IOException e) { JDAImpl.LOG.fatal("Could not write log to logFile..."); JDAImpl.LOG.log(e); } } }
From source file:net.dv8tion.jda.core.utils.SimpleLog.java
private static void logToFiles(String msg, Level level) { Set<File> files = collectFiles(level); for (File file : files) { try {/* ww w .j a v a2 s. c om*/ Files.write(file.toPath(), (msg + '\n').getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.APPEND); } catch (IOException e) { e.printStackTrace(); // JDAImpl.LOG.fatal("Could not write log to logFile..."); // JDAImpl.LOG.log(e); } } }
From source file:org.apache.hadoop.hive.ql.MetaStoreDumpUtility.java
public static void setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(HiveConf conf, String tmpBaseDir) { Connection conn = null;/*from ww w.j av a2 s.co m*/ try { Properties props = new Properties(); // connection properties props.put("user", conf.get("javax.jdo.option.ConnectionUserName")); props.put("password", conf.get("javax.jdo.option.ConnectionPassword")); String url = conf.get("javax.jdo.option.ConnectionURL"); conn = DriverManager.getConnection(url, props); ResultSet rs = null; Statement s = conn.createStatement(); if (LOG.isDebugEnabled()) { LOG.debug("Connected to metastore database "); } String mdbPath = HiveTestEnvSetup.HIVE_ROOT + "/data/files/tpcds-perf/metastore_export/"; // Setup the table column stats BufferedReader br = new BufferedReader(new FileReader(new File( HiveTestEnvSetup.HIVE_ROOT + "/metastore/scripts/upgrade/derby/022-HIVE-11107.derby.sql"))); String command; s.execute("DROP TABLE APP.TABLE_PARAMS"); s.execute("DROP TABLE APP.TAB_COL_STATS"); // Create the column stats table while ((command = br.readLine()) != null) { if (!command.endsWith(";")) { continue; } if (LOG.isDebugEnabled()) { LOG.debug("Going to run command : " + command); } PreparedStatement psCommand = conn.prepareStatement(command.substring(0, command.length() - 1)); psCommand.execute(); psCommand.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + command); } } br.close(); java.nio.file.Path tabColStatsCsv = FileSystems.getDefault().getPath(mdbPath, "csv", "TAB_COL_STATS.txt.bz2"); java.nio.file.Path tabParamsCsv = FileSystems.getDefault().getPath(mdbPath, "csv", "TABLE_PARAMS.txt.bz2"); // Set up the foreign key constraints properly in the TAB_COL_STATS data java.nio.file.Path tmpFileLoc1 = FileSystems.getDefault().getPath(tmpBaseDir, "TAB_COL_STATS.txt"); java.nio.file.Path tmpFileLoc2 = FileSystems.getDefault().getPath(tmpBaseDir, "TABLE_PARAMS.txt"); class MyComp implements Comparator<String> { @Override public int compare(String str1, String str2) { if (str2.length() != str1.length()) { return str2.length() - str1.length(); } return str1.compareTo(str2); } } final SortedMap<String, Integer> tableNameToID = new TreeMap<String, Integer>(new MyComp()); rs = s.executeQuery("SELECT * FROM APP.TBLS"); while (rs.next()) { String tblName = rs.getString("TBL_NAME"); Integer tblId = rs.getInt("TBL_ID"); tableNameToID.put(tblName, tblId); if (LOG.isDebugEnabled()) { LOG.debug("Resultset : " + tblName + " | " + tblId); } } final Map<String, Map<String, String>> data = new HashMap<>(); rs = s.executeQuery("select TBLS.TBL_NAME, a.COLUMN_NAME, a.TYPE_NAME from " + "(select COLUMN_NAME, TYPE_NAME, SDS.SD_ID from APP.COLUMNS_V2 join APP.SDS on SDS.CD_ID = COLUMNS_V2.CD_ID) a" + " join APP.TBLS on TBLS.SD_ID = a.SD_ID"); while (rs.next()) { String tblName = rs.getString(1); String colName = rs.getString(2); String typeName = rs.getString(3); Map<String, String> cols = data.get(tblName); if (null == cols) { cols = new HashMap<>(); } cols.put(colName, typeName); data.put(tblName, cols); } BufferedReader reader = new BufferedReader(new InputStreamReader( new BZip2CompressorInputStream(Files.newInputStream(tabColStatsCsv, StandardOpenOption.READ)))); Stream<String> replaced = reader.lines().parallel().map(str -> { String[] splits = str.split(","); String tblName = splits[0]; String colName = splits[1]; Integer tblID = tableNameToID.get(tblName); StringBuilder sb = new StringBuilder( "default@" + tblName + "@" + colName + "@" + data.get(tblName).get(colName) + "@"); for (int i = 2; i < splits.length; i++) { sb.append(splits[i] + "@"); } // Add tbl_id and empty bitvector return sb.append(tblID).append("@").toString(); }); Files.write(tmpFileLoc1, (Iterable<String>) replaced::iterator); replaced.close(); reader.close(); BufferedReader reader2 = new BufferedReader(new InputStreamReader( new BZip2CompressorInputStream(Files.newInputStream(tabParamsCsv, StandardOpenOption.READ)))); final Map<String, String> colStats = new ConcurrentHashMap<>(); Stream<String> replacedStream = reader2.lines().parallel().map(str -> { String[] splits = str.split("_@"); String tblName = splits[0]; Integer tblId = tableNameToID.get(tblName); Map<String, String> cols = data.get(tblName); StringBuilder sb = new StringBuilder(); sb.append("{\"COLUMN_STATS\":{"); for (String colName : cols.keySet()) { sb.append("\"" + colName + "\":\"true\","); } sb.append("},\"BASIC_STATS\":\"true\"}"); colStats.put(tblId.toString(), sb.toString()); return tblId.toString() + "@" + splits[1]; }); Files.write(tmpFileLoc2, (Iterable<String>) replacedStream::iterator); Files.write(tmpFileLoc2, (Iterable<String>) colStats.entrySet().stream() .map(map -> map.getKey() + "@COLUMN_STATS_ACCURATE@" + map.getValue())::iterator, StandardOpenOption.APPEND); replacedStream.close(); reader2.close(); // Load the column stats and table params with 30 TB scale String importStatement1 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TAB_COL_STATS" + "', '" + tmpFileLoc1.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)"; String importStatement2 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TABLE_PARAMS" + "', '" + tmpFileLoc2.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)"; PreparedStatement psImport1 = conn.prepareStatement(importStatement1); if (LOG.isDebugEnabled()) { LOG.debug("Going to execute : " + importStatement1); } psImport1.execute(); psImport1.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + importStatement1); } PreparedStatement psImport2 = conn.prepareStatement(importStatement2); if (LOG.isDebugEnabled()) { LOG.debug("Going to execute : " + importStatement2); } psImport2.execute(); psImport2.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + importStatement2); } s.execute("ALTER TABLE APP.TAB_COL_STATS ADD COLUMN CAT_NAME VARCHAR(256)"); s.execute("update APP.TAB_COL_STATS set CAT_NAME = '" + Warehouse.DEFAULT_CATALOG_NAME + "'"); s.close(); conn.close(); } catch (Exception e) { throw new RuntimeException("error while loading tpcds metastore dump", e); } }
From source file:de.digiway.rapidbreeze.server.model.download.Download.java
/** * Starts this {@linkplain Download}.//w w w. j a v a2s.c om * */ void start() { switch (statusHandler.getCurrentStatus()) { case RUNNING: return; case PAUSE: statusHandler.newStatus(DownloadStatus.RUNNING); return; } try { long startAt = 0; if (Files.exists(tempFile)) { try { startAt = Files.size(tempFile); } catch (IOException ex) { // File might be removed in the meantime startAt = 0; } } StorageProviderDownloadClient storageDownload = getDownloadClient(); throttledInputStream = new ThrottledInputStream(storageDownload.start(url, startAt)); throttledInputStream.setThrottle(throttleMaxBytesPerSecond); sourceChannel = Channels.newChannel(throttledInputStream); targetChannel = FileChannel.open(tempFile, StandardOpenOption.WRITE, StandardOpenOption.APPEND, StandardOpenOption.CREATE); targetChannel.position(startAt); } catch (IOException | RuntimeException ex) { LOG.log(Level.SEVERE, "An exception occured during data transfer setup for " + Download.class.getSimpleName() + ":" + this, ex); closeChannels(); cachedUrlStatus = null; statusHandler.newException(ex); return; } done = false; statusHandler.newStatus(DownloadStatus.RUNNING); }
From source file:com.streamsets.datacollector.publicrestapi.CredentialsDeploymentResource.java
private void deployDPMToken(CredentialsBeanJson credentialsBeanJson) throws IOException { LOG.info("Deploying DPM token"); File dpmProperties = new File(runtimeInfo.getConfigDir(), "dpm.properties"); Configuration conf = new Configuration(); Files.write(Paths.get(runtimeInfo.getConfigDir(), "application-token.txt"), credentialsBeanJson.getToken().getBytes(Charsets.UTF_8), CREATE, WRITE); try (FileReader reader = new FileReader(dpmProperties)) { conf.load(reader);/*from w w w . j a v a 2 s . c om*/ } conf.unset(RemoteSSOService.DPM_BASE_URL_CONFIG); conf.set(RemoteSSOService.DPM_ENABLED, true); conf.set(RemoteSSOService.SECURITY_SERVICE_APP_AUTH_TOKEN_CONFIG, Configuration.FileRef.PREFIX + APPLICATION_TOKEN_TXT + Configuration.FileRef.SUFFIX); conf.set(RemoteSSOService.DPM_DEPLOYMENT_ID, credentialsBeanJson.getDeploymentId()); runtimeInfo.setDeploymentId(credentialsBeanJson.getDeploymentId()); if (!CollectionUtils.isEmpty(credentialsBeanJson.getLabels())) { String labelsString = StringUtils.join(credentialsBeanJson.getLabels().toArray(), ","); LOG.info("SDC will have the following Labels: {}", labelsString); conf.set(RemoteEventHandlerTask.REMOTE_JOB_LABELS, labelsString); } try (FileWriter writer = new FileWriter(dpmProperties)) { conf.save(writer); } Files.write(Paths.get(dpmProperties.getPath()), (RemoteSSOService.DPM_BASE_URL_CONFIG + "=" + credentialsBeanJson.getDpmUrl()).getBytes(), StandardOpenOption.APPEND); runtimeInfo.setDPMEnabled(true); LOG.info("DPM token deployed"); }
From source file:org.linagora.linshare.webservice.userv2.impl.FlowDocumentUploaderRestServiceImpl.java
@Path("/") @POST//from w w w . j a v a 2s. co m @Consumes("multipart/form-data") @Override public FlowDto uploadChunk(@Multipart(CHUNK_NUMBER) long chunkNumber, @Multipart(TOTAL_CHUNKS) long totalChunks, @Multipart(CHUNK_SIZE) long chunkSize, @Multipart(CURRENT_CHUNK_SIZE) long currentChunkSize, @Multipart(TOTAL_SIZE) long totalSize, @Multipart(IDENTIFIER) String identifier, @Multipart(FILENAME) String filename, @Multipart(RELATIVE_PATH) String relativePath, @Multipart(FILE) InputStream file, MultipartBody body, @Multipart(value = WORK_GROUP_UUID, required = false) String workGroupUuid, @Multipart(value = WORK_GROUP_FOLDER_UUID, required = false) String workGroupFolderUuid, @Multipart(value = ASYNC_TASK, required = false) boolean async) throws BusinessException { logger.debug("upload chunk number : " + chunkNumber); identifier = cleanIdentifier(identifier); boolean isValid = FlowUploaderUtils.isValid(chunkNumber, chunkSize, totalSize, identifier, filename); Validate.isTrue(isValid); checkIfMaintenanceIsEnabled(); FlowDto flow = new FlowDto(chunkNumber); try { logger.debug("writing chunk number : " + chunkNumber); java.nio.file.Path tempFile = FlowUploaderUtils.getTempFile(identifier, chunkedFiles); ChunkedFile currentChunkedFile = chunkedFiles.get(identifier); if (!currentChunkedFile.hasChunk(chunkNumber)) { FileChannel fc = FileChannel.open(tempFile, StandardOpenOption.CREATE, StandardOpenOption.APPEND); ByteArrayOutputStream output = new ByteArrayOutputStream(); IOUtils.copy(file, output); fc.write(ByteBuffer.wrap(output.toByteArray()), (chunkNumber - 1) * chunkSize); fc.close(); if (sizeValidation) { if (output.size() != currentChunkSize) { String msg = String.format("File size does not match, found : %1$d, announced : %2$d", output.size(), currentChunkSize); logger.error(msg); flow.setChunkUploadSuccess(false); flow.setErrorMessage(msg); return flow; } } currentChunkedFile.addChunk(chunkNumber); } else { logger.error("currentChunkedFile.hasChunk(chunkNumber) !!! " + currentChunkedFile); logger.error("chunkedNumber skipped : " + chunkNumber); } logger.debug("nb uploading files : " + chunkedFiles.size()); logger.debug("current chuckedfile uuid : " + identifier); logger.debug("current chuckedfiles" + chunkedFiles.toString()); if (FlowUploaderUtils.isUploadFinished(identifier, chunkSize, totalSize, chunkedFiles)) { flow.setLastChunk(true); logger.debug("upload finished : " + chunkNumber + " : " + identifier); InputStream inputStream = Files.newInputStream(tempFile, StandardOpenOption.READ); File tempFile2 = getTempFile(inputStream, "rest-flowuploader", filename); if (sizeValidation) { long currSize = tempFile2.length(); if (currSize != totalSize) { String msg = String.format("File size does not match, found : %1$d, announced : %2$d", currSize, totalSize); logger.error(msg); flow.setChunkUploadSuccess(false); flow.setErrorMessage(msg); return flow; } } EntryDto uploadedDocument = new EntryDto(); flow.setIsAsync(async); boolean isWorkGroup = !Strings.isNullOrEmpty(workGroupUuid); if (async) { logger.debug("Async mode is used"); // Asynchronous mode AccountDto actorDto = documentFacade.getAuthenticatedAccountDto(); AsyncTaskDto asyncTask = null; try { if (isWorkGroup) { ThreadEntryTaskContext threadEntryTaskContext = new ThreadEntryTaskContext(actorDto, actorDto.getUuid(), workGroupUuid, tempFile2, filename, workGroupFolderUuid); asyncTask = asyncTaskFacade.create(totalSize, getTransfertDuration(identifier), filename, null, AsyncTaskType.THREAD_ENTRY_UPLOAD); ThreadEntryUploadAsyncTask task = new ThreadEntryUploadAsyncTask(threadEntryAsyncFacade, threadEntryTaskContext, asyncTask); taskExecutor.execute(task); flow.completeAsyncTransfert(asyncTask); } else { DocumentTaskContext documentTaskContext = new DocumentTaskContext(actorDto, actorDto.getUuid(), tempFile2, filename, null, null); asyncTask = asyncTaskFacade.create(totalSize, getTransfertDuration(identifier), filename, null, AsyncTaskType.DOCUMENT_UPLOAD); DocumentUploadAsyncTask task = new DocumentUploadAsyncTask(documentAsyncFacade, documentTaskContext, asyncTask); taskExecutor.execute(task); flow.completeAsyncTransfert(asyncTask); } } catch (Exception e) { logAsyncFailure(asyncTask, e); deleteTempFile(tempFile2); ChunkedFile remove = chunkedFiles.remove(identifier); Files.deleteIfExists(remove.getPath()); throw e; } } else { try { if (isWorkGroup) { uploadedDocument = threadEntryFacade.create(null, workGroupUuid, workGroupFolderUuid, tempFile2, filename); } else { uploadedDocument = documentFacade.create(tempFile2, filename, "", null); } flow.completeTransfert(uploadedDocument); } finally { deleteTempFile(tempFile2); ChunkedFile remove = chunkedFiles.remove(identifier); if (remove != null) { Files.deleteIfExists(remove.getPath()); } else { logger.error("Should not happen !!!"); logger.error("chunk number: " + chunkNumber); logger.error("chunk identifier: " + identifier); logger.error("chunk filename: " + filename); logger.error("chunks : " + chunkedFiles.toString()); } } } return flow; } else { logger.debug("upload pending "); flow.setChunkUploadSuccess(true); } } catch (BusinessException e) { logger.error(e.getMessage()); logger.debug("Exception : ", e); flow.setChunkUploadSuccess(false); flow.setErrorMessage(e.getMessage()); flow.setErrCode(e.getErrorCode().getCode()); } catch (Exception e) { logger.error(e.getMessage()); logger.debug("Exception : ", e); flow.setChunkUploadSuccess(false); flow.setErrorMessage(e.getMessage()); } return flow; }
From source file:io.github.swagger2markup.Swagger2MarkupConverter.java
/** * Converts the Swagger specification the given {@code outputFile}.<br> * An extension identifying the markup language will be automatically added to file name. * * @param outputFile the output file/*from w w w .j a v a2 s .com*/ */ public void toFile(Path outputFile) { Validate.notNull(outputFile, "outputFile must not be null"); applyOverviewDocument().writeToFile(outputFile, StandardCharsets.UTF_8); applyPathsDocument().writeToFile(outputFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND); applyDefinitionsDocument().writeToFile(outputFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND); applySecurityDocument().writeToFile(outputFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND); }
From source file:de.huberlin.wbi.cuneiform.core.cre.LocalThread.java
@Override public void run() { Path scriptFile, location, successMarker, reportFile, callLocation, stdErrFile, stdOutFile; // Path lockMarker; Process process;//from w w w. j a va2 s . c o m int exitValue; Set<JsonReportEntry> report; JsonReportEntry entry; String line; StringBuffer buf; Path srcPath, destPath; ProcessBuilder processBuilder; Ticket ticket; String script, stdOut, stdErr; long tic, toc; JSONObject obj; Message msg; Charset cs; int trial; boolean suc; Exception ex; if (log.isDebugEnabled()) log.debug("Starting up local thread for ticket " + invoc.getTicketId() + "."); if (invoc == null) throw new NullPointerException("Invocation must not be null."); ticket = invoc.getTicket(); process = null; stdOut = null; stdErr = null; // lockMarker = null; script = null; successMarker = null; cs = Charset.forName("UTF-8"); try { callLocation = Paths.get(System.getProperty("user.dir")); location = buildDir.resolve(String.valueOf(invoc.getTicketId())); // lockMarker = location.resolve( Invocation.LOCK_FILENAME ); successMarker = location.resolve(Invocation.SUCCESS_FILENAME); reportFile = location.resolve(Invocation.REPORT_FILENAME); script = invoc.toScript(); // if( Files.exists( lockMarker ) ) // throw new IOException( "Lock held on ticket "+invoc.getTicketId() ); if (!Files.exists(successMarker)) { deleteIfExists(location); Files.createDirectories(location); // Files.createFile( lockMarker ); scriptFile = invoc.getExecutablePath(location); Files.createFile(scriptFile, PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxr-x---"))); // write executable script try (BufferedWriter writer = Files.newBufferedWriter(scriptFile, cs, StandardOpenOption.CREATE)) { writer.write(script); } // write executable log entry try (BufferedWriter writer = Files.newBufferedWriter(reportFile, cs, StandardOpenOption.CREATE)) { writer.write(ticket.getExecutableLogEntry().toString()); writer.write('\n'); } for (String filename : invoc.getStageInList()) { if (filename.charAt(0) == '/') throw new UnsupportedOperationException("Absolute path encountered '" + filename + "'."); srcPath = centralRepo.resolve(filename); destPath = location.resolve(filename); if (!Files.exists(srcPath)) { srcPath = callLocation.resolve(filename); if (log.isTraceEnabled()) log.trace("Resolving relative path '" + srcPath + "'."); } else if (log.isTraceEnabled()) log.trace("Resolving path to central repository '" + srcPath + "'."); if (log.isTraceEnabled()) log.trace("Trying to create symbolic link from '" + srcPath + "' to '" + destPath + "'."); if (!Files.exists(destPath.getParent())) Files.createDirectories(destPath.getParent()); Files.createSymbolicLink(destPath, srcPath); } // run script processBuilder = new ProcessBuilder(invoc.getCmd()); processBuilder.directory(location.toFile()); stdOutFile = location.resolve(Invocation.STDOUT_FILENAME); stdErrFile = location.resolve(Invocation.STDERR_FILENAME); processBuilder.redirectOutput(stdOutFile.toFile()); processBuilder.redirectError(stdErrFile.toFile()); trial = 1; suc = false; ex = null; tic = System.currentTimeMillis(); do { try { process = processBuilder.start(); suc = true; } catch (IOException e) { ex = e; if (log.isWarnEnabled()) log.warn("Unable to start process on trial " + (trial++) + " Waiting " + WAIT_INTERVAL + "ms: " + e.getMessage()); Thread.sleep(WAIT_INTERVAL); } } while (suc == false && trial <= MAX_TRIALS); if (process == null) { ticketSrc.sendMsg(new TicketFailedMsg(cre, ticket, ex, script, null, null)); // Files.delete( lockMarker ); return; } exitValue = process.waitFor(); toc = System.currentTimeMillis(); try (BufferedWriter writer = Files.newBufferedWriter(reportFile, cs, StandardOpenOption.APPEND)) { obj = new JSONObject(); obj.put(JsonReportEntry.LABEL_REALTIME, toc - tic); entry = invoc.createJsonReportEntry(tic, JsonReportEntry.KEY_INVOC_TIME, obj); writer.write(entry.toString()); writer.write('\n'); try (BufferedReader reader = Files.newBufferedReader(stdOutFile, cs)) { buf = new StringBuffer(); while ((line = reader.readLine()) != null) buf.append(line).append('\n'); stdOut = buf.toString(); if (!stdOut.isEmpty()) { entry = invoc.createJsonReportEntry(JsonReportEntry.KEY_INVOC_STDOUT, stdOut); writer.write(entry.toString()); writer.write('\n'); } } try (BufferedReader reader = Files.newBufferedReader(stdErrFile, cs)) { buf = new StringBuffer(); while ((line = reader.readLine()) != null) buf.append(line).append('\n'); stdErr = buf.toString(); if (!stdErr.isEmpty()) { entry = invoc.createJsonReportEntry(JsonReportEntry.KEY_INVOC_STDERR, stdErr); writer.write(entry.toString()); writer.write('\n'); } } if (exitValue == 0) Files.createFile(successMarker); else { ticketSrc.sendMsg(new TicketFailedMsg(cre, ticket, null, script, stdOut, stdErr)); // Files.delete( lockMarker ); return; } } } // gather report report = new HashSet<>(); try (BufferedReader reader = Files.newBufferedReader(reportFile, cs)) { while ((line = reader.readLine()) != null) { line = line.trim(); if (line.isEmpty()) continue; entry = new JsonReportEntry(line); // If the report comes from the hard cache then the run id // is different from the run id of this invocation. This is // corrected here. entry.setRunId(invoc.getRunId()); report.add(entry); } } invoc.evalReport(report); // create link in central data repository for (String f : invoc.getStageOutList()) { srcPath = location.resolve(f); destPath = centralRepo.resolve(f); if (Files.exists(destPath)) continue; if (log.isTraceEnabled()) log.trace("Creating link from " + srcPath + " to " + destPath + "."); Files.createSymbolicLink(destPath, srcPath); } ticketSrc.sendMsg(new TicketFinishedMsg(cre, invoc.getTicket(), report)); if (log.isTraceEnabled()) log.trace("Local thread ran through without exception."); // Files.deleteIfExists( lockMarker ); } catch (InterruptedException e) { if (log.isTraceEnabled()) log.trace("Local thread has been interrupted."); } catch (Exception e) { if (log.isTraceEnabled()) log.trace("Something went wrong. Deleting success marker if present."); if (successMarker != null) try { Files.deleteIfExists(successMarker); } catch (IOException e1) { e1.printStackTrace(); } msg = new TicketFailedMsg(cre, ticket, e, script, stdOut, stdErr); ticketSrc.sendMsg(msg); } finally { if (process != null) { if (log.isDebugEnabled()) log.debug("Stopping local thread for ticket " + invoc.getTicketId() + "."); process.destroy(); } } }
From source file:io.github.swagger2markup.Swagger2MarkupConverter.java
/** * Converts the Swagger specification the given {@code outputFile}. * * @param outputFile the output file//from w w w . j a v a 2 s. c o m */ public void toFileWithoutExtension(Path outputFile) { Validate.notNull(outputFile, "outputFile must not be null"); applyOverviewDocument().writeToFileWithoutExtension(outputFile, StandardCharsets.UTF_8); applyPathsDocument().writeToFileWithoutExtension(outputFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND); applyDefinitionsDocument().writeToFileWithoutExtension(outputFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND); applySecurityDocument().writeToFileWithoutExtension(outputFile, StandardCharsets.UTF_8, StandardOpenOption.APPEND); }