List of usage examples for java.io UncheckedIOException UncheckedIOException
public UncheckedIOException(IOException cause)
From source file:org.haiku.haikudepotserver.pkg.job.PkgScreenshotImportArchiveJobRunner.java
/** * <p>Goes through the archive and captures information about each screenshot.</p> *//*from w ww .j ava 2 s . c om*/ private void collectScreenshotMetadataFromArchive(Map<String, ScreenshotImportMetadatas> data, ArchiveInputStream archiveInputStream, ArchiveEntry archiveEntry, String pkgName, int order) { ScreenshotImportMetadatas metadatas = data.get(pkgName); if (null == metadatas) { metadatas = new ScreenshotImportMetadatas(); ObjectContext context = serverRuntime.newContext(); Optional<Pkg> pkgOptional = Pkg.tryGetByName(context, pkgName); if (!pkgOptional.isPresent()) { metadatas.setNotFound(); } data.put(pkgName, metadatas); } if (!metadatas.isNotFound()) { HashingInputStream hashingInputStream = new HashingInputStream(HASH_FUNCTION, archiveInputStream); try { ByteStreams.copy(hashingInputStream, ByteStreams.nullOutputStream()); } catch (IOException ioe) { throw new UncheckedIOException(ioe); } metadatas.add(new FromArchiveScreenshotMetadata(order, archiveEntry.getSize(), hashingInputStream.hash(), archiveEntry.getName())); } }
From source file:no.imr.stox.functions.utils.RUtils.java
/** * calculate accurate polygons by use of r in a separate process. * * @param polygons//from w w w . j ava2 s . c o m * @return */ static public PolygonAreaMatrix getAccuratePolygons(String rFolder, MatrixBO polygons, String tempRScriptFileName) { PolygonAreaMatrix res = new PolygonAreaMatrix(); if (tempRScriptFileName == null) { tempRScriptFileName = "area.txt"; } String fileName = getTmpDir() + tempRScriptFileName; try (PrintWriter pw = new PrintWriter(fileName)) { pw.println("Sys.setenv(JAVA_HOME = \"\")"); pw.println("library(Rstox)"); // pw.println("source('" + ProjectUtils.getSystemRFolder() + "/rstox_spatial.r" + "')"); pw.print("pol <- cbind("); List<String> rowKeys = polygons.getRowKeys(); for (int i = 0; i < rowKeys.size(); i++) { String strata = rowKeys.get(i); //polygons.getRowKeys() pw.print("c('" + strata + "', '" + getPolygonWKT((Geometry) polygons.getRowColValue(strata, Functions.COL_POLVAR_POLYGON)) + "')"); if (i < rowKeys.size() - 1) { pw.print(","); } else { pw.print(")"); } pw.println(); } pw.println("invisible(apply(cbind(cbind(pol[1,], apply(cbind(pol[2,]), MARGIN=1,"); pw.println("FUN=function(p) polyArea(p)))), MARGIN=1,"); pw.println("FUN=function(x) cat(x[1],':', x[2],sep='', ';')))"); pw.println("quit()"); } catch (FileNotFoundException ex) { throw new UncheckedIOException(ex); } fileName = fileName.replace("\\", "/"); String fileNameOut = fileName + ".out"; String fileName2 = fileName + ".call"; try (PrintWriter pw = new PrintWriter(fileName2)) { pw.println("sink('" + fileNameOut + "')"); pw.println("source('" + fileName + "')"); } catch (FileNotFoundException ex) { throw new UncheckedIOException(ex); } callR(rFolder, fileName2, false); try { Files.lines(Paths.get(fileNameOut)).forEach(s -> { String[] strs = s.split(";"); Arrays.stream(strs).forEach(str_1 -> { String[] str = str_1.split(":"); res.getData().setRowValue(str[0], Double.valueOf(str[1])); }); }); } catch (IOException ex) { Logger.getLogger(RUtils.class.getName()).log(Level.SEVERE, null, ex); } /* java.io.InputStream is = proc.getInputStream(); java.util.Scanner s = new java.util.Scanner(is).useDelimiter(";"); while (s.hasNext()) { String[] str = s.next().split(":"); res.getData().setRowValue(str[0], Double.valueOf(str[1])); //System.out.println(s.next()); }*/ return res; }
From source file:org.mitre.mpf.wfm.service.component.StartupComponentRegistrationServiceImpl.java
private static String getPackageTld(Path componentPackage) { try (TarArchiveInputStream inputStream = new TarArchiveInputStream( new GZIPInputStream(Files.newInputStream(componentPackage)))) { TarArchiveEntry tarEntry;//from w w w .jav a 2 s . c o m while ((tarEntry = inputStream.getNextTarEntry()) != null) { Path entryPath = Paths.get(tarEntry.getName()); if (entryPath.getNameCount() > 0) { return entryPath.getName(0).toString(); } } return null; } catch (IOException e) { throw new UncheckedIOException(e); } }
From source file:com.taobao.android.builder.dependency.parser.DependencyLocationManager.java
public static File getExploreDir(Project project, MavenCoordinates mavenCoordinates, File bundle, String type, String path) {/*from w w w .j ava2 s .c o m*/ if (!bundle.exists()) { project.getLogger().info("missing " + mavenCoordinates.toString()); } Optional<FileCache> buildCache = AndroidGradleOptions.getBuildCache(project); File explodedDir; if (shouldUseBuildCache(project, mavenCoordinates, bundle, buildCache)) { //&& !"awb" // .equals(type) try { explodedDir = buildCache.get().getFileInCache(PrepareLibraryTask.getBuildCacheInputs(bundle)); return explodedDir; } catch (IOException e) { throw new UncheckedIOException(e); } } else { Preconditions.checkState(!AndroidGradleOptions.isImprovedDependencyResolutionEnabled(project), "Improved dependency resolution must be used with " + "build cache."); return FileUtils.join(project.getBuildDir(), FD_INTERMEDIATES, "exploded-" + type, path); } //throw new GradleException("set explored dir exception"); }
From source file:org.opencb.opencga.app.cli.analysis.VariantCommandExecutor.java
private void query() throws Exception { AnalysisCliOptionsParser.QueryVariantCommandOptions cliOptions = variantCommandOptions.queryVariantCommandOptions; Map<Long, String> studyIds = getStudyIds(sessionId); Query query = VariantQueryCommandUtils.parseQuery(cliOptions, studyIds); QueryOptions queryOptions = VariantQueryCommandUtils.parseQueryOptions(cliOptions); VariantFetcher variantFetcher = new VariantFetcher(catalogManager, storageManagerFactory); if (cliOptions.count) { QueryResult<Long> result = variantFetcher.count(query, sessionId); System.out.println("Num. results\t" + result.getResult().get(0)); } else if (StringUtils.isNotEmpty(cliOptions.groupBy)) { ObjectMapper objectMapper = new ObjectMapper(); QueryResult groupBy = variantFetcher.groupBy(query, queryOptions, cliOptions.groupBy, sessionId); System.out/*from w ww .j a va 2s. co m*/ .println("rank = " + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(groupBy)); } else if (StringUtils.isNotEmpty(cliOptions.rank)) { ObjectMapper objectMapper = new ObjectMapper(); QueryResult rank = variantFetcher.rank(query, queryOptions, cliOptions.rank, sessionId); System.out.println("rank = " + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(rank)); } else { final String outputFormat; if (StringUtils.isNotEmpty(cliOptions.outputFormat)) { outputFormat = cliOptions.outputFormat.toLowerCase(); } else { outputFormat = "vcf"; } try (OutputStream outputStream = VariantQueryCommandUtils.getOutputStream(cliOptions); VariantDBIterator iterator = variantFetcher.iterator(query, queryOptions, sessionId)) { StudyConfiguration studyConfiguration; final DataWriter<Variant> exporter; switch (VariantQueryCommandUtils.VariantOutputFormat.safeValueOf(outputFormat)) { case VCF: // StudyConfigurationManager studyConfigurationManager = variantDBAdaptor.getStudyConfigurationManager(); // Map<Long, List<Sample>> samplesMetadata = variantFetcher.getSamplesMetadata(studyId, query, queryOptions, sessionId); // QueryResult<StudyConfiguration> studyConfigurationResult = studyConfigurationManager.getStudyConfiguration( // query.getAsStringList(RETURNED_STUDIES.key()).get(0), null); studyConfiguration = variantFetcher.getStudyConfiguration( query.getAsIntegerList(RETURNED_STUDIES.key()).get(0), null, sessionId); if (studyConfiguration != null) { // Samples to be returned if (query.containsKey(RETURNED_SAMPLES.key())) { queryOptions.put(RETURNED_SAMPLES.key(), query.get(RETURNED_SAMPLES.key())); } // options.add("includeAnnotations", queryVariantsCommandOptions.includeAnnotations); if (cliOptions.annotations != null) { queryOptions.add("annotations", cliOptions.annotations); } // VariantVcfExporter.htsExport(iterator, studyConfiguration, outputStream, queryOptions); long studyId = variantFetcher.getMainStudyId(query); VariantSourceDBAdaptor sourceDBAdaptor = variantFetcher.getSourceDBAdaptor((int) studyId, sessionId); exporter = new VariantVcfExporter(studyConfiguration, sourceDBAdaptor, outputStream, queryOptions); } else { throw new IllegalArgumentException( "No study found named " + query.getAsStringList(RETURNED_STUDIES.key()).get(0)); } break; case JSON: // we know that it is JSON, otherwise we have not reached this point exporter = batch -> { batch.forEach(variant -> { try { outputStream.write(variant.toJson().getBytes()); outputStream.write('\n'); } catch (IOException e) { throw new UncheckedIOException(e); } }); return true; }; break; case AVRO: String codecName = ""; if (VariantQueryCommandUtils.VariantOutputFormat.isGzip(outputFormat)) { codecName = "gzip"; } if (outputFormat.endsWith("snappy")) { codecName = "snappy"; } exporter = new VariantAvroWriter(VariantAvro.getClassSchema(), codecName, outputStream); break; case STATS: studyConfiguration = variantFetcher.getStudyConfiguration( query.getAsIntegerList(RETURNED_STUDIES.key()).get(0), null, sessionId); List<String> cohorts = new ArrayList<>(studyConfiguration.getCohortIds().keySet()); cohorts.sort(String::compareTo); exporter = new VariantStatsTsvExporter(outputStream, studyConfiguration.getStudyName(), cohorts); break; case CELLBASE: exporter = new VariantStatsPopulationFrequencyExporter(outputStream); break; default: throw new ParameterException("Unknown output format " + outputFormat); } ParallelTaskRunner.Task<Variant, Variant> progressTask; ExecutorService executor; if (VariantQueryCommandUtils.isStandardOutput(cliOptions)) { progressTask = batch -> batch; executor = null; } else { executor = Executors.newSingleThreadExecutor(); Future<Long> future = executor.submit(() -> { Long count = variantFetcher.count(query, sessionId).first(); count = Math.min(queryOptions.getLong(QueryOptions.LIMIT, Long.MAX_VALUE), count - queryOptions.getLong(QueryOptions.SKIP, 0)); return count; }); executor.shutdown(); ProgressLogger progressLogger = new ProgressLogger("Export variants", future, 200); progressTask = batch -> { progressLogger.increment(batch.size()); return batch; }; } ParallelTaskRunner.Config config = ParallelTaskRunner.Config.builder().setNumTasks(1) .setBatchSize(10).setAbortOnFail(true).build(); ParallelTaskRunner<Variant, Variant> ptr = new ParallelTaskRunner<>(batchSize -> { List<Variant> variants = new ArrayList<>(batchSize); while (iterator.hasNext() && variants.size() < batchSize) { variants.add(iterator.next()); } return variants; }, progressTask, exporter, config); ptr.run(); if (executor != null) { executor.shutdownNow(); } logger.info( "Time fetching data: " + iterator.getTimeFetching(TimeUnit.MILLISECONDS) / 1000.0 + "s"); logger.info("Time converting data: " + iterator.getTimeConverting(TimeUnit.MILLISECONDS) / 1000.0 + "s"); } } }
From source file:cn.edu.zjnu.acm.judge.core.Judger.java
private void judgeInternal(RunRecord runRecord) { Path workDirectory = judgeConfiguration.getWorkDirectory(runRecord.getSubmissionId()); runRecord.setWorkDirectory(workDirectory); try {/*from www.j a v a2 s.c om*/ if (compile(runRecord)) { runProcess(runRecord); } judgeServerService.delete(workDirectory); } catch (IOException ex) { throw new UncheckedIOException(ex); } }
From source file:com.github.anba.es6draft.util.Resources.java
private static <T extends TestInfo> Function<Path, T> mapper(BiFunction<Path, Iterator<String>, T> fn) { return file -> { try (BufferedReader reader = Files.newBufferedReader(file, StandardCharsets.UTF_8)) { return fn.apply(file, new LineIterator(reader)); } catch (IOException e) { throw new UncheckedIOException(e); }/* w w w .java 2 s . com*/ }; }
From source file:org.apache.hadoop.hbase.client.TestAsyncTable.java
@Test public void testCheckAndMutate() throws InterruptedException, ExecutionException { AsyncTableBase table = getTable.get(); int count = 10; CountDownLatch putLatch = new CountDownLatch(count + 1); table.put(new Put(row).addColumn(FAMILY, QUALIFIER, VALUE)).thenRun(() -> putLatch.countDown()); IntStream.range(0, count)/*from w w w. ja v a2s .c o m*/ .forEach(i -> table.put(new Put(row).addColumn(FAMILY, concat(QUALIFIER, i), VALUE)) .thenRun(() -> putLatch.countDown())); putLatch.await(); AtomicInteger successCount = new AtomicInteger(0); AtomicInteger successIndex = new AtomicInteger(-1); CountDownLatch mutateLatch = new CountDownLatch(count); IntStream.range(0, count).forEach(i -> { RowMutations mutation = new RowMutations(row); try { mutation.add(new Delete(row).addColumn(FAMILY, QUALIFIER)); mutation.add(new Put(row).addColumn(FAMILY, concat(QUALIFIER, i), concat(VALUE, i))); } catch (IOException e) { throw new UncheckedIOException(e); } table.checkAndMutate(row, FAMILY, QUALIFIER, VALUE, mutation).thenAccept(x -> { if (x) { successCount.incrementAndGet(); successIndex.set(i); } mutateLatch.countDown(); }); }); mutateLatch.await(); assertEquals(1, successCount.get()); Result result = table.get(new Get(row)).get(); IntStream.range(0, count).forEach(i -> { if (i == successIndex.get()) { assertArrayEquals(concat(VALUE, i), result.getValue(FAMILY, concat(QUALIFIER, i))); } else { assertArrayEquals(VALUE, result.getValue(FAMILY, concat(QUALIFIER, i))); } }); }
From source file:org.nuxeo.ecm.core.io.download.DownloadServiceImpl.java
protected void transferBlobWithByteRange(Blob blob, ByteRange byteRange, HttpServletResponse response) throws UncheckedIOException { transferBlobWithByteRange(blob, byteRange, () -> { try {//from ww w.ja v a 2 s. c o m return response.getOutputStream(); } catch (IOException e) { throw new UncheckedIOException(e); } }); try { response.flushBuffer(); } catch (IOException e) { throw new UncheckedIOException(e); } }
From source file:com.joyent.manta.client.multipart.JobsMultipartManagerIT.java
public void canReturnEmptyMultipartList() throws IOException { final List<MantaMultipartUpload> list; try (Stream<MantaMultipartUpload> inProgress = multipart.listInProgress()) { list = inProgress.collect(Collectors.toList()); }/*w w w. java2 s. c o m*/ if (!list.isEmpty()) { System.err.println("List should be empty. Actually had " + list.size() + " elements"); list.forEach(element -> { System.err.println(element.getPath()); if (element instanceof JobsMultipartUpload) { JobsMultipartUpload jobsUpload = (JobsMultipartUpload) element; try (Stream<MantaMultipartUploadPart> innerStream = multipart.listParts(jobsUpload)) { innerStream.forEach(part -> System.err.println(" " + part.getObjectPath())); } catch (IOException e) { throw new UncheckedIOException(e); } } }); throw new SkipException("List should be empty. Actually had " + list.size() + " elements"); } else { assertTrue(true); } }