List of usage examples for java.nio.file Files newInputStream
public static InputStream newInputStream(Path path, OpenOption... options) throws IOException
From source file:org.apache.marmotta.platform.ldp.services.LdpBinaryStoreServiceImpl.java
@Override public InputStream read(String resource) throws IOException { try {//from w w w. j a v a 2 s . c om Path file = getFile(resource); if (Files.exists(file)) { return Files.newInputStream(file, StandardOpenOption.READ); } else { log.warn("{} not found in binary storage ({})", resource, file); return null; } } catch (URISyntaxException e) { log.error("Error reading resource {}: {}", resource, e.getMessage()); return null; } }
From source file:at.tfr.securefs.xnio.MessageHandlerImpl.java
@Override public void handleMessage(String json, MessageSender messageSender) throws IOException { log.debug("handleMessage: " + json); final Message message = objectMapper.readValue(json, Message.class); Path path = configuration.getBasePath().resolve(message.getPath()); if (!path.relativize(configuration.getBasePath()).toString().equals("..")) { throw new SecurityException("invalid path spec: " + message.getPath()); }/* ww w . j a v a 2 s. c o m*/ try { final String uniqueKey = message.getUniqueKey(); // find the Channel for this data stream: StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>> info = activeStreams.getStreams() .get(uniqueKey); if (message.getType() == MessageType.OPEN && info != null) { log.warn("illegal state on Open stream: " + message); IoUtils.safeClose(info.getStream().getRightSide()); messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); } switch (message.getType()) { case ERROR: log.info("error from Client: " + json); case CLOSE: { if (info != null) { IoUtils.safeClose(info.getStream().getRightSide()); } } break; case OPEN: { switch (message.getSubType()) { case READ: { final InputStream is = Files.newInputStream(path, StandardOpenOption.READ); final InputStream cis = new CipherInputStream(is, getCipher(message, Cipher.DECRYPT_MODE)); final ChannelPipe<StreamSourceChannel, StreamSinkChannel> pipe = xnioWorker .createHalfDuplexPipe(); pipe.getLeftSide().getReadSetter().set(new SecureChannelWriterBase(message) { @Override protected void write(Message message) { try { messageSender.send(message); } catch (Exception e) { log.warn("cannot write message=" + message + " : " + e, e); } } }); pipe.getLeftSide().getCloseSetter().set(new ChannelListener<StreamSourceChannel>() { @Override public void handleEvent(StreamSourceChannel channel) { activeStreams.getStreams().remove(uniqueKey); messageSender.send(new Message(MessageType.CLOSE, message.getPath()).key(uniqueKey)); } }); pipe.getRightSide().getWriteSetter().set(new ChannelListener<StreamSinkChannel>() { private byte[] bytes = new byte[Constants.BUFFER_SIZE]; @Override public void handleEvent(StreamSinkChannel channel) { try { int count = 0; while ((count = cis.read(bytes, 0, bytes.length)) > 0) { if (count > 0) { Channels.writeBlocking(pipe.getRightSide(), ByteBuffer.wrap(bytes, 0, count)); } if (count < 0) { pipe.getRightSide().close(); } else { channel.resumeWrites(); } } } catch (Exception e) { log.warn("cannot read from cypher: " + e, e); IoUtils.safeClose(channel); } } }); activeStreams.getStreams().put(uniqueKey, new StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>>(pipe, message.getPath())); // start sending data: pipe.getLeftSide().resumeReads(); pipe.getRightSide().resumeWrites(); } break; case WRITE: { Files.createDirectories(path.getParent()); OutputStream os = Files.newOutputStream(path, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); OutputStream cos = new CipherOutputStream(os, getCipher(message, Cipher.ENCRYPT_MODE)); ChannelPipe<StreamSourceChannel, StreamSinkChannel> pipe = xnioWorker.createHalfDuplexPipe(); pipe.getLeftSide().getReadSetter().set(new SecureChannelReaderBase() { @Override public void handleEvent(StreamSourceChannel channel) { readChannel(message, cos, pipe, channel); } }); pipe.getLeftSide().getCloseSetter().set(new SecureChannelReaderBase() { @Override public void handleEvent(StreamSourceChannel channel) { try { cos.close(); activeStreams.getStreams().remove(pipe.toString()); messageSender .send(new Message(MessageType.CLOSE, message.getPath()).key(uniqueKey)); log.info("closed channel: " + pipe.toString()); } catch (IOException e) { log.warn("cannot close stream: message=" + message + " : " + e, e); } } }); activeStreams.getStreams().put(uniqueKey, new StreamInfo<ChannelPipe<StreamSourceChannel, StreamSinkChannel>>(pipe, message.getPath())); // start receiving data: pipe.getLeftSide().resumeReads(); } break; default: messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); break; } } break; case DATA: { if (info != null) { Channels.writeBlocking(info.getStream().getRightSide(), ByteBuffer.wrap(message.getBytes())); } else { messageSender.send(new Message(MessageType.ERROR, message.getPath()).key(uniqueKey)); } } break; } } catch (IOException e) { log.warn("cannot handle message: " + message + " : " + e, e); throw e; } catch (Exception e) { log.warn("cannot handle message: " + message + " : " + e, e); throw new IOException("cannot handle message: " + message + " : " + e, e); } }
From source file:org.carcv.core.model.file.FileCarImage.java
/** * Loads a part corresponding to the rectangular region from the image into memory. Calls * {@link #loadFragment(InputStream, Rectangle)} internally. * * @param rect specifies the rectangular region to load as the image * @throws IOException if an error during loading occurs *///from ww w . j av a2 s . c o m public void loadFragment(Rectangle rect) throws IOException { InputStream inStream = Files.newInputStream(persistablePath.getPath(), StandardOpenOption.READ); loadFragment(inStream, rect); }
From source file:io.redlink.solrlib.embedded.EmbeddedCoreContainer.java
@Override @SuppressWarnings({ "squid:S3725", "squid:S3776" }) protected synchronized void init(ExecutorService executorService) throws IOException { Preconditions.checkState(Objects.isNull(coreContainer), "Already initialized!"); if (solrHome == null) { solrHome = Files.createTempDirectory("solr-home"); log.debug("No solr-home set, using temp directory {}", solrHome); deleteOnShutdown = true;/*w w w .jav a2 s . c o m*/ } final Path absoluteSolrHome = this.solrHome.toAbsolutePath(); if (Files.isDirectory(absoluteSolrHome)) { log.trace("solr-home exists: {}", absoluteSolrHome); } else { Files.createDirectories(absoluteSolrHome); log.debug("Created solr-home: {}", absoluteSolrHome); } final Path lib = absoluteSolrHome.resolve("lib"); if (Files.isDirectory(lib)) { log.trace("lib-directory exists: {}", lib); } else { Files.createDirectories(lib); log.debug("Created solr-lib directory: {}", lib); } final Path solrXml = absoluteSolrHome.resolve("solr.xml"); if (!Files.exists(solrXml)) { log.info("no solr.xml found, creating new at {}", solrXml); try (PrintStream writer = new PrintStream(Files.newOutputStream(solrXml, StandardOpenOption.CREATE))) { writer.printf("<!-- Generated by %s on %tF %<tT -->%n", getClass().getSimpleName(), new Date()); writer.println("<solr>"); writer.printf(" <str name=\"%s\">%s</str>%n", "sharedLib", absoluteSolrHome.relativize(lib)); writer.println("</solr>"); } } else { log.trace("found solr.xml: {}", solrXml); } for (SolrCoreDescriptor coreDescriptor : coreDescriptors) { final String coreName = coreDescriptor.getCoreName(); if (availableCores.containsKey(coreName)) { log.warn("CoreName-Clash: {} already initialized. Skipping {}", coreName, coreDescriptor.getClass()); continue; } final Path coreDir = absoluteSolrHome.resolve(coreName); Files.createDirectories(coreDir); coreDescriptor.initCoreDirectory(coreDir, lib); final Properties coreProperties = new Properties(); final Path corePropertiesFile = coreDir.resolve("core.properties"); if (Files.exists(corePropertiesFile)) { try (InputStream inStream = Files.newInputStream(corePropertiesFile, StandardOpenOption.CREATE)) { coreProperties.load(inStream); } log.debug("core.properties for {} found, updating", coreName); } else { log.debug("Creating new core {} in {}", coreName, coreDir); } coreProperties.setProperty("name", coreName); try (OutputStream outputStream = Files.newOutputStream(corePropertiesFile)) { coreProperties.store(outputStream, null); } if (coreDescriptor.getNumShards() > 1 || coreDescriptor.getReplicationFactor() > 1) { log.warn("Deploying {} to EmbeddedCoreContainer, ignoring config of shards={},replication={}", coreName, coreDescriptor.getNumShards(), coreDescriptor.getReplicationFactor()); } availableCores.put(coreName, coreDescriptor); } log.info("Starting {} in solr-home '{}'", getClass().getSimpleName(), absoluteSolrHome); coreContainer = CoreContainer.createAndLoad(absoluteSolrHome, solrXml); availableCores.values().forEach(coreDescriptor -> { final String coreName = coreDescriptor.getCoreName(); try (SolrClient solrClient = createSolrClient(coreName)) { final NamedList<Object> coreStatus = CoreAdminRequest.getStatus(coreName, solrClient) .getCoreStatus(coreName); final NamedList<Object> indexStatus = coreStatus == null ? null : (NamedList<Object>) coreStatus.get("index"); final Object lastModified = indexStatus == null ? null : indexStatus.get("lastModified"); // lastModified is null if there was never a update scheduleCoreInit(executorService, coreDescriptor, lastModified == null); } catch (SolrServerException | IOException e) { if (log.isDebugEnabled()) { log.error("Error initializing core {}", coreName, e); } //noinspection ThrowableResultOfMethodCallIgnored coreInitExceptions.put(coreName, e); } }); }
From source file:org.apache.nifi.controller.service.ControllerServiceLoader.java
public List<ControllerServiceNode> loadControllerServices(final ControllerServiceProvider provider) throws IOException { final SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI); final DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); InputStream fis = null;//from w w w .ja v a2 s .c o m BufferedInputStream bis = null; documentBuilderFactory.setNamespaceAware(true); final List<ControllerServiceNode> services = new ArrayList<>(); try { final URL configurationResource = this.getClass().getResource("/ControllerServiceConfiguration.xsd"); if (configurationResource == null) { throw new NullPointerException("Unable to load XML Schema for ControllerServiceConfiguration"); } final Schema schema = schemaFactory.newSchema(configurationResource); documentBuilderFactory.setSchema(schema); final DocumentBuilder builder = documentBuilderFactory.newDocumentBuilder(); builder.setErrorHandler(new org.xml.sax.ErrorHandler() { @Override public void fatalError(final SAXParseException err) throws SAXException { logger.error("Config file line " + err.getLineNumber() + ", col " + err.getColumnNumber() + ", uri " + err.getSystemId() + " :message: " + err.getMessage()); if (logger.isDebugEnabled()) { logger.error("Error Stack Dump", err); } throw err; } @Override public void error(final SAXParseException err) throws SAXParseException { logger.error("Config file line " + err.getLineNumber() + ", col " + err.getColumnNumber() + ", uri " + err.getSystemId() + " :message: " + err.getMessage()); if (logger.isDebugEnabled()) { logger.error("Error Stack Dump", err); } throw err; } @Override public void warning(final SAXParseException err) throws SAXParseException { logger.warn(" Config file line " + err.getLineNumber() + ", uri " + err.getSystemId() + " : message : " + err.getMessage()); if (logger.isDebugEnabled()) { logger.warn("Warning stack dump", err); } throw err; } }); //if controllerService.xml does not exist, create an empty file... fis = Files.newInputStream(this.serviceConfigXmlPath, StandardOpenOption.READ); bis = new BufferedInputStream(fis); if (Files.size(this.serviceConfigXmlPath) > 0) { final Document document = builder.parse(bis); final NodeList servicesNodes = document.getElementsByTagName("services"); final Element servicesElement = (Element) servicesNodes.item(0); final List<Element> serviceNodes = DomUtils.getChildElementsByTagName(servicesElement, "service"); for (final Element serviceElement : serviceNodes) { //get properties for the specific controller task - id, name, class, //and schedulingPeriod must be set final String serviceId = DomUtils.getChild(serviceElement, "identifier").getTextContent() .trim(); final String serviceClass = DomUtils.getChild(serviceElement, "class").getTextContent().trim(); //set the class to be used for the configured controller task final ControllerServiceNode serviceNode = provider.createControllerService(serviceClass, serviceId, false); //optional task-specific properties for (final Element optionalProperty : DomUtils.getChildElementsByTagName(serviceElement, "property")) { final String name = optionalProperty.getAttribute("name").trim(); final String value = optionalProperty.getTextContent().trim(); serviceNode.setProperty(name, value); } services.add(serviceNode); provider.enableControllerService(serviceNode); } } } catch (SAXException | ParserConfigurationException sxe) { throw new IOException(sxe); } finally { FileUtils.closeQuietly(fis); FileUtils.closeQuietly(bis); } return services; }
From source file:it.greenvulcano.configuration.BaseConfigurationManager.java
@Override public byte[] extract(String name, String entry) { Path configurationArchivePath = getConfigurationPath(name); try (ZipInputStream configurationArchive = new ZipInputStream( Files.newInputStream(configurationArchivePath, StandardOpenOption.READ))) { ZipEntry zipEntry = null; while ((zipEntry = configurationArchive.getNextEntry()) != null) { if (zipEntry.getName().equals(entry)) { byte[] entryData = IOUtils.toByteArray(configurationArchive); return entryData; }//w w w . ja va 2 s .c om } } catch (Exception e) { LOG.error("Failed to extract entry " + entry + " from archive " + configurationArchivePath, e); } return new byte[] {}; }
From source file:org.apache.hadoop.hive.ql.MetaStoreDumpUtility.java
public static void setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(HiveConf conf, String tmpBaseDir) { Connection conn = null;//w ww . j a va 2 s. c o m try { Properties props = new Properties(); // connection properties props.put("user", conf.get("javax.jdo.option.ConnectionUserName")); props.put("password", conf.get("javax.jdo.option.ConnectionPassword")); String url = conf.get("javax.jdo.option.ConnectionURL"); conn = DriverManager.getConnection(url, props); ResultSet rs = null; Statement s = conn.createStatement(); if (LOG.isDebugEnabled()) { LOG.debug("Connected to metastore database "); } String mdbPath = HiveTestEnvSetup.HIVE_ROOT + "/data/files/tpcds-perf/metastore_export/"; // Setup the table column stats BufferedReader br = new BufferedReader(new FileReader(new File( HiveTestEnvSetup.HIVE_ROOT + "/metastore/scripts/upgrade/derby/022-HIVE-11107.derby.sql"))); String command; s.execute("DROP TABLE APP.TABLE_PARAMS"); s.execute("DROP TABLE APP.TAB_COL_STATS"); // Create the column stats table while ((command = br.readLine()) != null) { if (!command.endsWith(";")) { continue; } if (LOG.isDebugEnabled()) { LOG.debug("Going to run command : " + command); } PreparedStatement psCommand = conn.prepareStatement(command.substring(0, command.length() - 1)); psCommand.execute(); psCommand.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + command); } } br.close(); java.nio.file.Path tabColStatsCsv = FileSystems.getDefault().getPath(mdbPath, "csv", "TAB_COL_STATS.txt.bz2"); java.nio.file.Path tabParamsCsv = FileSystems.getDefault().getPath(mdbPath, "csv", "TABLE_PARAMS.txt.bz2"); // Set up the foreign key constraints properly in the TAB_COL_STATS data java.nio.file.Path tmpFileLoc1 = FileSystems.getDefault().getPath(tmpBaseDir, "TAB_COL_STATS.txt"); java.nio.file.Path tmpFileLoc2 = FileSystems.getDefault().getPath(tmpBaseDir, "TABLE_PARAMS.txt"); class MyComp implements Comparator<String> { @Override public int compare(String str1, String str2) { if (str2.length() != str1.length()) { return str2.length() - str1.length(); } return str1.compareTo(str2); } } final SortedMap<String, Integer> tableNameToID = new TreeMap<String, Integer>(new MyComp()); rs = s.executeQuery("SELECT * FROM APP.TBLS"); while (rs.next()) { String tblName = rs.getString("TBL_NAME"); Integer tblId = rs.getInt("TBL_ID"); tableNameToID.put(tblName, tblId); if (LOG.isDebugEnabled()) { LOG.debug("Resultset : " + tblName + " | " + tblId); } } final Map<String, Map<String, String>> data = new HashMap<>(); rs = s.executeQuery("select TBLS.TBL_NAME, a.COLUMN_NAME, a.TYPE_NAME from " + "(select COLUMN_NAME, TYPE_NAME, SDS.SD_ID from APP.COLUMNS_V2 join APP.SDS on SDS.CD_ID = COLUMNS_V2.CD_ID) a" + " join APP.TBLS on TBLS.SD_ID = a.SD_ID"); while (rs.next()) { String tblName = rs.getString(1); String colName = rs.getString(2); String typeName = rs.getString(3); Map<String, String> cols = data.get(tblName); if (null == cols) { cols = new HashMap<>(); } cols.put(colName, typeName); data.put(tblName, cols); } BufferedReader reader = new BufferedReader(new InputStreamReader( new BZip2CompressorInputStream(Files.newInputStream(tabColStatsCsv, StandardOpenOption.READ)))); Stream<String> replaced = reader.lines().parallel().map(str -> { String[] splits = str.split(","); String tblName = splits[0]; String colName = splits[1]; Integer tblID = tableNameToID.get(tblName); StringBuilder sb = new StringBuilder( "default@" + tblName + "@" + colName + "@" + data.get(tblName).get(colName) + "@"); for (int i = 2; i < splits.length; i++) { sb.append(splits[i] + "@"); } // Add tbl_id and empty bitvector return sb.append(tblID).append("@").toString(); }); Files.write(tmpFileLoc1, (Iterable<String>) replaced::iterator); replaced.close(); reader.close(); BufferedReader reader2 = new BufferedReader(new InputStreamReader( new BZip2CompressorInputStream(Files.newInputStream(tabParamsCsv, StandardOpenOption.READ)))); final Map<String, String> colStats = new ConcurrentHashMap<>(); Stream<String> replacedStream = reader2.lines().parallel().map(str -> { String[] splits = str.split("_@"); String tblName = splits[0]; Integer tblId = tableNameToID.get(tblName); Map<String, String> cols = data.get(tblName); StringBuilder sb = new StringBuilder(); sb.append("{\"COLUMN_STATS\":{"); for (String colName : cols.keySet()) { sb.append("\"" + colName + "\":\"true\","); } sb.append("},\"BASIC_STATS\":\"true\"}"); colStats.put(tblId.toString(), sb.toString()); return tblId.toString() + "@" + splits[1]; }); Files.write(tmpFileLoc2, (Iterable<String>) replacedStream::iterator); Files.write(tmpFileLoc2, (Iterable<String>) colStats.entrySet().stream() .map(map -> map.getKey() + "@COLUMN_STATS_ACCURATE@" + map.getValue())::iterator, StandardOpenOption.APPEND); replacedStream.close(); reader2.close(); // Load the column stats and table params with 30 TB scale String importStatement1 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TAB_COL_STATS" + "', '" + tmpFileLoc1.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)"; String importStatement2 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TABLE_PARAMS" + "', '" + tmpFileLoc2.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)"; PreparedStatement psImport1 = conn.prepareStatement(importStatement1); if (LOG.isDebugEnabled()) { LOG.debug("Going to execute : " + importStatement1); } psImport1.execute(); psImport1.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + importStatement1); } PreparedStatement psImport2 = conn.prepareStatement(importStatement2); if (LOG.isDebugEnabled()) { LOG.debug("Going to execute : " + importStatement2); } psImport2.execute(); psImport2.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + importStatement2); } s.execute("ALTER TABLE APP.TAB_COL_STATS ADD COLUMN CAT_NAME VARCHAR(256)"); s.execute("update APP.TAB_COL_STATS set CAT_NAME = '" + Warehouse.DEFAULT_CATALOG_NAME + "'"); s.close(); conn.close(); } catch (Exception e) { throw new RuntimeException("error while loading tpcds metastore dump", e); } }
From source file:gaffer.graph.hook.OperationAuthoriser.java
private static Properties readProperties(final Path propFileLocation) { Properties props;// www . j ava2s . c om if (null != propFileLocation) { try { props = readProperties(Files.newInputStream(propFileLocation, StandardOpenOption.READ)); } catch (IOException e) { throw new IllegalArgumentException(e); } } else { props = new Properties(); } return props; }
From source file:io.anserini.index.IndexUtils.java
public InputStream getReadFileStream(String path) throws IOException { InputStream fin = Files.newInputStream(Paths.get(path), StandardOpenOption.READ); BufferedInputStream in = new BufferedInputStream(fin); if (path.endsWith(".bz2")) { BZip2CompressorInputStream bzIn = new BZip2CompressorInputStream(in); return bzIn; } else if (path.endsWith(".gz")) { GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); return gzIn; } else if (path.endsWith(".zip")) { GzipCompressorInputStream zipIn = new GzipCompressorInputStream(in); return zipIn; }/* ww w. j a v a 2 s . c om*/ return in; }
From source file:com.ignorelist.kassandra.steam.scraper.FileCache.java
private InputStream getIfPresentNonBlocking(Object key) { if (isExpired(key.toString())) { return null; }/*from ww w . j a v a2 s . co m*/ try { return new GZIPInputStream( Files.newInputStream(buildCacheFile(key.toString()), StandardOpenOption.READ)); } catch (IOException ex) { LOG.log(Level.WARNING, "failed to open InputStream", ex); return null; } }