List of usage examples for java.nio.file StandardOpenOption READ
StandardOpenOption READ
To view the source code for java.nio.file StandardOpenOption READ.
Click Source Link
From source file:org.apache.hadoop.hive.ql.MetaStoreDumpUtility.java
public static void setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(HiveConf conf, String tmpBaseDir) { Connection conn = null;//from w w w . ja v a 2 s . com try { Properties props = new Properties(); // connection properties props.put("user", conf.get("javax.jdo.option.ConnectionUserName")); props.put("password", conf.get("javax.jdo.option.ConnectionPassword")); String url = conf.get("javax.jdo.option.ConnectionURL"); conn = DriverManager.getConnection(url, props); ResultSet rs = null; Statement s = conn.createStatement(); if (LOG.isDebugEnabled()) { LOG.debug("Connected to metastore database "); } String mdbPath = HiveTestEnvSetup.HIVE_ROOT + "/data/files/tpcds-perf/metastore_export/"; // Setup the table column stats BufferedReader br = new BufferedReader(new FileReader(new File( HiveTestEnvSetup.HIVE_ROOT + "/metastore/scripts/upgrade/derby/022-HIVE-11107.derby.sql"))); String command; s.execute("DROP TABLE APP.TABLE_PARAMS"); s.execute("DROP TABLE APP.TAB_COL_STATS"); // Create the column stats table while ((command = br.readLine()) != null) { if (!command.endsWith(";")) { continue; } if (LOG.isDebugEnabled()) { LOG.debug("Going to run command : " + command); } PreparedStatement psCommand = conn.prepareStatement(command.substring(0, command.length() - 1)); psCommand.execute(); psCommand.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + command); } } br.close(); java.nio.file.Path tabColStatsCsv = FileSystems.getDefault().getPath(mdbPath, "csv", "TAB_COL_STATS.txt.bz2"); java.nio.file.Path tabParamsCsv = FileSystems.getDefault().getPath(mdbPath, "csv", "TABLE_PARAMS.txt.bz2"); // Set up the foreign key constraints properly in the TAB_COL_STATS data java.nio.file.Path tmpFileLoc1 = FileSystems.getDefault().getPath(tmpBaseDir, "TAB_COL_STATS.txt"); java.nio.file.Path tmpFileLoc2 = FileSystems.getDefault().getPath(tmpBaseDir, "TABLE_PARAMS.txt"); class MyComp implements Comparator<String> { @Override public int compare(String str1, String str2) { if (str2.length() != str1.length()) { return str2.length() - str1.length(); } return str1.compareTo(str2); } } final SortedMap<String, Integer> tableNameToID = new TreeMap<String, Integer>(new MyComp()); rs = s.executeQuery("SELECT * FROM APP.TBLS"); while (rs.next()) { String tblName = rs.getString("TBL_NAME"); Integer tblId = rs.getInt("TBL_ID"); tableNameToID.put(tblName, tblId); if (LOG.isDebugEnabled()) { LOG.debug("Resultset : " + tblName + " | " + tblId); } } final Map<String, Map<String, String>> data = new HashMap<>(); rs = s.executeQuery("select TBLS.TBL_NAME, a.COLUMN_NAME, a.TYPE_NAME from " + "(select COLUMN_NAME, TYPE_NAME, SDS.SD_ID from APP.COLUMNS_V2 join APP.SDS on SDS.CD_ID = COLUMNS_V2.CD_ID) a" + " join APP.TBLS on TBLS.SD_ID = a.SD_ID"); while (rs.next()) { String tblName = rs.getString(1); String colName = rs.getString(2); String typeName = rs.getString(3); Map<String, String> cols = data.get(tblName); if (null == cols) { cols = new HashMap<>(); } cols.put(colName, typeName); data.put(tblName, cols); } BufferedReader reader = new BufferedReader(new InputStreamReader( new BZip2CompressorInputStream(Files.newInputStream(tabColStatsCsv, StandardOpenOption.READ)))); Stream<String> replaced = reader.lines().parallel().map(str -> { String[] splits = str.split(","); String tblName = splits[0]; String colName = splits[1]; Integer tblID = tableNameToID.get(tblName); StringBuilder sb = new StringBuilder( "default@" + tblName + "@" + colName + "@" + data.get(tblName).get(colName) + "@"); for (int i = 2; i < splits.length; i++) { sb.append(splits[i] + "@"); } // Add tbl_id and empty bitvector return sb.append(tblID).append("@").toString(); }); Files.write(tmpFileLoc1, (Iterable<String>) replaced::iterator); replaced.close(); reader.close(); BufferedReader reader2 = new BufferedReader(new InputStreamReader( new BZip2CompressorInputStream(Files.newInputStream(tabParamsCsv, StandardOpenOption.READ)))); final Map<String, String> colStats = new ConcurrentHashMap<>(); Stream<String> replacedStream = reader2.lines().parallel().map(str -> { String[] splits = str.split("_@"); String tblName = splits[0]; Integer tblId = tableNameToID.get(tblName); Map<String, String> cols = data.get(tblName); StringBuilder sb = new StringBuilder(); sb.append("{\"COLUMN_STATS\":{"); for (String colName : cols.keySet()) { sb.append("\"" + colName + "\":\"true\","); } sb.append("},\"BASIC_STATS\":\"true\"}"); colStats.put(tblId.toString(), sb.toString()); return tblId.toString() + "@" + splits[1]; }); Files.write(tmpFileLoc2, (Iterable<String>) replacedStream::iterator); Files.write(tmpFileLoc2, (Iterable<String>) colStats.entrySet().stream() .map(map -> map.getKey() + "@COLUMN_STATS_ACCURATE@" + map.getValue())::iterator, StandardOpenOption.APPEND); replacedStream.close(); reader2.close(); // Load the column stats and table params with 30 TB scale String importStatement1 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TAB_COL_STATS" + "', '" + tmpFileLoc1.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)"; String importStatement2 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TABLE_PARAMS" + "', '" + tmpFileLoc2.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)"; PreparedStatement psImport1 = conn.prepareStatement(importStatement1); if (LOG.isDebugEnabled()) { LOG.debug("Going to execute : " + importStatement1); } psImport1.execute(); psImport1.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + importStatement1); } PreparedStatement psImport2 = conn.prepareStatement(importStatement2); if (LOG.isDebugEnabled()) { LOG.debug("Going to execute : " + importStatement2); } psImport2.execute(); psImport2.close(); if (LOG.isDebugEnabled()) { LOG.debug("successfully completed " + importStatement2); } s.execute("ALTER TABLE APP.TAB_COL_STATS ADD COLUMN CAT_NAME VARCHAR(256)"); s.execute("update APP.TAB_COL_STATS set CAT_NAME = '" + Warehouse.DEFAULT_CATALOG_NAME + "'"); s.close(); conn.close(); } catch (Exception e) { throw new RuntimeException("error while loading tpcds metastore dump", e); } }
From source file:com.spectralogic.ds3cli.command.PutObject.java
private void transfer(final Ds3ClientHelpers helpers, final Ds3Object ds3Obj) throws IOException, XmlProcessingException { final WriteJobOptions writeJobOptions = WriteJobOptions.create(); writeJobOptions.setForce(force);/*from w w w.j av a 2 s . c o m*/ if (priority != null) { writeJobOptions.withPriority(priority); } final Ds3ClientHelpers.Job putJob = helpers .startWriteJob(this.bucketName, Lists.newArrayList(ds3Obj), writeJobOptions) .withMaxParallelRequests(this.numberOfThreads); if (!Guard.isMapNullOrEmpty(metadata)) { putJob.withMetadata(new MetadataAccess() { @Override public Map<String, String> getMetadataValue(final String s) { return new ImmutableMap.Builder<String, String>() .putAll(MetadataUtils.getMetadataValues(objectPath)).putAll(metadata).build(); } }); } putJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() { @Override public SeekableByteChannel buildChannel(final String s) throws IOException { return FileChannel.open(objectPath, StandardOpenOption.READ); } }); }
From source file:gaffer.graph.hook.OperationAuthoriser.java
private static Properties readProperties(final Path propFileLocation) { Properties props;//from w ww. ja v a 2 s.co m if (null != propFileLocation) { try { props = readProperties(Files.newInputStream(propFileLocation, StandardOpenOption.READ)); } catch (IOException e) { throw new IllegalArgumentException(e); } } else { props = new Properties(); } return props; }
From source file:io.anserini.index.IndexUtils.java
public InputStream getReadFileStream(String path) throws IOException { InputStream fin = Files.newInputStream(Paths.get(path), StandardOpenOption.READ); BufferedInputStream in = new BufferedInputStream(fin); if (path.endsWith(".bz2")) { BZip2CompressorInputStream bzIn = new BZip2CompressorInputStream(in); return bzIn; } else if (path.endsWith(".gz")) { GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in); return gzIn; } else if (path.endsWith(".zip")) { GzipCompressorInputStream zipIn = new GzipCompressorInputStream(in); return zipIn; }//from w w w.j a v a 2 s . c o m return in; }
From source file:uk.gov.gchq.gaffer.graph.hook.OperationAuthoriser.java
private static Properties readProperties(final Path propFileLocation) { Properties props;/*from w w w . j a v a 2 s .c om*/ if (null != propFileLocation) { try { props = readProperties(Files.newInputStream(propFileLocation, StandardOpenOption.READ)); } catch (final IOException e) { throw new IllegalArgumentException(e); } } else { props = new Properties(); } return props; }
From source file:com.ignorelist.kassandra.steam.scraper.FileCache.java
private InputStream getIfPresentNonBlocking(Object key) { if (isExpired(key.toString())) { return null; }// w w w.j a v a2 s.c o m try { return new GZIPInputStream( Files.newInputStream(buildCacheFile(key.toString()), StandardOpenOption.READ)); } catch (IOException ex) { LOG.log(Level.WARNING, "failed to open InputStream", ex); return null; } }
From source file:com.github.jinahya.verbose.codec.BinaryCodecTest.java
protected final void encodeDecode(final Path expectedPath) throws IOException { if (expectedPath == null) { throw new NullPointerException("null expectedPath"); }/* w w w .j av a 2 s. co m*/ try (final FileChannel expectedChannel = FileChannel.open(expectedPath, StandardOpenOption.READ)) { encodeDecode(expectedChannel); } }
From source file:it.greenvulcano.configuration.BaseConfigurationManager.java
@Override public Properties getXMLConfigProperties() throws FileNotFoundException, IOException { Path xmlConfigPath = Paths.get(XMLConfig.getBaseConfigPath(), "XMLConfig.properties"); if (Files.exists(xmlConfigPath)) { Properties properties = new Properties(); properties.load(Files.newInputStream(xmlConfigPath, StandardOpenOption.READ)); return properties; } else {/* w w w . ja va 2 s. co m*/ throw new FileNotFoundException("XMLConfig.properties"); } }
From source file:de.elomagic.mag.AbstractTest.java
protected Future<byte[]> createFileExistsFuture(Path file) { ExecutorService executor = Executors.newFixedThreadPool(2); FutureTask<byte[]> futureTask = new FutureTask<>(() -> { byte[] result = null; do {//from ww w . j a v a 2 s . c o m if (Files.exists(file)) { InputStream in = Files.newInputStream(file, StandardOpenOption.READ); result = IOUtils.readFully(in, in.available()); } Thread.sleep(100); } while (result == null); return result; }); executor.execute(futureTask); return futureTask; }
From source file:org.apache.nifi.processors.kite.TestInferAvroSchema.java
@Test public void inferAvroSchemaFromCSVFile() throws Exception { runner.assertValid();/*ww w. ja v a2 s .co m*/ // Read in the header StringWriter writer = new StringWriter(); IOUtils.copy( (Files.newInputStream(Paths.get("src/test/resources/ShapesHeader.csv"), StandardOpenOption.READ)), writer, "UTF-8"); runner.setProperty(InferAvroSchema.CSV_HEADER_DEFINITION, writer.toString()); runner.setProperty(InferAvroSchema.GET_CSV_HEADER_DEFINITION_FROM_INPUT, "false"); Map<String, String> attributes = new HashMap<>(); attributes.put(CoreAttributes.MIME_TYPE.key(), "text/csv"); runner.enqueue(new File("src/test/resources/Shapes_NoHeader.csv").toPath(), attributes); runner.run(); runner.assertTransferCount(InferAvroSchema.REL_UNSUPPORTED_CONTENT, 0); runner.assertTransferCount(InferAvroSchema.REL_FAILURE, 0); runner.assertTransferCount(InferAvroSchema.REL_ORIGINAL, 1); runner.assertTransferCount(InferAvroSchema.REL_SUCCESS, 1); MockFlowFile data = runner.getFlowFilesForRelationship(InferAvroSchema.REL_SUCCESS).get(0); data.assertContentEquals( unix2PlatformSpecificLineEndings(new File("src/test/resources/Shapes_header.csv.avro"))); data.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/avro-binary"); }