List of usage examples for java.io Flushable interface-usage
From source file org.apache.streams.hbase.HbasePersistWriter.java
import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTableInterface; import org.apache.hadoop.hbase.client.HTablePool; import org.apache.hadoop.hbase.client.Put; import org.slf4j.Logger;
From source file fabrice.CSVPrinter.java
/**
* Prints values in a CSV format.
*
* Taken from <b>org.apache.commons.csv.CSVPrinter</b>
*
* <groupId>org.apache.commons</groupId>
From source file com.healthmarketscience.jackcess.PageChannel.java
/** * Reads and writes individual pages in a database file * @author Tim McCune */ public class PageChannel implements Channel, Flushable {
From source file com.magnet.tools.tests.CukeCommandExecutor.java
/** * Executor for commands within cucumber test */ class CukeCommandExecutor extends DefaultExecutor implements Closeable, Flushable { private final String command;
From source file org.springframework.integration.metadata.PropertiesPersistingMetadataStore.java
/**
* Properties file-based implementation of {@link MetadataStore}. To avoid conflicts
* each instance should be constructed with the unique key from which unique file name
* will be generated.
* By default, the properties file will be
* {@code 'java.io.tmpdir' + "/spring-integration/metadata-store.properties"},
From source file com.healthmarketscience.jackcess.impl.PageChannel.java
/** * Reads and writes individual pages in a database file * @author Tim McCune */ public class PageChannel implements Channel, Flushable {
From source file de.hpi.fgis.hdrs.client.TripleOutputStream.java
/**
* This class is used by client applications to write
* triples into a HDRS store.
* @author hefenbrock
*
*/
From source file org.apache.streams.hdfs.WebHdfsPersistWriter.java
import java.net.URI; import java.net.URISyntaxException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.Date; import java.util.List;
From source file org.apache.hadoop.yarn.client.api.impl.TimelineWriter.java
/** * Base writer class to write the Timeline data. */ @Private @Unstable public abstract class TimelineWriter implements Flushable {
From source file org.apache.streams.mongo.MongoPersistWriter.java
public class MongoPersistWriter implements StreamsPersistWriter, Runnable, Flushable, Closeable { public static final String STREAMS_ID = "MongoPersistWriter"; private static final Logger LOGGER = LoggerFactory.getLogger(MongoPersistWriter.class);