Example usage for java.util EnumSet of

List of usage examples for java.util EnumSet of

Introduction

In this page you can find the example usage for java.util EnumSet of.

Prototype

public static <E extends Enum<E>> EnumSet<E> of(E e) 

Source Link

Document

Creates an enum set initially containing the specified element.

Usage

From source file:com.caseystella.analytics.outlier.streaming.mad.SketchyMovingMADIntegrationTest.java

@Test
public void runAccuracyBenchmark() throws IOException {
    Map<String, List<String>> benchmarks = JSONUtil.INSTANCE.load(
            new FileInputStream(new File(new File(benchmarkRoot), "combined_labels.json")),
            new TypeReference<Map<String, List<String>>>() {
            });// w w  w.  jav a  2 s . c om
    Assert.assertTrue(benchmarks.size() > 0);
    Map<ConfusionMatrix.ConfusionEntry, Long> overallConfusionMatrix = new HashMap<>();
    DescriptiveStatistics globalExpectedScores = new DescriptiveStatistics();
    long total = 0;
    for (Map.Entry<String, List<String>> kv : benchmarks.entrySet()) {
        File dataFile = new File(new File(benchmarkRoot), kv.getKey());
        File plotFile = new File(new File(benchmarkRoot), kv.getKey() + ".dat");
        Assert.assertTrue(dataFile.exists());
        Set<Long> expectedOutliers = Sets.newHashSet(Iterables.transform(kv.getValue(), STR_TO_TS));
        OutlierRunner runner = new OutlierRunner(outlierConfig, extractorConfigStr);
        final long[] numObservations = { 0L };
        final long[] lastTimestamp = { Long.MIN_VALUE };
        final DescriptiveStatistics timeDiffStats = new DescriptiveStatistics();
        final Map<Long, Outlier> outlierMap = new HashMap<>();
        final PrintWriter pw = new PrintWriter(plotFile);
        List<Outlier> outliers = runner.run(dataFile, 1, EnumSet.of(Severity.SEVERE_OUTLIER),
                new Function<Map.Entry<DataPoint, Outlier>, Void>() {
                    @Nullable
                    @Override
                    public Void apply(@Nullable Map.Entry<DataPoint, Outlier> kv) {
                        DataPoint dataPoint = kv.getKey();
                        Outlier outlier = kv.getValue();
                        pw.println(dataPoint.getTimestamp() + " " + outlier.getDataPoint().getValue() + " "
                                + ((outlier.getSeverity() == Severity.SEVERE_OUTLIER) ? "outlier" : "normal"));
                        outlierMap.put(dataPoint.getTimestamp(), outlier);
                        numObservations[0] += 1;
                        if (lastTimestamp[0] != Long.MIN_VALUE) {
                            timeDiffStats.addValue(dataPoint.getTimestamp() - lastTimestamp[0]);
                        }
                        lastTimestamp[0] = dataPoint.getTimestamp();
                        return null;
                    }
                });
        pw.close();
        total += numObservations[0];
        Set<Long> calculatedOutliers = Sets
                .newHashSet(Iterables.transform(outliers, OutlierRunner.OUTLIER_TO_TS));
        double stdDevDiff = Math.sqrt(timeDiffStats.getVariance());
        System.out.println("Running data from " + kv.getKey() + " - E[time delta]: "
                + ConfusionMatrix.timeConversion((long) timeDiffStats.getMean()) + ", StdDev[time delta]: "
                + ConfusionMatrix.timeConversion((long) stdDevDiff) + " mean: " + runner.getMean());
        Map<ConfusionMatrix.ConfusionEntry, Long> confusionMatrix = ConfusionMatrix.getConfusionMatrix(
                expectedOutliers, calculatedOutliers, numObservations[0], (long) timeDiffStats.getMean(), 3 //stdDevDiff > 30000?0:3
                , outlierMap, globalExpectedScores);

        ConfusionMatrix.printConfusionMatrix(confusionMatrix);
        overallConfusionMatrix = ConfusionMatrix.merge(overallConfusionMatrix, confusionMatrix);
    }
    System.out.println("Really ran " + total);
    ConfusionMatrix.printConfusionMatrix(overallConfusionMatrix);
    ConfusionMatrix.printStats("Global Expected Outlier Scores", globalExpectedScores);
}

From source file:ua.com.rocketlv.spb.Application.java

@Bean
public ServletContextInitializer servletContextInitializer() {
    return (ServletContext servletContext) -> {
        final CharacterEncodingFilter characterEncodingFilter = new CharacterEncodingFilter();
        characterEncodingFilter.setEncoding("UTF-8");
        characterEncodingFilter.setForceEncoding(false);

        // TODO Blog
        servletContext.addFilter("characterEncodingFilter", characterEncodingFilter)
                .addMappingForUrlPatterns(EnumSet.of(DispatcherType.REQUEST), false, "/*");
    };/* w  w  w .j  av  a2s .co m*/
}

From source file:com.microsoft.windowsazure.services.media.samples.contentprotection.playreadywidevine.Program.java

private static AssetInfo uploadFileAndCreateAsset(String fileName)
        throws ServiceException, FileNotFoundException, NoSuchAlgorithmException {
    WritableBlobContainerContract uploader;
    AssetInfo resultAsset;// w w  w . ja  v  a  2s.  c  om
    AccessPolicyInfo uploadAccessPolicy;
    LocatorInfo uploadLocator = null;

    // Create an Asset
    resultAsset = mediaService.create(Asset.create().setName(fileName).setAlternateId("altId"));
    System.out.println("Created Asset " + fileName);

    // Create an AccessPolicy that provides Write access for 15 minutes
    uploadAccessPolicy = mediaService
            .create(AccessPolicy.create("uploadAccessPolicy", 15.0, EnumSet.of(AccessPolicyPermission.WRITE)));

    // Create a Locator using the AccessPolicy and Asset
    uploadLocator = mediaService
            .create(Locator.create(uploadAccessPolicy.getId(), resultAsset.getId(), LocatorType.SAS));

    // Create the Blob Writer using the Locator
    uploader = mediaService.createBlobWriter(uploadLocator);

    // The local file that will be uploaded to your Media Services account
    InputStream input = new FileInputStream(
            new File(Program.class.getClassLoader().getResource("").getPath() + fileName));

    System.out.println("Uploading " + fileName);

    // Upload the local file to the asset
    uploader.createBlockBlob(fileName, input);

    // Inform Media Services about the uploaded files
    mediaService.action(AssetFile.createFileInfos(resultAsset.getId()));
    System.out.println("Uploaded Asset File " + fileName);

    mediaService.delete(Locator.delete(uploadLocator.getId()));
    mediaService.delete(AccessPolicy.delete(uploadAccessPolicy.getId()));

    return resultAsset;
}

From source file:com.couchbase.lite.AttachmentsTest.java

@SuppressWarnings("unchecked")
public void testAttachments() throws Exception {

    String testAttachmentName = "test_attachment";

    BlobStore attachments = database.getAttachments();

    Assert.assertEquals(0, attachments.count());
    Assert.assertEquals(new HashSet<Object>(), attachments.allKeys());

    Status status = new Status();
    Map<String, Object> rev1Properties = new HashMap<String, Object>();
    rev1Properties.put("foo", 1);
    rev1Properties.put("bar", false);
    RevisionInternal rev1 = database.putRevision(new RevisionInternal(rev1Properties, database), null, false,
            status);//from w  w w.  jav a 2s . c om

    Assert.assertEquals(Status.CREATED, status.getCode());

    byte[] attach1 = "This is the body of attach1".getBytes();
    database.insertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach1), rev1.getSequence(),
            testAttachmentName, "text/plain", rev1.getGeneration());
    Assert.assertEquals(Status.CREATED, status.getCode());

    //We must set the no_attachments column for the rev to false, as we are using an internal
    //private API call above (database.insertAttachmentForSequenceWithNameAndType) which does
    //not set the no_attachments column on revs table
    try {
        ContentValues args = new ContentValues();
        args.put("no_attachments=", false);
        database.getDatabase().update("revs", args, "sequence=?",
                new String[] { String.valueOf(rev1.getSequence()) });
    } catch (SQLException e) {
        Log.e(Database.TAG, "Error setting rev1 no_attachments to false", e);
        throw new CouchbaseLiteException(Status.INTERNAL_SERVER_ERROR);
    }

    Attachment attachment = database.getAttachmentForSequence(rev1.getSequence(), testAttachmentName);
    Assert.assertEquals("text/plain", attachment.getContentType());
    InputStream is = attachment.getContent();
    byte[] data = IOUtils.toByteArray(is);
    is.close();
    Assert.assertTrue(Arrays.equals(attach1, data));

    Map<String, Object> innerDict = new HashMap<String, Object>();
    innerDict.put("content_type", "text/plain");
    innerDict.put("digest", "sha1-gOHUOBmIMoDCrMuGyaLWzf1hQTE=");
    innerDict.put("length", 27);
    innerDict.put("stub", true);
    innerDict.put("revpos", 1);
    Map<String, Object> attachmentDict = new HashMap<String, Object>();
    attachmentDict.put(testAttachmentName, innerDict);

    Map<String, Object> attachmentDictForSequence = database.getAttachmentsDictForSequenceWithContent(
            rev1.getSequence(), EnumSet.noneOf(Database.TDContentOptions.class));
    Assert.assertEquals(attachmentDict, attachmentDictForSequence);

    RevisionInternal gotRev1 = database.getDocumentWithIDAndRev(rev1.getDocId(), rev1.getRevId(),
            EnumSet.noneOf(Database.TDContentOptions.class));
    Map<String, Object> gotAttachmentDict = (Map<String, Object>) gotRev1.getProperties().get("_attachments");
    Assert.assertEquals(attachmentDict, gotAttachmentDict);

    // Check the attachment dict, with attachments included:
    innerDict.remove("stub");
    innerDict.put("data", Base64.encodeBytes(attach1));
    attachmentDictForSequence = database.getAttachmentsDictForSequenceWithContent(rev1.getSequence(),
            EnumSet.of(Database.TDContentOptions.TDIncludeAttachments));
    Assert.assertEquals(attachmentDict, attachmentDictForSequence);

    gotRev1 = database.getDocumentWithIDAndRev(rev1.getDocId(), rev1.getRevId(),
            EnumSet.of(Database.TDContentOptions.TDIncludeAttachments));
    gotAttachmentDict = (Map<String, Object>) gotRev1.getProperties().get("_attachments");
    Assert.assertEquals(attachmentDict, gotAttachmentDict);

    // Add a second revision that doesn't update the attachment:
    Map<String, Object> rev2Properties = new HashMap<String, Object>();
    rev2Properties.put("_id", rev1.getDocId());
    rev2Properties.put("foo", 2);
    rev2Properties.put("bazz", false);
    RevisionInternal rev2 = database.putRevision(new RevisionInternal(rev2Properties, database),
            rev1.getRevId(), false, status);
    Assert.assertEquals(Status.CREATED, status.getCode());

    database.copyAttachmentNamedFromSequenceToSequence(testAttachmentName, rev1.getSequence(),
            rev2.getSequence());

    // Add a third revision of the same document:
    Map<String, Object> rev3Properties = new HashMap<String, Object>();
    rev3Properties.put("_id", rev2.getDocId());
    rev3Properties.put("foo", 2);
    rev3Properties.put("bazz", false);
    RevisionInternal rev3 = database.putRevision(new RevisionInternal(rev3Properties, database),
            rev2.getRevId(), false, status);
    Assert.assertEquals(Status.CREATED, status.getCode());

    byte[] attach2 = "<html>And this is attach2</html>".getBytes();
    database.insertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach2), rev3.getSequence(),
            testAttachmentName, "text/html", rev2.getGeneration());

    // Check the 2nd revision's attachment:
    Attachment attachment2 = database.getAttachmentForSequence(rev2.getSequence(), testAttachmentName);

    Assert.assertEquals("text/plain", attachment2.getContentType());
    InputStream is2 = attachment2.getContent();
    data = IOUtils.toByteArray(is2);
    is2.close();
    Assert.assertTrue(Arrays.equals(attach1, data));

    // Check the 3rd revision's attachment:
    Attachment attachment3 = database.getAttachmentForSequence(rev3.getSequence(), testAttachmentName);
    Assert.assertEquals("text/html", attachment3.getContentType());
    InputStream is3 = attachment3.getContent();
    data = IOUtils.toByteArray(is3);
    is3.close();
    Assert.assertTrue(Arrays.equals(attach2, data));

    Map<String, Object> attachmentDictForRev3 = (Map<String, Object>) database
            .getAttachmentsDictForSequenceWithContent(rev3.getSequence(),
                    EnumSet.noneOf(Database.TDContentOptions.class))
            .get(testAttachmentName);
    if (attachmentDictForRev3.containsKey("follows")) {
        if (((Boolean) attachmentDictForRev3.get("follows")).booleanValue() == true) {
            throw new RuntimeException("Did not expected attachment dict 'follows' key to be true");
        } else {
            throw new RuntimeException("Did not expected attachment dict to have 'follows' key");
        }
    }

    // Examine the attachment store:
    Assert.assertEquals(2, attachments.count());
    Set<BlobKey> expected = new HashSet<BlobKey>();
    expected.add(BlobStore.keyForBlob(attach1));
    expected.add(BlobStore.keyForBlob(attach2));

    Assert.assertEquals(expected, attachments.allKeys());

    database.compact(); // This clears the body of the first revision
    Assert.assertEquals(1, attachments.count());

    Set<BlobKey> expected2 = new HashSet<BlobKey>();
    expected2.add(BlobStore.keyForBlob(attach2));
    Assert.assertEquals(expected2, attachments.allKeys());
}

From source file:fr.ritaly.dungeonmaster.ai.CreatureManager.java

private EnumSet<Direction> getFreeDirections() {
    if (creatures != null) {
        final boolean ne = !creatures.containsKey(Sector.NORTH_EAST);
        final boolean nw = !creatures.containsKey(Sector.NORTH_WEST);
        final boolean se = !creatures.containsKey(Sector.SOUTH_EAST);
        final boolean sw = !creatures.containsKey(Sector.SOUTH_WEST);

        int count = 0;

        if (ne) {
            count++;/*from  w ww . ja  v  a2 s.  c  om*/
        }
        if (nw) {
            count++;
        }
        if (se) {
            count++;
        }
        if (sw) {
            count++;
        }

        switch (count) {
        case 0:
        case 1:
            // No free direction
            return EnumSet.noneOf(Direction.class);
        case 2:
            // At most one direction free
            if (ne && nw) {
                return EnumSet.of(Direction.NORTH);
            } else if (se && sw) {
                return EnumSet.of(Direction.SOUTH);
            } else if (nw && sw) {
                return EnumSet.of(Direction.WEST);
            } else if (ne && se) {
                return EnumSet.of(Direction.EAST);
            } else {
                // No direction free (the sectors are staggered)
                return EnumSet.noneOf(Direction.class);
            }
        case 3:
            // One for sure, maybe 2
            final List<Direction> directions = new ArrayList<Direction>(2);

            if (ne && nw) {
                directions.add(Direction.NORTH);
            }
            if (se && sw) {
                directions.add(Direction.SOUTH);
            }
            if (nw && sw) {
                directions.add(Direction.WEST);
            }
            if (ne && se) {
                directions.add(Direction.EAST);
            }

            if (directions.size() == 1) {
                return EnumSet.of(directions.iterator().next());
            } else {
                Collections.shuffle(directions);

                return EnumSet.of(directions.iterator().next());
            }
        case 4:
            // 2 are free
            // Choose a random pair of directions
            if (RandomUtils.nextBoolean()) {
                return EnumSet.of(Direction.EAST, Direction.WEST);
            } else {
                return EnumSet.of(Direction.NORTH, Direction.SOUTH);
            }
        default:
            throw new RuntimeException("Unexpected count <" + count + ">");
        }
    }

    // Choose a random pair of directions
    if (RandomUtils.nextBoolean()) {
        return EnumSet.of(Direction.EAST, Direction.WEST);
    } else {
        return EnumSet.of(Direction.NORTH, Direction.SOUTH);
    }
}

From source file:com.mellanox.r4h.TestHFlush.java

/**
 * The test calls//from  www  . j  a  v a2s . c om
 * {@link #doTheJob(Configuration, String, long, short, boolean, EnumSet)}
 * while requiring the semantic of {@link SyncFlag#UPDATE_LENGTH}.
 */
@Test
public void hSyncUpdateLength_01() throws IOException {
    doTheJob(new HdfsConfiguration(), fName, MiniDFSClusterBridge.getAppendTestUtil_BLOCK_SIZE(), (short) 2,
            true, EnumSet.of(SyncFlag.UPDATE_LENGTH));
}

From source file:ch.cyberduck.core.ftp.FTPStatListServiceTest.java

@Test
public void testParseEgnyte() throws Exception {
    final List<String> lines = Arrays.asList("200-drwx------   0 - -            0 Jun 17 07:59 core",
            "200 -rw-------   0 David-Kocher -          529 Jun 17 07:59 App.config");
    final FTPFileEntryParser parser = new LaxUnixFTPEntryParser();
    final List<String> list = new FTPStatListService(null, parser).parse(200,
            lines.toArray(new String[lines.size()]));
    assertEquals(2, list.size());/*from w w w  . java2s . c  o  m*/
    assertTrue(list.contains("drwx------   0 - -            0 Jun 17 07:59 core"));
    assertTrue(list.contains("-rw-------   0 David-Kocher -          529 Jun 17 07:59 App.config"));
    final Path parent = new Path("/cyberduck", EnumSet.of(Path.Type.directory));
    final AttributedList<Path> parsed = new FTPListResponseReader(parser, true).read(parent, list,
            new DisabledListProgressListener());
    assertEquals(2, parsed.size());
}

From source file:net.solarnetwork.node.dao.jdbc.test.JdbcSettingsDaoTests.java

@Test
public void updateVolatile() {
    insertVolatile();// ww w  .j a v  a2s . c o  m
    EasyMock.reset(eventAdminMock);

    replay(eventAdminMock);

    Setting s = new Setting();
    s.setKey(TEST_KEY);
    s.setType(TEST_TYPE);
    s.setValue("foo");
    s.setFlags(EnumSet.of(SettingFlag.Volatile));

    settingDao.storeSetting(s);

    verify(eventAdminMock);
}

From source file:com.couchbase.touchdb.testapp.tests.HeavyAttachments.java

@SuppressWarnings("unchecked")
public void buildAttachments(boolean heavyAttachments, boolean pushAttachments) throws Exception {

    TDBlobStore attachments = database.getAttachments();

    Assert.assertEquals(0, attachments.count());
    Assert.assertEquals(new HashSet<Object>(), attachments.allKeys());

    TDStatus status = new TDStatus();
    Map<String, Object> rev1Properties = new HashMap<String, Object>();
    rev1Properties.put("foo", 1);
    rev1Properties.put("bar", false);
    TDRevision rev1 = database.putRevision(new TDRevision(rev1Properties), null, false, status);

    Assert.assertEquals(TDStatus.CREATED, status.getCode());

    byte[] attach1 = "This is the body of attach1".getBytes();
    status = database.insertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach1),
            rev1.getSequence(), "attach", "text/plain", rev1.getGeneration());
    Assert.assertEquals(TDStatus.CREATED, status.getCode());

    TDAttachment attachment = database.getAttachmentForSequence(rev1.getSequence(), "attach", status);
    Assert.assertEquals(TDStatus.OK, status.getCode());
    Assert.assertEquals("text/plain", attachment.getContentType());
    byte[] data = IOUtils.toByteArray(attachment.getContentStream());
    Assert.assertTrue(Arrays.equals(attach1, data));

    Map<String, Object> innerDict = new HashMap<String, Object>();
    innerDict.put("content_type", "text/plain");
    innerDict.put("digest", "sha1-gOHUOBmIMoDCrMuGyaLWzf1hQTE=");
    innerDict.put("length", 27);
    innerDict.put("stub", true);
    innerDict.put("revpos", 1);
    Map<String, Object> attachmentDict = new HashMap<String, Object>();
    attachmentDict.put("attach", innerDict);

    Map<String, Object> attachmentDictForSequence = database
            .getAttachmentsDictForSequenceWithContent(rev1.getSequence(), false);
    Assert.assertEquals(attachmentDict, attachmentDictForSequence);

    TDRevision gotRev1 = database.getDocumentWithIDAndRev(rev1.getDocId(), rev1.getRevId(),
            EnumSet.noneOf(TDDatabase.TDContentOptions.class));
    Map<String, Object> gotAttachmentDict = (Map<String, Object>) gotRev1.getProperties().get("_attachments");
    Assert.assertEquals(attachmentDict, gotAttachmentDict);

    // Check the attachment dict, with attachments included:
    innerDict.remove("stub");
    innerDict.put("data", Base64.encodeBytes(attach1));
    attachmentDictForSequence = database.getAttachmentsDictForSequenceWithContent(rev1.getSequence(), true);
    Assert.assertEquals(attachmentDict, attachmentDictForSequence);

    gotRev1 = database.getDocumentWithIDAndRev(rev1.getDocId(), rev1.getRevId(),
            EnumSet.of(TDDatabase.TDContentOptions.TDIncludeAttachments));
    gotAttachmentDict = (Map<String, Object>) gotRev1.getProperties().get("_attachments");
    Assert.assertEquals(attachmentDict, gotAttachmentDict);

    // Add a second revision that doesn't update the attachment:
    Map<String, Object> rev2Properties = new HashMap<String, Object>();
    rev2Properties.put("_id", rev1.getDocId());
    rev2Properties.put("foo", 2);
    rev2Properties.put("bazz", false);
    TDRevision rev2 = database.putRevision(new TDRevision(rev2Properties), rev1.getRevId(), false, status);
    Assert.assertEquals(TDStatus.CREATED, status.getCode());

    status = database.copyAttachmentNamedFromSequenceToSequence("attach", rev1.getSequence(),
            rev2.getSequence());/*from w w  w.  j  a va2s .co m*/
    Assert.assertEquals(TDStatus.OK, status.getCode());

    // Add a third revision of the same document:
    Map<String, Object> rev3Properties = new HashMap<String, Object>();
    rev3Properties.put("_id", rev2.getDocId());
    rev3Properties.put("foo", 2);
    rev3Properties.put("bazz", false);
    TDRevision rev3 = database.putRevision(new TDRevision(rev3Properties), rev2.getRevId(), false, status);
    Assert.assertEquals(TDStatus.CREATED, status.getCode());

    byte[] attach2 = "<html>And this is attach2</html>".getBytes();
    status = database.insertAttachmentForSequenceWithNameAndType(new ByteArrayInputStream(attach2),
            rev3.getSequence(), "attach", "text/html", rev2.getGeneration());
    Assert.assertEquals(TDStatus.CREATED, status.getCode());

    // Check the 2nd revision's attachment:
    TDAttachment attachment2 = database.getAttachmentForSequence(rev2.getSequence(), "attach", status);
    Assert.assertEquals(TDStatus.OK, status.getCode());
    Assert.assertEquals("text/plain", attachment2.getContentType());
    data = IOUtils.toByteArray(attachment2.getContentStream());
    Assert.assertTrue(Arrays.equals(attach1, data));

    // Check the 3rd revision's attachment:
    TDAttachment attachment3 = database.getAttachmentForSequence(rev3.getSequence(), "attach", status);
    Assert.assertEquals(TDStatus.OK, status.getCode());
    Assert.assertEquals("text/html", attachment3.getContentType());
    data = IOUtils.toByteArray(attachment3.getContentStream());
    Assert.assertTrue(Arrays.equals(attach2, data));

    // Examine the attachment store:
    Assert.assertEquals(2, attachments.count());
    Set<TDBlobKey> expected = new HashSet<TDBlobKey>();
    expected.add(TDBlobStore.keyForBlob(attach1));
    expected.add(TDBlobStore.keyForBlob(attach2));

    Assert.assertEquals(expected, attachments.allKeys());

    status = database.compact(); // This clears the body of the first revision
    Assert.assertEquals(TDStatus.OK, status.getCode());
    Assert.assertEquals(1, attachments.count());

    Set<TDBlobKey> expected2 = new HashSet<TDBlobKey>();
    expected2.add(TDBlobStore.keyForBlob(attach2));
    Assert.assertEquals(expected2, attachments.allKeys());
    TDRevision lastRev = rev3;

    if (heavyAttachments) {
        int numberOfImagesOkayOn512Ram = 4;
        int numberOfImagesOkayOn768Ram = 6;
        int numberOfImagesOkayOn1024Ram = 8;

        lastRev = attachImages(numberOfImagesOkayOn1024Ram, rev3);
        /* query the db for that doc */
        TDRevision largerRev = database.getDocumentWithIDAndRev(lastRev.getDocId(), lastRev.getRevId(),
                EnumSet.noneOf(TDDatabase.TDContentOptions.class));
        attachmentDict = (Map<String, Object>) largerRev.getProperties().get("_attachments");
        Assert.assertNotNull(attachmentDict);
    }
    if (pushAttachments) {
        URL remote = getReplicationURL();
        deleteRemoteDB(remote);
        final TDReplicator repl = database.getReplicator(remote, true, false, server.getWorkExecutor());
        ((TDPusher) repl).setCreateTarget(true);
        try {
            runTestOnUiThread(new Runnable() {

                @Override
                public void run() {
                    // Push them to the remote:
                    repl.start();
                    Assert.assertTrue(repl.isRunning());
                }
            });
        } catch (Throwable e) {
            e.printStackTrace();
        }

        while (repl.isRunning()) {
            Log.i(TAG, "Waiting for replicator to finish");
            Thread.sleep(1000);
        }

        /* Ensure that the last version of the doc is the last thing replicated. */
        Assert.assertTrue(lastRev.getRevId().startsWith(repl.getLastSequence()));

    }

}

From source file:ch.cyberduck.core.cryptomator.SFTPMoveFeatureTest.java

@Test
public void testMoveToDifferentFolderCryptomator() throws Exception {
    final Host host = new Host(new SFTPProtocol(), "test.cyberduck.ch",
            new Credentials(System.getProperties().getProperty("sftp.user"),
                    System.getProperties().getProperty("sftp.password")));
    final SFTPSession session = new SFTPSession(host);
    session.open(new DisabledHostKeyCallback());
    session.login(new DisabledPasswordStore(), new DisabledLoginCallback(), new DisabledCancelCallback());
    final Path home = new SFTPHomeDirectoryService(session).find();
    final Path vault = new Path(home, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory));
    final Path source = new Path(vault, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file));
    final Path targetFolder = new Path(vault, UUID.randomUUID().toString(), EnumSet.of(Path.Type.directory));
    final Path target = new Path(targetFolder, UUID.randomUUID().toString(), EnumSet.of(Path.Type.file));
    final CryptoVault cryptomator = new CryptoVault(vault, new DisabledPasswordStore());
    cryptomator.create(session, null, new VaultCredentials("test"));
    session.withRegistry(/*from   w  ww . j  ava2s  . com*/
            new DefaultVaultRegistry(new DisabledPasswordStore(), new DisabledPasswordCallback(), cryptomator));
    new CryptoTouchFeature<Void>(session,
            new DefaultTouchFeature<Void>(new DefaultUploadFeature<Void>(new SFTPWriteFeature(session))),
            new SFTPWriteFeature(session), cryptomator).touch(source, new TransferStatus());
    assertTrue(new CryptoFindFeature(session, new DefaultFindFeature(session), cryptomator).find(source));
    new CryptoDirectoryFeature<Void>(session, new SFTPDirectoryFeature(session), new SFTPWriteFeature(session),
            cryptomator).mkdir(targetFolder, null, new TransferStatus());
    assertTrue(new CryptoFindFeature(session, new DefaultFindFeature(session), cryptomator).find(targetFolder));
    new CryptoMoveFeature(session, new SFTPMoveFeature(session), new SFTPDeleteFeature(session), cryptomator)
            .move(source, target, new TransferStatus(), new Delete.Callback() {
                @Override
                public void delete(final Path file) {
                    //
                }
            }, new DisabledConnectionCallback());
    assertFalse(new CryptoFindFeature(session, new DefaultFindFeature(session), cryptomator).find(source));
    assertTrue(new CryptoFindFeature(session, new DefaultFindFeature(session), cryptomator).find(target));
    new CryptoDeleteFeature(session, new SFTPDeleteFeature(session), cryptomator).delete(
            Arrays.asList(target, targetFolder, vault), new DisabledLoginCallback(),
            new Delete.DisabledCallback());
    session.close();
}