Example usage for java.util Collections EMPTY_MAP

List of usage examples for java.util Collections EMPTY_MAP

Introduction

In this page you can find the example usage for java.util Collections EMPTY_MAP.

Prototype

Map EMPTY_MAP

To view the source code for java.util Collections EMPTY_MAP.

Click Source Link

Document

The empty map (immutable).

Usage

From source file:com.vuze.android.remote.rpc.TransmissionRPC.java

private void sendRequest(final String id, final Map data, final ReplyMapReceivedListener l) {
    if (id == null || data == null) {
        if (AndroidUtils.DEBUG_RPC) {
            Log.e(TAG, "sendRequest(" + id + "," + JSONUtils.encodeToJSON(data) + "," + l + ")");
        }/*  w  w  w  .j  av a 2s  . co m*/
        return;
    }
    new Thread(new Runnable() {
        @SuppressWarnings("unchecked")
        @Override
        public void run() {
            data.put("random", Integer.toHexString(cacheBuster++));
            try {
                Map reply = RestJsonClient.connect(id, rpcURL, data, headers, creds, supportsGZIP);

                String result = MapUtils.getMapString(reply, "result", "");
                if (l != null) {
                    if (result.equals("success")) {
                        l.rpcSuccess(id, MapUtils.getMapMap(reply, "arguments", Collections.EMPTY_MAP));
                    } else {
                        if (AndroidUtils.DEBUG_RPC) {
                            Log.d(TAG, id + "] rpcFailure: " + result);
                        }
                        // clean up things like:
                        // org.gudy.azureus2.plugins.utils.resourcedownloader.ResourceDownloaderException: http://foo.torrent: I/O Exception while downloading 'http://foo.torrent', Operation timed out
                        result = result.replaceAll("org\\.[a-z.]+:", "");
                        result = result.replaceAll("com\\.[a-z.]+:", "");
                        l.rpcFailure(id, result);
                    }
                }
            } catch (RPCException e) {
                HttpResponse httpResponse = e.getHttpResponse();
                if (httpResponse != null && httpResponse.getStatusLine().getStatusCode() == 409) {
                    if (AndroidUtils.DEBUG_RPC) {
                        Log.d(TAG, "409: retrying");
                    }
                    Header header = httpResponse.getFirstHeader("X-Transmission-Session-Id");
                    headers = new Header[] { header };
                    sendRequest(id, data, l);
                    return;
                }

                Throwable cause = e.getCause();
                if (sessionInfo != null && (cause instanceof HttpHostConnectException)) {
                    RemoteProfile remoteProfile = sessionInfo.getRemoteProfile();
                    if (remoteProfile != null && remoteProfile.getRemoteType() == RemoteProfile.TYPE_CORE) {
                        VuzeRemoteApp.waitForCore(sessionInfo.getCurrentActivity(), 10000);
                        sendRequest(id, data, l);
                        return;
                    }
                }
                if (l != null) {
                    l.rpcError(id, e);
                }
                // TODO: trigger a generic error listener, so we can put a "Could not connect" status text somewhere
            }
        }
    }, "sendRequest" + id).start();
}

From source file:org.apache.nifi.processors.standard.ListenTCPRecord.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final SocketChannelRecordReader socketRecordReader = pollForSocketRecordReader();
    if (socketRecordReader == null) {
        return;//from  ww w .  j  a  v a2 s.  com
    }

    if (socketRecordReader.isClosed()) {
        getLogger().warn("Unable to read records from {}, socket already closed",
                new Object[] { getRemoteAddress(socketRecordReader) });
        IOUtils.closeQuietly(socketRecordReader); // still need to call close so the overall count is decremented
        return;
    }

    final int recordBatchSize = context.getProperty(RECORD_BATCH_SIZE).asInteger();
    final String readerErrorHandling = context.getProperty(READER_ERROR_HANDLING_STRATEGY).getValue();
    final RecordSetWriterFactory recordSetWriterFactory = context.getProperty(RECORD_WRITER)
            .asControllerService(RecordSetWriterFactory.class);

    // synchronize to ensure there are no stale values in the underlying SocketChannel
    synchronized (socketRecordReader) {
        FlowFile flowFile = session.create();
        try {
            // lazily creating the record reader here b/c we need a flow file, eventually shouldn't have to do this
            RecordReader recordReader = socketRecordReader.getRecordReader();
            if (recordReader == null) {
                recordReader = socketRecordReader.createRecordReader(flowFile, getLogger());
            }

            Record record;
            try {
                record = recordReader.nextRecord();
            } catch (final Exception e) {
                boolean timeout = false;

                // some of the underlying record libraries wrap the real exception in RuntimeException, so check each
                // throwable (starting with the current one) to see if its a SocketTimeoutException
                Throwable cause = e;
                while (cause != null) {
                    if (cause instanceof SocketTimeoutException) {
                        timeout = true;
                        break;
                    }
                    cause = cause.getCause();
                }

                if (timeout) {
                    getLogger().debug("Timeout reading records, will try again later", e);
                    socketReaders.offer(socketRecordReader);
                    session.remove(flowFile);
                    return;
                } else {
                    throw e;
                }
            }

            if (record == null) {
                getLogger().debug("No records available from {}, closing connection",
                        new Object[] { getRemoteAddress(socketRecordReader) });
                IOUtils.closeQuietly(socketRecordReader);
                session.remove(flowFile);
                return;
            }

            String mimeType = null;
            WriteResult writeResult = null;

            final RecordSchema recordSchema = recordSetWriterFactory.getSchema(Collections.EMPTY_MAP,
                    record.getSchema());
            try (final OutputStream out = session.write(flowFile);
                    final RecordSetWriter recordWriter = recordSetWriterFactory.createWriter(getLogger(),
                            recordSchema, out)) {

                // start the record set and write the first record from above
                recordWriter.beginRecordSet();
                writeResult = recordWriter.write(record);

                while (record != null && writeResult.getRecordCount() < recordBatchSize) {
                    // handle a read failure according to the strategy selected...
                    // if discarding then bounce to the outer catch block which will close the connection and remove the flow file
                    // if keeping then null out the record to break out of the loop, which will transfer what we have and close the connection
                    try {
                        record = recordReader.nextRecord();
                    } catch (final SocketTimeoutException ste) {
                        getLogger().debug("Timeout reading records, will try again later", ste);
                        break;
                    } catch (final Exception e) {
                        if (ERROR_HANDLING_DISCARD.getValue().equals(readerErrorHandling)) {
                            throw e;
                        } else {
                            record = null;
                        }
                    }

                    if (record != null) {
                        writeResult = recordWriter.write(record);
                    }
                }

                writeResult = recordWriter.finishRecordSet();
                recordWriter.flush();
                mimeType = recordWriter.getMimeType();
            }

            // if we didn't write any records then we need to remove the flow file
            if (writeResult.getRecordCount() <= 0) {
                getLogger().debug("Removing flow file, no records were written");
                session.remove(flowFile);
            } else {
                final String sender = getRemoteAddress(socketRecordReader);

                final Map<String, String> attributes = new HashMap<>(writeResult.getAttributes());
                attributes.put(CoreAttributes.MIME_TYPE.key(), mimeType);
                attributes.put("tcp.sender", sender);
                attributes.put("tcp.port", String.valueOf(port));
                attributes.put("record.count", String.valueOf(writeResult.getRecordCount()));
                flowFile = session.putAllAttributes(flowFile, attributes);

                final String senderHost = sender.startsWith("/") && sender.length() > 1 ? sender.substring(1)
                        : sender;
                final String transitUri = new StringBuilder().append("tcp").append("://").append(senderHost)
                        .append(":").append(port).toString();
                session.getProvenanceReporter().receive(flowFile, transitUri);

                session.transfer(flowFile, REL_SUCCESS);
            }

            getLogger().debug("Re-queuing connection for further processing...");
            socketReaders.offer(socketRecordReader);

        } catch (Exception e) {
            getLogger().error("Error processing records: " + e.getMessage(), e);
            IOUtils.closeQuietly(socketRecordReader);
            session.remove(flowFile);
            return;
        }
    }
}

From source file:org.apache.atlas.web.resources.EntityJerseyResourceIT.java

@Test(dependsOnMethods = "testSubmitEntity")
public void testAddReferenceProperty() throws Exception {
    //Create new db instance
    Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN);
    String dbName = randomString();
    databaseInstance.set(NAME, dbName);/*  w ww  .  j a  v  a 2  s.c  om*/
    databaseInstance.set(QUALIFIED_NAME, dbName);
    databaseInstance.set(CLUSTER_NAME, randomString());
    databaseInstance.set("description", "new database");
    databaseInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName);
    databaseInstance.set("owner", "user1");
    databaseInstance.set(CLUSTER_NAME, "cl1");
    databaseInstance.set("parameters", Collections.EMPTY_MAP);
    databaseInstance.set("location", "/tmp");

    Id dbInstance = createInstance(databaseInstance);
    String dbId = dbInstance._getId();

    //Add reference property
    final String guid = tableId._getId();
    addProperty(guid, "db", dbId);
}

From source file:eu.openanalytics.rsb.RestJobsITCase.java

@SuppressWarnings("unchecked")
private Document doSubmitXmlJob(final String applicationName, final InputStream xmlJob)
        throws IOException, SAXException, XpathException {
    return doTestSubmitXmlJobWithXmlAck(applicationName, xmlJob, Collections.EMPTY_MAP);
}

From source file:org.apache.atlas.web.integration.EntityJerseyResourceIT.java

@Test
public void testAddReferenceProperty() throws Exception {
    String dbName = "db" + randomString();
    String tableName = "table" + randomString();
    Referenceable hiveDBInstance = createHiveDBInstanceBuiltIn(dbName);
    Id dbId = createInstance(hiveDBInstance);
    Referenceable hiveTableInstance = createHiveTableInstanceBuiltIn(dbName, tableName, dbId);
    Id id = createInstance(hiveTableInstance);

    final String guid = id._getId();
    try {//from ww w.  j a v a 2 s. co m
        Assert.assertNotNull(UUID.fromString(guid));
    } catch (IllegalArgumentException e) {
        Assert.fail("Response is not a guid, " + guid);
    }

    //Create new db instance
    dbName = "db" + randomString();
    Referenceable databaseInstance = new Referenceable(DATABASE_TYPE_BUILTIN);
    databaseInstance.set(NAME, dbName);
    databaseInstance.set(QUALIFIED_NAME, dbName);
    databaseInstance.set(CLUSTER_NAME, randomString());
    databaseInstance.set("description", "new database");
    databaseInstance.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbName);
    databaseInstance.set("owner", "user1");
    databaseInstance.set(CLUSTER_NAME, "cl1");
    databaseInstance.set("parameters", Collections.EMPTY_MAP);
    databaseInstance.set("location", "/tmp");

    Id dbInstance = createInstance(databaseInstance);
    String newDBId = dbInstance._getId();

    //Add reference property
    addProperty(guid, "db", newDBId);
}

From source file:org.codehaus.groovy.grails.web.servlet.mvc.AbstractGrailsControllerHelper.java

@SuppressWarnings("rawtypes")
private Map initChainModel(HttpServletRequest request) {
    FlashScope fs = grailsAttributes.getFlashScope(request);
    if (fs.containsKey(PROPERTY_CHAIN_MODEL)) {
        Map chainModel = (Map) fs.get(PROPERTY_CHAIN_MODEL);
        if (chainModel == null) {
            chainModel = Collections.EMPTY_MAP;
        }//w w  w. j  a  v  a 2s.  c o  m
        return chainModel;
    }
    return Collections.EMPTY_MAP;
}

From source file:com.zenesis.qx.remote.ProxyTypeImpl.java

@Override
public Map<String, ProxyProperty> getProperties() {
    if (properties == null)
        return Collections.EMPTY_MAP;
    return properties;
}

From source file:org.geoserver.data.test.MockData.java

/**
 * Adds a property file as a feature type in a property datastore.
 * //  www .  j  av a2  s.  c  o  m
 * @param name
 *            the fully qualified name of the feature type. The prefix and
 *            namespace URI will be used to create a namespace, the prefix
 *            will be used as the datastore name, the local name will become
 *            the feature type name
 * @param properties
 *            a URL to the property file backing the feature type. If null,
 *            an emtpy property file will be used
 * @param extraParams
 *            a map from extra configurable keys to their values (see for example
 * @throws IOException
 */
public void addPropertiesType(QName name, URL properties, Map extraParams) throws IOException {
    // sanitize input params
    if (extraParams == null)
        extraParams = Collections.EMPTY_MAP;

    // setup the type directory if needed
    File directory = new File(data, name.getPrefix());
    if (!directory.exists()) {
        directory.mkdir();
    }

    // create the properties file
    File f = new File(directory, name.getLocalPart() + ".properties");

    // copy over the contents
    InputStream propertiesContents;
    if (properties == null)
        propertiesContents = new ByteArrayInputStream("-=".getBytes());
    else
        propertiesContents = properties.openStream();
    IOUtils.copy(propertiesContents, f);

    // write the info file
    info(name, extraParams);

    // setup the meta information to be written in the catalog 
    namespaces.put(name.getPrefix(), name.getNamespaceURI());
    dataStoreNamepaces.put(name.getPrefix(), name.getPrefix());
    Map params = new HashMap();
    params.put(PropertyDataStoreFactory.DIRECTORY.key, directory);
    params.put(PropertyDataStoreFactory.NAMESPACE.key, name.getNamespaceURI());
    dataStores.put(name.getPrefix(), params);
}

From source file:org.geotools.data.complex.config.AppSchemaDataAccessConfigurator.java

/**
 * //ww  w .  j a va  2  s  . c om
 * @param dto
 * @return Map&lt;Name, Expression&gt; with the values per qualified name (attribute name in the
 *         mapping)
 * @throws DataSourceException
 */
private Map getClientProperties(org.geotools.data.complex.config.AttributeMapping dto)
        throws DataSourceException {

    if (dto.getClientProperties().size() == 0) {
        return Collections.EMPTY_MAP;
    }

    Map clientProperties = new HashMap();
    for (Iterator it = dto.getClientProperties().entrySet().iterator(); it.hasNext();) {
        Map.Entry entry = (Map.Entry) it.next();
        String name = (String) entry.getKey();
        Name qName = Types.degloseName(name, namespaces);
        String cqlExpression = (String) entry.getValue();
        final Expression expression = parseOgcCqlExpression(cqlExpression);
        clientProperties.put(qName, expression);
    }
    return clientProperties;
}

From source file:com.zenesis.qx.remote.ProxyTypeImpl.java

@Override
public Map<String, ProxyEvent> getEvents() {
    if (events == null)
        return Collections.EMPTY_MAP;
    return events;
}