Example usage for java.util Collections synchronizedMap

List of usage examples for java.util Collections synchronizedMap

Introduction

In this page you can find the example usage for java.util Collections synchronizedMap.

Prototype

public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m) 

Source Link

Document

Returns a synchronized (thread-safe) map backed by the specified map.

Usage

From source file:org.apache.gora.couchdb.store.CouchDBStore.java

/**
 * Persist an object into the store.//from  w ww. j a va 2  s .  com
 *
 * @param key identifier of the object in the store
 * @param obj the object to be inserted
 */
@Override
public void put(K key, T obj) throws GoraException {
    final Map<String, Object> buffer = Collections.synchronizedMap(new LinkedHashMap<String, Object>());
    buffer.put("_id", key);

    Schema schema = obj.getSchema();

    List<Field> fields = schema.getFields();
    for (int i = 0; i < fields.size(); i++) {
        if (!obj.isDirty(i)) {
            continue;
        }
        Field field = fields.get(i);
        Object fieldValue = obj.get(field.pos());

        Schema fieldSchema = field.schema();

        // check if field has a nested structure (array, map, record or union)
        fieldValue = toDBObject(fieldSchema, fieldValue);
        buffer.put(field.name(), fieldValue);
    }
    bulkDocs.add(buffer);

}

From source file:gov.va.isaac.gui.preferences.PreferencesViewController.java

private void saveAndExitIfSuccessful() {
    logger.debug("performing save...");

    final Map<PreferencesPluginViewI, Exception> caughtExceptions = Collections
            .synchronizedMap(new WeakHashMap<>());

    for (PreferencesPluginViewI plugin : plugins_) {
        try {/*from  www .  ja v  a 2s. c  o  m*/
            plugin.save();
        } catch (IOException e) {
            caughtExceptions.put(plugin, e);
        }
    }

    if (caughtExceptions.size() > 0) {
        String msg = "Caught " + caughtExceptions.size() + " exceptions performing save";
        StringBuilder builder = new StringBuilder();
        for (Map.Entry<PreferencesPluginViewI, Exception> entry : caughtExceptions.entrySet()) {
            builder.append("\n" + "Plugin " + entry.getKey().getName() + " ("
                    + entry.getKey().getClass().getName() + ")" + " threw "
                    + entry.getValue().getClass().getName() + " " + entry.getValue().getLocalizedMessage());

            logger.error("Plugin " + entry.getKey().getName() + " (" + entry.getKey().getClass().getName() + ")"
                    + " threw " + entry.getValue().getClass().getName() + " "
                    + entry.getValue().getLocalizedMessage());
            entry.getValue().printStackTrace();
        }

        AppContext.getCommonDialogs().showErrorDialog("Preferences Save Error", msg, builder.toString(),
                stage_);
    } else {
        stage_.close();
    }
}

From source file:edu.cornell.mannlib.vitro.webapp.utils.jena.JenaIngestUtils.java

public Map<String, LinkedList<String>> generatePropertyMap(List<Model> sourceModels, ModelMaker maker) {
    Map<String, LinkedList<String>> propertyMap = Collections
            .synchronizedMap(new HashMap<String, LinkedList<String>>());
    Set<String> doneList = new HashSet<String>();
    for (Model model : sourceModels) {
        ClosableIterator cItr = model.listSubjects();
        while (cItr.hasNext()) {
            Resource res = (Resource) cItr.next();
            if (res.isAnon() && !doneList.contains(res.getId())) {
                doneList.add(res.getId().toString());
                StmtIterator stmtItr = model.listStatements(res, (Property) null, (RDFNode) null);
                while (stmtItr.hasNext()) {
                    Statement stmt = stmtItr.next();
                    if (!stmt.getObject().isResource()) {
                        if (propertyMap.containsKey(stmt.getPredicate().getURI())) {
                            LinkedList linkList = propertyMap.get(stmt.getPredicate().getURI());
                            linkList.add(stmt.getObject().toString());
                        } else {
                            propertyMap.put(stmt.getPredicate().getURI(), new LinkedList());
                            LinkedList linkList = propertyMap.get(stmt.getPredicate().getURI());
                            linkList.add(stmt.getObject().toString());
                        }/*from w  w  w  . j  a va  2 s  .  co  m*/
                    }
                }
            }
        }
        cItr = model.listObjects();
        while (cItr.hasNext()) {
            RDFNode rdfn = (RDFNode) cItr.next();
            if (rdfn.isResource()) {
                Resource res = (Resource) rdfn;
                if (res.isAnon() && !doneList.contains(res.getId())) {
                    doneList.add(res.getId().toString());
                    StmtIterator stmtItr = model.listStatements(res, (Property) null, (RDFNode) null);
                    while (stmtItr.hasNext()) {
                        Statement stmt = stmtItr.next();
                        if (!stmt.getObject().isResource()) {
                            if (propertyMap.containsKey(stmt.getPredicate().getURI())) {
                                LinkedList linkList = propertyMap.get(stmt.getPredicate().getURI());
                                linkList.add(stmt.getObject().toString());
                            } else {
                                propertyMap.put(stmt.getPredicate().getURI(), new LinkedList());
                                LinkedList linkList = propertyMap.get(stmt.getPredicate().getURI());
                                linkList.add(stmt.getObject().toString());
                            }
                        }
                    }
                }
            }
        }
        cItr.close();
    }
    return propertyMap;
}

From source file:com.taobao.itest.util.XlsUtil.java

/**
 * To read Excel data into a Map structure
 * /*w ww.j  a  v  a2 s.  c  o  m*/
 * @param excelDir
 *            excel file path, such as: abc.xls this file exists called to
 *            the same level directory
 * 
 * @return converted Map
 */
public static Map<String, List<Map<String, Object>>> readData(String execlDir) {

    Map<String, List<Map<String, Object>>> allData = new HashMap<String, List<Map<String, Object>>>();

    List<Map<String, Object>> sheet = null;

    String excelRealPath = getExcelRealPath(execlDir);
    IDataSet dataSet;
    try {
        dataSet = new XlsDataSet(ResourceUtils.getFile(excelRealPath));

        // traverse all sheet
        String[] allDataTable = dataSet.getTableNames();

        ITable dataTable = null;
        ITableMetaData meta = null;
        Column[] columns = null;

        Map<String, Object> row = null;
        String columnName = null;
        Object obj = null;

        // read every Sheet
        for (int d = 0; d < allDataTable.length; d++) {
            dataTable = dataSet.getTable(allDataTable[d]);
            meta = dataTable.getTableMetaData();
            columns = meta.getColumns();

            sheet = new ArrayList<Map<String, Object>>();
            // read every line
            for (int k = 0; k < dataTable.getRowCount(); k++) {
                row = Collections.synchronizedMap(new CamelCasingHashMap<String, Object>());
                ;
                for (int i = 0; i < columns.length; i++) {
                    columnName = columns[i].getColumnName();
                    obj = dataTable.getValue(k, columnName);
                    row.put(columnName, obj);
                }
                sheet.add(k, row);
            }

            allData.put(allDataTable[d], sheet);
        }
    } catch (Throwable e) {
        e.printStackTrace();
    }

    return allData;

}

From source file:org.apache.abdera.util.AbderaConfiguration.java

/**
 * Registers NamedParser implementations using the /META-INF/services/org.apache.abdera.writer.NamedParser file
 *///from   www  .java2  s . com
private Map<String, NamedParser> initNamedParsers() {
    Map<String, NamedParser> parsers = null;
    Iterable<NamedParser> _parsers = Discover.locate(NAMED_PARSER);
    parsers = Collections.synchronizedMap(new HashMap<String, NamedParser>());
    for (NamedParser parser : _parsers) {
        parsers.put(parser.getName().toLowerCase(), parser);
    }
    return parsers;
}

From source file:eu.itesla_project.modules.validation.OverloadValidationTool.java

@Override
public void run(CommandLine line) throws Exception {
    OfflineConfig config = OfflineConfig.load();
    String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name")
            : OfflineConfig.DEFAULT_RULES_DB_NAME;
    RulesDbClientFactory rulesDbClientFactory = config.getRulesDbClientFactoryClass().newInstance();
    String caseFormat = line.getOptionValue("case-format");
    Path caseDir = Paths.get(line.getOptionValue("case-dir"));
    String workflowId = line.getOptionValue("workflow");
    Path outputDir = Paths.get(line.getOptionValue("output-dir"));
    double purityThreshold = line.hasOption("purity-threshold")
            ? Double.parseDouble(line.getOptionValue("purity-threshold"))
            : DEFAULT_PURITY_THRESHOLD;//  ww  w.  j  a  v  a  2  s . co m

    ContingenciesAndActionsDatabaseClient contingencyDb = config.getContingencyDbClientFactoryClass()
            .newInstance().create();
    LoadFlowFactory loadFlowFactory = config.getLoadFlowFactoryClass().newInstance();

    try (ComputationManager computationManager = new LocalComputationManager();
            RulesDbClient rulesDb = rulesDbClientFactory.create(rulesDbName)) {

        Importer importer = Importers.getImporter(caseFormat, computationManager);
        if (importer == null) {
            throw new RuntimeException("Format " + caseFormat + " not supported");
        }

        Map<String, Map<String, OverloadStatus>> statusPerContingencyPerCase = Collections
                .synchronizedMap(new TreeMap<>());

        Set<String> contingencyIds = Collections.synchronizedSet(new LinkedHashSet<>());

        Importers.importAll(caseDir, importer, true, network -> {
            try {
                List<Contingency> contingencies = contingencyDb.getContingencies(network);
                contingencyIds
                        .addAll(contingencies.stream().map(Contingency::getId).collect(Collectors.toList()));

                System.out.println("running security analysis...");

                SecurityAnalysis securityAnalysis = new SecurityAnalysisImpl(network, computationManager,
                        loadFlowFactory);
                SecurityAnalysisResult securityAnalysisResult = securityAnalysis
                        .runAsync(network1 -> contingencies).join();

                System.out.println("checking rules...");

                Map<String, Map<SecurityIndexType, SecurityRuleCheckStatus>> offlineRuleCheckPerContingency = SecurityRuleUtil
                        .checkRules(network, rulesDb, workflowId, RuleAttributeSet.MONTE_CARLO,
                                EnumSet.of(SecurityIndexType.TSO_OVERLOAD), null, purityThreshold);

                Map<String, OverloadStatus> statusPerContingency = new HashMap<>();

                for (PostContingencyResult postContingencyResult : securityAnalysisResult
                        .getPostContingencyResults()) {
                    Contingency contingency = postContingencyResult.getContingency();
                    boolean lfOk = postContingencyResult.isComputationOk()
                            && postContingencyResult.getLimitViolations().isEmpty();
                    Map<SecurityIndexType, SecurityRuleCheckStatus> offlineRuleCheck = offlineRuleCheckPerContingency
                            .get(contingency.getId());
                    boolean offlineRuleOk = offlineRuleCheck != null && offlineRuleCheck
                            .get(SecurityIndexType.TSO_OVERLOAD) == SecurityRuleCheckStatus.OK;
                    statusPerContingency.put(contingency.getId(), new OverloadStatus(offlineRuleOk, lfOk));
                }

                statusPerContingencyPerCase.put(network.getId(), statusPerContingency);
            } catch (Exception e) {
                LOGGER.error(e.toString(), e);
            }
        }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + " ..."));

        writeCsv(contingencyIds, statusPerContingencyPerCase, outputDir);
    }
}

From source file:com.alibaba.wasp.master.AssignmentManager.java

/**
 * Constructs a new assignment manager.//www. j  a  v a  2 s .  co m
 *
 * @param server
 * @param serverManager
 * @param service
 * @param metricsMaster
 * @throws org.apache.zookeeper.KeeperException
 * @throws java.io.IOException
 */
public AssignmentManager(Server server, FServerManager serverManager, final LoadBalancer balancer,
        final ExecutorService service, MetricsMaster metricsMaster) throws KeeperException, IOException {
    super(server.getZooKeeper());
    this.server = server;
    this.serverManager = serverManager;
    this.executorService = service;
    this.entityGroupsToReopen = Collections.synchronizedMap(new HashMap<String, EntityGroupInfo>());
    Configuration conf = server.getConfiguration();
    this.timeoutMonitor = new TimeoutMonitor(conf.getInt("wasp.master.assignment.timeoutmonitor.period", 30000),
            server, serverManager, conf.getInt("wasp.master.assignment.timeoutmonitor.timeout", 600000));
    this.timerUpdater = new TimerUpdater(conf.getInt("wasp.master.assignment.timerupdater.period", 10000),
            server);
    Threads.setDaemonThreadRunning(timerUpdater.getThread(), server.getServerName() + ".timerUpdater");
    this.zkTable = new ZKTable(this.watcher);
    this.maximumAttempts = this.server.getConfiguration().getInt("wasp.assignment.maximum.attempts", 10);
    this.balancer = balancer;
    int maxThreads = conf.getInt("wasp.assignment.threads.max", 30);
    this.threadPoolExecutorService = Threads.getBoundedCachedThreadPool(maxThreads, 60L, TimeUnit.SECONDS,
            newDaemonThreadFactory("hbase-am"));
    this.metricsMaster = metricsMaster;// can be null only with tests.
    this.entityGroupStates = new EntityGroupStates(server, serverManager);

    int workers = conf.getInt("wasp.assignment.zkevent.workers", 5);
    zkEventWorkers = new java.util.concurrent.ExecutorService[workers];
    ThreadFactory threadFactory = newDaemonThreadFactory("am-zkevent-worker");
    for (int i = 0; i < workers; i++) {
        zkEventWorkers[i] = Threads.getBoundedCachedThreadPool(1, 60L, TimeUnit.SECONDS, threadFactory);
    }
}

From source file:nl.tue.gale.ae.processor.xmlmodule.AdaptLinkModule.java

@SuppressWarnings("unchecked")
private void storeInSession(GaleContext gale, String guid, String exec) {
    Map<String, String[]> lru = (Map<String, String[]>) gale.req().getSession().getAttribute("ExecPlugin:map");
    if (lru == null) {
        lru = Collections.synchronizedMap((Map<String, String[]>) new LRUMap(50));
        gale.req().getSession().setAttribute("ExecPlugin:map", lru);
    }//from w  w w.ja va  2 s.  co m
    lru.put(guid, new String[] { gale.conceptUri().toString(), exec });
}

From source file:it.polimi.geinterface.GroupEntityManager.java

/**
 * /*  www .  j  a  v  a2s  .co  m*/
 * @param ctx - {@link Context} of the application using the framework.
 * @param self - {@link Entity} representing selfEntity
 * @param secureMgr - {@link SecurityManager} used to set security policies.
 * @param connCallback - {@link ConnectionStateCallback} used to set callback functions for 
 * network events (disconnection, connection failed, successful connection)
 */
private GroupEntityManager(Context ctx, Entity self, SecurityManager secureMgr,
        final ConnectionStateCallback connCallback) {
    _instance = this;
    appCtx = ctx;
    selfEntity = self;

    LoggerService.changeMode(ctx, LogMod.silent);

    Log.e(TAG, ctx.getPackageName());
    if (secureMgr == null)
        //set default security configuration
        securityManager = new SecurityManager.Builder(ctx).build();
    else
        securityManager = secureMgr;

    techManager = new TechnologyManager(appCtx);

    techManager.startProximiyUpdates();

    proximityDataList = new ArrayList<ProximityData>();
    proximitySubscriptionList = new ArrayList<Subscription>();
    groupSubscriptionList = new ArrayList<Subscription>();
    geofenceSubscriptionList = new ArrayList<Subscription>();

    lastSeenBeacons = new ArrayList<Entity>();

    waitingForCheckInTasks = Collections.synchronizedMap(new HashMap<String, TimerTask>());

    msgHandler = new MessageHandler();

    checkInTimer = new Timer(true);

    this.connStateCallback = connCallback;

    networkClient = new MQTTPahoClient(appCtx, self, securityManager, connCallback);

    networkClient.setMessageArrivedCallback(new MessageCallback() {

        @Override
        public void onMessageReceived(String m) {

            //timestamp used for logging
            long timestamp = Calendar.getInstance().getTimeInMillis() + LoggerService.NTP_DELAY;

            String senderID = MessageUtils.getSenderID(m);
            MessageType type = MessageUtils.getMsgType(m);

            //skip messages from myself
            if (senderID.equalsIgnoreCase(selfEntity.getEntityID()))
                return;

            Log.i(TAG, "Message received from " + senderID);

            /*
             * 
             * The following line of codes are used only for logging 
             * 
             */
            String log, topicReply, logId;
            JSONObject status = LogMessageUtils.buildStatus(proximitySubscriptionList.size(),
                    groupSubscriptionList.size(), geofenceSubscriptionList.size());

            if (type.equals(MessageType.SYNC_RESP))
                topicReply = MessageUtils.getRequestTopicFromMessage(m);
            else {
                topicReply = null;
            }

            if (type.equals(MessageType.CHECK_OUT)) {
                if (MessageUtils.getValidBitFromMessage(m))
                    logId = selfEntity.getEntityID() + timestamp;
                else {
                    logId = MessageUtils.getSenderID(m);
                }
                log = LogMessageUtils.buildMessageReceivedLog(logId, selfEntity.getEntityID(), Type.DEVICE,
                        type, topicReply, status, timestamp);
                m = MessageUtils.addLogField(m, logId);
            } else {
                log = LogMessageUtils.buildMessageReceivedLog(MessageUtils.getLogIdFromMessage(m),
                        selfEntity.getEntityID(), Type.DEVICE, type, topicReply, status, timestamp);
            }

            if (!senderID.equals(selfEntity.getEntityID()))
                LoggerService.writeToFile(appCtx, log);
            /*
             * 
             * End of logging code
             * 
             */

            msgHandler.messageConsumer(m);
        }
    });

    scheduler = new Scheduler();
    scheduler.resume();
}

From source file:cn.openwatch.internal.http.loopj.AsyncHttpClient.java

/**
 * Creates a new AsyncHttpClient.//from   w  w w .  j  av a2s.c o  m
 *
 * @param schemeRegistry SchemeRegistry to be used
 */
public AsyncHttpClient(SchemeRegistry schemeRegistry) {

    BasicHttpParams httpParams = new BasicHttpParams();

    ConnManagerParams.setTimeout(httpParams, connectTimeout);
    ConnManagerParams.setMaxConnectionsPerRoute(httpParams, new ConnPerRouteBean(maxConnections));
    ConnManagerParams.setMaxTotalConnections(httpParams, DEFAULT_MAX_CONNECTIONS);

    HttpConnectionParams.setSoTimeout(httpParams, responseTimeout);
    HttpConnectionParams.setConnectionTimeout(httpParams, connectTimeout);
    HttpConnectionParams.setTcpNoDelay(httpParams, true);
    HttpConnectionParams.setSocketBufferSize(httpParams, DEFAULT_SOCKET_BUFFER_SIZE);

    HttpProtocolParams.setVersion(httpParams, HttpVersion.HTTP_1_1);

    ClientConnectionManager cm = createConnectionManager(schemeRegistry, httpParams);
    Utils.asserts(cm != null,
            "Custom implementation of #createConnectionManager(SchemeRegistry, BasicHttpParams) returned null");

    threadPool = getDefaultThreadPool();
    requestMap = Collections.synchronizedMap(new WeakHashMap<Context, List<RequestHandle>>());
    clientHeaderMap = new HashMap<String, String>();

    httpContext = new SyncBasicHttpContext(new BasicHttpContext());
    httpClient = new DefaultHttpClient(cm, httpParams);
    httpClient.addRequestInterceptor(new HttpRequestInterceptor() {
        @Override
        public void process(HttpRequest request, HttpContext context) {
            if (!request.containsHeader(HEADER_ACCEPT_ENCODING)) {
                request.addHeader(HEADER_ACCEPT_ENCODING, ENCODING_GZIP);
            }
            for (String header : clientHeaderMap.keySet()) {
                if (request.containsHeader(header)) {
                    Header overwritten = request.getFirstHeader(header);

                    // remove the overwritten header
                    request.removeHeader(overwritten);
                }
                request.addHeader(header, clientHeaderMap.get(header));
            }
        }
    });

    httpClient.addResponseInterceptor(new HttpResponseInterceptor() {
        @Override
        public void process(HttpResponse response, HttpContext context) {
            final HttpEntity entity = response.getEntity();
            if (entity == null) {
                return;
            }
            final Header encoding = entity.getContentEncoding();
            if (encoding != null) {
                for (HeaderElement element : encoding.getElements()) {
                    if (element.getName().equalsIgnoreCase(ENCODING_GZIP)) {
                        response.setEntity(new InflatingEntity(entity));
                        break;
                    }
                }
            }
        }
    });

    httpClient.addRequestInterceptor(new HttpRequestInterceptor() {
        @Override
        public void process(final HttpRequest request, final HttpContext context)
                throws HttpException, IOException {
            AuthState authState = (AuthState) context.getAttribute(ClientContext.TARGET_AUTH_STATE);
            CredentialsProvider credsProvider = (CredentialsProvider) context
                    .getAttribute(ClientContext.CREDS_PROVIDER);
            HttpHost targetHost = (HttpHost) context.getAttribute(ExecutionContext.HTTP_TARGET_HOST);

            if (authState.getAuthScheme() == null) {
                AuthScope authScope = new AuthScope(targetHost.getHostName(), targetHost.getPort());
                Credentials creds = credsProvider.getCredentials(authScope);
                if (creds != null) {
                    authState.setAuthScheme(new BasicScheme());
                    authState.setCredentials(creds);
                }
            }
        }
    }, 0);

    httpClient
            .setHttpRequestRetryHandler(new RetryHandler(DEFAULT_MAX_RETRIES, DEFAULT_RETRY_SLEEP_TIME_MILLIS));
}