List of usage examples for java.util Collections synchronizedMap
public static <K, V> Map<K, V> synchronizedMap(Map<K, V> m)
From source file:eu.itesla_project.modules.rules.CheckSecurityTool.java
@Override public void run(CommandLine line) throws Exception { OfflineConfig config = OfflineConfig.load(); Path caseFile = Paths.get(line.getOptionValue("case-file")); Objects.requireNonNull(caseFile); String rulesDbName = line.hasOption("rules-db-name") ? line.getOptionValue("rules-db-name") : OfflineConfig.DEFAULT_RULES_DB_NAME; RulesDbClientFactory rulesDbClientFactory = config.getRulesDbClientFactoryClass().newInstance(); String workflowId = line.getOptionValue("workflow"); RuleAttributeSet attributeSet = RuleAttributeSet.valueOf(line.getOptionValue("attribute-set")); double purityThreshold = line.hasOption("purity-threshold") ? Double.parseDouble(line.getOptionValue("purity-threshold")) : CheckSecurityCommand.DEFAULT_PURITY_THRESHOLD; Path outputCsvFile = null;//from ww w .j a v a 2s. c om if (line.hasOption("output-csv-file")) { outputCsvFile = Paths.get(line.getOptionValue("output-csv-file")); } Set<SecurityIndexType> securityIndexTypes = line.hasOption("security-index-types") ? Arrays.stream(line.getOptionValue("security-index-types").split(",")) .map(SecurityIndexType::valueOf).collect(Collectors.toSet()) : EnumSet.allOf(SecurityIndexType.class); final Set<String> contingencies = line.hasOption("contingencies") ? Arrays.stream(line.getOptionValue("contingencies").split(",")).collect(Collectors.toSet()) : null; try (RulesDbClient rulesDb = rulesDbClientFactory.create(rulesDbName)) { if (Files.isRegularFile(caseFile)) { System.out.println("loading case " + caseFile + "..."); // load the network Network network = Importers.loadNetwork(caseFile); if (network == null) { throw new RuntimeException("Case '" + caseFile + "' not found"); } network.getStateManager().allowStateMultiThreadAccess(true); System.out.println("checking rules..."); Map<String, Map<SecurityIndexType, SecurityRuleCheckStatus>> checkStatusPerContingency = SecurityRuleUtil .checkRules(network, rulesDb, workflowId, attributeSet, securityIndexTypes, contingencies, purityThreshold); if (outputCsvFile == null) { prettyPrint(checkStatusPerContingency, securityIndexTypes); } else { writeCsv(checkStatusPerContingency, securityIndexTypes, outputCsvFile); } } else if (Files.isDirectory(caseFile)) { if (outputCsvFile == null) { throw new RuntimeException( "In case of multiple impact security checks, only output to csv file is supported"); } Map<String, Map<SecurityIndexId, SecurityRuleCheckStatus>> checkStatusPerBaseCase = Collections .synchronizedMap(new LinkedHashMap<>()); Importers.loadNetworks(caseFile, true, network -> { try { Map<String, Map<SecurityIndexType, SecurityRuleCheckStatus>> checkStatusPerContingency = SecurityRuleUtil .checkRules(network, rulesDb, workflowId, attributeSet, securityIndexTypes, contingencies, purityThreshold); Map<SecurityIndexId, SecurityRuleCheckStatus> checkStatusMap = new HashMap<>(); for (Map.Entry<String, Map<SecurityIndexType, SecurityRuleCheckStatus>> entry : checkStatusPerContingency .entrySet()) { String contingencyId = entry.getKey(); for (Map.Entry<SecurityIndexType, SecurityRuleCheckStatus> entry1 : entry.getValue() .entrySet()) { SecurityIndexType type = entry1.getKey(); SecurityRuleCheckStatus status = entry1.getValue(); checkStatusMap.put(new SecurityIndexId(contingencyId, type), status); } } checkStatusPerBaseCase.put(network.getId(), checkStatusMap); } catch (Exception e) { LOGGER.error(e.toString(), e); } }, dataSource -> System.out.println("loading case " + dataSource.getBaseName() + "...")); writeCsv2(checkStatusPerBaseCase, outputCsvFile); } } }
From source file:org.zenoss.zep.impl.TriggerPlugin.java
@Override public void start(Map<String, String> properties) { int triggerRuleCacheSize = this.getTriggerRuleCacheSize(); logger.info("TriggerPlugin trigger rule cache size: {}", triggerRuleCacheSize); Map<String, TriggerRuleCache> boundedMap = ZepUtils.createBoundedMap(triggerRuleCacheSize); this.triggerRuleCache = Collections.synchronizedMap(boundedMap); super.start(properties); scheduleSpool();//ww w.j av a 2s.co m }
From source file:edu.cuny.cat.server.ConnectionAdaptor.java
public ConnectionAdaptor(final ConnectionManager manager, final Connection<CatpMessage> conn) { eventEngine = Galaxy.getInstance().getDefaultTyped(EventEngine.class); this.manager = manager; controller = GameController.getInstance(); clock = controller.getClock();// w w w . j a v a2 s.c om shoutValidator = controller.getShoutValidator(); transactionValidator = controller.getTransactionValidator(); chargeValidator = controller.getChargeValidator(); registry = controller.getRegistry(); timeController = controller.getTimeController(); proactiveSessions = BufferUtils.synchronizedBuffer(new UnboundedFifoBuffer<CatpProactiveSession>()); pendingRequestSessions = Collections.synchronizedMap(new HashMap<String, ShoutFromTraderSession>()); connection = ListenableConnection.makeReactiveConnection(conn); setExpectedReactiveSessions( new CatpReactiveSession[] { new CheckInSession(), new OracleSession("BeforeCheckIn") }); openConnection(); setState(ClientState.READY, ClientState.getCodeDesc(ClientState.READY) + " for checking in"); }
From source file:org.apache.hadoop.yarn.server.applicationhistoryservice.timeline.LeveldbTimelineStore.java
@Override @SuppressWarnings("unchecked") protected void serviceInit(Configuration conf) throws Exception { Options options = new Options(); options.createIfMissing(true);/*w w w. j a v a2s .c o m*/ options.cacheSize(conf.getLong(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE, YarnConfiguration.DEFAULT_TIMELINE_SERVICE_LEVELDB_READ_CACHE_SIZE)); JniDBFactory factory = new JniDBFactory(); String path = conf.get(YarnConfiguration.TIMELINE_SERVICE_LEVELDB_PATH); File p = new File(path); if (!p.exists()) { if (!p.mkdirs()) { throw new IOException("Couldn't create directory for leveldb " + "timeline store " + path); } } LOG.info("Using leveldb path " + path); db = factory.open(new File(path, FILENAME), options); startTimeWriteCache = Collections.synchronizedMap(new LRUMap(getStartTimeWriteCacheSize(conf))); startTimeReadCache = Collections.synchronizedMap(new LRUMap(getStartTimeReadCacheSize(conf))); if (conf.getBoolean(YarnConfiguration.TIMELINE_SERVICE_TTL_ENABLE, true)) { deletionThread = new EntityDeletionThread(conf); deletionThread.start(); } super.serviceInit(conf); }
From source file:org.apache.hadoop.mapred.JobInitializationPoller.java
public JobInitializationPoller(JobQueuesManager mgr, CapacitySchedulerConf rmConf, Set<String> queue, TaskTrackerManager ttm) {// w ww .j av a2 s .co m initializedJobs = new HashMap<JobID, JobInProgress>(); this.jobQueueManager = mgr; threadsToQueueMap = Collections.synchronizedMap(new HashMap<String, JobInitializationThread>()); super.setName("JobInitializationPollerThread"); running = true; this.ttm = ttm; }
From source file:org.apache.hadoop.mapred.WorkflowJobInitializationPoller.java
public WorkflowJobInitializationPoller(WorkflowJobQueuesManager mgr, WorkflowSchedulerConf rmConf, Set<String> queue, TaskTrackerManager ttm) { initializedJobs = new HashMap<JobID, JobInProgress>(); this.jobQueueManager = mgr; threadsToQueueMap = Collections.synchronizedMap(new HashMap<String, JobInitializationThread>()); super.setName("JobInitializationPollerThread"); running = true;//from www . ja va2 s. co m this.ttm = ttm; }
From source file:org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer.java
public RemoteInterpreterServer(String intpEventServerHost, int intpEventServerPort, String portRange, String interpreterGroupId, boolean isTest) throws TTransportException, IOException { logger.info("Starting remote interpreter server on port {}, intpEventServerAddress: {}:{}", port, intpEventServerHost, intpEventServerPort); if (null != intpEventServerHost) { this.intpEventServerHost = intpEventServerHost; if (!isTest) { TTransport transport = new TSocket(intpEventServerHost, intpEventServerPort); transport.open();/* w ww. j av a 2 s. c om*/ TProtocol protocol = new TBinaryProtocol(transport); intpEventServiceClient = new RemoteInterpreterEventService.Client(protocol); intpEventClient = new RemoteInterpreterEventClient(intpEventServiceClient); } } else { // DevInterpreter this.port = intpEventServerPort; } this.isTest = isTest; this.interpreterGroupId = interpreterGroupId; RemoteInterpreterService.Processor<RemoteInterpreterServer> processor = new RemoteInterpreterService.Processor<>( this); TServerSocket serverTransport; if (null == intpEventServerHost) { // Dev Interpreter serverTransport = new TServerSocket(intpEventServerPort); } else { serverTransport = RemoteInterpreterUtils.createTServerSocket(portRange); this.port = serverTransport.getServerSocket().getLocalPort(); this.host = RemoteInterpreterUtils.findAvailableHostAddress(); logger.info("Launching ThriftServer at " + this.host + ":" + this.port); } server = new TThreadPoolServer(new TThreadPoolServer.Args(serverTransport).processor(processor)); remoteWorksResponsePool = Collections.synchronizedMap(new HashMap<String, Object>()); }
From source file:mil.navy.med.dzreg.dao.RegistriesManagerDAO.java
/** * Register a new registry profile.// w ww . j av a 2s . c o m * @param profile * @return * @throws Exception */ public AckType register(PersonRegistryProfileType profile) { EntityManager em = null; PersistentServiceFactory psf = null; StringBuffer exceptionMsg = new StringBuffer(); AckType ack = new AckType(); ack.setResponseCode(_APPLICATION_ERROR); if (profile != null && profile.getPerson() != null && profile.getRegistry() != null && !profile.getRegistry().isEmpty() && profile.getDataSource() != null) { //------------------------------------------------------------------------ // Registry type must be valid. //------------------------------------------------------------------------ Map<Integer, DzType> toBeRegisterDzTypes = Collections.synchronizedMap(new HashMap<Integer, DzType>()); for (RegistryType r : profile.getRegistry()) { try { DzType dzType = this.validRegistryType(r); if (dzType != null) { toBeRegisterDzTypes.put(Integer.valueOf(dzType.getDztypeId()), dzType); } else { exceptionMsg.append("Invalid/Unknown registy type specified - " + r.getRegistryId() + ";"); } } catch (Exception ex) { exceptionMsg.append(ex.getMessage() + ";"); } } //---------------------------------------------------------------------- // Person info must have following elements: // 1. Identifer // 2. Name // 3. Date of birth // 4. Data Source //---------------------------------------------------------------------- PersonType person = profile.getPerson(); if (person.getName() == null || person.getName().isEmpty() || (person.getDataSource() == null && profile.getDataSource() == null)) { ack.setDetectedIssueText("Missing required metadata (person identifier or name or data source);"); return ack; } //------------------------------------------------------------------------ // Check to see if this person already registered. //------------------------------------------------------------------------ DzPatients registeredPatient = null; try { registeredPatient = validPerson(person); } catch (javax.persistence.NoResultException nre) { } catch (Exception ex) { ack.setDetectedIssueText("Failed to register patient - " + ex.getMessage()); return ack; } try { psf = PersistentServiceFactory.getInstance(REGISTRY_MANAGER_PU); em = psf.getEntityManager(); em.getTransaction().begin(); // Get the date today using Calendar object. Calendar cal = Calendar.getInstance(); Timestamp today = new Timestamp(cal.getTimeInMillis()); //---------------------------------------------------------------------- // If yes, only need to add a record to table DZ_REG for each new // registry type. //---------------------------------------------------------------------- if (registeredPatient != null) { // remove any registry type (from request) that is already assigned to // this person Collection<DzReg> registries = registeredPatient.getDzRegCollection(); for (DzReg r : registries) { Integer intDzTypeId = Integer.valueOf(r.getDzType().getDztypeId()); if (toBeRegisterDzTypes.containsKey(intDzTypeId)) { toBeRegisterDzTypes.remove(intDzTypeId); log.debug("Already registered in Registry " + intDzTypeId + "!"); exceptionMsg.append("Already registered in Registry " + intDzTypeId + ";"); } } // what we have left is new registry type to be add to person registry // profile Collection<DzType> toBeRegisterColl = toBeRegisterDzTypes.values(); for (DzType d : toBeRegisterColl) { // only need to add a record to table DZ_REG DzRegPK pk = new DzRegPK(person.getId(), d.getDztypeId()); DzReg newDzreg = new DzReg(); newDzreg.setDzRegPK(pk); newDzreg.setActive(_ACTIVE); newDzreg.setDataSource(person.getDataSource()); newDzreg.setRegisteredDt(today); newDzreg.setInsertedDt(today); em.persist(newDzreg); } } //---------------------------------------------------------------------- // If no, need to insert a new record in DZ_PATIENTS table and a new // record in table DZ_REG for each new registry type. //---------------------------------------------------------------------- else { DzPatients newDzPatient = map(person); newDzPatient.setInsertedDt(today); newDzPatient.setUpdatedDt(today); if (person.getDataSource() == null) { if (profile.getDataSource() != null) { newDzPatient.setDataSource(profile.getDataSource()); } else { // cannot insert record throw new Exception("Missing required metadata (data source);"); } } else { newDzPatient.setDataSource(profile.getDataSource()); } Collection<DzType> dzTypes = toBeRegisterDzTypes.values(); Collection<DzReg> newDzregList = new ArrayList<DzReg>(dzTypes.size()); for (DzType dzType : dzTypes) { DzRegPK pk = new DzRegPK(person.getId(), dzType.getDztypeId()); DzReg newDzreg = new DzReg(); newDzreg.setDzRegPK(pk); newDzreg.setActive(_ACTIVE); newDzreg.setRegisteredDt(today); newDzreg.setInsertedDt(today); if (person.getDataSource() == null) { newDzreg.setDataSource(profile.getDataSource()); } else { newDzreg.setDataSource(person.getDataSource()); } newDzregList.add(newDzreg); } newDzPatient.setDzRegCollection(newDzregList); em.persist(newDzPatient); } em.getTransaction().commit(); ack.setResponseCode(_OK); ack.setDetectedIssueText(exceptionMsg.toString()); return ack; } catch (Exception ex) { ex.printStackTrace(); em.getTransaction().rollback(); log.error("Failed to create new records in table DZ_PATIENTS/DZ_REG for profile=" + profile); ack.setDetectedIssueText( "Failed to register patient " + profile.getPerson().getId() + "-" + ex.getMessage()); return ack; } finally { em.close(); } } else { ack.setDetectedIssueText("Invalid registry profile"); return ack; } }
From source file:org.opcfoundation.ua.transport.https.HttpsServer.java
public HttpsServer(Application application) throws ServiceResultException { super(CloseableObjectState.Closed, CloseableObjectState.Closed); this.application = application; this.ioConfig = new IOReactorConfig(); this.securityPolicies = application.getHttpsSettings().getHttpsSecurityPolicies(); // Disable Nagle's ioConfig.setTcpNoDelay(false);/*from w w w . ja va 2 s.c om*/ HttpProcessor httpproc = new ImmutableHttpProcessor(new HttpResponseInterceptor[] { // Use standard server-side protocol interceptors new ResponseDate(), new ResponseServer(), new ResponseContent(), new ResponseConnControl() }); // Create request handler registry registry = new RequestResolver(); // Register the default handler for all URIs final Map<NHttpServerConnection, HttpsServerConnection> connMap = Collections .synchronizedMap(new HashMap<NHttpServerConnection, HttpsServerConnection>()); // Create connection re-use strategy connectionReuseStrategy = new DefaultConnectionReuseStrategy(); // Create server-side HTTP protocol handler protocolHandler = new HttpAsyncService(httpproc, connectionReuseStrategy, registry, getHttpParams()) { @Override public void connected(final NHttpServerConnection conn) { NHttpConnectionBase conn2 = (NHttpConnectionBase) conn; log.info("connected: {} {}<-> {} context={} socketTimeout={}", HttpsServer.this.getBoundSocketAddresses(), conn2.getLocalAddress(), conn2.getRemoteAddress(), conn2.getContext(), conn2.getSocketTimeout()); HttpsServerConnection httpsConnection = new HttpsServerConnection(HttpsServer.this, conn); connMap.put(conn, httpsConnection); connections.addConnection(httpsConnection); super.connected(conn); } @Override public void closed(final NHttpServerConnection conn) { NHttpConnectionBase conn2 = (NHttpConnectionBase) conn; log.info("closed: {} {}<-> {} context={} socketTimeout={}", HttpsServer.this.getBoundSocketAddresses(), conn2.getLocalAddress(), conn2.getRemoteAddress(), conn2.getContext(), conn2.getSocketTimeout()); HttpsServerConnection conn3 = connMap.remove(conn); connections.removeConnection(conn3); super.closed(conn); } }; // Create a service server for connections that query endpoints (url = "") discoveryServer = new Server(application); discoveryServer.setEndpointBindings(endpointBindings); EndpointBinding discoveryBinding = new EndpointBinding(this, discoveryEndpoint, discoveryServer); discoveryHandler = new HttpsServerEndpointHandler(discoveryBinding); }
From source file:org.pentaho.di.core.util.StringUtil.java
/** * Substitutes variables in <code>aString</code> with the environment values in the system properties * * @param aString//from w ww . j a va2s . co m * the string on which to apply the substitution. * @param systemProperties * the system properties to use * @return the string with the substitution applied. */ public static final synchronized String environmentSubstitute(String aString, Map<String, String> systemProperties) { Map<String, String> sysMap = new HashMap<String, String>(); synchronized (sysMap) { sysMap.putAll(Collections.synchronizedMap(systemProperties)); aString = substituteWindows(aString, sysMap); aString = substituteUnix(aString, sysMap); aString = substituteHex(aString); return aString; } }