List of usage examples for java.util.logging Level FINE
Level FINE
To view the source code for java.util.logging Level FINE.
Click Source Link
From source file:com.mobilehelix.appserver.push.PushManager.java
public void addSession(String client, String userID, String password, String deviceType, Long appID, Integer appGenID) throws AppserverSystemException { ApplicationSettings as = appRegistry.getSettingsForAppID(client, appID, appGenID); if (as == null) { /* The registration does not tell us the app type. Hence we may get * normal web apps in our ID list. We just need to skip these ... *//*from w ww .j a v a2 s .co m*/ return; } if (as.getPushReceiver() == null) { /** * App does not support push. */ return; } LOG.log(Level.FINE, "Create or refresh push session for app {0}", as.getAppName()); // See if we have a push receiver for client/user/app boolean found = false; String combinedUser = this.getCombinedUser(client, userID); ConcurrentLinkedQueue<PushReceiver> receivers = this.userPushMap.get(combinedUser); if (receivers != null && !receivers.isEmpty()) { for (PushReceiver receiver : receivers) { if (receiver.matches(client, userID, appID)) { found = true; LOG.log(Level.INFO, "Refreshing push session for {0}", combinedUser); receiver.refresh(userID, password, as, true); } } } try { if (!found) { LOG.log(Level.INFO, "Creating push session for {0}", combinedUser); String uniqueID = this.getUniqueID(client, userID, appID); PushReceiver newReceiver = as.getPushReceiver(); PushCompletion pushAccepted = new PushCompletion(this.userPushMap, this.idMap, uniqueID, combinedUser, newReceiver); pushInit.doInit(newReceiver, asHostPlusPort, uniqueID, combinedUser, client, userID, password, deviceType, appID, as, pushAccepted); /* if (newReceiver.create(asHostPlusPort, uniqueID, newSess.getClient(), newSess.getUserID(), newSess.getPassword(), newSess.getDeviceType(), as)) { LOG.log(Level.FINE, "Created push session for {0}, ID {1}", new Object[] { combinedUser, uniqueID }); } */ } } catch (NoSuchAlgorithmException | UnsupportedEncodingException ex) { LOG.log(Level.SEVERE, "Failed to create push session.", ex); throw new AppserverSystemException("Failed to create push session.", "FailedToCreatePushSession", new String[] { ex.getMessage() }); } }
From source file:org.globus.security.stores.ResourceSecurityWrapperStore.java
private boolean load(Resource resource, Set<V> currentRoots, Map<String, T> newWrapperMap) throws ResourceStoreException { if (!resource.isReadable()) { throw new ResourceStoreException("Cannot read file"); }/*w w w . ja va2 s . c om*/ try { if (resource.getFile().isDirectory()) { File directory = resource.getFile(); currentRoots.addAll(addCredentials(directory)); return true; } } catch (IOException e) { // This is ok, it just means the resource is not a // filesystemresources logger.log(Level.FINE, "Not a filesystem resource", e); } try { String resourceUri = resource.getURL().toExternalForm(); T fbo = this.wrapperMap.get(resourceUri); if (fbo == null) { fbo = create(resource); } V target = fbo.create(resource); newWrapperMap.put(resourceUri, fbo); currentRoots.add(target); return true; } catch (IOException e) { throw new ResourceStoreException(e); } }
From source file:magma.agent.worldmodel.impl.ThisPlayer.java
/** * @return the homePosition/* w w w . j a v a 2 s.c o m*/ */ public Vector3D getHomePosition(String playmode) { float[] startPos = { 0.0f, 0.0f, 0.0f }; logger.log(Level.FINE, "playmode: {0}", new Object[] { playmode }); if ((homePosition == null) || (side == IMagmaConstants.LEFT_SIDE)) { if (playmode.equalsIgnoreCase(IServerConfigFilesConstants.PLAYMODE_GOAL_RIGHT) || playmode.equalsIgnoreCase(IServerConfigFilesConstants.PLAYMODE_BEFORE_KICK_OFF)) { startPos = IMagmaConstants.startPositionsOwnKickoff[getID()]; } else { startPos = IMagmaConstants.startPositionsOtherKickoff[getID()]; } homePosition = new Vector3D(startPos[0], startPos[1], startPos[2]); } else { if (playmode.equalsIgnoreCase(IServerConfigFilesConstants.PLAYMODE_GOAL_LEFT)) { startPos = IMagmaConstants.startPositionsOwnKickoff[getID()]; } else { startPos = IMagmaConstants.startPositionsOtherKickoff[getID()]; } homePosition = new Vector3D(startPos[0], startPos[1], startPos[2]); } return homePosition; }
From source file:com.wareninja.android.commonutils.foursquareV2.http.HttpApiWithOAuthV2.java
public void setOAuthTokenWithSecret(String token, String tokenSecret) { verifyConsumer();/*from w w w.j a v a 2s . c o m*/ if (token == null && tokenSecret == null) { if (DEBUG) LOG.log(Level.FINE, "Resetting consumer due to null token/secret."); String consumerKey = mConsumer.getConsumerKey(); String consumerSecret = mConsumer.getConsumerSecret(); mConsumer = new CommonsHttpOAuthConsumer(consumerKey, consumerSecret, SignatureMethod.HMAC_SHA1); } else { mConsumer.setTokenWithSecret(token, tokenSecret); } }
From source file:org.openspaces.rest.space.SpacePojoReadAPIController.java
/** * REST COUNT - Unrestricted for now/*w w w. ja va 2 s . c o m*/ * * TODO: add query body * */ @RequestMapping(value = "/count", method = RequestMethod.GET) public ModelAndView count(@RequestParam String spaceName, @RequestParam String locators, @RequestParam String classname, HttpServletResponse response) { if (logger.isLoggable(Level.FINE)) logger.fine("creating read query with type: " + classname); Object template; try { template = Class.forName(classname).newInstance(); } catch (Exception e) { throw new RuntimeException(e); } GigaSpace gigaSpace = ControllerUtils.xapCache.get(spaceName, locators); Integer cnt = gigaSpace.count(template); ModelAndView mv = new ModelAndView("jsonView"); if (cnt != null) { mv.addObject("count", cnt); } response.setHeader("Access-Control-Allow-Origin", "*"); return mv; }
From source file:com.qualogy.qafe.service.DocumentServiceImpl.java
private DocumentOutput handleCSV(DocumentParameter parameter) { DocumentOutput out = null;/*from ww w .j a v a 2 s .c om*/ String uuid = UUIDHelper.generateUUID(); try { CSVReader reader = null; if (parameter.getDelimiter() != null) { char separator = parameter.getDelimiter().length() > 0 ? parameter.getDelimiter().charAt(0) : ','; reader = new CSVReader(new InputStreamReader(new ByteArrayInputStream(parameter.getData())), separator); } else { reader = new CSVReader(new InputStreamReader(new ByteArrayInputStream(parameter.getData()))); } List<String[]> sheetData = reader.readAll(); out = handleCSVData(sheetData, parameter.isFirstFieldHeader()); } catch (Exception e) { LOG.log(Level.FINE, e.getMessage(), e); out = new DocumentOutput(); } out.setUuid(uuid); return out; }
From source file:com.google.enterprise.connector.salesforce.storetype.DBStore.java
public DBStore(BaseConnector connector) { Connection connection = null; logger = Logger.getLogger(this.getClass().getPackage().getName()); logger.log(Level.INFO, "Initialize DBStore "); this.connector = connector; //each connector instance has its own table in the same database this.instance_table = "i_" + connector.getInstanceName(); Statement Stmt = null;/* w w w . j av a2 s. c o m*/ ResultSet RS = null; DatabaseMetaData dbm = null; boolean table_exists = false; try { //check if the datasource/database exists Context initCtx = new InitialContext(); Context envCtx = (Context) initCtx.lookup("java:comp/env"); ds = (DataSource) envCtx.lookup(BaseConstants.CONNECTOR_DATASOURCE); connection = ds.getConnection(); connection.setAutoCommit(true); dbm = connection.getMetaData(); logger.log(Level.INFO, "Connected to databaseType " + dbm.getDatabaseProductName()); } catch (Exception ex) { logger.log(Level.SEVERE, "Exception initializing Store Datasource " + ex); connection = null; return; } try { if (dbm.getDatabaseProductName().equals("MySQL")) { //check if the per-connector table exists logger.log(Level.FINE, "Checking to see if connector DB exists..."); Stmt = connection.createStatement(); RS = Stmt.executeQuery("desc " + instance_table); ResultSetMetaData rsMetaData = RS.getMetaData(); if (rsMetaData.getColumnCount() > 0) table_exists = true; RS.close(); Stmt.close(); } else { logger.log(Level.SEVERE, "Unsupported DATABASE TYPE..." + dbm.getDatabaseProductName()); } } catch (Exception ex) { logger.log(Level.SEVERE, "Exception initializing Store " + ex); } try { //if the per-instance table doesn't exist, create it if (!table_exists) { logger.log(Level.INFO, "Creating Instance Table " + instance_table); if (dbm.getDatabaseProductName().equals("MySQL")) { Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); String create_stmt = ""; create_stmt = "CREATE TABLE `" + this.instance_table + "` (" + "`crawl_set` decimal(19,5) NOT NULL," + "`insert_timestamp` timestamp NOT NULL default CURRENT_TIMESTAMP on update CURRENT_TIMESTAMP," + "`crawl_data` MEDIUMTEXT default NULL," + "PRIMARY KEY (`crawl_set`)," + "KEY `set_index` (`crawl_set`)" + ") ENGINE=MyISAM;"; statement.addBatch(create_stmt); statement.executeBatch(); statement.close(); } else { logger.log(Level.INFO, "Instance Table " + instance_table + " already exists"); //connection.close(); //TODO: somehow figure out if we should delete this table here } } boolean qrtz_table_exists = false; if (dbm.getDatabaseProductName().equals("MySQL")) { //check if the per-connector table exists logger.log(Level.FINE, "Checking to see if quartz tables exists..."); Stmt = connection.createStatement(); try { RS = Stmt.executeQuery("desc QRTZ_JOB_DETAILS"); ResultSetMetaData rsMetaData = RS.getMetaData(); if (rsMetaData.getColumnCount() > 0) qrtz_table_exists = true; } catch (Exception ex) { logger.log(Level.INFO, "Could not find Quartz Tables...creating now.."); } RS.close(); Stmt.close(); } else { logger.log(Level.SEVERE, "Unsupported DATABASE TYPE..." + dbm.getDatabaseProductName()); } if (!qrtz_table_exists) { logger.log(Level.INFO, "Creating Global Quartz Table "); //the quartz db setup scripts are at //quartz-1.8.0/docs/dbTables/tables_mysql.sql //one set of Quartz tables can handle any number of triggers/crons Statement statement = connection.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY); String create_stmt = "CREATE TABLE QRTZ_JOB_DETAILS (JOB_NAME VARCHAR(200) NOT NULL,JOB_GROUP VARCHAR(200) NOT NULL,DESCRIPTION VARCHAR(250) NULL,JOB_CLASS_NAME VARCHAR(250) NOT NULL,IS_DURABLE VARCHAR(1) NOT NULL,IS_VOLATILE VARCHAR(1) NOT NULL,IS_STATEFUL VARCHAR(1) NOT NULL,REQUESTS_RECOVERY VARCHAR(1) NOT NULL,JOB_DATA BLOB NULL,PRIMARY KEY (JOB_NAME,JOB_GROUP));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_JOB_LISTENERS ( JOB_NAME VARCHAR(200) NOT NULL, JOB_GROUP VARCHAR(200) NOT NULL, JOB_LISTENER VARCHAR(200) NOT NULL, PRIMARY KEY (JOB_NAME,JOB_GROUP,JOB_LISTENER), FOREIGN KEY (JOB_NAME,JOB_GROUP) REFERENCES QRTZ_JOB_DETAILS(JOB_NAME,JOB_GROUP));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_FIRED_TRIGGERS ( ENTRY_ID VARCHAR(95) NOT NULL, TRIGGER_NAME VARCHAR(200) NOT NULL, TRIGGER_GROUP VARCHAR(200) NOT NULL, IS_VOLATILE VARCHAR(1) NOT NULL, INSTANCE_NAME VARCHAR(200) NOT NULL, FIRED_TIME BIGINT(13) NOT NULL, PRIORITY INTEGER NOT NULL, STATE VARCHAR(16) NOT NULL, JOB_NAME VARCHAR(200) NULL, JOB_GROUP VARCHAR(200) NULL, IS_STATEFUL VARCHAR(1) NULL, REQUESTS_RECOVERY VARCHAR(1) NULL, PRIMARY KEY (ENTRY_ID));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_TRIGGERS ( TRIGGER_NAME VARCHAR(200) NOT NULL, TRIGGER_GROUP VARCHAR(200) NOT NULL, JOB_NAME VARCHAR(200) NOT NULL, JOB_GROUP VARCHAR(200) NOT NULL, IS_VOLATILE VARCHAR(1) NOT NULL, DESCRIPTION VARCHAR(250) NULL, NEXT_FIRE_TIME BIGINT(13) NULL, PREV_FIRE_TIME BIGINT(13) NULL, PRIORITY INTEGER NULL, TRIGGER_STATE VARCHAR(16) NOT NULL, TRIGGER_TYPE VARCHAR(8) NOT NULL, START_TIME BIGINT(13) NOT NULL, END_TIME BIGINT(13) NULL, CALENDAR_NAME VARCHAR(200) NULL, MISFIRE_INSTR SMALLINT(2) NULL, JOB_DATA BLOB NULL, PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP), FOREIGN KEY (JOB_NAME,JOB_GROUP) REFERENCES QRTZ_JOB_DETAILS(JOB_NAME,JOB_GROUP));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_SIMPLE_TRIGGERS ( TRIGGER_NAME VARCHAR(200) NOT NULL, TRIGGER_GROUP VARCHAR(200) NOT NULL, REPEAT_COUNT BIGINT(7) NOT NULL, REPEAT_INTERVAL BIGINT(12) NOT NULL, TIMES_TRIGGERED BIGINT(10) NOT NULL, PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP), FOREIGN KEY (TRIGGER_NAME,TRIGGER_GROUP) REFERENCES QRTZ_TRIGGERS(TRIGGER_NAME,TRIGGER_GROUP));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_CRON_TRIGGERS ( TRIGGER_NAME VARCHAR(200) NOT NULL, TRIGGER_GROUP VARCHAR(200) NOT NULL, CRON_EXPRESSION VARCHAR(200) NOT NULL, TIME_ZONE_ID VARCHAR(80), PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP), FOREIGN KEY (TRIGGER_NAME,TRIGGER_GROUP) REFERENCES QRTZ_TRIGGERS(TRIGGER_NAME,TRIGGER_GROUP));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_BLOB_TRIGGERS ( TRIGGER_NAME VARCHAR(200) NOT NULL, TRIGGER_GROUP VARCHAR(200) NOT NULL, BLOB_DATA BLOB NULL, PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP), FOREIGN KEY (TRIGGER_NAME,TRIGGER_GROUP) REFERENCES QRTZ_TRIGGERS(TRIGGER_NAME,TRIGGER_GROUP));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_TRIGGER_LISTENERS ( TRIGGER_NAME VARCHAR(200) NOT NULL, TRIGGER_GROUP VARCHAR(200) NOT NULL, TRIGGER_LISTENER VARCHAR(200) NOT NULL, PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_LISTENER), FOREIGN KEY (TRIGGER_NAME,TRIGGER_GROUP) REFERENCES QRTZ_TRIGGERS(TRIGGER_NAME,TRIGGER_GROUP));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_CALENDARS ( CALENDAR_NAME VARCHAR(200) NOT NULL, CALENDAR BLOB NOT NULL, PRIMARY KEY (CALENDAR_NAME));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS ( TRIGGER_GROUP VARCHAR(200) NOT NULL, PRIMARY KEY (TRIGGER_GROUP));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_SCHEDULER_STATE ( INSTANCE_NAME VARCHAR(200) NOT NULL, LAST_CHECKIN_TIME BIGINT(13) NOT NULL, CHECKIN_INTERVAL BIGINT(13) NOT NULL, PRIMARY KEY (INSTANCE_NAME));"; statement.addBatch(create_stmt); create_stmt = "CREATE TABLE QRTZ_LOCKS ( LOCK_NAME VARCHAR(40) NOT NULL, PRIMARY KEY (LOCK_NAME));"; statement.addBatch(create_stmt); create_stmt = "INSERT INTO QRTZ_LOCKS values('TRIGGER_ACCESS');"; statement.addBatch(create_stmt); create_stmt = "INSERT INTO QRTZ_LOCKS values('JOB_ACCESS');"; statement.addBatch(create_stmt); create_stmt = "INSERT INTO QRTZ_LOCKS values('CALENDAR_ACCESS');"; statement.addBatch(create_stmt); create_stmt = "INSERT INTO QRTZ_LOCKS values('STATE_ACCESS');"; statement.addBatch(create_stmt); create_stmt = "INSERT INTO QRTZ_LOCKS values('MISFIRE_ACCESS');"; statement.addBatch(create_stmt); statement.executeBatch(); statement.close(); } else { logger.log(Level.INFO, "Global Quartz Table already exists "); } connection.close(); } catch (Exception ex) { logger.log(Level.SEVERE, "Exception Creating StoreTable " + ex); } }
From source file:com.cedarsoft.couchdb.DesignDocumentsUpdater.java
/** * Returns the current revision (if there is one) or null * * @param path the path/* ww w . ja va2 s . c o m*/ * @return the revision or null if there is no revision */ @Nullable private static Revision getRevision(@Nonnull WebResource path) throws ActionFailedException, IOException { if (LOG.isLoggable(Level.FINE)) { LOG.fine("HEAD: " + path.toString()); } ClientResponse response = path.get(ClientResponse.class); try { if (LOG.isLoggable(Level.FINE)) { LOG.fine("\tStatus: " + response.getStatus()); } if (response.getClientResponseStatus() == ClientResponse.Status.NOT_FOUND) { return null; } ActionResponseSerializer.verifyNoError(response); if (response.getClientResponseStatus() != ClientResponse.Status.OK) { throw new IllegalStateException( "Invalid response: " + response.getStatus() + ": " + response.getEntity(String.class)); } JsonFactory jsonFactory = JacksonSupport.getJsonFactory(); try (InputStream entityInputStream = response.getEntityInputStream()) { JsonParser parser = jsonFactory.createJsonParser(entityInputStream); JacksonParserWrapper wrapper = new JacksonParserWrapper(parser); wrapper.nextToken(JsonToken.START_OBJECT); wrapper.nextFieldValue("_id"); wrapper.nextFieldValue("_rev"); return new Revision(wrapper.getText()); } } finally { response.close(); } }
From source file:com.cloudbees.jenkins.plugins.amazonecs.ECSService.java
AmazonECSClient getAmazonECSClient() { final AmazonECSClient client; ProxyConfiguration proxy = Jenkins.getInstance().proxy; ClientConfiguration clientConfiguration = new ClientConfiguration(); if (proxy != null) { clientConfiguration.setProxyHost(proxy.name); clientConfiguration.setProxyPort(proxy.port); clientConfiguration.setProxyUsername(proxy.getUserName()); clientConfiguration.setProxyPassword(proxy.getPassword()); }//from w w w . j a v a 2s . co m AmazonWebServicesCredentials credentials = getCredentials(credentialsId); if (credentials == null) { // no credentials provided, rely on com.amazonaws.auth.DefaultAWSCredentialsProviderChain // to use IAM Role define at the EC2 instance level ... client = new AmazonECSClient(clientConfiguration); } else { if (LOGGER.isLoggable(Level.FINE)) { String awsAccessKeyId = credentials.getCredentials().getAWSAccessKeyId(); String obfuscatedAccessKeyId = StringUtils.left(awsAccessKeyId, 4) + StringUtils.repeat("*", awsAccessKeyId.length() - (2 * 4)) + StringUtils.right(awsAccessKeyId, 4); LOGGER.log(Level.FINE, "Connect to Amazon ECS with IAM Access Key {1}", new Object[] { obfuscatedAccessKeyId }); } client = new AmazonECSClient(credentials, clientConfiguration); } client.setRegion(getRegion(regionName)); LOGGER.log(Level.FINE, "Selected Region: {0}", regionName); return client; }
From source file:de.dal33t.powerfolder.util.ConfigurationLoader.java
/** * Processes/Handles a configuration (re-) load request. * * @param controller//from w ww . j a v a 2 s . c om * @param clr */ public static void processMessage(final Controller controller, ConfigurationLoadRequest clr) { Reject.ifNull(controller, "Controller"); Reject.ifNull(clr, "Message"); try { LOG.info("Processing message: " + clr); if (StringUtils.isBlank(clr.getConfigURL())) { // Single Key=value option if (clr.isKeyValue()) { boolean hasValue = controller.getConfig().containsKey(clr.getKey()); if (clr.isReplaceExisting() == null || clr.isReplaceExisting() || (!hasValue && !clr.isReplaceExisting())) { if (clr.getValue() == null) { controller.getConfig().remove(clr.getKey()); } else { controller.getConfig().put(clr.getKey(), clr.getValue()); } // Seems to be valid, store. controller.saveConfig(); LOG.log(Level.INFO, "Update configuration " + clr.getKey() + "=" + clr.getValue()); } } } else { Properties preConfig = ConfigurationLoader.loadPreConfiguration(clr.getConfigURL()); if (preConfig != null) { boolean overwrite; if (clr.isReplaceExisting() != null) { overwrite = clr.isReplaceExisting(); } else { overwrite = overwriteConfigEntries(preConfig); } if (dropFolderSettings(preConfig)) { Set<String> entryIds = FolderSettings.loadEntryIds(controller.getConfig()); for (String entryId : entryIds) { FolderSettings.removeEntries(controller.getConfig(), entryId); } } int i = ConfigurationLoader.merge(preConfig, controller.getConfig(), controller.getPreferences(), overwrite); LOG.log(Level.FINE, "Loaded/Merged " + i + " config/prefs entries from: " + clr.getConfigURL()); ConfigurationEntry.CONFIG_URL.setValue(controller, clr.getConfigURL()); // Seems to be valid, store. controller.saveConfig(); } else { LOG.log(Level.WARNING, "Unable to load config from " + clr.getConfigURL()); } } if (clr.isRestartRequired() && controller.isStarted()) { if (controller.getUptime() < 10000L) { controller.schedule(new Runnable() { public void run() { controller.shutdownAndRequestRestart(); } }, 10000L); } else { controller.shutdownAndRequestRestart(); } } } catch (IOException e) { LOG.log(Level.SEVERE, "Unable to reload configuration: " + clr + ". " + e, e); } }