List of usage examples for java.util Properties remove
@Override public synchronized Object remove(Object key)
From source file:org.lsc.jndi.JndiServices.java
private void initConnection() throws NamingException, IOException { // log new connection with it's details logConnectingTo(connProps);//from w w w . j a v a 2 s .c om /* should we negotiate TLS? */ if (connProps.get(TLS_CONFIGURATION) != null && (Boolean) connProps.get(TLS_CONFIGURATION)) { /* if we're going to do TLS, we mustn't BIND before the STARTTLS operation * so we remove credentials from the properties to stop JNDI from binding */ /* duplicate properties to avoid changing them (they are used as a cache key in getInstance() */ Properties localConnProps = new Properties(); localConnProps.putAll(connProps); String jndiContextAuthentication = localConnProps.getProperty(Context.SECURITY_AUTHENTICATION); String jndiContextPrincipal = localConnProps.getProperty(Context.SECURITY_PRINCIPAL); String jndiContextCredentials = localConnProps.getProperty(Context.SECURITY_CREDENTIALS); localConnProps.remove(Context.SECURITY_AUTHENTICATION); localConnProps.remove(Context.SECURITY_PRINCIPAL); localConnProps.remove(Context.SECURITY_CREDENTIALS); /* open the connection */ ctx = new InitialLdapContext(localConnProps, null); /* initiate the STARTTLS extended operation */ try { tlsResponse = (StartTlsResponse) ctx.extendedOperation(new StartTlsRequest()); tlsResponse.negotiate(); } catch (IOException e) { LOGGER.error("Error starting TLS encryption on connection to {}", localConnProps.getProperty(Context.PROVIDER_URL)); LOGGER.debug(e.toString(), e); throw e; } catch (NamingException e) { LOGGER.error("Error starting TLS encryption on connection to {}", localConnProps.getProperty(Context.PROVIDER_URL)); LOGGER.debug(e.toString(), e); throw e; } /* now we add the credentials back to the context, to BIND once TLS is started */ ctx.addToEnvironment(Context.SECURITY_AUTHENTICATION, jndiContextAuthentication); ctx.addToEnvironment(Context.SECURITY_PRINCIPAL, jndiContextPrincipal); ctx.addToEnvironment(Context.SECURITY_CREDENTIALS, jndiContextCredentials); } else { /* don't start TLS, just connect normally (this can be on ldap:// or ldaps://) */ ctx = new InitialLdapContext(connProps, null); } /* get LDAP naming context */ try { namingContext = new LdapUrl((String) ctx.getEnvironment().get(Context.PROVIDER_URL)); } catch (LdapURLEncodingException e) { LOGGER.error(e.toString()); LOGGER.debug(e.toString(), e); throw new NamingException(e.getMessage()); } /* handle options */ contextDn = namingContext.getDn() != null ? namingContext.getDn() : null; String pageSizeStr = (String) ctx.getEnvironment().get("java.naming.ldap.pageSize"); if (pageSizeStr != null) { pageSize = Integer.parseInt(pageSizeStr); } else { pageSize = -1; } sortedBy = (String) ctx.getEnvironment().get("java.naming.ldap.sortedBy"); String recursiveDeleteStr = (String) ctx.getEnvironment().get("java.naming.recursivedelete"); if (recursiveDeleteStr != null) { recursiveDelete = Boolean.parseBoolean(recursiveDeleteStr); } else { recursiveDelete = false; } /* Load SyncRepl response control */ LdapApiService ldapApiService = LdapApiServiceFactory.getSingleton(); ControlFactory<?> factory = new SyncStateValueFactory(ldapApiService); ldapApiService.registerControl(factory); /* Load Persistent Search response control */ factory = new PersistentSearchFactory(ldapApiService); ldapApiService.registerControl(factory); }
From source file:com.streamsets.datacollector.cluster.BaseClusterProvider.java
@VisibleForTesting void rewriteProperties(File sdcPropertiesFile, List<File> additionalPropFiles, File etcStagingDir, Map<String, String> sourceConfigs, Map<String, String> sourceInfo, String clusterToken, Optional<String> mesosURL) throws IOException { InputStream sdcInStream = null; OutputStream sdcOutStream = null; Properties sdcProperties = new Properties(); try {/*from w w w. j a v a2 s . c o m*/ sdcInStream = new FileInputStream(sdcPropertiesFile); sdcProperties.load(sdcInStream); for (File propFiles : additionalPropFiles) { sdcInStream = new FileInputStream(propFiles); sdcProperties.load(sdcInStream); } copyDpmTokenIfRequired(sdcProperties, etcStagingDir); sdcProperties.setProperty(RuntimeModule.PIPELINE_EXECUTION_MODE_KEY, ExecutionMode.SLAVE.name()); sdcProperties.setProperty(WebServerTask.REALM_FILE_PERMISSION_CHECK, "false"); // Remove always problematical properties for (String property : SDC_CONFIGS_TO_ALWAYS_REMOVE) { sdcProperties.remove(property); } // Remove additional properties that user might need to String propertiesToRemove = sdcProperties.getProperty(CONFIG_ADDITIONAL_CONFIGS_TO_REMOVE); if (propertiesToRemove != null) { for (String property : propertiesToRemove.split(",")) { sdcProperties.remove(property); } } if (runtimeInfo != null) { if (runtimeInfo.getSSLContext() != null) { sdcProperties.setProperty(WebServerTask.HTTP_PORT_KEY, "-1"); sdcProperties.setProperty(WebServerTask.HTTPS_PORT_KEY, "0"); } else { sdcProperties.setProperty(WebServerTask.HTTP_PORT_KEY, "0"); sdcProperties.setProperty(WebServerTask.HTTPS_PORT_KEY, "-1"); } String id = String.valueOf(runtimeInfo.getId()); sdcProperties.setProperty(Constants.SDC_ID, id); sdcProperties.setProperty(Constants.PIPELINE_CLUSTER_TOKEN_KEY, clusterToken); sdcProperties.setProperty(Constants.CALLBACK_SERVER_URL_KEY, runtimeInfo.getClusterCallbackURL()); } if (mesosURL.isPresent()) { sdcProperties.setProperty(Constants.MESOS_JAR_URL, mesosURL.get()); } addClusterConfigs(sourceConfigs, sdcProperties); addClusterConfigs(sourceInfo, sdcProperties); sdcOutStream = new FileOutputStream(sdcPropertiesFile); sdcProperties.store(sdcOutStream, null); getLog().debug("sourceConfigs = {}", sourceConfigs); getLog().debug("sourceInfo = {}", sourceInfo); getLog().debug("sdcProperties = {}", sdcProperties); sdcOutStream.flush(); sdcOutStream.close(); } finally { if (sdcInStream != null) { IOUtils.closeQuietly(sdcInStream); } if (sdcOutStream != null) { IOUtils.closeQuietly(sdcOutStream); } } }
From source file:org.sakaiproject.blti.tool.LTIAdminTool.java
public void doToolPut(RunData data, Context context) { String peid = ((JetspeedRunData) data).getJs_peid(); SessionState state = ((JetspeedRunData) data).getPortletSessionState(peid); if (!ltiService.isMaintain()) { addAlert(state, rb.getString("error.maintain.delete")); switchPanel(state, "Error"); return;/*from w w w. j av a 2 s . co m*/ } Properties reqProps = data.getParameters().getProperties(); String newSecret = reqProps.getProperty(LTIService.LTI_SECRET); if (SECRET_HIDDEN.equals(newSecret)) { reqProps.remove(LTIService.LTI_SECRET); newSecret = null; } if (newSecret != null) { newSecret = SakaiBLTIUtil.encryptSecret(newSecret.trim()); reqProps.setProperty(LTIService.LTI_SECRET, newSecret); } String id = data.getParameters().getString(LTIService.LTI_ID); String success = null; Object retval = null; if (id == null) { retval = ltiService.insertTool(reqProps); success = rb.getString("success.created"); } else { Long key = new Long(id); retval = ltiService.updateTool(key, reqProps); success = rb.getString("success.updated"); } if (retval instanceof String) { state.setAttribute(STATE_POST, reqProps); addAlert(state, (String) retval); state.setAttribute(STATE_ID, id); return; } state.setAttribute(STATE_SUCCESS, success); switchPanel(state, "ToolSystem"); }
From source file:org.eclipse.gyrex.cloud.internal.preferences.ZooKeeperBasedPreferences.java
/** * Updates the local node properties with properties from ZooKeeper. * <p>//w w w . j a v a 2 s . c o m * This method is called by {@link ZooKeeperPreferencesService} when * properties have been loaded from ZooKeeper. * <p> * The local properties will be completely replaced with the properties * loaded from the specified bytes. Properties that exist locally but not * remotely will be removed locally. Properties that exist remotely but not * locally will be added locally. Proper events will be fired. * </p> * <p> * The replace strategy relies on the node version provided by ZooKeeper. * The version of a ZooKeeper node is used as the properties version. When * writing properties to ZooKeeper we'll receive a response * </p> * * @param remotePropertyBytes * @param propertiesVersion * @throws IOException */ final void loadProperties(final byte[] remotePropertyBytes, final int propertiesVersion) throws IOException { // don't do anything if removed if (removed) { return; } // collect events final List<PreferenceChangeEvent> events = new ArrayList<PreferenceChangeEvent>(); // prevent concurrent property modification (eg. remote _and_ local flush) propertiesModificationLock.lock(); try { if (removed) { return; } if (CloudDebug.zooKeeperPreferences) { LOG.debug("Loading properties for node {} (version {})", this, propertiesVersion); } // load remote properties // (note, can be null if there is a node in ZooKeeper but without data) final Properties loadedProps = new Properties(); if (remotePropertyBytes != null) { loadedProps.load(new ByteArrayInputStream(remotePropertyBytes)); // check version final Object formatVersion = loadedProps.remove(VERSION_KEY); if ((formatVersion == null) || !VERSION_VALUE.equals(formatVersion)) { // ignore for now LOG.warn("Properties with incompatible storage format version ({}) found for node {}.", formatVersion, this); return; } } // update properties version (after they were de-serialized successfully) this.propertiesVersion = propertiesVersion; propertiesLoadTimestamp = System.currentTimeMillis(); // collect all property names final Set<String> propertyNames = new HashSet<String>(); propertyNames.addAll(loadedProps.stringPropertyNames()); propertyNames.addAll(properties.stringPropertyNames()); // note, the policy here is very simple: we completely // replace the local properties with the loaded properties; // this keeps the implementation simple and also delegates // the coordination of concurrent updates in a distributed // system a layer higher to the clients of preferences API // discover new, updated and removed properties for (final String key : propertyNames) { final String newValue = loadedProps.getProperty(key); final String oldValue = properties.getProperty(key); if (newValue == null) { // does not exists in ZooKeeper, assume removed properties.remove(key); if (CloudDebug.zooKeeperPreferences) { LOG.debug("Node {} property removed: {}", this, key); } // create event events.add(new PreferenceChangeEvent(this, key, oldValue, newValue)); } else if ((oldValue == null) || !oldValue.equals(newValue)) { // assume added or updated in ZooKeeper properties.put(key, newValue); if (CloudDebug.zooKeeperPreferences) { if (oldValue == null) { LOG.debug("Node {} property added: {}={}", new Object[] { this, key, newValue }); } else { LOG.debug("Node {} property updated: {}={}", new Object[] { this, key, newValue }); } } // create event events.add(new PreferenceChangeEvent(this, key, oldValue, newValue)); } } if (CloudDebug.zooKeeperPreferences) { LOG.debug("Loaded properties for node {} (now at version {})", this, propertiesVersion); } } finally { propertiesModificationLock.unlock(); } // fire events outside of lock // TODO we need to understand event ordering better (eg. concurrent remote updates) // (this may result in sending events asynchronously through an ordered queue, but for now we do it directly) for (final PreferenceChangeEvent event : events) { firePreferenceEvent(event); } }
From source file:com.jaspersoft.studio.statistics.UsageManager.java
/** * Send the statistics to the defined server. They are read from the properties filed and converted into a JSON * string. Then this string is sent to the server as a post parameter named data *///from ww w . jav a2s. c om protected void sendStatistics() { BufferedReader responseReader = null; DataOutputStream postWriter = null; try { if (!STATISTICS_SERVER_URL.trim().isEmpty()) { URL obj = new URL(STATISTICS_SERVER_URL); HttpURLConnection con = (HttpURLConnection) obj.openConnection(); // add request header con.setRequestMethod("POST"); //$NON-NLS-1$ con.setRequestProperty("User-Agent", "Mozilla/5.0"); //$NON-NLS-1$ //$NON-NLS-2$ con.setRequestProperty("Accept-Language", "en-US,en;q=0.5"); //$NON-NLS-1$ //$NON-NLS-2$ // Read and convert the statistics into a JSON string UsagesContainer container = new UsagesContainer(getAppDataFolder().getName()); boolean fileChanged = false; synchronized (UsageManager.this) { Properties prop = getStatisticsContainer(); for (Object key : new ArrayList<Object>(prop.keySet())) { try { String[] id_category = key.toString().split(Pattern.quote(ID_CATEGORY_SEPARATOR)); String value = prop.getProperty(key.toString(), "0"); int usageNumber = Integer.parseInt(value); //$NON-NLS-1$ String version = getVersion(); //Check if the id contains the version if (id_category.length == 3) { version = id_category[2]; } else { //Old structure, remove the old entry and insert the new fixed one //this is a really limit case and should almost never happen prop.remove(key); String fixed_key = id_category[0] + ID_CATEGORY_SEPARATOR + id_category[1] + ID_CATEGORY_SEPARATOR + version; prop.setProperty(fixed_key, value); fileChanged = true; } container.addStat( new UsageStatistic(id_category[0], id_category[1], version, usageNumber)); } catch (Exception ex) { //if a key is invalid remove it ex.printStackTrace(); prop.remove(key); fileChanged = true; } } } if (fileChanged) { //The statistics file was changed, maybe a fix or an invalid property removed //write it corrected on the disk writeStatsToDisk.cancel(); writeStatsToDisk.setPriority(Job.SHORT); writeStatsToDisk.schedule(MINIMUM_WAIT_TIME); } ObjectMapper mapper = new ObjectMapper(); String serializedData = mapper.writeValueAsString(container); // Send post request with the JSON string as the data parameter String urlParameters = "data=" + serializedData; //$NON-NLS-1$ con.setDoOutput(true); postWriter = new DataOutputStream(con.getOutputStream()); postWriter.writeBytes(urlParameters); postWriter.flush(); int responseCode = con.getResponseCode(); responseReader = new BufferedReader(new InputStreamReader(con.getInputStream())); String inputLine; StringBuffer response = new StringBuffer(); while ((inputLine = responseReader.readLine()) != null) { response.append(inputLine); } // Update the upload time if (responseCode == 200 && ModelUtils.safeEquals(response.toString(), "ok")) { setInstallationInfo(TIMESTAMP_INFO, String.valueOf(getCurrentTime())); } else { //print result System.out.println("Response error: " + response.toString()); } } } catch (Exception ex) { ex.printStackTrace(); JaspersoftStudioPlugin.getInstance().logError(Messages.UsageManager_errorStatUpload, ex); } finally { FileUtils.closeStream(postWriter); FileUtils.closeStream(responseReader); } }
From source file:org.apache.phoenix.query.BaseTest.java
private static void deletePriorSchemas(long ts, String url) throws Exception { Properties props = new Properties(); props.put(QueryServices.QUEUE_SIZE_ATTRIB, Integer.toString(1024)); if (ts != HConstants.LATEST_TIMESTAMP) { props.setProperty(CURRENT_SCN_ATTRIB, Long.toString(ts)); }/*from w ww . ja v a2s . co m*/ try (Connection conn = DriverManager.getConnection(url, props)) { DatabaseMetaData dbmd = conn.getMetaData(); ResultSet rs = dbmd.getSchemas(); while (rs.next()) { String schemaName = rs.getString(PhoenixDatabaseMetaData.TABLE_SCHEM); if (schemaName.equals(PhoenixDatabaseMetaData.SYSTEM_SCHEMA_NAME)) { continue; } schemaName = SchemaUtil.getEscapedArgument(schemaName); String ddl = "DROP SCHEMA " + schemaName; conn.createStatement().executeUpdate(ddl); } rs.close(); } // Make sure all schemas have been dropped props.remove(CURRENT_SCN_ATTRIB); try (Connection seeLatestConn = DriverManager.getConnection(url, props)) { DatabaseMetaData dbmd = seeLatestConn.getMetaData(); ResultSet rs = dbmd.getSchemas(); boolean hasSchemas = rs.next(); if (hasSchemas) { String schemaName = rs.getString(PhoenixDatabaseMetaData.TABLE_SCHEM); if (schemaName.equals(PhoenixDatabaseMetaData.SYSTEM_SCHEMA_NAME)) { hasSchemas = rs.next(); } } if (hasSchemas) { fail("The following schemas are not dropped that should be:" + getSchemaNames(rs)); } } }
From source file:org.webdavaccess.LocalFileSystemStorage.java
/** * Delete properties for given resource/* ww w .j av a 2s . c o m*/ * * @param resourceUri for which to delete properties */ public void deleteProperties(String resourceUri, Properties propertiesToDelete) { resourceUri = normalize(resourceUri); // Try cache first Properties props = (Properties) mPropertiesCache.get(resourceUri); if (props != null) mPropertiesCache.remove(resourceUri); File file = getPropertiesFile(resourceUri); if (file == null || !file.exists()) { return; } InputStream in = null; Properties persisted = new Properties(); try { in = new FileInputStream(file); persisted.loadFromXML(in); } catch (Exception e) { log.warn("Failed to get properties from cache for " + resourceUri); return; } finally { if (in != null) try { in.close(); } catch (Exception e) { } } boolean changed = false; Enumeration en = persisted.keys(); HashMap toRemove = new HashMap(); while (en.hasMoreElements()) { String key = (String) en.nextElement(); if (isResourceProperty(resourceUri, key)) { if (propertiesToDelete != null) { String newKey = getPropertyKey(resourceUri, key); if (propertiesToDelete.getProperty(newKey) != null) toRemove.put(key, persisted.getProperty(key)); } else { toRemove.put(key, persisted.getProperty(key)); } } } changed = !toRemove.isEmpty(); for (Iterator it = toRemove.keySet().iterator(); it.hasNext();) { String key = (String) it.next(); persisted.remove(key); } if (changed) { // Store the updates properties OutputStream os = null; try { os = new FileOutputStream(file); persisted.storeToXML(os, ""); } catch (Exception e) { log.warn("Failed to store properties for " + resourceUri); } finally { if (os != null) try { os.close(); } catch (Exception e) { } } } }
From source file:ca.hec.commons.utils.MergePropertiesUtils.java
/** * Merge newProps to mainProps./*from w ww . j a v a 2 s . com*/ * NOTE: The idea is that we want to write out the properties in exactly the same order, for future comparison purposes. * * @param newPropertiesFile * @param newProps * @return nb of properties from main props file. */ public static void merge(File newPropertiesFile, Properties updatedProps) throws Exception { /** * 1) Read line by line of the new PropertiesFile (Sakai 2.9.1) * For each line: extract property * check if we have a match one in the propToMerge * - if no, then rewrite the same line * - if yes: - if same value, rewrite the same line * - if different value, rewrite the prop with the new value * - For both cases, delete the key from newProperties. * * 2) At the end, write the remaining list of propToMerge at * the end of mainProp */ try { int nbPropsInMain = 0; int nbPropsChanged = 0; int nbPropsSimilar = 0; int nbPropsNotInNew = 0; // Open the file that is the first // command line parameter FileInputStream fstream = new FileInputStream(newPropertiesFile); // Get the object of DataInputStream DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); LineWithContinuation lineIn; // Read File Line By Line while ((lineIn = LineWithContinuation.readLineWithContinuation(br)) != null) { KeyValue keyValue = extractKeyValue(lineIn.getFullLine()); // May be a comment line, or blank line, or not a line containing key & property pair (we expect "key = value" line). // Simply echo the line back. if (keyValue == null) { System.out.println(lineIn.getFullLine()); continue; } nbPropsInMain++; String key = keyValue.key; //System.out.println(key); String newValue = updatedProps.getProperty(key); String valueEscaped = unescapeJava(keyValue.value); if (newValue != null) { if (!newValue.equals(valueEscaped)) { String newLine = composeNewPropLine(key, StringEscapeUtils.escapeJava(newValue)); System.out.println(newLine); nbPropsChanged++; } else { System.out.println(lineIn.getLineWithReturn()); nbPropsSimilar++; } // remove the key from newProps because it is used updatedProps.remove(key); } else { System.out.println(lineIn.getLineWithReturn()); nbPropsNotInNew++; } } // Close the input stream in.close(); System.out.println("\n\n### " + nbPropsInMain + " properties in SAKAI 11 (" + nbPropsChanged + " changed, " + nbPropsSimilar + " props with same value in both versions, " + nbPropsNotInNew + " not in 2.9.1)"); } catch (Exception e) {// Catch exception if any System.err.println("Error: " + e.getMessage()); throw e; } }
From source file:org.wso2.carbon.clustering.hazelcast.util.MemberUtils.java
public static org.apache.axis2.clustering.Member getLocalMember(String domain, String localMemberHost, int localMemberPort) { if (!isInitialized) { throw new IllegalStateException("MemberUtils not initialized. Call MemberUtils.init() first"); }/*from w ww . j a v a 2s .c om*/ org.apache.axis2.clustering.Member member = new org.apache.axis2.clustering.Member(localMemberHost, localMemberPort); Properties memberInfo = new Properties(); AxisConfiguration axisConfig = configurationContext.getAxisConfiguration(); TransportInDescription httpTransport = axisConfig.getTransportIn("http"); int portOffset = 0; Parameter param = getParameter(ClusteringConstants.Parameters.AVOID_INITIATION); if (param != null && !JavaUtils.isTrueExplicitly(param.getValue())) { //AvoidInitialization = false, Hence we set the portOffset if (System.getProperty("portOffset") != null) { portOffset = Integer.parseInt(System.getProperty("portOffset")); } } if (httpTransport != null) { Parameter port = httpTransport.getParameter("port"); if (port != null) { int httpPort = Integer.valueOf((String) port.getValue()) + portOffset; member.setHttpPort(httpPort); } } TransportInDescription httpsTransport = axisConfig.getTransportIn("https"); if (httpsTransport != null) { Parameter port = httpsTransport.getParameter("port"); if (port != null) { int httpsPort = Integer.valueOf((String) port.getValue()) + portOffset; member.setHttpsPort(httpsPort); } } Parameter isActiveParam = getParameter(ClusteringConstants.Parameters.IS_ACTIVE); if (isActiveParam != null) { memberInfo.setProperty(ClusteringConstants.Parameters.IS_ACTIVE, (String) isActiveParam.getValue()); } if (localMemberHost != null) { memberInfo.setProperty("hostName", localMemberHost); } Parameter propsParam = getParameter("properties"); if (propsParam != null) { OMElement paramEle = propsParam.getParameterElement(); for (Iterator iter = paramEle.getChildrenWithLocalName("property"); iter.hasNext();) { OMElement propEle = (OMElement) iter.next(); OMAttribute nameAttrib = propEle.getAttribute(new QName("name")); if (nameAttrib != null) { String attribName = nameAttrib.getAttributeValue(); attribName = replaceProperty(attribName, memberInfo); OMAttribute valueAttrib = propEle.getAttribute(new QName("value")); if (valueAttrib != null) { String attribVal = valueAttrib.getAttributeValue(); attribVal = replaceProperty(attribVal, memberInfo); memberInfo.setProperty(attribName, attribVal); } } } } memberInfo.remove("hostName"); // this was needed only to populate other properties. No need to send it. member.setProperties(memberInfo); member.setDomain(domain); return member; }
From source file:com.googlecode.fascinator.HarvestClient.java
/** * Process each objects//from w w w . j a v a2 s. c om * * @param oid Object Id * @param commit Flag to commit after indexing * @throws StorageException If storage is not found * @throws TransformerException If transformer fail to transform the object * @throws MessagingException If the object could not be queue'd */ private void processObject(String oid, boolean commit) throws TransformerException, StorageException, MessagingException { // get the object DigitalObject object = storage.getObject(oid); String isNew = "false"; String isModified = "false"; // update object metadata Properties props = object.getMetadata(); // TODO - objectId is redundant now? props.setProperty("objectId", object.getId()); props.setProperty("scriptType", config.getString(null, "indexer", "script", "type")); // Set our config and rules data as properties on the object props.setProperty("rulesOid", rulesObject.getId()); props.setProperty("rulesPid", rulesObject.getSourceId()); props.setProperty("jsonConfigOid", configObject.getId()); props.setProperty("jsonConfigPid", configObject.getSourceId()); if (fileOwner != null) { props.setProperty("owner", fileOwner); } JsonObject params = config.getObject("indexer", "params"); for (Object key : params.keySet()) { props.setProperty(key.toString(), params.get(key).toString()); } // check this object's status (i.e. new or modified) and count if (props.containsKey("isNew") && Boolean.parseBoolean(props.getProperty("isNew"))) { isNew = "true"; } else if (props.containsKey("isModified")) { if (Boolean.parseBoolean(props.getProperty("isModified"))) { isModified = "true"; } } // now remove these properties. We don't need them anymore props.remove("isNew"); props.remove("isModified"); // done with the object object.close(); // put in event log Map<String, String> msgs = new LinkedHashMap<String, String>(); msgs.put("harvestId", harvestId); msgs.put("isNew", isNew); msgs.put("isModified", isModified); msgs.put("repository_type", repoType); msgs.put("repository_name", repoName); sentMessage(oid, "modify", msgs); // queue the object for indexing queueHarvest(oid, configFile, commit); }