List of usage examples for java.util SortedMap keySet
Set<K> keySet();
From source file:org.apache.accumulo.tserver.Tablet.java
/** * yet another constructor - this one allows us to avoid costly lookups into the Metadata table if we already know the files we need - as at split time *///from w w w. java 2 s .c o m private Tablet(final TabletServer tabletServer, final Text location, final KeyExtent extent, final TabletResourceManager trm, final Configuration conf, final VolumeManager fs, final List<LogEntry> logEntries, final SortedMap<FileRef, DataFileValue> datafiles, String time, final TServerInstance lastLocation, Set<FileRef> scanFiles, long initFlushID, long initCompactID) throws IOException { Path locationPath; if (location.find(":") >= 0) { locationPath = new Path(location.toString()); } else { locationPath = fs.getFullPath(FileType.TABLE, extent.getTableId().toString() + location.toString()); } locationPath = DirectoryDecommissioner.checkTabletDirectory(tabletServer, fs, extent, locationPath); this.location = locationPath; this.lastLocation = lastLocation; this.tabletDirectory = location.toString(); this.conf = conf; this.acuTableConf = tabletServer.getTableConfiguration(extent); this.fs = fs; this.extent = extent; this.tabletResources = trm; this.lastFlushID = initFlushID; this.lastCompactID = initCompactID; if (extent.isRootTablet()) { long rtime = Long.MIN_VALUE; for (FileRef ref : datafiles.keySet()) { Path path = ref.path(); FileSystem ns = fs.getFileSystemByPath(path); FileSKVIterator reader = FileOperations.getInstance().openReader(path.toString(), true, ns, ns.getConf(), tabletServer.getTableConfiguration(extent)); long maxTime = -1; try { while (reader.hasTop()) { maxTime = Math.max(maxTime, reader.getTopKey().getTimestamp()); reader.next(); } } finally { reader.close(); } if (maxTime > rtime) { time = TabletTime.LOGICAL_TIME_ID + "" + maxTime; rtime = maxTime; } } } if (time == null && datafiles.isEmpty() && extent.equals(RootTable.OLD_EXTENT)) { // recovery... old root tablet has no data, so time doesn't matter: time = TabletTime.LOGICAL_TIME_ID + "" + Long.MIN_VALUE; } this.tabletServer = tabletServer; this.logId = tabletServer.createLogId(extent); this.timer = new TabletStatsKeeper(); setupDefaultSecurityLabels(extent); tabletMemory = new TabletMemory(); tabletTime = TabletTime.getInstance(time); persistedTime = tabletTime.getTime(); acuTableConf.addObserver(configObserver = new ConfigurationObserver() { private void reloadConstraints() { constraintChecker.set(new ConstraintChecker(acuTableConf)); } @Override public void propertiesChanged() { reloadConstraints(); try { setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } @Override public void propertyChanged(String prop) { if (prop.startsWith(Property.TABLE_CONSTRAINT_PREFIX.getKey())) reloadConstraints(); else if (prop.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { try { log.info("Default security labels changed for extent: " + extent.toString()); setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } } @Override public void sessionExpired() { log.debug("Session expired, no longer updating per table props..."); } }); acuTableConf.getNamespaceConfiguration().addObserver(configObserver); // Force a load of any per-table properties configObserver.propertiesChanged(); if (!logEntries.isEmpty()) { log.info("Starting Write-Ahead Log recovery for " + this.extent); final long[] count = new long[2]; final CommitSession commitSession = tabletMemory.getCommitSession(); count[1] = Long.MIN_VALUE; try { Set<String> absPaths = new HashSet<String>(); for (FileRef ref : datafiles.keySet()) absPaths.add(ref.path().toString()); tabletServer.recover(this.tabletServer.getFileSystem(), extent, acuTableConf, logEntries, absPaths, new MutationReceiver() { @Override public void receive(Mutation m) { // LogReader.printMutation(m); Collection<ColumnUpdate> muts = m.getUpdates(); for (ColumnUpdate columnUpdate : muts) { if (!columnUpdate.hasTimestamp()) { // if it is not a user set timestamp, it must have been set // by the system count[1] = Math.max(count[1], columnUpdate.getTimestamp()); } } tabletMemory.mutate(commitSession, Collections.singletonList(m)); count[0]++; } }); if (count[1] != Long.MIN_VALUE) { tabletTime.useMaxTimeFromWALog(count[1]); } commitSession.updateMaxCommittedTime(tabletTime.getTime()); if (count[0] == 0) { MetadataTableUtil.removeUnusedWALEntries(extent, logEntries, tabletServer.getLock()); logEntries.clear(); } } catch (Throwable t) { if (acuTableConf.getBoolean(Property.TABLE_FAILURES_IGNORE)) { log.warn("Error recovering from log files: ", t); } else { throw new RuntimeException(t); } } // make some closed references that represent the recovered logs currentLogs = new HashSet<DfsLogger>(); for (LogEntry logEntry : logEntries) { for (String log : logEntry.logSet) { currentLogs.add(new DfsLogger(tabletServer.getServerConfig(), log)); } } log.info("Write-Ahead Log recovery complete for " + this.extent + " (" + count[0] + " mutations applied, " + tabletMemory.getNumEntries() + " entries created)"); } String contextName = acuTableConf.get(Property.TABLE_CLASSPATH); if (contextName != null && !contextName.equals("")) { // initialize context classloader, instead of possibly waiting for it to initialize for a scan // TODO this could hang, causing other tablets to fail to load - ACCUMULO-1292 AccumuloVFSClassLoader.getContextManager().getClassLoader(contextName); } // do this last after tablet is completely setup because it // could cause major compaction to start datafileManager = new DatafileManager(datafiles); computeNumEntries(); datafileManager.removeFilesAfterScan(scanFiles); // look for hints of a failure on the previous tablet server if (!logEntries.isEmpty() || needsMajorCompaction(MajorCompactionReason.NORMAL)) { // look for any temp files hanging around removeOldTemporaryFiles(); } log.log(TLevel.TABLET_HIST, extent + " opened"); }
From source file:org.apache.accumulo.tserver.tablet.Tablet.java
public Tablet(final TabletServer tabletServer, final KeyExtent extent, final TabletResourceManager trm, TabletData data) throws IOException { this.tabletServer = tabletServer; this.extent = extent; this.tabletResources = trm; this.lastLocation = data.getLastLocation(); this.lastFlushID = data.getFlushID(); this.lastCompactID = data.getCompactID(); this.splitCreationTime = data.getSplitTime(); this.tabletTime = TabletTime.getInstance(data.getTime()); this.persistedTime = tabletTime.getTime(); this.logId = tabletServer.createLogId(extent); TableConfiguration tblConf = tabletServer.getTableConfiguration(extent); if (null == tblConf) { Tables.clearCache(tabletServer.getInstance()); tblConf = tabletServer.getTableConfiguration(extent); requireNonNull(tblConf, "Could not get table configuration for " + extent.getTableId()); }/*from w ww . j a v a 2s . c om*/ this.tableConfiguration = tblConf; // translate any volume changes VolumeManager fs = tabletServer.getFileSystem(); boolean replicationEnabled = ReplicationConfigurationUtil.isEnabled(extent, this.tableConfiguration); TabletFiles tabletPaths = new TabletFiles(data.getDirectory(), data.getLogEntris(), data.getDataFiles()); tabletPaths = VolumeUtil.updateTabletVolumes(tabletServer, tabletServer.getLock(), fs, extent, tabletPaths, replicationEnabled); // deal with relative path for the directory Path locationPath; if (tabletPaths.dir.contains(":")) { locationPath = new Path(tabletPaths.dir); } else { locationPath = tabletServer.getFileSystem().getFullPath(FileType.TABLE, extent.getTableId() + tabletPaths.dir); } this.location = locationPath; this.tabletDirectory = tabletPaths.dir; for (Entry<Long, List<FileRef>> entry : data.getBulkImported().entrySet()) { this.bulkImported.put(entry.getKey(), new CopyOnWriteArrayList<FileRef>(entry.getValue())); } setupDefaultSecurityLabels(extent); final List<LogEntry> logEntries = tabletPaths.logEntries; final SortedMap<FileRef, DataFileValue> datafiles = tabletPaths.datafiles; tableConfiguration.addObserver(configObserver = new ConfigurationObserver() { private void reloadConstraints() { log.debug("Reloading constraints for extent: " + extent); constraintChecker.set(new ConstraintChecker(tableConfiguration)); } @Override public void propertiesChanged() { reloadConstraints(); try { setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } @Override public void propertyChanged(String prop) { if (prop.startsWith(Property.TABLE_CONSTRAINT_PREFIX.getKey())) reloadConstraints(); else if (prop.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { try { log.info("Default security labels changed for extent: " + extent.toString()); setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } } @Override public void sessionExpired() { log.debug("Session expired, no longer updating per table props..."); } }); tableConfiguration.getNamespaceConfiguration().addObserver(configObserver); tabletMemory = new TabletMemory(this); // Force a load of any per-table properties configObserver.propertiesChanged(); if (!logEntries.isEmpty()) { log.info("Starting Write-Ahead Log recovery for " + this.extent); final AtomicLong entriesUsedOnTablet = new AtomicLong(0); // track max time from walog entries without timestamps final AtomicLong maxTime = new AtomicLong(Long.MIN_VALUE); final CommitSession commitSession = getTabletMemory().getCommitSession(); try { Set<String> absPaths = new HashSet<String>(); for (FileRef ref : datafiles.keySet()) absPaths.add(ref.path().toString()); tabletServer.recover(this.getTabletServer().getFileSystem(), extent, tableConfiguration, logEntries, absPaths, new MutationReceiver() { @Override public void receive(Mutation m) { // LogReader.printMutation(m); Collection<ColumnUpdate> muts = m.getUpdates(); for (ColumnUpdate columnUpdate : muts) { if (!columnUpdate.hasTimestamp()) { // if it is not a user set timestamp, it must have been set // by the system maxTime.set(Math.max(maxTime.get(), columnUpdate.getTimestamp())); } } getTabletMemory().mutate(commitSession, Collections.singletonList(m)); entriesUsedOnTablet.incrementAndGet(); } }); if (maxTime.get() != Long.MIN_VALUE) { tabletTime.useMaxTimeFromWALog(maxTime.get()); } commitSession.updateMaxCommittedTime(tabletTime.getTime()); if (entriesUsedOnTablet.get() == 0) { log.debug("No replayed mutations applied, removing unused entries for " + extent); MetadataTableUtil.removeUnusedWALEntries(getTabletServer(), extent, logEntries, tabletServer.getLock()); // No replication update to be made because the fact that this tablet didn't use any mutations // from the WAL implies nothing about use of this WAL by other tablets. Do nothing. logEntries.clear(); } else if (ReplicationConfigurationUtil.isEnabled(extent, tabletServer.getTableConfiguration(extent))) { // The logs are about to be re-used by this tablet, we need to record that they have data for this extent, // but that they may get more data. logEntries is not cleared which will cause the elements // in logEntries to be added to the currentLogs for this Tablet below. // // This update serves the same purpose as an update during a MinC. We know that the WAL was defined // (written when the WAL was opened) but this lets us know there are mutations written to this WAL // that could potentially be replicated. Because the Tablet is using this WAL, we can be sure that // the WAL isn't closed (WRT replication Status) and thus we're safe to update its progress. Status status = StatusUtil.openWithUnknownLength(); for (LogEntry logEntry : logEntries) { log.debug("Writing updated status to metadata table for " + logEntry.filename + " " + ProtobufUtil.toString(status)); ReplicationTableUtil.updateFiles(tabletServer, extent, logEntry.filename, status); } } } catch (Throwable t) { if (tableConfiguration.getBoolean(Property.TABLE_FAILURES_IGNORE)) { log.warn("Error recovering from log files: ", t); } else { throw new RuntimeException(t); } } // make some closed references that represent the recovered logs currentLogs = new ConcurrentSkipListSet<DfsLogger>(); for (LogEntry logEntry : logEntries) { currentLogs.add(new DfsLogger(tabletServer.getServerConfig(), logEntry.filename, logEntry.getColumnQualifier().toString())); } log.info("Write-Ahead Log recovery complete for " + this.extent + " (" + entriesUsedOnTablet.get() + " mutations applied, " + getTabletMemory().getNumEntries() + " entries created)"); } String contextName = tableConfiguration.get(Property.TABLE_CLASSPATH); if (contextName != null && !contextName.equals("")) { // initialize context classloader, instead of possibly waiting for it to initialize for a scan // TODO this could hang, causing other tablets to fail to load - ACCUMULO-1292 AccumuloVFSClassLoader.getContextManager().getClassLoader(contextName); } // do this last after tablet is completely setup because it // could cause major compaction to start datafileManager = new DatafileManager(this, datafiles); computeNumEntries(); getDatafileManager().removeFilesAfterScan(data.getScanFiles()); // look for hints of a failure on the previous tablet server if (!logEntries.isEmpty() || needsMajorCompaction(MajorCompactionReason.NORMAL)) { // look for any temp files hanging around removeOldTemporaryFiles(); } log.log(TLevel.TABLET_HIST, extent + " opened"); }
From source file:com.google.gwt.emultest.java.util.TreeMapTest.java
public void testHeadMapLjava_lang_ObjectZL() { K[] keys = getSortedKeys();// w w w .j a v a2 s . c o m V[] values = getSortedValues(); NavigableMap<K, V> map = createNavigableMap(); for (int i = 0; i < keys.length; i++) { map.put(keys[i], values[i]); } // normal case SortedMap<K, V> subMap = map.headMap(keys[2], true); assertEquals(3, subMap.size()); subMap = map.headMap(keys[3], true); assertEquals(4, subMap.size()); for (int i = 0; i < 4; i++) { assertEquals(values[i], subMap.get(keys[i])); } subMap = map.headMap(keys[2], false); assertEquals(2, subMap.size()); assertNull(subMap.get(keys[3])); // Exceptions assertEquals(0, map.headMap(keys[0], false).size()); try { map.headMap(null, true); assertTrue("expected exception", useNullKey()); } catch (NullPointerException e) { assertFalse("unexpected NPE", useNullKey()); } try { map.headMap(null, false); assertTrue("expected exception", useNullKey()); } catch (NullPointerException e) { assertFalse("unexpected NPE", useNullKey()); } subMap = map.headMap(keys[2]); assertEquals(2, subMap.size()); try { subMap.put(keys[2], values[2]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException expected) { } assertEquals(keys.length, map.size()); subMap = map.headMap(keys[2], true); assertEquals(3, subMap.size()); subMap.remove(keys[1]); assertFalse(subMap.containsKey(keys[1])); assertFalse(subMap.containsValue(values[1])); assertFalse(map.containsKey(keys[1])); assertFalse(map.containsValue(values[1])); assertEquals(2, subMap.size()); assertEquals(keys.length - 1, map.size()); subMap.put(keys[1], values[1]); try { subMap.subMap(keys[1], keys[3]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException expected) { } try { subMap.subMap(keys[3], keys[1]); fail("should throw IllegalArgumentException"); } catch (IllegalArgumentException expected) { } if (useNullKey() && useNullValue()) { map.put(null, null); subMap = map.headMap(null, true); assertEquals(1, subMap.size()); assertTrue(subMap.containsValue(null)); assertNull(subMap.get(null)); subMap = map.subMap(null, false, keys[2], true); assertEquals(3, subMap.size()); Set<K> keySet = subMap.keySet(); assertEquals(3, keySet.size()); Set<Map.Entry<K, V>> entrySet = subMap.entrySet(); assertEquals(3, entrySet.size()); Collection<V> valueCollection = subMap.values(); assertEquals(3, valueCollection.size()); map.remove(null); } // head map of head map NavigableMap<K, V> headMap = map.headMap(keys[3], true); assertEquals(4, headMap.size()); headMap = headMap.headMap(keys[3], false); assertEquals(3, headMap.size()); headMap = headMap.headMap(keys[2], false); assertEquals(2, headMap.size()); headMap = headMap.tailMap(keys[0], false); assertEquals(1, headMap.size()); headMap = headMap.tailMap(keys[1], false); assertEquals(0, headMap.size()); }
From source file:com.aurel.track.exchange.excel.ExcelImportBL.java
/** * Prepare the conflicts//from w w w. j av a2s. c o m * * @param workItemBeansList * @param presentFieldIDs * @param personID * @param locale * @return */ static SortedMap<Integer, SortedMap<Integer, Map<Integer, Object>>> conflictResolutionWorkItems( Collection<TWorkItemBean> workItemBeansList, Set<Integer> presentFieldIDs, Map<Integer, WorkItemContext> existingIssueContextsMap, Map<Integer, Map<Integer, Map<Integer, TFieldConfigBean>>> projectsIssueTypesFieldConfigsMap, Map<Integer, String> columnIndexToColumNameMap, Map<Integer, Integer> fieldIDToColumnIndexMap, Integer personID, Locale locale, Map<String, Boolean> overwriteMap) { Map<Integer, Map<Integer, Boolean>> workItemAndFieldBasedMap = getWorkItemAndFieldBasedMap(overwriteMap); SortedMap<Integer, SortedMap<Integer, Map<Integer, Object>>> conflictsMap = new TreeMap<Integer, SortedMap<Integer, Map<Integer, Object>>>(); Iterator<TWorkItemBean> itrWorkItemBean = workItemBeansList.iterator(); Set<Integer> hardcodedExplicitHistoryFields = HistorySaverBL.getHardCodedExplicitHistoryFields(); int row = 1; Calendar calendar = Calendar.getInstance(); while (itrWorkItemBean.hasNext()) { TWorkItemBean workItemBean = itrWorkItemBean.next(); Integer workItemID = workItemBean.getObjectID(); row++; Map<Integer, TFieldConfigBean> fieldConfigsMap = projectsIssueTypesFieldConfigsMap .get(workItemBean.getProjectID()).get(workItemBean.getListTypeID()); WorkItemContext workItemContext = existingIssueContextsMap.get(workItemID); if (workItemContext != null) { // conflicts can happen only for existing workItems TWorkItemBean workItemBeanOriginal = workItemContext.getWorkItemBeanOriginal(); Date excelLastEdited = null; if (fieldIDToColumnIndexMap.get(SystemFields.INTEGER_LASTMODIFIEDDATE) != null) { // it was overwritten in workItemBean from excel only if the // field was mapped excelLastEdited = workItemBean.getLastEdit(); } if (workItemBeanOriginal != null) { List<Integer> changedFields = getFieldsChanged(workItemBean, workItemBeanOriginal, presentFieldIDs); if (changedFields == null || changedFields.isEmpty()) { // no field change at all -> no conflict continue; } SortedMap<Integer, Map<Integer, HistoryValues>> workItemHistoryChanges = null; Map<Integer, Boolean> fieldForWorkItemOverwrite = workItemAndFieldBasedMap.get(workItemID); /* * if (fieldForWorkItemOverwrite!=null) { //after submitting * the overwrite map (the conflict handling is done by the * user) for (Iterator<Integer> iterator = * changedFields.iterator(); iterator.hasNext();) { Integer * fieldID = iterator.next(); Boolean overwrite = * fieldForWorkItemOverwrite.get(fieldID); if * (overwrite==null || !overwrite.booleanValue()) { * //overwrite==null there was no conflict at all (no * checkbox was rendered) //if user decided to leave the * track+ value change back to original * workItemBean.setAttribute(fieldID, * workItemBeanOriginal.getAttribute(fieldID)); } } //once * fieldOverwrite is already specified no further conflict * processing is needed continue; } */ if (excelLastEdited == null) { // no last edit field specified, no usable history data // available at all: each field change means conflict for (Iterator<Integer> itrField = changedFields.iterator(); itrField.hasNext();) { Integer fieldID = itrField.next(); addAsConfict(conflictsMap, row, fieldID, workItemBean, workItemBeanOriginal, columnIndexToColumNameMap, fieldIDToColumnIndexMap, overwriteMap, fieldForWorkItemOverwrite, locale); if (LOGGER.isDebugEnabled()) { LOGGER.debug("WorkItem " + workItemBean.getObjectID() + " row " + row + " fieldID " + fieldID + " has conficts (no lastEdit specified)"); } } } else { // last edited is specified in excel: search the history // changed fields are either with explicit history or // not List<Integer> changedFieldsWithExplicitHistory = new LinkedList<Integer>(); List<Integer> changedFieldsWithoutExplicitHistory = new LinkedList<Integer>(); for (Iterator<Integer> iterator = changedFields.iterator(); iterator.hasNext();) { Integer fieldID = iterator.next(); TFieldConfigBean fieldConfigBean = fieldConfigsMap.get(fieldID); if (fieldConfigBean.isHistoryString() || hardcodedExplicitHistoryFields.contains(fieldID)) { changedFieldsWithExplicitHistory.add(fieldID); } else { changedFieldsWithoutExplicitHistory.add(fieldID); } } if (!changedFieldsWithoutExplicitHistory.isEmpty()) { // if at least one changed field hat no explicit // history then take the commons history field also changedFieldsWithExplicitHistory.add(TFieldChangeBean.COMPOUND_HISTORY_FIELD); } Integer[] changedFieldIDs = GeneralUtils.createIntegerArrFromIntArr( GeneralUtils.createIntArrFromIntegerList(changedFieldsWithExplicitHistory)); if (changedFieldsWithExplicitHistory != null && !changedFieldsWithExplicitHistory.isEmpty()) { // get the changes for fields since excelLastEdited: // explicit fields and // and the common field if at least one field hasn't // explicit history // TODO not really correct to add a minute but: // the last edited date from excel // (DateFormat.SHORT) doesn't contain seconds, // while the last history entry from the Genji // issue contains even milliseconds. // Consequently the history entry's date from Genji // is after the one exported to excel // even if in track+ no further history entry exists // (no further change was made). // Consequently we would generate false conflicts. // If we add this extra minute then we get rid of // those false conflicts but there is a small risk // that if also another change was made in the same // minute, it will be overwritten by excel value // without conflict warning calendar.setTime(excelLastEdited); calendar.add(Calendar.MINUTE, 1); workItemHistoryChanges = HistoryLoaderBL.getWorkItemRawHistory( workItemBean.getObjectID(), changedFieldIDs, null, calendar.getTime(), null); } if (workItemHistoryChanges != null) { // there is some history data for (Iterator<Integer> itrField = changedFieldsWithExplicitHistory.iterator(); itrField .hasNext();) { IFieldTypeRT fieldTypeRT = null; Integer fieldID = itrField.next(); if (!fieldID.equals(TFieldChangeBean.COMPOUND_HISTORY_FIELD)) { fieldTypeRT = FieldTypeManager.getFieldTypeRT(fieldID); } HistoryValues historyValues = null; for (Iterator<Integer> itrTransaction = workItemHistoryChanges.keySet() .iterator(); itrTransaction.hasNext();) { // get the first entry from the history // after excelLastEdited Integer transactionID = itrTransaction.next(); Map<Integer, HistoryValues> historyValuesMap = workItemHistoryChanges .get(transactionID); if (historyValuesMap.containsKey(fieldID)) { historyValues = historyValuesMap.get(fieldID); break; } } // changedFields contains fields with explicit // history, without explicit history and commons // field if (historyValues != null) {// if no history // value, no // conflict if (fieldTypeRT != null) {// explicit // history // the actual excel value differs from // the first oldValue -> the field was // probably changed in excel also if (fieldTypeRT.valueModified(workItemBean.getAttribute(fieldID), historyValues.getOldValue())) { // field with explicit history // changed in excel and track+: that // is a conflict addAsConfict(conflictsMap, row, fieldID, workItemBean, workItemBeanOriginal, columnIndexToColumNameMap, fieldIDToColumnIndexMap, overwriteMap, fieldForWorkItemOverwrite, locale); if (LOGGER.isDebugEnabled()) { LOGGER.debug("WorkItem " + workItemBean.getObjectID() + " row " + row + " fieldID " + fieldID + " has conficts (explicit history found)"); } } else { // the excel value is the same with // the first old value from the // history // since the excel last modified // date: the value was modified only // in track+, // leave the track+ version without // conflict resolution workItemBean.setAttribute(fieldID, workItemBeanOriginal.getAttribute(fieldID)); if (LOGGER.isDebugEnabled()) { LOGGER.debug("WorkItem " + workItemBean.getObjectID() + " row " + row + " fieldID " + fieldID + "no conflict (value changed only in Genji)"); } } } else { // common history: history for fields // without explicit history // add a conflict for each field without // explicit history for (Iterator<Integer> iterator = changedFieldsWithoutExplicitHistory .iterator(); iterator.hasNext();) { Integer fieldWithoutExplicitHistory = iterator.next(); addAsConfict(conflictsMap, row, fieldWithoutExplicitHistory, workItemBean, workItemBeanOriginal, columnIndexToColumNameMap, fieldIDToColumnIndexMap, overwriteMap, fieldForWorkItemOverwrite, locale); if (LOGGER.isDebugEnabled()) { LOGGER.debug("WorkItem " + workItemBean.getObjectID() + " row " + row + " fieldID " + fieldID + " has conficts (common history found)"); } } } } else { // no history entry found for field: the // field was modified only in excel, no // conflict handling needed if (LOGGER.isDebugEnabled()) { LOGGER.debug("WorkItem " + workItemBean.getObjectID() + " row " + row + " fieldID " + fieldID + " no conflict: no fieldID history found, value changed only in Excel)"); } } } } else { // no history entry found for the entire workItem: // the workItem was modified only in excel, no // conflict handling needed if (LOGGER.isDebugEnabled()) { LOGGER.debug("WorkItem " + workItemBean.getObjectID() + " row " + row + " no conflict: no workItem history found, value changed only in Excel"); } } } } } } return conflictsMap; }
From source file:org.torproject.ernie.web.ExoneraTorServlet.java
public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException { /* Start writing response. */ PrintWriter out = response.getWriter(); writeHeader(out);/*from w w w. j a va 2s. c om*/ /* Look up first and last consensus in the database. */ long firstValidAfter = -1L, lastValidAfter = -1L; try { Connection conn = this.ds.getConnection(); Statement statement = conn.createStatement(); String query = "SELECT MIN(validafter) AS first, " + "MAX(validafter) AS last FROM consensus"; ResultSet rs = statement.executeQuery(query); if (rs.next()) { firstValidAfter = rs.getTimestamp(1).getTime(); lastValidAfter = rs.getTimestamp(2).getTime(); } rs.close(); statement.close(); conn.close(); } catch (SQLException e) { /* Looks like we don't have any consensuses. */ } if (firstValidAfter < 0L || lastValidAfter < 0L) { out.println("<p><font color=\"red\"><b>Warning: </b></font>This " + "server doesn't have any relay lists available. If this " + "problem persists, please " + "<a href=\"mailto:tor-assistants@freehaven.net\">let us " + "know</a>!</p>\n"); writeFooter(out); return; } out.println("<a name=\"relay\"></a><h3>Was there a Tor relay running " + "on this IP address?</h3>"); /* Parse IP parameter. */ Pattern ipAddressPattern = Pattern .compile("^([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." + "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." + "([01]?\\d\\d?|2[0-4]\\d|25[0-5])\\." + "([01]?\\d\\d?|2[0-4]\\d|25[0-5])$"); String ipParameter = request.getParameter("ip"); String relayIP = "", ipWarning = ""; if (ipParameter != null && ipParameter.length() > 0) { Matcher ipParameterMatcher = ipAddressPattern.matcher(ipParameter); if (ipParameterMatcher.matches()) { String[] ipParts = ipParameter.split("\\."); relayIP = Integer.parseInt(ipParts[0]) + "." + Integer.parseInt(ipParts[1]) + "." + Integer.parseInt(ipParts[2]) + "." + Integer.parseInt(ipParts[3]); } else { ipWarning = "\"" + (ipParameter.length() > 20 ? ipParameter.substring(0, 20) + "[...]" : ipParameter) + "\" is not a valid IP address."; } } /* Parse timestamp parameter. */ String timestampParameter = request.getParameter("timestamp"); long timestamp = 0L; String timestampStr = "", timestampWarning = ""; SimpleDateFormat shortDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm"); shortDateTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); if (timestampParameter != null && timestampParameter.length() > 0) { try { timestamp = shortDateTimeFormat.parse(timestampParameter).getTime(); timestampStr = shortDateTimeFormat.format(timestamp); if (timestamp < firstValidAfter || timestamp > lastValidAfter) { timestampWarning = "Please pick a value between \"" + shortDateTimeFormat.format(firstValidAfter) + "\" and \"" + shortDateTimeFormat.format(lastValidAfter) + "\"."; } } catch (ParseException e) { /* We have no way to handle this exception, other than leaving timestampStr at "". */ timestampWarning = "\"" + (timestampParameter.length() > 20 ? timestampParameter.substring(0, 20) + "[...]" : timestampParameter) + "\" is not a valid timestamp."; } } /* If either IP address or timestamp is provided, the other one must * be provided, too. */ if (relayIP.length() < 1 && timestampStr.length() > 0 && ipWarning.length() < 1) { ipWarning = "Please provide an IP address."; } if (relayIP.length() > 0 && timestampStr.length() < 1 && timestampWarning.length() < 1) { timestampWarning = "Please provide a timestamp."; } /* Parse target IP parameter. */ String targetIP = "", targetPort = "", target = ""; String[] targetIPParts = null; String targetAddrParameter = request.getParameter("targetaddr"); String targetAddrWarning = ""; if (targetAddrParameter != null && targetAddrParameter.length() > 0) { Matcher targetAddrParameterMatcher = ipAddressPattern.matcher(targetAddrParameter); if (targetAddrParameterMatcher.matches()) { String[] targetAddrParts = targetAddrParameter.split("\\."); targetIP = Integer.parseInt(targetAddrParts[0]) + "." + Integer.parseInt(targetAddrParts[1]) + "." + Integer.parseInt(targetAddrParts[2]) + "." + Integer.parseInt(targetAddrParts[3]); target = targetIP; targetIPParts = targetIP.split("\\."); } else { targetAddrWarning = "\"" + (targetAddrParameter.length() > 20 ? timestampParameter.substring(0, 20) + "[...]" : timestampParameter) + "\" is not a valid IP address."; } } /* Parse target port parameter. */ String targetPortParameter = request.getParameter("targetport"); String targetPortWarning = ""; if (targetPortParameter != null && targetPortParameter.length() > 0) { Pattern targetPortPattern = Pattern.compile("\\d+"); if (targetPortParameter.length() < 5 && targetPortPattern.matcher(targetPortParameter).matches() && !targetPortParameter.equals("0") && Integer.parseInt(targetPortParameter) < 65536) { targetPort = targetPortParameter; if (target != null) { target += ":" + targetPort; } else { target = targetPort; } } else { targetPortWarning = "\"" + (targetPortParameter.length() > 8 ? targetPortParameter.substring(0, 8) + "[...]" : targetPortParameter) + "\" is not a valid TCP port."; } } /* If target port is provided, a target address must be provided, * too. */ if (targetPort.length() > 0 && targetIP.length() < 1 && targetAddrWarning.length() < 1) { targetAddrWarning = "Please provide an IP address."; } /* Write form with IP address and timestamp. */ out.println(" <form action=\"exonerator.html#relay\">\n" + " <input type=\"hidden\" name=\"targetaddr\" " + (targetIP.length() > 0 ? " value=\"" + targetIP + "\"" : "") + ">\n" + " <input type=\"hidden\" name=\"targetPort\"" + (targetPort.length() > 0 ? " value=\"" + targetPort + "\"" : "") + ">\n" + " <table>\n" + " <tr>\n" + " <td align=\"right\">IP address in question:" + "</td>\n" + " <td><input type=\"text\" name=\"ip\"" + (relayIP.length() > 0 ? " value=\"" + relayIP + "\"" : "") + ">" + (ipWarning.length() > 0 ? "<br><font color=\"red\">" + ipWarning + "</font>" : "") + "</td>\n" + " <td><i>(Ex.: 1.2.3.4)</i></td>\n" + " </tr>\n" + " <tr>\n" + " <td align=\"right\">Timestamp, in UTC:</td>\n" + " <td><input type=\"text\" name=\"timestamp\"" + (timestampStr.length() > 0 ? " value=\"" + timestampStr + "\"" : "") + ">" + (timestampWarning.length() > 0 ? "<br><font color=\"red\">" + timestampWarning + "</font>" : "") + "</td>\n" + " <td><i>(Ex.: 2010-01-01 12:00)</i></td>\n" + " </tr>\n" + " <tr>\n" + " <td></td>\n" + " <td>\n" + " <input type=\"submit\">\n" + " <input type=\"reset\">\n" + " </td>\n" + " <td></td>\n" + " </tr>\n" + " </table>\n" + " </form>\n"); if (relayIP.length() < 1 || timestampStr.length() < 1) { writeFooter(out); return; } /* Look up relevant consensuses. */ long timestampTooOld = timestamp - 15L * 60L * 60L * 1000L; long timestampFrom = timestamp - 3L * 60L * 60L * 1000L; long timestampTooNew = timestamp + 12L * 60L * 60L * 1000L; out.printf( "<p>Looking up IP address %s in the relay lists published " + "between %s and %s. " + "Clients could have used any of these relay lists to " + "select relays for their paths and build circuits using them. " + "You may follow the links to relay lists and relay descriptors " + "to grep for the lines printed below and confirm that results " + "are correct.<br>", relayIP, shortDateTimeFormat.format(timestampFrom), timestampStr); SimpleDateFormat validAfterTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); validAfterTimeFormat.setTimeZone(TimeZone.getTimeZone("UTC")); String fromValidAfter = validAfterTimeFormat.format(timestampTooOld); String toValidAfter = validAfterTimeFormat.format(timestampTooNew); SortedMap<Long, String> tooOldConsensuses = new TreeMap<Long, String>(); SortedMap<Long, String> relevantConsensuses = new TreeMap<Long, String>(); SortedMap<Long, String> tooNewConsensuses = new TreeMap<Long, String>(); try { Connection conn = this.ds.getConnection(); Statement statement = conn.createStatement(); String query = "SELECT validafter, rawdesc FROM consensus " + "WHERE validafter >= '" + fromValidAfter + "' AND validafter <= '" + toValidAfter + "'"; ResultSet rs = statement.executeQuery(query); while (rs.next()) { long consensusTime = rs.getTimestamp(1).getTime(); String rawConsensusString = new String(rs.getBytes(2), "US-ASCII"); if (consensusTime < timestampFrom) { tooOldConsensuses.put(consensusTime, rawConsensusString); } else if (consensusTime > timestamp) { tooNewConsensuses.put(consensusTime, rawConsensusString); } else { relevantConsensuses.put(consensusTime, rawConsensusString); } } rs.close(); statement.close(); conn.close(); } catch (SQLException e) { /* Looks like we don't have any consensuses in the requested interval. */ } SortedMap<Long, String> allConsensuses = new TreeMap<Long, String>(); allConsensuses.putAll(tooOldConsensuses); allConsensuses.putAll(relevantConsensuses); allConsensuses.putAll(tooNewConsensuses); if (allConsensuses.isEmpty()) { out.println(" <p>No relay lists found!</p>\n" + " <p>Result is INDECISIVE!</p>\n" + " <p>We cannot make any statement whether there was " + "a Tor relay running on IP address " + relayIP + " at " + timestampStr + "! We " + "did not find any relevant relay lists preceding the given " + "time. If you think this is an error on our side, please " + "<a href=\"mailto:tor-assistants@freehaven.net\">contact " + "us</a>!</p>\n"); writeFooter(out); return; } /* Parse consensuses to find descriptors belonging to the IP address. */ SortedSet<Long> positiveConsensusesNoTarget = new TreeSet<Long>(); Set<String> addressesInSameNetwork = new HashSet<String>(); SortedMap<String, Set<Long>> relevantDescriptors = new TreeMap<String, Set<Long>>(); SimpleDateFormat validAfterUrlFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss"); validAfterUrlFormat.setTimeZone(TimeZone.getTimeZone("UTC")); for (Map.Entry<Long, String> e : allConsensuses.entrySet()) { long consensus = e.getKey(); if (relevantConsensuses.containsKey(consensus)) { long validAfterTime = -1L; String validAfterDatetime = validAfterTimeFormat.format(consensus); String validAfterString = validAfterUrlFormat.format(consensus); out.println(" <br><tt>valid-after <b>" + "<a href=\"consensus?valid-after=" + validAfterString + "\" target=\"_blank\">" + validAfterDatetime + "</b></a></tt><br>"); } String rawConsensusString = e.getValue(); BufferedReader br = new BufferedReader(new StringReader(rawConsensusString)); String line = null; while ((line = br.readLine()) != null) { if (!line.startsWith("r ")) { continue; } String[] parts = line.split(" "); String address = parts[6]; if (address.equals(relayIP)) { String hex = String.format("%040x", new BigInteger(1, Base64.decodeBase64(parts[3] + "=="))); if (!relevantDescriptors.containsKey(hex)) { relevantDescriptors.put(hex, new HashSet<Long>()); } relevantDescriptors.get(hex).add(consensus); positiveConsensusesNoTarget.add(consensus); if (relevantConsensuses.containsKey(consensus)) { out.println(" <tt>r " + parts[1] + " " + parts[2] + " " + "<a href=\"serverdesc?desc-id=" + hex + "\" " + "target=\"_blank\">" + parts[3] + "</a> " + parts[4] + " " + parts[5] + " <b>" + parts[6] + "</b> " + parts[7] + " " + parts[8] + "</tt><br>"); } } else { if (relayIP.startsWith(address.substring(0, address.lastIndexOf(".")))) { addressesInSameNetwork.add(address); } } } br.close(); } if (relevantDescriptors.isEmpty()) { out.printf( " <p>None found!</p>\n" + " <p>Result is NEGATIVE with moderate certainty!</p>\n" + " <p>We did not find IP " + "address " + relayIP + " in any of the relay lists that were " + "published between %s and %s.\n\nA possible " + "reason for false negatives is that the relay is using a " + "different IP address when generating a descriptor than for " + "exiting to the Internet. We hope to provide better checks " + "for this case in the future.</p>\n", shortDateTimeFormat.format(timestampTooOld), shortDateTimeFormat.format(timestampTooNew)); if (!addressesInSameNetwork.isEmpty()) { out.println(" <p>The following other IP addresses of Tor " + "relays were found in the mentioned relay lists that " + "are in the same /24 network and that could be related to " + "IP address " + relayIP + ":</p>\n"); for (String s : addressesInSameNetwork) { out.println(" <p>" + s + "</p>\n"); } } writeFooter(out); return; } /* Print out result. */ Set<Long> matches = positiveConsensusesNoTarget; if (matches.contains(relevantConsensuses.lastKey())) { out.println(" <p>Result is POSITIVE with high certainty!" + "</p>\n" + " <p>We found one or more relays on IP address " + relayIP + " in the most recent relay list preceding " + timestampStr + " that clients were likely to know.</p>\n"); } else { boolean inOtherRelevantConsensus = false, inTooOldConsensuses = false, inTooNewConsensuses = false; for (long match : matches) { if (relevantConsensuses.containsKey(match)) { inOtherRelevantConsensus = true; } else if (tooOldConsensuses.containsKey(match)) { inTooOldConsensuses = true; } else if (tooNewConsensuses.containsKey(match)) { inTooNewConsensuses = true; } } if (inOtherRelevantConsensus) { out.println(" <p>Result is POSITIVE " + "with moderate certainty!</p>\n"); out.println("<p>We found one or more relays on IP address " + relayIP + ", but not in the relay list immediately " + "preceding " + timestampStr + ". A possible reason for the " + "relay being missing in the last relay list preceding the " + "given time might be that some of the directory " + "authorities had difficulties connecting to the relay. " + "However, clients might still have used the relay.</p>\n"); } else { out.println(" <p>Result is NEGATIVE " + "with high certainty!</p>\n"); out.println(" <p>We did not find any relay on IP address " + relayIP + " in the relay lists 3 hours preceding " + timestampStr + ".</p>\n"); if (inTooOldConsensuses || inTooNewConsensuses) { if (inTooOldConsensuses && !inTooNewConsensuses) { out.println(" <p>Note that we found a matching relay " + "in relay lists that were published between 5 and 3 " + "hours before " + timestampStr + ".</p>\n"); } else if (!inTooOldConsensuses && inTooNewConsensuses) { out.println(" <p>Note that we found a matching relay " + "in relay lists that were published up to 2 hours " + "after " + timestampStr + ".</p>\n"); } else { out.println(" <p>Note that we found a matching relay " + "in relay lists that were published between 5 and 3 " + "hours before and in relay lists that were published " + "up to 2 hours after " + timestampStr + ".</p>\n"); } out.println("<p>Make sure that the timestamp you provided is " + "in the correct timezone: UTC (or GMT).</p>"); } writeFooter(out); return; } } /* Second part: target */ out.println("<br><a name=\"exit\"></a><h3>Was this relay configured " + "to permit exiting to a given target?</h3>"); out.println(" <form action=\"exonerator.html#exit\">\n" + " <input type=\"hidden\" name=\"timestamp\"\n" + " value=\"" + timestampStr + "\">\n" + " <input type=\"hidden\" name=\"ip\" " + "value=\"" + relayIP + "\">\n" + " <table>\n" + " <tr>\n" + " <td align=\"right\">Target address:</td>\n" + " <td><input type=\"text\" name=\"targetaddr\"" + (targetIP.length() > 0 ? " value=\"" + targetIP + "\"" : "") + "\">" + (targetAddrWarning.length() > 0 ? "<br><font color=\"red\">" + targetAddrWarning + "</font>" : "") + "</td>\n" + " <td><i>(Ex.: 4.3.2.1)</i></td>\n" + " </tr>\n" + " <tr>\n" + " <td align=\"right\">Target port:</td>\n" + " <td><input type=\"text\" name=\"targetport\"" + (targetPort.length() > 0 ? " value=\"" + targetPort + "\"" : "") + ">" + (targetPortWarning.length() > 0 ? "<br><font color=\"red\">" + targetPortWarning + "</font>" : "") + "</td>\n" + " <td><i>(Ex.: 80)</i></td>\n" + " </tr>\n" + " <tr>\n" + " <td></td>\n" + " <td>\n" + " <input type=\"submit\">\n" + " <input type=\"reset\">\n" + " </td>\n" + " <td></td>\n" + " </tr>\n" + " </table>\n" + " </form>\n"); if (targetIP.length() < 1) { writeFooter(out); return; } /* Parse router descriptors to check exit policies. */ out.println("<p>Searching the relay descriptors published by the " + "relay on IP address " + relayIP + " to find out whether this " + "relay permitted exiting to " + target + ". You may follow the " + "links above to the relay descriptors and grep them for the " + "lines printed below to confirm that results are correct.</p>"); SortedSet<Long> positiveConsensuses = new TreeSet<Long>(); Set<String> missingDescriptors = new HashSet<String>(); Set<String> descriptors = relevantDescriptors.keySet(); for (String descriptor : descriptors) { byte[] rawDescriptor = null; try { Connection conn = this.ds.getConnection(); Statement statement = conn.createStatement(); String query = "SELECT rawdesc FROM descriptor " + "WHERE descriptor = '" + descriptor + "'"; ResultSet rs = statement.executeQuery(query); if (rs.next()) { rawDescriptor = rs.getBytes(1); } rs.close(); statement.close(); conn.close(); } catch (SQLException e) { /* Consider this descriptors as 'missing'. */ continue; } if (rawDescriptor != null && rawDescriptor.length > 0) { missingDescriptors.remove(descriptor); String rawDescriptorString = new String(rawDescriptor, "US-ASCII"); try { BufferedReader br = new BufferedReader(new StringReader(rawDescriptorString)); String line = null, routerLine = null, publishedLine = null; StringBuilder acceptRejectLines = new StringBuilder(); boolean foundMatch = false; while ((line = br.readLine()) != null) { if (line.startsWith("router ")) { routerLine = line; } else if (line.startsWith("published ")) { publishedLine = line; } else if (line.startsWith("reject ") || line.startsWith("accept ")) { if (foundMatch) { out.println("<tt> " + line + "</tt><br>"); continue; } boolean ruleAccept = line.split(" ")[0].equals("accept"); String ruleAddress = line.split(" ")[1].split(":")[0]; if (!ruleAddress.equals("*")) { if (!ruleAddress.contains("/") && !ruleAddress.equals(targetIP)) { /* IP address does not match. */ acceptRejectLines.append("<tt> " + line + "</tt><br>\n"); continue; } String[] ruleIPParts = ruleAddress.split("/")[0].split("\\."); int ruleNetwork = ruleAddress.contains("/") ? Integer.parseInt(ruleAddress.split("/")[1]) : 32; for (int i = 0; i < 4; i++) { if (ruleNetwork == 0) { break; } else if (ruleNetwork >= 8) { if (ruleIPParts[i].equals(targetIPParts[i])) { ruleNetwork -= 8; } else { break; } } else { int mask = 255 ^ 255 >>> ruleNetwork; if ((Integer.parseInt(ruleIPParts[i]) & mask) == (Integer.parseInt(targetIPParts[i]) & mask)) { ruleNetwork = 0; } break; } } if (ruleNetwork > 0) { /* IP address does not match. */ acceptRejectLines.append("<tt> " + line + "</tt><br>\n"); continue; } } String rulePort = line.split(" ")[1].split(":")[1]; if (targetPort.length() < 1 && !ruleAccept && !rulePort.equals("*")) { /* With no port given, we only consider reject :* rules as matching. */ acceptRejectLines.append("<tt> " + line + "</tt><br>\n"); continue; } if (targetPort.length() > 0 && !rulePort.equals("*") && rulePort.contains("-")) { int fromPort = Integer.parseInt(rulePort.split("-")[0]); int toPort = Integer.parseInt(rulePort.split("-")[1]); int targetPortInt = Integer.parseInt(targetPort); if (targetPortInt < fromPort || targetPortInt > toPort) { /* Port not contained in interval. */ continue; } } if (targetPort.length() > 0) { if (!rulePort.equals("*") && !rulePort.contains("-") && !targetPort.equals(rulePort)) { /* Ports do not match. */ acceptRejectLines.append("<tt> " + line + "</tt><br>\n"); continue; } } boolean relevantMatch = false; for (long match : relevantDescriptors.get(descriptor)) { if (relevantConsensuses.containsKey(match)) { relevantMatch = true; } } if (relevantMatch) { String[] routerParts = routerLine.split(" "); out.println("<br><tt>" + routerParts[0] + " " + routerParts[1] + " <b>" + routerParts[2] + "</b> " + routerParts[3] + " " + routerParts[4] + " " + routerParts[5] + "</tt><br>"); String[] publishedParts = publishedLine.split(" "); out.println("<tt>" + publishedParts[0] + " <b>" + publishedParts[1] + " " + publishedParts[2] + "</b></tt><br>"); out.println(acceptRejectLines.toString()); out.println("<tt><b>" + line + "</b></tt><br>"); foundMatch = true; } if (ruleAccept) { positiveConsensuses.addAll(relevantDescriptors.get(descriptor)); } } } br.close(); } catch (IOException e) { /* Could not read descriptor string. */ continue; } } } /* Print out result. */ matches = positiveConsensuses; if (matches.contains(relevantConsensuses.lastKey())) { out.println(" <p>Result is POSITIVE with high certainty!</p>" + "\n" + " <p>We found one or more relays on IP address " + relayIP + " permitting exit to " + target + " in the most recent relay list preceding " + timestampStr + " that clients were likely to know.</p>\n"); writeFooter(out); return; } boolean resultIndecisive = target.length() > 0 && !missingDescriptors.isEmpty(); if (resultIndecisive) { out.println(" <p>Result is INDECISIVE!</p>\n" + " <p>At least one referenced descriptor could not be " + "found. This is a rare case, but one that (apparently) " + "happens. We cannot make any good statement about exit " + "relays without these descriptors. The following descriptors " + "are missing:</p>"); for (String desc : missingDescriptors) out.println(" <p>" + desc + "</p>\n"); } boolean inOtherRelevantConsensus = false, inTooOldConsensuses = false, inTooNewConsensuses = false; for (long match : matches) { if (relevantConsensuses.containsKey(match)) { inOtherRelevantConsensus = true; } else if (tooOldConsensuses.containsKey(match)) { inTooOldConsensuses = true; } else if (tooNewConsensuses.containsKey(match)) { inTooNewConsensuses = true; } } if (inOtherRelevantConsensus) { if (!resultIndecisive) { out.println(" <p>Result is POSITIVE " + "with moderate certainty!</p>\n"); } out.println("<p>We found one or more relays on IP address " + relayIP + " permitting exit to " + target + ", but not in " + "the relay list immediately preceding " + timestampStr + ". A possible reason for the relay being missing in the last " + "relay list preceding the given time might be that some of " + "the directory authorities had difficulties connecting to " + "the relay. However, clients might still have used the " + "relay.</p>\n"); } else { if (!resultIndecisive) { out.println(" <p>Result is NEGATIVE " + "with high certainty!</p>\n"); } out.println(" <p>We did not find any relay on IP address " + relayIP + " permitting exit to " + target + " in the relay list 3 hours preceding " + timestampStr + ".</p>\n"); if (inTooOldConsensuses || inTooNewConsensuses) { if (inTooOldConsensuses && !inTooNewConsensuses) { out.println(" <p>Note that we found a matching relay in " + "relay lists that were published between 5 and 3 " + "hours before " + timestampStr + ".</p>\n"); } else if (!inTooOldConsensuses && inTooNewConsensuses) { out.println(" <p>Note that we found a matching relay in " + "relay lists that were published up to 2 hours after " + timestampStr + ".</p>\n"); } else { out.println(" <p>Note that we found a matching relay in " + "relay lists that were published between 5 and 3 " + "hours before and in relay lists that were published up " + "to 2 hours after " + timestampStr + ".</p>\n"); } out.println("<p>Make sure that the timestamp you provided is " + "in the correct timezone: UTC (or GMT).</p>"); } } if (target != null) { if (positiveConsensuses.isEmpty() && !positiveConsensusesNoTarget.isEmpty()) { out.println(" <p>Note that although the found relay(s) did " + "not permit exiting to " + target + ", there have been one " + "or more relays running at the given time.</p>"); } } /* Finish writing response. */ writeFooter(out); }
From source file:edu.cmu.tetrad.search.Lofs2.java
private void resolveOneEdgeMax2(Graph graph, Node x, Node y, boolean strong) { TetradLogger.getInstance().log("info", "\nEDGE " + x + " --- " + y); SortedMap<Double, String> scoreReports = new TreeMap<Double, String>(); List<Node> neighborsx = new ArrayList<Node>(); for (Node _node : graph.getAdjacentNodes(x)) { if (!knowledge.isForbidden(_node.getName(), x.getName())) { // if (!knowledge.edgeForbidden(x.getName(), _node.getName())) { neighborsx.add(_node);/*from ww w. j a v a2 s.c om*/ } } // neighborsx.remove(y); double max = Double.NEGATIVE_INFINITY; boolean left = false; boolean right = false; DepthChoiceGenerator genx = new DepthChoiceGenerator(neighborsx.size(), neighborsx.size()); int[] choicex; while ((choicex = genx.next()) != null) { List<Node> condxMinus = GraphUtils.asList(choicex, neighborsx); if (condxMinus.contains(y)) continue; List<Node> condxPlus = new ArrayList<Node>(condxMinus); condxPlus.add(y); double xPlus = score(x, condxPlus); double xMinus = score(x, condxMinus); double p = pValue(x, condxPlus); if (p > alpha) { continue; } double p2 = pValue(x, condxMinus); if (p2 > alpha) { continue; } List<Node> neighborsy = new ArrayList<Node>(); for (Node _node : graph.getAdjacentNodes(y)) { if (!knowledge.isForbidden(_node.getName(), y.getName())) { neighborsy.add(_node); } } DepthChoiceGenerator geny = new DepthChoiceGenerator(neighborsy.size(), neighborsy.size()); int[] choicey; while ((choicey = geny.next()) != null) { List<Node> condyMinus = GraphUtils.asList(choicey, neighborsy); if (condyMinus.contains(x)) continue; List<Node> condyPlus = new ArrayList<Node>(condyMinus); condyPlus.add(x); double yPlus = score(y, condyPlus); double yMinus = score(y, condyMinus); double p3 = pValue(y, condyPlus); if (p3 > alpha) { continue; } double p4 = pValue(y, condyMinus); if (p4 > alpha) { continue; } boolean forbiddenLeft = knowledge.isForbidden(y.getName(), x.getName()); boolean forbiddenRight = knowledge.isForbidden(x.getName(), y.getName()); double delta = 0.0; if (strong) { if (yPlus <= xPlus + delta && xMinus <= yMinus + delta) { double score = combinedScore(xPlus, yMinus); if ((yPlus <= yMinus + delta && xMinus <= xPlus + delta) || forbiddenRight) { StringBuilder builder = new StringBuilder(); builder.append("\nStrong ").append(y).append("->").append(x).append(" ").append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); if (score > max) { max = score; left = true; right = false; } } else { StringBuilder builder = new StringBuilder(); builder.append("\nNo directed edge ").append(x).append("--").append(y).append(" ") .append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); } } else if ((xPlus <= yPlus + delta && yMinus <= xMinus + delta) || forbiddenLeft) { double score = combinedScore(yPlus, xMinus); if (yMinus <= yPlus + delta && xPlus <= xMinus + delta) { StringBuilder builder = new StringBuilder(); builder.append("\nStrong ").append(x).append("->").append(y).append(" ").append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); if (score > max) { max = score; left = false; right = true; } } else { StringBuilder builder = new StringBuilder(); builder.append("\nNo directed edge ").append(x).append("--").append(y).append(" ") .append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); } } else if (yPlus <= xPlus + delta && yMinus <= xMinus + delta) { double score = combinedScore(yPlus, xMinus); StringBuilder builder = new StringBuilder(); builder.append("\nNo directed edge ").append(x).append("--").append(y).append(" ") .append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); } else if (xPlus <= yPlus + delta && xMinus <= yMinus + delta) { double score = combinedScore(yPlus, xMinus); StringBuilder builder = new StringBuilder(); builder.append("\nNo directed edge ").append(x).append("--").append(y).append(" ") .append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); } } else { if ((yPlus <= xPlus + delta && xMinus <= yMinus + delta) || forbiddenRight) { double score = combinedScore(xPlus, yMinus); StringBuilder builder = new StringBuilder(); builder.append("\nWeak ").append(y).append("->").append(x).append(" ").append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); if (score > max) { max = score; left = true; right = false; } } else if ((xPlus <= yPlus + delta && yMinus <= xMinus + delta) || forbiddenLeft) { double score = combinedScore(yPlus, xMinus); StringBuilder builder = new StringBuilder(); builder.append("\nWeak ").append(x).append("->").append(y).append(" ").append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); if (score > max) { max = score; left = false; right = true; } } else if (yPlus <= xPlus + delta && yMinus <= xMinus + delta) { double score = combinedScore(yPlus, xMinus); StringBuilder builder = new StringBuilder(); builder.append("\nNo directed edge ").append(x).append("--").append(y).append(" ") .append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); } else if (xPlus <= yPlus + delta && xMinus <= yMinus + delta) { double score = combinedScore(yPlus, xMinus); StringBuilder builder = new StringBuilder(); builder.append("\nNo directed edge ").append(x).append("--").append(y).append(" ") .append(score); builder.append("\n Parents(").append(x).append(") = ").append(condxMinus); builder.append("\n Parents(").append(y).append(") = ").append(condyMinus); scoreReports.put(-score, builder.toString()); } } } } for (double score : scoreReports.keySet()) { TetradLogger.getInstance().log("info", scoreReports.get(score)); } graph.removeEdges(x, y); if (left) { graph.addDirectedEdge(y, x); } if (right) { graph.addDirectedEdge(x, y); } if (!graph.isAdjacentTo(x, y)) { graph.addUndirectedEdge(x, y); } }