List of usage examples for java.lang Long MIN_VALUE
long MIN_VALUE
To view the source code for java.lang Long MIN_VALUE.
Click Source Link
From source file:com.planetmayo.debrief.satc_rcp.views.MaintainContributionsView.java
protected void redoOwnshipStates() { if (legPlot == null) return;// w ww . j av a2 s. c o m boolean showCourses = true; if (showOSCourse != null) showCourses = showOSCourse.getSelection(); java.awt.Color courseCol = java.awt.Color.blue.darker().darker(); java.awt.Color speedCol = java.awt.Color.blue.brighter().brighter(); // ok, now loop through and set them long startTime = Long.MAX_VALUE; long endTime = Long.MIN_VALUE; // clear any datasets legPlot.setDataset(0, null); legPlot.setDataset(1, null); // hmm, actually we have to remove any target leg markers @SuppressWarnings("unchecked") Collection<IntervalMarker> markers = legPlot.getDomainMarkers(Layer.BACKGROUND); if (markers != null) { ArrayList<IntervalMarker> markersToDelete = new ArrayList<IntervalMarker>(markers); Iterator<IntervalMarker> mIter = markersToDelete.iterator(); while (mIter.hasNext()) { IntervalMarker im = mIter.next(); legPlot.removeDomainMarker(im); } } // hey, does it have any ownship legs? TimeSeriesCollection tscC = new TimeSeriesCollection(); TimeSeriesCollection tscS = new TimeSeriesCollection(); TimeSeriesCollection tscCLegs = new TimeSeriesCollection(); TimeSeriesCollection tscSLegs = new TimeSeriesCollection(); TimeSeries courses = new TimeSeries("Course"); TimeSeries bearings = new TimeSeries("Bearings"); TimeSeries speeds = new TimeSeries("Speed"); TimeSeries courseLegs = new TimeSeries("Course (leg)"); TimeSeries speedLegs = new TimeSeries("Speed (leg)"); Iterator<BaseContribution> conts = activeSolver.getContributions().iterator(); while (conts.hasNext()) { BaseContribution baseC = conts.next(); if (baseC.isActive()) if (baseC instanceof BearingMeasurementContribution) { BearingMeasurementContribution bmc = (BearingMeasurementContribution) baseC; Iterator<LegOfData> lIter = null; LegOfData thisLeg = null; if (bmc.getOwnshipLegs() != null) { lIter = bmc.getOwnshipLegs().iterator(); thisLeg = lIter.next(); } List<HostState> hostStates = bmc.getHostState(); if (hostStates != null) { Iterator<HostState> stateIter = hostStates.iterator(); while (stateIter.hasNext()) { BearingMeasurementContribution.HostState hostState = stateIter.next(); long thisTime = hostState.time; double thisCourse = hostState.courseDegs; if (showCourses) courses.add(new FixedMillisecond(thisTime), thisCourse); double thisSpeed = hostState.speedKts; speeds.add(new FixedMillisecond(thisTime), thisSpeed); startTime = Math.min(thisTime, startTime); endTime = Math.max(thisTime, endTime); // sort out if this is in a leg or not if (thisLeg != null) { if (thisTime > thisLeg.getEnd() && lIter.hasNext()) { thisLeg = lIter.next(); } else { if (thisTime >= thisLeg.getStart()) { speedLegs.add(new FixedMillisecond(thisTime), thisSpeed); if (showCourses) courseLegs.add(new FixedMillisecond(thisTime), thisCourse); } } } } } // also, we wish to show the bearings from the BMC Iterator<BMeasurement> cuts = bmc.getMeasurements().iterator(); while (cuts.hasNext()) { BearingMeasurementContribution.BMeasurement measurement = cuts.next(); if (measurement.isActive()) { long thisT = measurement.getDate().getTime(); bearings.add(new FixedMillisecond(thisT), Math.toDegrees(Math.abs(measurement.getBearingRads()))); } } } } // HEY, also shade the ownship legs conts = activeSolver.getContributions().iterator(); while (conts.hasNext()) { BaseContribution baseC = conts.next(); if (baseC.isActive()) { if (baseC instanceof BearingMeasurementContribution) { BearingMeasurementContribution bmc = (BearingMeasurementContribution) baseC; Iterator<LegOfData> lIter = null; if (bmc.getOwnshipLegs() != null) { int ctr = 1; lIter = bmc.getOwnshipLegs().iterator(); while (lIter.hasNext()) { LegOfData thisL = lIter.next(); long thisStart = thisL.getStart(); long thisFinish = thisL.getEnd(); java.awt.Color transCol = new java.awt.Color(0, 0, 255, 12); final Marker bst = new IntervalMarker(thisStart, thisFinish, transCol, new BasicStroke(2.0f), null, null, 1.0f); bst.setLabel("O/S-" + ctr++); bst.setLabelAnchor(RectangleAnchor.TOP_LEFT); bst.setLabelFont(new Font("SansSerif", Font.ITALIC + Font.BOLD, 10)); bst.setLabelTextAnchor(TextAnchor.TOP_LEFT); legPlot.addDomainMarker(bst, Layer.BACKGROUND); } } } } } tscS.addSeries(speeds); tscSLegs.addSeries(speedLegs); tscC.addSeries(bearings); if (showCourses) { tscC.addSeries(courses); tscCLegs.addSeries(courseLegs); } legPlot.setDataset(0, null); legPlot.setDataset(1, null); legPlot.setDataset(2, null); legPlot.setDataset(3, null); legPlot.setDataset(0, tscC); legPlot.setDataset(1, tscS); legPlot.setDataset(2, tscCLegs); legPlot.setDataset(3, tscSLegs); final NumberAxis axis2 = new NumberAxis("Speed (Kts)"); legPlot.setRangeAxis(1, axis2); legPlot.mapDatasetToRangeAxis(1, 1); legPlot.mapDatasetToRangeAxis(3, 1); legPlot.getRangeAxis(0).setLabel("Crse/Brg (Degs)"); legPlot.mapDatasetToRangeAxis(0, 0); legPlot.mapDatasetToRangeAxis(2, 0); final XYLineAndShapeRenderer lineRenderer1 = new XYLineAndShapeRenderer(true, true); lineRenderer1.setSeriesPaint(1, courseCol); lineRenderer1.setSeriesShape(1, ShapeUtilities.createDiamond(0.1f)); lineRenderer1.setSeriesPaint(0, java.awt.Color.RED); lineRenderer1.setSeriesShape(0, ShapeUtilities.createDiamond(2f)); final XYLineAndShapeRenderer lineRenderer2 = new XYLineAndShapeRenderer(true, false); lineRenderer2.setSeriesPaint(0, speedCol); final XYLineAndShapeRenderer lineRenderer3 = new XYLineAndShapeRenderer(false, true); lineRenderer3.setSeriesPaint(0, courseCol); lineRenderer3.setSeriesShape(0, ShapeUtilities.createUpTriangle(2f)); final XYLineAndShapeRenderer lineRenderer4 = new XYLineAndShapeRenderer(false, true); lineRenderer4.setSeriesPaint(0, speedCol); lineRenderer4.setSeriesShape(0, ShapeUtilities.createDownTriangle(2f)); // ok, and store them legPlot.setRenderer(0, lineRenderer1); legPlot.setRenderer(1, lineRenderer2); legPlot.setRenderer(2, lineRenderer3); legPlot.setRenderer(3, lineRenderer4); if (startTime != Long.MAX_VALUE) legPlot.getDomainAxis().setRange(startTime, endTime); // ok - get the straight legs to sort themselves out // redoStraightLegs(); }
From source file:gov.va.isaac.util.OTFUtility.java
public static RefexVersionBI<?> getLatestRefexVersion( @SuppressWarnings("rawtypes") Collection<? extends RefexVersionBI> collection) { RefexVersionBI<?> newest = null; ;//from w ww.j a va 2 s.c om long newestTime = Long.MIN_VALUE; for (RefexVersionBI<?> x : collection) { if (x.getTime() > newestTime) { newest = x; newestTime = x.getTime(); } } return newest; }
From source file:gov.va.isaac.util.OTFUtility.java
public static RefexDynamicVersionBI<?> getLatestDynamicRefexVersion( @SuppressWarnings("rawtypes") Collection<? extends RefexDynamicVersionBI> collection) { RefexDynamicVersionBI<?> newest = null; ;// w w w. j a v a 2 s. com long newestTime = Long.MIN_VALUE; for (RefexDynamicVersionBI<?> x : collection) { if (x.getTime() > newestTime) { newest = x; newestTime = x.getTime(); } } return newest; }
From source file:alter.vitro.vgw.service.query.SimpleQueryHandler.java
private Vector<ReqResultOverData> findAggrAttrValue(String pQueryDefId, Vector<QueriedMoteAndSensors> pMotesAndTheirSensorAndFunctsVec, Vector<ReqFunctionOverData> reqFunctionVec, List<String> serviceDeployStatusStr, List<String[]> localReplacedResources) { //// w w w. java 2 s . co m // ADDED CODE -- OPTIMIZATION PENDING +++++ // // --------------- SERVICE CONTINUATION PREP // TODO: SERVICE CONTINUATION PREP //service Continuation Additions: //String serviceDeployStatusStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_UNKNOWN; serviceDeployStatusStr.add(ResponseAggrMsg.DEPLOY_STATUS_SERVICE_UNKNOWN); // deploy status flags boolean serviceDeployAllNodesAvailable = true; boolean serviceDeployContinuationEmployed = false; boolean serviceDeployPartiallyPossible = false; boolean serviceDeployImpossible = false; // [0] is the original nodeId, [1] the replacing node id and [2] the capability //List<String[]> localReplacedResources = new ArrayList<String[]>(); // // // TODO: 1.Use the motesAndTheirSensorAndFunctVec to get the requested motes and the requested capabilities. // TODO: 2.Check wth Continuation Service and Resource Availability Service. // TODO. 2a. If all nodes are available then Deploy_Status = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_POSSIBLE. // 2b. If a node in the requested motes is unavailable (or future: a requested resource is unavailable) // Check the equivalent nodes for matches for this capability. // If a match is found, replace the node in the motesAndTheirSensorAndFunctsVec with the replacement node // and keep this replacing tracked/stored locally (as well as the cache of the continuationService) // when the results are found, replace the original mote back, but also send the extra xml that says that the values from that node for that capability are from the replacement node // TODO: Careful! a node could be replaced by more than one nodes, based on the capabilities requested! TEST THIS CASE! // TODO: Careful! a node could be replaced for one capability, but not for another! // Also set the flag serviceContinuationEmployed to true. // if at the end only this flag is set then update the Deploy_Status to ResponseAggrMsg.DEPLOY_STATUS_SERVICE_CONTINUATION // If a match is not found then remove this node from the results. // Also set the flag servicePartiallyPossible to true. // if at the end only this flag is set then update the Deploy_Status ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL // If a the end both flags serviceContinuationEmployed and servicePartiallyPossible are true // and not the serviceImpossible flag then update the Deploy_Status to ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_CONT_COMBO // // Finally if NO nodes are available for the service set the serviceImpossible flag to true and // update the deploy_status to ResponseAggrMsg.DEPLOY_STATUS_SERVICE_IMPOSSIBLE // END: SERVICE CONTINUATION PREP Vector<QueriedMoteAndSensors> originalMotesAndTheirSensorAndFunctsVec = pMotesAndTheirSensorAndFunctsVec; Vector<QueriedMoteAndSensors> newMotesAndTheirSensorAndFunctsVec = new Vector<QueriedMoteAndSensors>(); List<String> allInvolvedMoteIdsList = new ArrayList<String>(); for (QueriedMoteAndSensors aMoteAndSensors : originalMotesAndTheirSensorAndFunctsVec) { allInvolvedMoteIdsList.add(aMoteAndSensors.getMoteid()); } logger.debug("Queried motes and sensors:"); for (QueriedMoteAndSensors aMoteAndSensors : originalMotesAndTheirSensorAndFunctsVec) { logger.debug("Mote Id: " + aMoteAndSensors.getMoteid()); if (aMoteAndSensors.getQueriedSensorIdsAndFuncVec() != null && !aMoteAndSensors.getQueriedSensorIdsAndFuncVec().isEmpty()) { HashMap<String, Vector<Integer>> functionsForCapabilityOfThisMoteHM = new HashMap<String, Vector<Integer>>(); for (ReqSensorAndFunctions sensAndFuncts : aMoteAndSensors.getQueriedSensorIdsAndFuncVec()) { logger.debug(" Capabilities: " + sensAndFuncts.getSensorModelid()); // TODO: we could probably acquire the friendly name too from some map //TODO: this isNodeResourceAvailable could be also done ideally within the ContinuationOfProvisionService within the findNextEquivalaneNode funciton (also could be synchronized) //logger.debug("DDDDD Size of functs:"+ Integer.toString(sensAndFuncts.getFunctionsOverSensorModelVec().size())); //{ // int smid = sensAndFuncts.getSensorModelIdInt(); // //logger.debug("For mote "+fullMoteId +" and sensor "+Integer.toString(smid) + " function vector size is "+reqFunctionVec.size()); // for (Integer inFunctVec : sensAndFuncts.getFunctionsOverSensorModelVec()) { // logger.debug("Fid: " + inFunctVec); // } // } functionsForCapabilityOfThisMoteHM.put(sensAndFuncts.getSensorModelid(), sensAndFuncts.getFunctionsOverSensorModelVec()); if (!ResourceAvailabilityService.getInstance().isNodeResourceAvailable(pQueryDefId, aMoteAndSensors.getMoteid(), sensAndFuncts.getSensorModelid())) { logger.debug("Node id: " + aMoteAndSensors.getMoteid() + " unavailable for: " + sensAndFuncts.getSensorModelid()); String[] replacementInfo = ContinuationOfProvisionService.getInstance() .findNextEquivalentNode(pQueryDefId, allInvolvedMoteIdsList, aMoteAndSensors.getMoteid(), sensAndFuncts.getSensorModelid()); if (replacementInfo == null) { // logger.debug("Could not find replacement node for " + sensAndFuncts.getSensorModelid() + " vsn id: " + pQueryDefId); serviceDeployPartiallyPossible = true; } else { logger.debug("Found replacement node " + replacementInfo[1] + " for node " + replacementInfo[0] + " for " + replacementInfo[2] + " vsn id: " + pQueryDefId); serviceDeployContinuationEmployed = true; // to prevent duplicates (though there really should not be such case) addToLocalReplacementInfoList(localReplacedResources, replacementInfo); } } //end if: node capability is not available else { //capability is available // add self as a replacement (locally) // a node could be available for some capabilities but not for others String[] replacementInfo = { aMoteAndSensors.getMoteid(), aMoteAndSensors.getMoteid(), sensAndFuncts.getSensorModelid() }; logger.debug("Adding self to local cache"); addToLocalReplacementInfoList(localReplacedResources, replacementInfo); } } //end for loop for this node's capability //loop through the localReplacedResources for this node and update the newMotesAndTheirSensorAndFunctsVec List<String> consideredReplacementNodes = new ArrayList<String>(); for (String[] entryLocal : localReplacedResources) { //logger.debug("Checking localReplacedResources for: " + entryLocal[0]); if (entryLocal[0].compareToIgnoreCase(aMoteAndSensors.getMoteid()) == 0) { String idOfOneReplacingNode = entryLocal[1]; if (!consideredReplacementNodes.contains(idOfOneReplacingNode)) { //logger.debug("INNER Checking localReplacedResources for: " + idOfOneReplacingNode); consideredReplacementNodes.add(idOfOneReplacingNode); Vector<ReqSensorAndFunctions> replacementNodeSensorAndFuncts = new Vector<ReqSensorAndFunctions>(); QueriedMoteAndSensors replacementMoteAndSensors = new QueriedMoteAndSensors( idOfOneReplacingNode, replacementNodeSensorAndFuncts); // inner loop again to find all capabilities that this node (idOfOneReplacingNode) is a replacement for for (String[] entryLocalInner : localReplacedResources) { if (entryLocalInner[0].compareToIgnoreCase(aMoteAndSensors.getMoteid()) == 0 && entryLocalInner[1].compareToIgnoreCase(idOfOneReplacingNode) == 0) { //logger.debug("INNER MATCh FOUND for: " + entryLocalInner[1] + " capability: " + entryLocalInner[2] ); String capabilityToAdd = entryLocalInner[2]; int capabilityToAddInt = ReqSensorAndFunctions.invalidSensModelId; try { capabilityToAddInt = Integer.valueOf(capabilityToAdd); } catch (Exception ex33) { logger.error( "Could not convert capability id to int for replacement capability: " + capabilityToAdd); } //logger.error("CAP TO ADD" + capabilityToAdd); if (functionsForCapabilityOfThisMoteHM.containsKey(capabilityToAdd) && functionsForCapabilityOfThisMoteHM.get(capabilityToAdd) != null && !functionsForCapabilityOfThisMoteHM.get(capabilityToAdd).isEmpty()) { //logger.error("FOUND IN HASHMAP!!!"); Vector<Integer> funcsOverThisCapability = functionsForCapabilityOfThisMoteHM .get(capabilityToAdd); //int smid = capabilityToAddInt; //logger.debug("DEB DEB For mote "+aMoteAndSensors.getMoteid() +" and sensor "+Integer.toString(smid) + " function vector size is "+reqFunctionVec.size()); //for (Integer inFunctVec : funcsOverThisCapability) { // logger.debug("DEB DEB Fid: " + inFunctVec); //} ReqSensorAndFunctions thisSensorAndFuncts = new ReqSensorAndFunctions( capabilityToAddInt, funcsOverThisCapability); //thisSensorAndFuncts.getSensorModelid(); //thisSensorAndFuncts.getFunctionsOverSensorModelVec().size(); //logger.debug("DEB DEB 333 For sensor "+ thisSensorAndFuncts.getSensorModelid()+ " function vector size is "+ thisSensorAndFuncts.getFunctionsOverSensorModelVec().size()); //for (Integer inFunctVec : funcsOverThisCapability) { // logger.debug("DEB DEB 333 Fid: " + inFunctVec); //} replacementNodeSensorAndFuncts.addElement(thisSensorAndFuncts); } } } if (!replacementNodeSensorAndFuncts.isEmpty()) { //logger.error("ADDING ELEMENT TO NEW MOTES LIST!!!" + replacementMoteAndSensors.getMoteid() + ":: " + Integer.toString(replacementMoteAndSensors.getQueriedSensorIdsAndFuncVec().size())); replacementMoteAndSensors .setQueriedSensorIdsAndFuncVec(replacementNodeSensorAndFuncts); newMotesAndTheirSensorAndFunctsVec.addElement(replacementMoteAndSensors); } } } } //functionsForCapabilityOfThisMoteHM.clear(); } } //end for loop for this node of queried motes if (newMotesAndTheirSensorAndFunctsVec == null || newMotesAndTheirSensorAndFunctsVec.isEmpty()) { serviceDeployImpossible = true; logger.debug("Service Deploy is impossible for vsn id: " + pQueryDefId); } // decide status String statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_UNKNOWN; if (serviceDeployImpossible) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_IMPOSSIBLE; } else if (serviceDeployContinuationEmployed && serviceDeployPartiallyPossible) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_CONT_COMBO; } else if (serviceDeployContinuationEmployed) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_CONTINUATION; } else if (serviceDeployPartiallyPossible) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL; } else if (serviceDeployAllNodesAvailable && !serviceDeployImpossible && !serviceDeployContinuationEmployed && !serviceDeployPartiallyPossible) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_POSSIBLE; } serviceDeployStatusStr.set(0, statusDecidedStr); logger.debug("Decided DEPLOY STATUS WAS: " + serviceDeployStatusStr.get(0)); // We proceed here because even if service deploy is not possible, a reply will be sent with the status and empty lists (TODO consider) // However we also send (near the end of this method, alert messages for the deploy status if <> OK // // // TODO: To skip redundant queries in network // TODO: Count the reqFunction in reqFunction Vec (Debug print them) (also check that they are executed even if gateway level for each node-which should not happen) // TODO: Verify that if a function is gateway level and its removed(?) from the reqFunctionVec then it's not executed by the wsi adapter! // // // TODO: handle conditions for aggregate (gateway level functions). // //clone the reqFunctionsVec . TODO. this is not cloning though, we pass references to the added elements Vector<ReqFunctionOverData> onlyNodeReqFunctVec = new Vector<ReqFunctionOverData>(); Vector<ReqFunctionOverData> onlyGwLevelReqFunctVec = new Vector<ReqFunctionOverData>(); for (int i = 0; i < reqFunctionVec.size(); i++) { if (ReqFunctionOverData.isValidGatewayReqFunct(reqFunctionVec.elementAt(i).getfuncName())) onlyGwLevelReqFunctVec.addElement(reqFunctionVec.elementAt(i)); else { onlyNodeReqFunctVec.addElement(reqFunctionVec.elementAt(i)); } } // // get the involved capabilities per gatewaylevel function, and then remove the function id from those sensorModels! // // Produce a hashmap of gwLevel function name to Vector of capabilities (sensorModelId from the query/request) HashMap<String, Vector<String>> gwLevelFunctToCapsList = new HashMap<String, Vector<String>>(); // todo: IMPORTANT later we should group sensormodelIds per capability they belong to, but for now sensormodelid == capability! Iterator<ReqFunctionOverData> gwLevelFunctsIter = onlyGwLevelReqFunctVec.iterator(); while (gwLevelFunctsIter.hasNext()) { Vector<String> myInvolvedCaps = new Vector<String>(); ReqFunctionOverData tmpGwLevelFunct = gwLevelFunctsIter.next(); // new change to new Vector of motes (19/04) Iterator<QueriedMoteAndSensors> onMotesSensFunctsVecIter = newMotesAndTheirSensorAndFunctsVec .iterator(); while (onMotesSensFunctsVecIter.hasNext()) { QueriedMoteAndSensors tmpMoteAndSenAndFuncts = onMotesSensFunctsVecIter.next(); Iterator<ReqSensorAndFunctions> sensAndFunctsIter = tmpMoteAndSenAndFuncts .getQueriedSensorIdsAndFuncVec().iterator(); while (sensAndFunctsIter.hasNext()) { ReqSensorAndFunctions sensAndFuncts = sensAndFunctsIter.next(); //Vector<Integer> sensfunctsVector = sensAndFuncts.getFunctionsOverSensorModelVec(); int initSize = sensAndFuncts.getFid().size(); for (int k = initSize - 1; k >= 0; k--) { int sensfid = sensAndFuncts.getFid().get(k).intValue(); if (sensfid == tmpGwLevelFunct.getfuncId()) { if (!myInvolvedCaps.contains(sensAndFuncts.getSensorModelid())) { myInvolvedCaps.addElement(sensAndFuncts.getSensorModelid()); } // TODO: WHY??? (NOT NEEDED ANYMORE because we use the onlyNodeReqFunctVec to query the sensor and that filters out the functions in the adapter) ::here we should also delete the fid from the sensor model (but the simple way does not work for some reason, so it is left for future) //List tmpList = removeElementAt(sensAndFuncts.getFid(),k); //sensAndFuncts.getFid().clear(); //sensAndFuncts.getFid().addAll(tmpList); //sensAndFuncts.getFunctionsOverSensorModelVec().clear(); } } } } gwLevelFunctToCapsList.put(tmpGwLevelFunct.getfuncName(), myInvolvedCaps); } // // // Vector<ReqResultOverData> allResultsRead = new Vector<ReqResultOverData>(); //WsiAdapterCon myDCon = WsiAdapterConFactory.createMiddleWCon("uberdust", DbConInfoFactory.createConInfo("restHttp")); // DONE: The translateAggrQuery should not be executed for gateway level functions (skip them here or in the adapter con class.(?) // new changed to the new vector of motes : 19/04 logger.debug("Submitting query to the network"); // ASK ONLY FOR NODE LEVEL FUNCTIONS (TODO: Essentially for now, only last value is a node level function sent from the VSP, although other node level functions are supported) allResultsRead = myDCon.translateAggrQuery(newMotesAndTheirSensorAndFunctsVec, onlyNodeReqFunctVec); logger.debug("After Submitting query to the network"); // // // TODO: All gateway level functions reference a node level function at some point (either directly eg max or two hops eg "IF MAX " // // // Handle gateway level functions // first order of business, delete everything within them (some connectors could put latest values of all nodes, but we want to do it the more proper way) // then get the values of the referenced function(s) // aggregate the values and produce a single result. TODO: here UOMs of different sensor models could come into play. Handle this in the future! // // // 1. we create a new derived structure with unique fid keyed entries for required Result over data. Vector<ReqResultOverData> allUniqueFunctionsWithResults = new Vector<ReqResultOverData>(); Iterator<ReqResultOverData> messyResultsIter = allResultsRead.iterator(); // Loop over all resultOverData. They are keyed by fid, but there can be multiple of the same fid! // So here we merge those of same fid. while (messyResultsIter.hasNext()) //OUTER loop { ReqResultOverData tmpResStructFromMessyVec = messyResultsIter.next(); //ReqResultOverData tmpResStructMatched = null; boolean foundTheFid = false; Iterator<ReqResultOverData> uniqueFuncResultsIter = allUniqueFunctionsWithResults.iterator(); while (uniqueFuncResultsIter.hasNext()) //for the first pass of the OUTER loop the allUniqueFunctionsWithResults is empty { ReqResultOverData uniqueFunctResult = uniqueFuncResultsIter.next(); if (uniqueFunctResult.getFidInt() == tmpResStructFromMessyVec.getFidInt()) { foundTheFid = true; uniqueFunctResult.getOut().addAll(tmpResStructFromMessyVec.getAllResultsforFunct()); break; } } if (!foundTheFid) { allUniqueFunctionsWithResults.addElement(new ReqResultOverData(tmpResStructFromMessyVec.getFidInt(), tmpResStructFromMessyVec.getAllResultsforFunct())); } } // // Repeat this process slightly altered to add the unique Gw level functions // Iterator<ReqFunctionOverData> gwfunctIter = onlyGwLevelReqFunctVec.iterator(); while (gwfunctIter.hasNext()) //OUTER loop { ReqFunctionOverData tmpReqGwFunct = gwfunctIter.next(); //ReqResultOverData tmpResStructMatched = null; boolean foundTheFid = false; Iterator<ReqResultOverData> uniqueFuncResultsIter = allUniqueFunctionsWithResults.iterator(); while (uniqueFuncResultsIter.hasNext()) //for the first pass of the OUTER loop the allUniqueFunctionsWithResults is empty { ReqResultOverData uniqueFunctResult = uniqueFuncResultsIter.next(); if (uniqueFunctResult.getFidInt() == tmpReqGwFunct.getfuncId()) { foundTheFid = true; break; } } if (!foundTheFid) { allUniqueFunctionsWithResults.addElement( new ReqResultOverData(tmpReqGwFunct.getfuncId(), new Vector<ResultAggrStruct>())); } } // end of 1. // // 2. Go through all the gateway level functions (all of which are missing values right now). // For each gateway level function, go through all the results for this function. // gwfunctIter = onlyGwLevelReqFunctVec.iterator(); while (gwfunctIter.hasNext()) { ReqFunctionOverData tmpGwFunct = gwfunctIter.next(); Iterator<ReqResultOverData> resultsIter = allUniqueFunctionsWithResults.iterator(); // loop over all resultOverData for this specific function (matching is made in the next two lines) while (resultsIter.hasNext()) { ReqResultOverData tmpResForGWFunct = resultsIter.next(); if (tmpResForGWFunct.getFidInt() == tmpGwFunct.getfuncId()) { // descriptionTokens[0] : GW LEVEL PREFIX // descriptionTokens[1] : FUNCTION NAME // descriptionTokens[2] : REFERENCED FUNCTION ID String[] descriptionTokens = tmpGwFunct.getfuncName() .split(ReqFunctionOverData.GW_LEVEL_SEPARATOR); // // 3. Handle min, max and avg gateway level functions. (IF THEN FUNCTIONS ARE HANDLED AS ANOTHER CASE - THEY ARE ONE HOP HIGHER) // MIN, MAX, and AVG are all one hop (reference) away from a node level function (last value) if (descriptionTokens != null && descriptionTokens.length > 2 && (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.maxFunc) || descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.minFunc) || descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.avgFunc))) { logger.debug("Clearing up values for gw funct name: " + tmpGwFunct.getfuncName()); // cleanup of output list (it should however be already empty now that we rightfully only poll the WSI for node level functions) tmpResForGWFunct.getOut().clear(); tmpResForGWFunct.getAllResultsforFunct().clear(); //after cleanup of output list logger.debug("Filling up values for gw funct name: " + tmpGwFunct.getfuncName()); if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.maxFunc)) { // MAX FUNCTION ======================================= int aggregatedValues = 0; int refFunct = ReqFunctionOverData.unknownFuncId; try { refFunct = Integer.valueOf(descriptionTokens[2]); } catch (Exception exfrtm) { logger.error("Reference function id was set as unknown!"); } HashMap<String, Long> capToTsFromMinLong = new HashMap<String, Long>(); HashMap<String, Long> capToTsToMaxLong = new HashMap<String, Long>(); HashMap<String, Long> capToMaxValueLong = new HashMap<String, Long>(); // Iterator<ReqResultOverData> resultsIter002 = allUniqueFunctionsWithResults.iterator(); // INNER LOOP THROUGH FUNCTIONS with results, searching for the referenced NODE level function while (resultsIter002.hasNext()) { ReqResultOverData tmpRes = resultsIter002.next(); if (tmpRes.getFidInt() == refFunct) { // for every GENERIC capability requested( the generic capability is coded as hashcode() ) for (String currCapSidStr : gwLevelFunctToCapsList .get(tmpGwFunct.getfuncName())) { if (!capToMaxValueLong.containsKey(currCapSidStr)) { capToMaxValueLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); capToTsFromMinLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsToMaxLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); } Iterator<OutType> tmpOutItemIter = tmpRes.getOut().iterator(); while (tmpOutItemIter.hasNext()) { ResultAggrStruct tmpOutItem = new ResultAggrStruct( tmpOutItemIter.next()); if (currCapSidStr.trim().equalsIgnoreCase(tmpOutItem.getSid().trim())) { try { long longValToCompare = Long.parseLong(tmpOutItem.getVal()); if (longValToCompare > capToMaxValueLong.get(currCapSidStr) .longValue()) { capToMaxValueLong.put(currCapSidStr, Long.valueOf(longValToCompare)); } if (capToTsFromMinLong.get(currCapSidStr) .longValue() > tmpOutItem.getTis().getFromTimestamp() .getTime()) { capToTsFromMinLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getFromTimestamp().getTime())); } if (capToTsToMaxLong.get(currCapSidStr).longValue() < tmpOutItem .getTis().getToTimestamp().getTime()) { capToTsToMaxLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getToTimestamp().getTime())); } aggregatedValues += 1; } catch (Exception e) { logger.error("Invalid format to aggregate"); } } } ResultAggrStruct thisAggrResult = new ResultAggrStruct( ResultAggrStruct.MidSpecialForAggregateMultipleValues, Integer.valueOf(currCapSidStr), Long.toString(capToMaxValueLong.get(currCapSidStr)), aggregatedValues, new TimeIntervalStructure( new Timestamp(capToTsFromMinLong.get(currCapSidStr)), new Timestamp(capToTsToMaxLong.get(currCapSidStr)))); tmpResForGWFunct.getOut().add(thisAggrResult); } } } } else if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.minFunc)) { // MIN FUNCTION ======================================= int aggregatedValues = 0; int refFunct = ReqFunctionOverData.unknownFuncId; try { refFunct = Integer.valueOf(descriptionTokens[2]); } catch (Exception exfrtm) { logger.error("Reference function id was set as unknown!"); } HashMap<String, Long> capToTsFromMinLong = new HashMap<String, Long>(); HashMap<String, Long> capToTsToMaxLong = new HashMap<String, Long>(); HashMap<String, Long> capToMinValueLong = new HashMap<String, Long>(); // Iterator<ReqResultOverData> resultsIter002 = allUniqueFunctionsWithResults.iterator(); while (resultsIter002.hasNext()) { ReqResultOverData tmpRes = resultsIter002.next(); if (tmpRes.getFidInt() == refFunct) { // for every GENERIC capability requested( the genereic capability is coded as hashcode() ) for (String currCapSidStr : gwLevelFunctToCapsList .get(tmpGwFunct.getfuncName())) { if (!capToMinValueLong.containsKey(currCapSidStr)) { capToMinValueLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsFromMinLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsToMaxLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); } Iterator<OutType> tmpOutItemIter = tmpRes.getOut().iterator(); while (tmpOutItemIter.hasNext()) { ResultAggrStruct tmpOutItem = new ResultAggrStruct( tmpOutItemIter.next()); if (currCapSidStr.trim().equalsIgnoreCase(tmpOutItem.getSid().trim())) { try { long longValToCompare = Long.parseLong(tmpOutItem.getVal()); if (longValToCompare < capToMinValueLong.get(currCapSidStr) .longValue()) { capToMinValueLong.put(currCapSidStr, Long.valueOf(longValToCompare)); } if (capToTsFromMinLong.get(currCapSidStr) .longValue() > tmpOutItem.getTis().getFromTimestamp() .getTime()) { capToTsFromMinLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getFromTimestamp().getTime())); } if (capToTsToMaxLong.get(currCapSidStr).longValue() < tmpOutItem .getTis().getToTimestamp().getTime()) { capToTsToMaxLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getToTimestamp().getTime())); } aggregatedValues += 1; } catch (Exception e) { logger.error("Invalid format to aggregate"); } } } ResultAggrStruct thisAggrResult = new ResultAggrStruct( ResultAggrStruct.MidSpecialForAggregateMultipleValues, Integer.valueOf(currCapSidStr), Long.toString(capToMinValueLong.get(currCapSidStr)), aggregatedValues, new TimeIntervalStructure( new Timestamp(capToTsFromMinLong.get(currCapSidStr)), new Timestamp(capToTsToMaxLong.get(currCapSidStr)))); logger.debug("Adding a result"); tmpResForGWFunct.getOut().add(thisAggrResult); logger.debug("Added a result"); } } } } else if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.avgFunc)) { // AVG FUNCTION ======================================= int aggregatedValues = 0; int refFunct = ReqFunctionOverData.unknownFuncId; try { refFunct = Integer.valueOf(descriptionTokens[2]); } catch (Exception exfrtm) { logger.error("Reference function id was set as unknown!"); } HashMap<String, Long> capToTsFromMinLong = new HashMap<String, Long>(); HashMap<String, Long> capToTsToMaxLong = new HashMap<String, Long>(); HashMap<String, Long> capToAvgValueLong = new HashMap<String, Long>(); // Iterator<ReqResultOverData> resultsIter002 = allUniqueFunctionsWithResults.iterator(); while (resultsIter002.hasNext()) { ReqResultOverData tmpRes = resultsIter002.next(); /*System.out.println("LLLLLLLL TEST 3"); StringBuilder tmpRsOD = new StringBuilder(); tmpRsOD.append("resf fid:"); tmpRsOD.append(tmpRes.getFidInt()); tmpRsOD.append(" AND ref funct:"); tmpRsOD.append(refFunct); System.out.println("OOOOOOOOOOOOOO TEST 3B" + tmpRsOD.toString());*/ if (tmpRes.getFidInt() == refFunct) { // for every GENERIC capability requested( the genereic capability is coded as hashcode() ) for (String currCapSidStr : gwLevelFunctToCapsList .get(tmpGwFunct.getfuncName())) { if (!capToAvgValueLong.containsKey(currCapSidStr)) { capToAvgValueLong.put(currCapSidStr, Long.valueOf(0)); capToTsFromMinLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsToMaxLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); } Iterator<OutType> tmpOutItemIter = tmpRes.getOut().iterator(); while (tmpOutItemIter.hasNext()) { ResultAggrStruct tmpOutItem = new ResultAggrStruct( tmpOutItemIter.next()); if (currCapSidStr.trim().equalsIgnoreCase(tmpOutItem.getSid().trim())) { try { long longValOfSensor = Long.parseLong(tmpOutItem.getVal()); long valPrevious = capToAvgValueLong.get(currCapSidStr) .longValue(); long newVal = valPrevious + longValOfSensor; capToAvgValueLong.put(currCapSidStr, Long.valueOf(newVal)); // if (capToTsFromMinLong.get(currCapSidStr) .longValue() > tmpOutItem.getTis().getFromTimestamp() .getTime()) { capToTsFromMinLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getFromTimestamp().getTime())); } if (capToTsToMaxLong.get(currCapSidStr).longValue() < tmpOutItem .getTis().getToTimestamp().getTime()) { capToTsToMaxLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getToTimestamp().getTime())); } aggregatedValues += 1; } catch (Exception e) { logger.error("Invalid format to aggregate"); } } } Double avgVal = Double .valueOf(capToAvgValueLong.get(currCapSidStr).longValue()) / Double.valueOf(aggregatedValues); /*StringBuilder tmpRs = new StringBuilder(); tmpRs.append("Result:"); tmpRs.append(avgVal); tmpRs.append(" aggr vals:"); tmpRs.append(aggregatedValues); System.out.println("OOOOOOOOOOOOOO TEST 3C" + tmpRs.toString());*/ ResultAggrStruct thisAggrResult = new ResultAggrStruct( ResultAggrStruct.MidSpecialForAggregateMultipleValues, Integer.valueOf(currCapSidStr), Double.toString(avgVal), aggregatedValues, new TimeIntervalStructure( new Timestamp(capToTsFromMinLong.get(currCapSidStr)), new Timestamp(capToTsToMaxLong.get(currCapSidStr)))); tmpResForGWFunct.getOut().add(thisAggrResult); //System.out.println("OOOOOOOOOOOOOO TEST 3D" + tmpRs.toString()); } } } } } } } } // end of while loop on ONE HOP REFERENCE GW FUNCTIONs (MIN, MAX, AVG // Start of while loop on 2nd HOP reference GW function (need the one hops already filled in) // TODO: we don't handle/anticipate the case where the IF_THEN function references another IF_THEN function (even repeatedly). More flexibility could be implemented!! gwfunctIter = onlyGwLevelReqFunctVec.iterator(); // gets a NEW iterator while (gwfunctIter.hasNext()) { ReqFunctionOverData tmpGwFunct = gwfunctIter.next(); Iterator<ReqResultOverData> resultsIter = allUniqueFunctionsWithResults.iterator(); // loop over all resultOverData for this specific function (matching is made in the next two lines) while (resultsIter.hasNext()) { ReqResultOverData tmpResForGWFunct = resultsIter.next(); if (tmpResForGWFunct.getFidInt() == tmpGwFunct.getfuncId()) { // descriptionTokens[0] : GW LEVEL PREFIX // descriptionTokens[1] : FUNCTION NAME // descriptionTokens[2] : REFERENCED FUNCTION ID String[] descriptionTokens = tmpGwFunct.getfuncName() .split(ReqFunctionOverData.GW_LEVEL_SEPARATOR); if (descriptionTokens != null && descriptionTokens.length > 2 && (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.ruleRuleBinaryAndFunc) || descriptionTokens[1] .equalsIgnoreCase(ReqFunctionOverData.ruleRuleIfThenFunc))) { logger.debug("Clearing up values for gw funct name: " + tmpGwFunct.getfuncName()); // cleanup of output list (it should however be already empty now that we rightfully only poll the WSI for node level functions) tmpResForGWFunct.getOut().clear(); tmpResForGWFunct.getAllResultsforFunct().clear(); //after cleanup of output list logger.debug("Filling values for funct name: " + tmpGwFunct.getfuncName()); if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.ruleRuleBinaryAndFunc)) { //TODO: handle a binary rule (condition1 and condition2) } else if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.ruleRuleIfThenFunc)) { logger.debug("Filling values for funct name: " + tmpGwFunct.getfuncName()); //handle a binary rule (condition1 then do 3) // 1: check if the referenced function has results that meet the conditions in its threshold int consideredValues = 0; int refFunct = ReqFunctionOverData.unknownFuncId; try { refFunct = Integer.valueOf(descriptionTokens[2]); } catch (Exception exfrtm) { logger.error("Reference function id was set as unknown!"); } HashMap<String, Long> capToTsFromMinLong = new HashMap<String, Long>(); HashMap<String, Long> capToTsToMaxLong = new HashMap<String, Long>(); HashMap<String, Long> capToConditionValueLong = new HashMap<String, Long>(); // Iterator<ReqResultOverData> resultsIter002 = allUniqueFunctionsWithResults.iterator(); while (resultsIter002.hasNext()) { ReqResultOverData tmpRes = resultsIter002.next(); if (tmpRes.getFidInt() == refFunct) { // for every GENERIC capability requested( the genereic capability is coded as hashcode() ) for (String currCapSidStr : gwLevelFunctToCapsList .get(tmpGwFunct.getfuncName())) { if (!capToConditionValueLong.containsKey(currCapSidStr)) { capToTsFromMinLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsToMaxLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); capToConditionValueLong.put(currCapSidStr, Long.valueOf(0)); } Iterator<OutType> tmpOutItemIter = tmpRes.getOut().iterator(); while (tmpOutItemIter.hasNext()) { ResultAggrStruct tmpOutItem = new ResultAggrStruct( tmpOutItemIter.next()); if (currCapSidStr.trim().equalsIgnoreCase(tmpOutItem.getSid().trim())) { try { // TODO: Actually here we need to find in the original ReqFunctVec (that contains the full function definitions, not just the function id) // the thresholds set. Before we search for the thresholds in the referenced function but now (better) we get them from this function (If_then) boolean foundTheCurrentFunctionInTheOriginalReqFunctionVec = false; long longValOfSensor = Long.parseLong(tmpOutItem.getVal()); ReqFunctionOverData currentFunctionInCondition = null; for (int kx1 = 0; kx1 < reqFunctionVec.size(); kx1++) { if (reqFunctionVec.elementAt(kx1) .getfuncId() == tmpResForGWFunct.getFidInt()) { currentFunctionInCondition = reqFunctionVec .elementAt(kx1); foundTheCurrentFunctionInTheOriginalReqFunctionVec = true; break; } } // but also find the reference function in the condition to include details in the notification boolean foundTheReferencedFunctionInTheOriginalReqFunctionVec = false; ReqFunctionOverData referencedFunctionInCondition = null; for (int kx1 = 0; kx1 < reqFunctionVec.size(); kx1++) { if (reqFunctionVec.elementAt(kx1) .getfuncId() == tmpResForGWFunct.getFidInt()) { referencedFunctionInCondition = reqFunctionVec .elementAt(kx1); foundTheReferencedFunctionInTheOriginalReqFunctionVec = true; break; } } if (foundTheCurrentFunctionInTheOriginalReqFunctionVec) // the referred function here must have a threshold field because it's an evaluation of a condition { if (currentFunctionInCondition != null && currentFunctionInCondition .getThresholdField() != null && !currentFunctionInCondition.getThresholdField() .isEmpty()) { logger.debug( "-------- INTO EVALUATING CONDITION NOW! "); ThresholdStructure requiredThresholds = new ThresholdStructure( currentFunctionInCondition.getThresholdField()); if (requiredThresholds.getLowerBound() != null && !requiredThresholds.getLowerBound() .isEmpty()) { logger.debug("Condition low parameter: " + requiredThresholds.getLowerBound() .trim()); // TODO: handle other conditions for services (lower than, equals, between) long lowbound = Long.parseLong( requiredThresholds.getLowerBound()); if (longValOfSensor >= lowbound) { logger.debug("Sensor: " + tmpOutItem.getMid() + ". Condition is met: " + Long.toString(longValOfSensor) + " >= " + requiredThresholds .getLowerBound().trim()); consideredValues = 1; ResultAggrStruct thisAggrResult = new ResultAggrStruct( tmpOutItem.getMid(), Integer.valueOf(currCapSidStr), Long.toString(longValOfSensor), consideredValues, new TimeIntervalStructure(new Timestamp( Long.valueOf(tmpOutItem.getTis() .getFromTimestamp() .getTime())), new Timestamp(Long.valueOf( tmpOutItem.getTis() .getToTimestamp() .getTime())))); tmpResForGWFunct.getOut().add(thisAggrResult); // DONE: Send an alert notification NotificationsFromVSNs newNotify = new NotificationsFromVSNs(); newNotify.setQueryDefId(pQueryDefId); newNotify.setVgwID(myPeerId); // get continuation info. Careful, we have not yet replaced the replacemntIDs with the original nodes in the measurements here (it's done later) // but we have to set the MoteId to the Original Id and the replacementId to the replacement node String[] replaceItem = getLocalReplacemntInfoListItem( localReplacedResources, tmpOutItem.getMid(), tmpOutItem.getSid()); if (replaceItem != null && replaceItem[0] != null && !replaceItem[0].isEmpty() && replaceItem[0].compareToIgnoreCase( replaceItem[1]) != 0) { newNotify.setMoteID(replaceItem[0]); newNotify.setReplacmntID( tmpOutItem.getMid()); } else { newNotify.setMoteID(tmpOutItem.getMid()); newNotify.setReplacmntID(""); } newNotify.setValue(longValOfSensor); if (tmpOutItem.getTis() != null && tmpOutItem .getTis().isTimestampFromDefined()) newNotify.setValueTimestamp( Long.toString(tmpOutItem.getTis() .getFromTimestamp() .getTime())); newNotify.setBoundValue(lowbound); newNotify.setRefFunctName( referencedFunctionInCondition .getfuncName()); newNotify.setRefFunctTriggerSign("gt"); //default for lower bound conditions newNotify.setCapabilityCode( tmpOutItem.getSid().trim()); newNotify.setTimestamp(Long .toString(System.currentTimeMillis())); newNotify.setType( NotificationsFromVSNs.CRITICAL_TYPE); newNotify.setLevel( NotificationsFromVSNs.GATEWAY_LEVEL); newNotify.setRefFunctId( referencedFunctionInCondition .getfuncId()); newNotify.setMessage( "Condition was met for node id: " + newNotify.getMoteID() + " value: " + longValOfSensor + " capability code:__" + tmpOutItem.getSid().trim()); // Send the response to the requesting end user //System.out.println("Sending Notification!"); String notifMsgToSend = NotificationsFromVSNs .getAlertDelimitedString(newNotify); this.sendResponse(notifMsgToSend); } else { logger.debug("Sensor: " + tmpOutItem.getMid() + " with value: " + Long.toString(longValOfSensor) + " does not meet Condition!"); } } } } } catch (Exception e) { logger.error("Invalid format to aggregate"); } } } // // } } } } } } } } // Add trailing section for service deployability and replacements list // Careful! for the replacements list, skip the entries where the node replaces itself // DONE: RECONSTRUCT the Vector<ReqResultOverData> allUniqueFunctionsWithResults for the original nodes! // // logger.debug("BEFORE RECONSTRUCTION"); if (allUniqueFunctionsWithResults != null) { logger.debug("IN RECONSTRUCTION"); for (ReqResultOverData aResultOverData : allUniqueFunctionsWithResults) { String functionId = aResultOverData.getFid(); // replacing is needed only for node level functions and possibly for if then functions referring to last values of sensors (not for aggregate GW level or if_then over aggregates) // &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&+++++++++++++++++++++++++++++++++++== /* boolean isGwLevel = false; Iterator<ReqFunctionOverData> gwfunctIterLocal = onlyGwLevelReqFunctVec.iterator(); while(gwfunctIterLocal.hasNext()) //OUTER loop { ReqFunctionOverData tmpReqGwFunct = gwfunctIterLocal.next(); if(Integer.toString(tmpReqGwFunct.getfuncId()).equalsIgnoreCase(functionId)){ isGwLevel = true; break; } } if(!isGwLevel) { */ logger.debug("FID:: " + functionId); if (aResultOverData.getAllResultsforFunct() != null) { if (aResultOverData.getAllResultsforFunct().isEmpty()) { logger.debug("has no results!!"); } else { logger.debug("found results!!"); } Vector<ResultAggrStruct> newReconstructedResultVec = null; boolean foundAtLeastOneResultForSpecificMoteId = false; for (ResultAggrStruct thisResult : aResultOverData.getAllResultsforFunct()) { if (thisResult.getMid() .compareToIgnoreCase(ResultAggrStruct.MidSpecialForAggregateMultipleValues) != 0) { if (!foundAtLeastOneResultForSpecificMoteId) { foundAtLeastOneResultForSpecificMoteId = true; newReconstructedResultVec = new Vector<ResultAggrStruct>(); } String[] replaceItem = getLocalReplacemntInfoListItem(localReplacedResources, thisResult.getMid(), thisResult.getSid()); if (replaceItem != null && replaceItem[0] != null && !replaceItem[0].isEmpty()) { logger.debug("Back to replacing node :" + thisResult.getMid() + " with original node: " + replaceItem[0]); thisResult.setMid(replaceItem[0]); newReconstructedResultVec.addElement(thisResult); } } } if (foundAtLeastOneResultForSpecificMoteId) { aResultOverData.setAllResultsforFunct(newReconstructedResultVec); } } /* } */ } } // // DEBUG: logger.debug("The gateway has collected results and is ready to send them!"); //return allResultsRead; // Support for various data types is added by the DataTypeAdapter class // ********************** COAP MESSAGES BACK TO GATEWAY ******************************* // ALSO SEND ANY SECURITY MESSAGES // TODO: we could clean the cache after sending these messages (?) if (!VitroGatewayService.getVitroGatewayService().isWsiTrustCoapMessagingSupport()) { logger.debug("No SUPPORT FOR SENDING TRUST SECURITY INFO back to VSP!"); } if (!VitroGatewayService.getVitroGatewayService().isTrustRoutingCoapMessagingActive()) { logger.debug("No ACTIVATION FOR SENDING TRUST SECURITY INFO back to VSP!"); } if (VitroGatewayService.getVitroGatewayService().isWsiTrustCoapMessagingSupport() && VitroGatewayService.getVitroGatewayService().isTrustRoutingCoapMessagingActive()) { logger.debug("Attempting to send TRUST SECURITY INFO back to VSP!"); HashMap<String, InfoOnTrustRouting> cacheTrustCoapCopy = new HashMap<String, InfoOnTrustRouting>( TrustRoutingQueryService.getInstance().getCachedDirectoryOfTrustRoutingInfo()); String aRefCapCode = ""; int aRefFunctId = 1;// last value is always in the request if (originalMotesAndTheirSensorAndFunctsVec != null) { try { aRefCapCode = originalMotesAndTheirSensorAndFunctsVec.firstElement() .getQueriedSensorIdsAndFuncVec().get(0).getSensorModelid(); } catch (Exception e339) { logger.error("Could not acquire sample capability id for security TRUST alert "); } try { aRefFunctId = originalMotesAndTheirSensorAndFunctsVec.firstElement() .getQueriedSensorIdsAndFuncVec().get(0).getFunctionsOverSensorModelVec().firstElement(); } catch (Exception e339) { logger.error("Could not acquire sample function id for security TRUST alert "); } } if (cacheTrustCoapCopy != null) { for (String sourceNodeId : cacheTrustCoapCopy.keySet()) { InfoOnTrustRouting tmpInfoOnTrust = cacheTrustCoapCopy.get(sourceNodeId); HashMap<String, Integer> tmpParentIdToPFiHM = tmpInfoOnTrust.getParentIdsToPFI(); for (String parentNodeId : tmpParentIdToPFiHM.keySet()) { // TODO: Send a SECURITY notification NotificationsFromVSNs newNotify = new NotificationsFromVSNs(); newNotify.setQueryDefId(pQueryDefId); newNotify.setVgwID(myPeerId); newNotify.setMoteID(sourceNodeId); newNotify.setValue(tmpParentIdToPFiHM.get(parentNodeId)); // TODO: Demo: change to current timestamp which is more reliable newNotify.setValueTimestamp(Long.toString(System.currentTimeMillis())); // the time stamp for the PFI value newNotify.setTimestamp(Long.toString(System.currentTimeMillis())); //the time stamp of the notification //newNotify.setTimestamp(tmpInfoOnTrust.getTimestamp() ); //newNotify.setValueTimestamp(tmpInfoOnTrust.getTimestamp()); newNotify.setType(NotificationsFromVSNs.SECURITY_TYPE); newNotify.setLevel(NotificationsFromVSNs.GATEWAY_LEVEL); // we need sample valid funct ids and capability codes related to this VSN , to associate it at the VSP level with a partial service! newNotify.setRefFunctId(aRefFunctId); newNotify.setCapabilityCode(aRefCapCode); // the message field is here used to store the parent ID. newNotify.setMessage(parentNodeId); // Send the response to the requesting end user //System.out.println("Sending Notification!"); String notifMsgToSend = NotificationsFromVSNs.getAlertDelimitedString(newNotify); try { this.sendResponse(notifMsgToSend); logger.debug("Sent one TRUST SECURITY INFO back to VSP!"); } catch (Exception securSendExc) { logger.error("Could not send Security Type notification", securSendExc); } } } } // /* logger.debug("Sending a dummy message security for TRUST-DEBUG"); { //--------------------------------------------------------------------- // TODO: Send a SECURITY notification NotificationsFromVSNs newNotify = new NotificationsFromVSNs(); newNotify.setQueryDefId(pQueryDefId); newNotify.setVgwID(myPeerId); newNotify.setMoteID("urn:wisebed:ctitestbed:0xca2"); newNotify.setValue(400); newNotify.setValueTimestamp(Long.toString(new Date().getTime())); newNotify.setTimestamp(Long.toString(new Date().getTime())); newNotify.setType(NotificationsFromVSNs.SECURITY_TYPE); newNotify.setLevel(NotificationsFromVSNs.GATEWAY_LEVEL); newNotify.setRefFunctId(aRefFunctId); newNotify.setCapabilityCode(aRefCapCode); // the message field is here used to store the parent ID. newNotify.setMessage("urn:wisebed:ctitestbed:0xCC"); // Send the response to the requesting end user //System.out.println("Sending Notification!"); String notifMsgToSend = NotificationsFromVSNs.getAlertDelimitedString(newNotify); try{ this.sendResponse(notifMsgToSend); logger.debug("Sent one TRUST SECURITY INFO back to VSP!"); }catch(Exception securSendExc){ logger.error("Could not send Security Type notification" , securSendExc); } //--------------------------------------------------------------------- } */ } //end of if we have to send the security Coap Routing Trust Messages // %%%%%%%%%% DIRECTLY INFORM THE GATEWAY OF PROBLEMATIC DEPLOY STATUS: if (serviceDeployImpossible || serviceDeployContinuationEmployed || serviceDeployPartiallyPossible) { String aRefMote = ""; String aRefCapCode = ""; int aRefFunctId = 1;// last value is always in the request if (originalMotesAndTheirSensorAndFunctsVec != null) { try { aRefMote = originalMotesAndTheirSensorAndFunctsVec.firstElement().getMoteid(); } catch (Exception e339) { logger.error("Could not acquire sample ref node it for DEPLOY ABILITY STATUS alert "); } try { aRefCapCode = originalMotesAndTheirSensorAndFunctsVec.firstElement() .getQueriedSensorIdsAndFuncVec().get(0).getSensorModelid(); } catch (Exception e339) { logger.error("Could not acquire sample capability for DEPLOY ABILITY STATUS alert "); } try { aRefFunctId = originalMotesAndTheirSensorAndFunctsVec.firstElement() .getQueriedSensorIdsAndFuncVec().get(0).getFunctionsOverSensorModelVec().firstElement(); } catch (Exception e339) { logger.error("Could not acquire sample function id for DEPLOY ABILITY STATUS alert "); } } String strMessage = ""; long deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_POSSIBLE_INT; if (serviceDeployImpossible) { strMessage = "The requested VSN cannot be supported by this island: " + myPeerId; // case ResponseAggrMsg.DEPLOY_STATUS_SERVICE_IMPOSSIBLE; deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_IMPOSSIBLE_INT; } else if (serviceDeployContinuationEmployed && serviceDeployPartiallyPossible) { // case ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_CONT_COMBO; strMessage = "The requested VSN is partially supported using service continuation on this island: " + myPeerId; deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_CONT_COMBO_INT; } else if (serviceDeployContinuationEmployed) { // case ResponseAggrMsg.DEPLOY_STATUS_SERVICE_CONTINUATION; strMessage = "The requested VSN is supported using service continuation on this island: " + myPeerId; deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_CONTINUATION_INT; } else if (serviceDeployPartiallyPossible) { // case ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL; strMessage = "The requested VSN is partially supported on this island: " + myPeerId; deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_INT; } // SEND THE NOTIFICATION:: // TODO: Send a DEPLOY_STATUS_TYPE notification NotificationsFromVSNs newNotify = new NotificationsFromVSNs(); newNotify.setQueryDefId(pQueryDefId); newNotify.setVgwID(myPeerId); newNotify.setMoteID(aRefMote); newNotify.setValue(deployValue); // TODO: Demo: change to current timestamp which is more reliable newNotify.setValueTimestamp(Long.toString(System.currentTimeMillis())); // the time stamp for the PFI value newNotify.setTimestamp(Long.toString(System.currentTimeMillis())); //the time stamp of the notification //newNotify.setTimestamp(tmpInfoOnTrust.getTimestamp() ); //newNotify.setValueTimestamp(tmpInfoOnTrust.getTimestamp()); newNotify.setType(NotificationsFromVSNs.DEPLOY_STATUS_TYPE); newNotify.setLevel(NotificationsFromVSNs.GATEWAY_LEVEL); // we need sample valid funct ids and capability codes related to this VSN , to associate it at the VSP level with a partial service! newNotify.setRefFunctId(aRefFunctId); newNotify.setCapabilityCode(aRefCapCode); // the message field is here used to store the parent ID. newNotify.setMessage(strMessage); // Send the response to the requesting end user //System.out.println("Sending Notification!"); String notifMsgToSend = NotificationsFromVSNs.getAlertDelimitedString(newNotify); try { this.sendResponse(notifMsgToSend); logger.debug("Sent one DEPLOY STATUS info back to VSP!"); } catch (Exception securSendExc) { logger.error("Could not send DEPLOY STATUS notification", securSendExc); } } return allUniqueFunctionsWithResults; }
From source file:de.innovationgate.webgate.api.jdbc.WGDatabaseImpl.java
/** * @throws WGAPIException /*w w w .j a v a2 s . c o m*/ * @see de.innovationgate.webgate.api.WGDatabaseCore#getRevision() */ public Comparable getRevision() throws WGAPIException { try { List result; if (_ddlVersion >= WGDatabase.CSVERSION_WGA5) { //Sequence seq = (Sequence) getSession().get(Sequence.class, "historylog_id"); //if (seq != null) { //return (seq.getValue() - 1); //} result = getSession().createQuery("select max(entry.id) from LogEntry as entry").list(); if (result.size() > 0) { Long id = (Long) result.get(0); if (id != null) { return id; } } return Long.MIN_VALUE; } else { result = getSession().createQuery("select max(entry.logtime) from LogEntry as entry").list(); Date lcDate = null; if (result.size() > 0) { lcDate = (Date) result.get(0); } if (lcDate != null) { return lcDate; } else { return new Date(Long.MIN_VALUE); } } } catch (HibernateException e) { throw new WGBackendException("Error retrieving historylog of database '" + getTitle() + "'", e); } }
From source file:org.apache.accumulo.server.tabletserver.Tablet.java
/** * yet another constructor - this one allows us to avoid costly lookups into the Metadata table if we already know the files we need - as at split time */// ww w . ja v a2 s. c o m private Tablet(final TabletServer tabletServer, final Text location, final KeyExtent extent, final TabletResourceManager trm, final Configuration conf, final VolumeManager fs, final List<LogEntry> logEntries, final SortedMap<FileRef, DataFileValue> datafiles, String time, final TServerInstance lastLocation, Set<FileRef> scanFiles, long initFlushID, long initCompactID) throws IOException { Path locationPath; if (location.find(":") >= 0) { locationPath = new Path(location.toString()); } else { locationPath = fs.getFullPath(FileType.TABLE, extent.getTableId().toString() + location.toString()); } this.location = locationPath.makeQualified(fs.getFileSystemByPath(locationPath)); this.lastLocation = lastLocation; this.tabletDirectory = location.toString(); this.conf = conf; this.acuTableConf = tabletServer.getTableConfiguration(extent); this.fs = fs; this.extent = extent; this.tabletResources = trm; this.lastFlushID = initFlushID; this.lastCompactID = initCompactID; if (extent.isRootTablet()) { long rtime = Long.MIN_VALUE; for (FileRef ref : datafiles.keySet()) { Path path = ref.path(); FileSystem ns = fs.getFileSystemByPath(path); FileSKVIterator reader = FileOperations.getInstance().openReader(path.toString(), true, ns, ns.getConf(), tabletServer.getTableConfiguration(extent)); long maxTime = -1; try { while (reader.hasTop()) { maxTime = Math.max(maxTime, reader.getTopKey().getTimestamp()); reader.next(); } } finally { reader.close(); } if (maxTime > rtime) { time = TabletTime.LOGICAL_TIME_ID + "" + maxTime; rtime = maxTime; } } } this.tabletServer = tabletServer; this.logId = tabletServer.createLogId(extent); this.timer = new TabletStatsKeeper(); setupDefaultSecurityLabels(extent); tabletMemory = new TabletMemory(); tabletTime = TabletTime.getInstance(time); persistedTime = tabletTime.getTime(); acuTableConf.addObserver(configObserver = new ConfigurationObserver() { private void reloadConstraints() { constraintChecker.set(new ConstraintChecker(getTableConfiguration())); } @Override public void propertiesChanged() { reloadConstraints(); try { setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } @Override public void propertyChanged(String prop) { if (prop.startsWith(Property.TABLE_CONSTRAINT_PREFIX.getKey())) reloadConstraints(); else if (prop.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { try { log.info("Default security labels changed for extent: " + extent.toString()); setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } } @Override public void sessionExpired() { log.debug("Session expired, no longer updating per table props..."); } }); // Force a load of any per-table properties configObserver.propertiesChanged(); tabletResources.setTablet(this, acuTableConf); if (!logEntries.isEmpty()) { log.info("Starting Write-Ahead Log recovery for " + this.extent); final long[] count = new long[2]; final CommitSession commitSession = tabletMemory.getCommitSession(); count[1] = Long.MIN_VALUE; try { Set<String> absPaths = new HashSet<String>(); for (FileRef ref : datafiles.keySet()) absPaths.add(ref.path().toString()); tabletServer.recover(this.tabletServer.getFileSystem(), this, logEntries, absPaths, new MutationReceiver() { @Override public void receive(Mutation m) { // LogReader.printMutation(m); Collection<ColumnUpdate> muts = m.getUpdates(); for (ColumnUpdate columnUpdate : muts) { if (!columnUpdate.hasTimestamp()) { // if it is not a user set timestamp, it must have been set // by the system count[1] = Math.max(count[1], columnUpdate.getTimestamp()); } } tabletMemory.mutate(commitSession, Collections.singletonList(m)); count[0]++; } }); if (count[1] != Long.MIN_VALUE) { tabletTime.useMaxTimeFromWALog(count[1]); } commitSession.updateMaxCommittedTime(tabletTime.getTime()); tabletMemory.updateMemoryUsageStats(); if (count[0] == 0) { MetadataTableUtil.removeUnusedWALEntries(extent, logEntries, tabletServer.getLock()); logEntries.clear(); } } catch (Throwable t) { if (acuTableConf.getBoolean(Property.TABLE_FAILURES_IGNORE)) { log.warn("Error recovering from log files: ", t); } else { throw new RuntimeException(t); } } // make some closed references that represent the recovered logs currentLogs = new HashSet<DfsLogger>(); for (LogEntry logEntry : logEntries) { for (String log : logEntry.logSet) { String[] parts = log.split("/", 2); Path file = fs.getFullPath(FileType.WAL, parts[1]); currentLogs.add(new DfsLogger(tabletServer.getServerConfig(), logEntry.server, file)); } } log.info("Write-Ahead Log recovery complete for " + this.extent + " (" + count[0] + " mutations applied, " + tabletMemory.getNumEntries() + " entries created)"); } String contextName = acuTableConf.get(Property.TABLE_CLASSPATH); if (contextName != null && !contextName.equals("")) { // initialize context classloader, instead of possibly waiting for it to initialize for a scan // TODO this could hang, causing other tablets to fail to load - ACCUMULO-1292 AccumuloVFSClassLoader.getContextManager().getClassLoader(contextName); } // do this last after tablet is completely setup because it // could cause major compaction to start datafileManager = new DatafileManager(datafiles); computeNumEntries(); datafileManager.removeFilesAfterScan(scanFiles); // look for hints of a failure on the previous tablet server if (!logEntries.isEmpty() || needsMajorCompaction(MajorCompactionReason.NORMAL)) { // look for any temp files hanging around removeOldTemporaryFiles(); } log.log(TLevel.TABLET_HIST, extent + " opened "); }
From source file:org.apache.accumulo.tserver.Tablet.java
/** * yet another constructor - this one allows us to avoid costly lookups into the Metadata table if we already know the files we need - as at split time */// ww w.ja v a 2 s . com private Tablet(final TabletServer tabletServer, final Text location, final KeyExtent extent, final TabletResourceManager trm, final Configuration conf, final VolumeManager fs, final List<LogEntry> logEntries, final SortedMap<FileRef, DataFileValue> datafiles, String time, final TServerInstance lastLocation, Set<FileRef> scanFiles, long initFlushID, long initCompactID) throws IOException { Path locationPath; if (location.find(":") >= 0) { locationPath = new Path(location.toString()); } else { locationPath = fs.getFullPath(FileType.TABLE, extent.getTableId().toString() + location.toString()); } locationPath = DirectoryDecommissioner.checkTabletDirectory(tabletServer, fs, extent, locationPath); this.location = locationPath; this.lastLocation = lastLocation; this.tabletDirectory = location.toString(); this.conf = conf; this.acuTableConf = tabletServer.getTableConfiguration(extent); this.fs = fs; this.extent = extent; this.tabletResources = trm; this.lastFlushID = initFlushID; this.lastCompactID = initCompactID; if (extent.isRootTablet()) { long rtime = Long.MIN_VALUE; for (FileRef ref : datafiles.keySet()) { Path path = ref.path(); FileSystem ns = fs.getFileSystemByPath(path); FileSKVIterator reader = FileOperations.getInstance().openReader(path.toString(), true, ns, ns.getConf(), tabletServer.getTableConfiguration(extent)); long maxTime = -1; try { while (reader.hasTop()) { maxTime = Math.max(maxTime, reader.getTopKey().getTimestamp()); reader.next(); } } finally { reader.close(); } if (maxTime > rtime) { time = TabletTime.LOGICAL_TIME_ID + "" + maxTime; rtime = maxTime; } } } if (time == null && datafiles.isEmpty() && extent.equals(RootTable.OLD_EXTENT)) { // recovery... old root tablet has no data, so time doesn't matter: time = TabletTime.LOGICAL_TIME_ID + "" + Long.MIN_VALUE; } this.tabletServer = tabletServer; this.logId = tabletServer.createLogId(extent); this.timer = new TabletStatsKeeper(); setupDefaultSecurityLabels(extent); tabletMemory = new TabletMemory(); tabletTime = TabletTime.getInstance(time); persistedTime = tabletTime.getTime(); acuTableConf.addObserver(configObserver = new ConfigurationObserver() { private void reloadConstraints() { constraintChecker.set(new ConstraintChecker(acuTableConf)); } @Override public void propertiesChanged() { reloadConstraints(); try { setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } @Override public void propertyChanged(String prop) { if (prop.startsWith(Property.TABLE_CONSTRAINT_PREFIX.getKey())) reloadConstraints(); else if (prop.equals(Property.TABLE_DEFAULT_SCANTIME_VISIBILITY.getKey())) { try { log.info("Default security labels changed for extent: " + extent.toString()); setupDefaultSecurityLabels(extent); } catch (Exception e) { log.error("Failed to reload default security labels for extent: " + extent.toString()); } } } @Override public void sessionExpired() { log.debug("Session expired, no longer updating per table props..."); } }); acuTableConf.getNamespaceConfiguration().addObserver(configObserver); // Force a load of any per-table properties configObserver.propertiesChanged(); if (!logEntries.isEmpty()) { log.info("Starting Write-Ahead Log recovery for " + this.extent); final long[] count = new long[2]; final CommitSession commitSession = tabletMemory.getCommitSession(); count[1] = Long.MIN_VALUE; try { Set<String> absPaths = new HashSet<String>(); for (FileRef ref : datafiles.keySet()) absPaths.add(ref.path().toString()); tabletServer.recover(this.tabletServer.getFileSystem(), extent, acuTableConf, logEntries, absPaths, new MutationReceiver() { @Override public void receive(Mutation m) { // LogReader.printMutation(m); Collection<ColumnUpdate> muts = m.getUpdates(); for (ColumnUpdate columnUpdate : muts) { if (!columnUpdate.hasTimestamp()) { // if it is not a user set timestamp, it must have been set // by the system count[1] = Math.max(count[1], columnUpdate.getTimestamp()); } } tabletMemory.mutate(commitSession, Collections.singletonList(m)); count[0]++; } }); if (count[1] != Long.MIN_VALUE) { tabletTime.useMaxTimeFromWALog(count[1]); } commitSession.updateMaxCommittedTime(tabletTime.getTime()); if (count[0] == 0) { MetadataTableUtil.removeUnusedWALEntries(extent, logEntries, tabletServer.getLock()); logEntries.clear(); } } catch (Throwable t) { if (acuTableConf.getBoolean(Property.TABLE_FAILURES_IGNORE)) { log.warn("Error recovering from log files: ", t); } else { throw new RuntimeException(t); } } // make some closed references that represent the recovered logs currentLogs = new HashSet<DfsLogger>(); for (LogEntry logEntry : logEntries) { for (String log : logEntry.logSet) { currentLogs.add(new DfsLogger(tabletServer.getServerConfig(), log)); } } log.info("Write-Ahead Log recovery complete for " + this.extent + " (" + count[0] + " mutations applied, " + tabletMemory.getNumEntries() + " entries created)"); } String contextName = acuTableConf.get(Property.TABLE_CLASSPATH); if (contextName != null && !contextName.equals("")) { // initialize context classloader, instead of possibly waiting for it to initialize for a scan // TODO this could hang, causing other tablets to fail to load - ACCUMULO-1292 AccumuloVFSClassLoader.getContextManager().getClassLoader(contextName); } // do this last after tablet is completely setup because it // could cause major compaction to start datafileManager = new DatafileManager(datafiles); computeNumEntries(); datafileManager.removeFilesAfterScan(scanFiles); // look for hints of a failure on the previous tablet server if (!logEntries.isEmpty() || needsMajorCompaction(MajorCompactionReason.NORMAL)) { // look for any temp files hanging around removeOldTemporaryFiles(); } log.log(TLevel.TABLET_HIST, extent + " opened"); }
From source file:net.tourbook.tour.photo.TourPhotoManager.java
private void setTourGPSIntoPhotos_10(final TourPhotoLink tourPhotoLink) { final ArrayList<Photo> allPhotos = tourPhotoLink.linkPhotos; final int numberOfPhotos = allPhotos.size(); if (numberOfPhotos == 0) { // no photos are available for this tour return;//w w w . j a va2 s.co m } final TourData tourData = TourManager.getInstance().getTourData(tourPhotoLink.tourId); final double[] latitudeSerie = tourData.latitudeSerie; final double[] longitudeSerie = tourData.longitudeSerie; if (latitudeSerie == null) { // no geo positions return; } final int[] timeSerie = tourData.timeSerie; final int numberOfTimeSlices = timeSerie.length; final long tourStartSeconds = tourData.getTourStartTime().toInstant().getEpochSecond(); long timeSliceEnd; if (numberOfTimeSlices > 1) { timeSliceEnd = tourStartSeconds + (long) (timeSerie[1] / 2.0); } else { // tour contains only 1 time slice timeSliceEnd = tourStartSeconds; } int timeIndex = 0; int photoIndex = 0; // get first photo Photo photo = allPhotos.get(photoIndex); // loop: time serie while (true) { // loop: photo serie, check if a photo is in the current time slice while (true) { final long imageAdjustedTime = photo.adjustedTimeLink; long imageTime = 0; if (imageAdjustedTime != Long.MIN_VALUE) { imageTime = imageAdjustedTime; } else { imageTime = photo.imageExifTime; } final long photoTime = imageTime / 1000; if (photoTime <= timeSliceEnd) { // photo is contained within the current time slice final double tourLatitude = latitudeSerie[timeIndex]; final double tourLongitude = longitudeSerie[timeIndex]; setTourGPSIntoPhotos_20(tourData, photo, tourLatitude, tourLongitude); photoIndex++; } else { // advance to the next time slice break; } if (photoIndex < numberOfPhotos) { photo = allPhotos.get(photoIndex); } else { break; } } if (photoIndex >= numberOfPhotos) { // no more photos break; } /* * photos are still available */ // advance to the next time slice on the x-axis timeIndex++; if (timeIndex >= numberOfTimeSlices - 1) { /* * end of tour is reached but there are still photos available, set remaining photos * at the end of the tour */ while (true) { final double tourLatitude = latitudeSerie[timeIndex]; final double tourLongitude = longitudeSerie[timeIndex]; setTourGPSIntoPhotos_20(tourData, photo, tourLatitude, tourLongitude); photoIndex++; if (photoIndex < numberOfPhotos) { photo = allPhotos.get(photoIndex); } else { break; } } } else { final long valuePointTime = timeSerie[timeIndex]; final long sliceDuration = timeSerie[timeIndex + 1] - valuePointTime; timeSliceEnd = tourStartSeconds + valuePointTime + (sliceDuration / 2); } } }
From source file:com.nuvolect.securesuite.data.SqlCipher.java
/**<pre> * contact_id = testIdTestVersion( account, cloud_c_id, cloud_version) * Return 0: cloud_c_id is current version, no update required * Return >0: cloud_c_id does not exists or is newer, update required * Return Long.MIN_VALUE, cloud_c_id is older than local version, no update required * * Assumes setupImport() is called prior to calling this method. * * 1. Search cloud_c_id in field cloud_c_id, get contact_id, loc_version, loc_account * * 2. count == 0//from www . j ava2 s .c om * a. The loc_contact_id does not exist * a. loc_contact_id is clear * a. No contact_id collision, no remapping required * a. import this contact * a. return cloud_c_id (== cloud_id) * * 3. count > 0, * a. && no account match * a. cloud_c_id for this account is not in local database * a. there is a contact_id collision, remapping is required * a. import this contact * a. return next open new_contact_id * * 4. count > 0 * b. && account match * b. cloud_c_id for this account is in local database * b. We don't know if contact_id is remapped * * b.1. cloud_version > loc_version * b.1. cloud version is newer * b.1. import this contact * b.1. if remapped, remapped value is in contact_id * b.1. return contact_id * * b.2. cloud_version = local_version * b.2. cloud version is the same * b.2. do not import this contact * b.2. return 0 (it exists, same version) * * b.3. cloud_version < local_version * b.3. The local version is newer than the cloud version * b.3. do not import this contact * b.3. return Long.MIN_VALUE * * Test data: * account cloud_c_id contact_id loc_ver _id * jack 1 1 1 1 * grandma 1 1001 1 2 * jack 2 2 1 3 * jack 3 3 2 4 * grandma 3 1002 2 4 * * Test cases: * account cloud_c_id cloud_ver result * jack 1 1 _id 1, b.2, return 0 * jack 2 2 _id 3, b.1, return 2 * grandma 1 1 _id 2, b.2, return 0 * grandma 1 2 _id 2, b.1, return 1001 * grandma 3 1 _id 4, b.3, return Long.MIN_VALUE * grandma 2 1 _id x, a., return 1003 (next open id) *</pre> */ public static synchronized long testIdTestVersion(String cloud_account, long cloud_c_id, int cloud_version) { String[] projection = { ATab.account_name.toString(), ATab.contact_id.toString(), ATab.cloud_version.toString() }; String where = ATab.cloud_c_id + "=?"; String[] args = new String[] { String.valueOf(cloud_c_id) }; Cursor c = account_db.query(ACCOUNT_TABLE, projection, where, args, null, null, null); int count = c.getCount(); if (DEBUG_IMPORT) LogUtil.log("testIdTestVersion count: " + count + ", cloud_account: " + cloud_account + ", cloud_c_id: " + cloud_c_id + ", cloud_version: " + cloud_version); if (count == 0) { c.close(); if (DEBUG_IMPORT) LogUtil.log("case 2, cloud_c_id: " + cloud_c_id); return cloud_c_id; //case 2 } boolean accountMatch = false; int loc_version = 0; long contact_id = 0; String account = ""; while (c.moveToNext()) { account = c.getString(c.getColumnIndex(ATab.account_name.toString())); if (account.contentEquals(cloud_account)) { accountMatch = true; loc_version = c.getInt(c.getColumnIndex(ATab.cloud_version.toString())); contact_id = c.getLong(c.getColumnIndex(ATab.contact_id.toString())); break; } } c.close(); if (!accountMatch) { //case 3 long remapId = getNextUnusedContactID(); mCloudRemapContactId.put(cloud_c_id, remapId); if (DEBUG_IMPORT) LogUtil.log("case 3, return remapId: " + remapId); return remapId; } else { // Case 4 if (cloud_version > loc_version) {//case 4.b.1 if (DEBUG_IMPORT) LogUtil.log("case 4, > loc_version: " + loc_version + ", return contact_id: " + contact_id); return contact_id; } else { if (cloud_version == loc_version) {//case 4.b.2 if (DEBUG_IMPORT) LogUtil.log("case 4, = loc_version: " + loc_version + ", return 0"); return 0L; } else {// case 4.b.3 if (DEBUG_IMPORT) LogUtil.log("case 4, < loc_version: " + loc_version + ", return " + Long.MIN_VALUE); return Long.MIN_VALUE; } } } }
From source file:ai.aitia.meme.paramsweep.intellisweepPlugin.JgapGAPlugin.java
private ParameterOrGene initializeUserObjectFromGene(final Element geneElement, final ParameterInfo info) throws WizardLoadingException { final String geneType = geneElement.getAttribute(WizardSettingsManager.TYPE); if (geneType == null || geneType.trim().isEmpty()) { throw new WizardLoadingException(true, "missing attribute '" + WizardSettingsManager.TYPE + "' at node: " + GENE); }//from w w w . j a v a 2 s . c om if (GeneInfo.INTERVAL.equals(geneType.trim())) { final String isIntegerStr = geneElement.getAttribute(IS_INTEGER); if (isIntegerStr == null || isIntegerStr.trim().isEmpty()) { throw new WizardLoadingException(true, "missing attribute '" + IS_INTEGER + "' at node: " + GENE); } final boolean isInteger = Boolean.parseBoolean(isIntegerStr.trim()); final String minStr = geneElement.getAttribute(MIN_VALUE); if (minStr == null || minStr.trim().isEmpty()) { throw new WizardLoadingException(true, "missing attribute '" + MIN_VALUE + "' at node: " + GENE); } final String maxStr = geneElement.getAttribute(MAX_VALUE); if (maxStr == null || maxStr.trim().isEmpty()) { throw new WizardLoadingException(true, "missing attribute '" + MAX_VALUE + "' at node: " + GENE); } if (isInteger) { long min = Long.MIN_VALUE; try { min = Long.parseLong(minStr); } catch (final NumberFormatException e) { throw new WizardLoadingException(true, "invalid attribute value (" + MIN_VALUE + "=" + minStr + ") at node: " + GENE + " (expected: integer number)"); } long max = Long.MAX_VALUE; try { max = Long.parseLong(maxStr); } catch (final NumberFormatException e) { throw new WizardLoadingException(true, "invalid attribute value (" + MAX_VALUE + "=" + maxStr + ") at node: " + GENE + " (expected: integer number)"); } return new ParameterOrGene(info, min, max); } else { double min = -Double.MAX_VALUE; try { min = Double.parseDouble(minStr); } catch (final NumberFormatException e) { throw new WizardLoadingException(true, "invalid attribute value (" + MIN_VALUE + "=" + minStr + ") at node: " + GENE + " (expected: real number)"); } double max = Double.MAX_VALUE; try { max = Double.parseDouble(maxStr); } catch (final NumberFormatException e) { throw new WizardLoadingException(true, "invalid attribute value (" + MAX_VALUE + "=" + maxStr + ") at node: " + GENE + " (expected: real number)"); } return new ParameterOrGene(info, min, max); } } else if (GeneInfo.LIST.equals(geneType.trim())) { final NodeList nl = geneElement.getElementsByTagName(LIST_VALUE); if (nl != null && nl.getLength() > 0) { final List<Object> valueList = new ArrayList<>(nl.getLength()); for (int i = 0; i < nl.getLength(); ++i) { final Element element = (Element) nl.item(i); final NodeList content = element.getChildNodes(); if (content == null || content.getLength() == 0) { throw new WizardLoadingException(true, "missing content at node: " + LIST_VALUE); } final String strValue = ((Text) content.item(0)).getNodeValue().trim(); if (info instanceof MasonChooserParameterInfo) { try { valueList.add(new Integer(strValue)); } catch (final NumberFormatException e) { throw new WizardLoadingException(true, "invalid content (" + strValue + ") at node: " + LIST_VALUE + " (expected: integer number)"); } } else { valueList.add(parseListElement(info.getJavaType(), strValue)); } } return new ParameterOrGene(info, valueList); } else { throw new WizardLoadingException(true, "missing node: " + LIST_VALUE); } } else if (GeneInfo.BOOLEAN.equals(geneType.trim())) { ParameterOrGene result = new ParameterOrGene(info); result.setBooleanGene(); return result; } else { throw new WizardLoadingException(true, "invalid attribute value (" + WizardSettingsManager.TYPE + "=" + geneType.trim() + ") at node: " + GENE); } }