List of usage examples for java.util Vector firstElement
public synchronized E firstElement()
From source file:org.red5.io.mp4.MP4Reader.java
/** * This handles the moov atom being at the beginning or end of the file, so the mdat may also * be before or after the moov atom./*from w w w . j a va 2 s . c o m*/ */ public void decodeHeader() { try { // the first atom will/should be the type MP4Atom type = MP4Atom.createAtom(fis); // expect ftyp log.debug("Type {}", MP4Atom.intToType(type.getType())); //log.debug("Atom int types - free={} wide={}", MP4Atom.typeToInt("free"), MP4Atom.typeToInt("wide")); // keep a running count of the number of atoms found at the "top" levels int topAtoms = 0; // we want a moov and an mdat, anything else throw the invalid file type error while (topAtoms < 2) { MP4Atom atom = MP4Atom.createAtom(fis); switch (atom.getType()) { case 1836019574: //moov topAtoms++; MP4Atom moov = atom; // expect moov log.debug("Type {}", MP4Atom.intToType(moov.getType())); log.debug("moov children: {}", moov.getChildren()); moovOffset = fis.getOffset() - moov.getSize(); MP4Atom mvhd = moov.lookup(MP4Atom.typeToInt("mvhd"), 0); if (mvhd != null) { log.debug("Movie header atom found"); //get the initial timescale timeScale = mvhd.getTimeScale(); duration = mvhd.getDuration(); log.debug("Time scale {} Duration {}", timeScale, duration); } /* nothing needed here yet MP4Atom meta = moov.lookup(MP4Atom.typeToInt("meta"), 0); if (meta != null) { log.debug("Meta atom found"); log.debug("{}", ToStringBuilder.reflectionToString(meta)); } */ //we would like to have two tracks, but it shouldn't be a requirement int loops = 0; int tracks = 0; do { MP4Atom trak = moov.lookup(MP4Atom.typeToInt("trak"), loops); if (trak != null) { log.debug("Track atom found"); log.debug("trak children: {}", trak.getChildren()); // trak: tkhd, edts, mdia MP4Atom tkhd = trak.lookup(MP4Atom.typeToInt("tkhd"), 0); if (tkhd != null) { log.debug("Track header atom found"); log.debug("tkhd children: {}", tkhd.getChildren()); if (tkhd.getWidth() > 0) { width = tkhd.getWidth(); height = tkhd.getHeight(); log.debug("Width {} x Height {}", width, height); } } MP4Atom edts = trak.lookup(MP4Atom.typeToInt("edts"), 0); if (edts != null) { log.debug("Edit atom found"); log.debug("edts children: {}", edts.getChildren()); //log.debug("Width {} x Height {}", edts.getWidth(), edts.getHeight()); } MP4Atom mdia = trak.lookup(MP4Atom.typeToInt("mdia"), 0); if (mdia != null) { log.debug("Media atom found"); // mdia: mdhd, hdlr, minf int scale = 0; //get the media header atom MP4Atom mdhd = mdia.lookup(MP4Atom.typeToInt("mdhd"), 0); if (mdhd != null) { log.debug("Media data header atom found"); //this will be for either video or audio depending media info scale = mdhd.getTimeScale(); log.debug("Time scale {}", scale); } MP4Atom hdlr = mdia.lookup(MP4Atom.typeToInt("hdlr"), 0); if (hdlr != null) { log.debug("Handler ref atom found"); // soun or vide log.debug("Handler type: {}", MP4Atom.intToType(hdlr.getHandlerType())); String hdlrType = MP4Atom.intToType(hdlr.getHandlerType()); if ("vide".equals(hdlrType)) { hasVideo = true; if (scale > 0) { videoTimeScale = scale * 1.0; log.debug("Video time scale: {}", videoTimeScale); } } else if ("soun".equals(hdlrType)) { hasAudio = true; if (scale > 0) { audioTimeScale = scale * 1.0; log.debug("Audio time scale: {}", audioTimeScale); } } tracks++; } MP4Atom minf = mdia.lookup(MP4Atom.typeToInt("minf"), 0); if (minf != null) { log.debug("Media info atom found"); // minf: (audio) smhd, dinf, stbl / (video) vmhd, // dinf, stbl MP4Atom smhd = minf.lookup(MP4Atom.typeToInt("smhd"), 0); if (smhd != null) { log.debug("Sound header atom found"); MP4Atom dinf = minf.lookup(MP4Atom.typeToInt("dinf"), 0); if (dinf != null) { log.debug("Data info atom found"); // dinf: dref log.debug("Sound dinf children: {}", dinf.getChildren()); MP4Atom dref = dinf.lookup(MP4Atom.typeToInt("dref"), 0); if (dref != null) { log.debug("Data reference atom found"); } } MP4Atom stbl = minf.lookup(MP4Atom.typeToInt("stbl"), 0); if (stbl != null) { log.debug("Sample table atom found"); // stbl: stsd, stts, stss, stsc, stsz, stco, // stsh log.debug("Sound stbl children: {}", stbl.getChildren()); // stsd - sample description // stts - time to sample // stsc - sample to chunk // stsz - sample size // stco - chunk offset //stsd - has codec child MP4Atom stsd = stbl.lookup(MP4Atom.typeToInt("stsd"), 0); if (stsd != null) { //stsd: mp4a log.debug("Sample description atom found"); MP4Atom mp4a = stsd.getChildren().get(0); //could set the audio codec here setAudioCodecId(MP4Atom.intToType(mp4a.getType())); //log.debug("{}", ToStringBuilder.reflectionToString(mp4a)); log.debug("Sample size: {}", mp4a.getSampleSize()); int ats = mp4a.getTimeScale(); //skip invalid audio time scale if (ats > 0) { audioTimeScale = ats * 1.0; } audioChannels = mp4a.getChannelCount(); log.debug("Sample rate (audio time scale): {}", audioTimeScale); log.debug("Channels: {}", audioChannels); //mp4a: esds if (mp4a.getChildren().size() > 0) { log.debug("Elementary stream descriptor atom found"); MP4Atom esds = mp4a.getChildren().get(0); log.debug("{}", ToStringBuilder.reflectionToString(esds)); MP4Descriptor descriptor = esds.getEsd_descriptor(); log.debug("{}", ToStringBuilder.reflectionToString(descriptor)); if (descriptor != null) { Vector<MP4Descriptor> children = descriptor.getChildren(); for (int e = 0; e < children.size(); e++) { MP4Descriptor descr = children.get(e); log.debug("{}", ToStringBuilder.reflectionToString(descr)); if (descr.getChildren().size() > 0) { Vector<MP4Descriptor> children2 = descr .getChildren(); for (int e2 = 0; e2 < children2.size(); e2++) { MP4Descriptor descr2 = children2.get(e2); log.debug("{}", ToStringBuilder .reflectionToString(descr2)); if (descr2 .getType() == MP4Descriptor.MP4DecSpecificInfoDescriptorTag) { //we only want the MP4DecSpecificInfoDescriptorTag audioDecoderBytes = descr2.getDSID(); //compare the bytes to get the aacaot/aottype //match first byte switch (audioDecoderBytes[0]) { case 0x12: default: //AAC LC - 12 10 audioCodecType = 1; break; case 0x0a: //AAC Main - 0A 10 audioCodecType = 0; break; case 0x11: case 0x13: //AAC LC SBR - 11 90 & 13 xx audioCodecType = 2; break; } //we want to break out of top level for loop e = 99; break; } } } } } } } //stsc - has Records MP4Atom stsc = stbl.lookup(MP4Atom.typeToInt("stsc"), 0); if (stsc != null) { log.debug("Sample to chunk atom found"); audioSamplesToChunks = stsc.getRecords(); log.debug("Record count: {}", audioSamplesToChunks.size()); MP4Atom.Record rec = audioSamplesToChunks.firstElement(); log.debug("Record data: Description index={} Samples per chunk={}", rec.getSampleDescriptionIndex(), rec.getSamplesPerChunk()); } //stsz - has Samples MP4Atom stsz = stbl.lookup(MP4Atom.typeToInt("stsz"), 0); if (stsz != null) { log.debug("Sample size atom found"); audioSamples = stsz.getSamples(); //vector full of integers log.debug("Sample size: {}", stsz.getSampleSize()); log.debug("Sample count: {}", audioSamples.size()); } //stco - has Chunks MP4Atom stco = stbl.lookup(MP4Atom.typeToInt("stco"), 0); if (stco != null) { log.debug("Chunk offset atom found"); //vector full of integers audioChunkOffsets = stco.getChunks(); log.debug("Chunk count: {}", audioChunkOffsets.size()); } //stts - has TimeSampleRecords MP4Atom stts = stbl.lookup(MP4Atom.typeToInt("stts"), 0); if (stts != null) { log.debug("Time to sample atom found"); Vector<MP4Atom.TimeSampleRecord> records = stts .getTimeToSamplesRecords(); log.debug("Record count: {}", records.size()); MP4Atom.TimeSampleRecord rec = records.firstElement(); log.debug("Record data: Consecutive samples={} Duration={}", rec.getConsecutiveSamples(), rec.getSampleDuration()); //if we have 1 record then all samples have the same duration if (records.size() > 1) { //TODO: handle audio samples with varying durations log.info( "Audio samples have differing durations, audio playback may fail"); } audioSampleDuration = rec.getSampleDuration(); } } } MP4Atom vmhd = minf.lookup(MP4Atom.typeToInt("vmhd"), 0); if (vmhd != null) { log.debug("Video header atom found"); MP4Atom dinf = minf.lookup(MP4Atom.typeToInt("dinf"), 0); if (dinf != null) { log.debug("Data info atom found"); // dinf: dref log.debug("Video dinf children: {}", dinf.getChildren()); MP4Atom dref = dinf.lookup(MP4Atom.typeToInt("dref"), 0); if (dref != null) { log.debug("Data reference atom found"); } } MP4Atom stbl = minf.lookup(MP4Atom.typeToInt("stbl"), 0); if (stbl != null) { log.debug("Sample table atom found"); // stbl: stsd, stts, stss, stsc, stsz, stco, // stsh log.debug("Video stbl children: {}", stbl.getChildren()); // stsd - sample description // stts - (decoding) time to sample // stsc - sample to chunk // stsz - sample size // stco - chunk offset // ctts - (composition) time to sample // stss - sync sample // sdtp - independent and disposable samples //stsd - has codec child MP4Atom stsd = stbl.lookup(MP4Atom.typeToInt("stsd"), 0); if (stsd != null) { log.debug("Sample description atom found"); log.debug("Sample description (video) stsd children: {}", stsd.getChildren()); MP4Atom avc1 = stsd.lookup(MP4Atom.typeToInt("avc1"), 0); if (avc1 != null) { log.debug("AVC1 children: {}", avc1.getChildren()); //set the video codec here - may be avc1 or mp4v setVideoCodecId(MP4Atom.intToType(avc1.getType())); //video decoder config //TODO may need to be generic later MP4Atom codecChild = avc1.lookup(MP4Atom.typeToInt("avcC"), 0); if (codecChild != null) { avcLevel = codecChild.getAvcLevel(); log.debug("AVC level: {}", avcLevel); avcProfile = codecChild.getAvcProfile(); log.debug("AVC Profile: {}", avcProfile); log.debug("AVCC size: {}", codecChild.getSize()); videoDecoderBytes = codecChild.getVideoConfigBytes(); log.debug("Video config bytes: {}", ToStringBuilder .reflectionToString(videoDecoderBytes)); } else { //quicktime and ipods use a pixel aspect atom //since we have no avcC check for this and avcC may //be a child MP4Atom pasp = avc1.lookup(MP4Atom.typeToInt("pasp"), 0); if (pasp != null) { log.debug("PASP children: {}", pasp.getChildren()); codecChild = pasp.lookup(MP4Atom.typeToInt("avcC"), 0); if (codecChild != null) { avcLevel = codecChild.getAvcLevel(); log.debug("AVC level: {}", avcLevel); avcProfile = codecChild.getAvcProfile(); log.debug("AVC Profile: {}", avcProfile); log.debug("AVCC size: {}", codecChild.getSize()); videoDecoderBytes = codecChild .getVideoConfigBytes(); log.debug("Video config bytes: {}", ToStringBuilder .reflectionToString(videoDecoderBytes)); } } } } else { //look for mp4v MP4Atom mp4v = stsd.lookup(MP4Atom.typeToInt("mp4v"), 0); if (mp4v != null) { log.debug("MP4V children: {}", mp4v.getChildren()); //set the video codec here - may be avc1 or mp4v setVideoCodecId(MP4Atom.intToType(mp4v.getType())); //look for esds MP4Atom codecChild = mp4v.lookup(MP4Atom.typeToInt("esds"), 0); if (codecChild != null) { //look for descriptors MP4Descriptor descriptor = codecChild .getEsd_descriptor(); log.debug("{}", ToStringBuilder.reflectionToString(descriptor)); if (descriptor != null) { Vector<MP4Descriptor> children = descriptor .getChildren(); for (int e = 0; e < children.size(); e++) { MP4Descriptor descr = children.get(e); log.debug("{}", ToStringBuilder .reflectionToString(descr)); if (descr.getChildren().size() > 0) { Vector<MP4Descriptor> children2 = descr .getChildren(); for (int e2 = 0; e2 < children2 .size(); e2++) { MP4Descriptor descr2 = children2 .get(e2); log.debug("{}", ToStringBuilder .reflectionToString(descr2)); if (descr2 .getType() == MP4Descriptor.MP4DecSpecificInfoDescriptorTag) { //we only want the MP4DecSpecificInfoDescriptorTag videoDecoderBytes = new byte[descr2 .getDSID().length - 8]; System.arraycopy(descr2.getDSID(), 8, videoDecoderBytes, 0, videoDecoderBytes.length); log.debug("Video config bytes: {}", ToStringBuilder .reflectionToString( videoDecoderBytes)); //we want to break out of top level for loop e = 99; break; } } } } } } } } log.debug("{}", ToStringBuilder.reflectionToString(avc1)); } //stsc - has Records MP4Atom stsc = stbl.lookup(MP4Atom.typeToInt("stsc"), 0); if (stsc != null) { log.debug("Sample to chunk atom found"); videoSamplesToChunks = stsc.getRecords(); log.debug("Record count: {}", videoSamplesToChunks.size()); MP4Atom.Record rec = videoSamplesToChunks.firstElement(); log.debug("Record data: Description index={} Samples per chunk={}", rec.getSampleDescriptionIndex(), rec.getSamplesPerChunk()); } //stsz - has Samples MP4Atom stsz = stbl.lookup(MP4Atom.typeToInt("stsz"), 0); if (stsz != null) { log.debug("Sample size atom found"); //vector full of integers videoSamples = stsz.getSamples(); //if sample size is 0 then the table must be checked due //to variable sample sizes log.debug("Sample size: {}", stsz.getSampleSize()); videoSampleCount = videoSamples.size(); log.debug("Sample count: {}", videoSampleCount); } //stco - has Chunks MP4Atom stco = stbl.lookup(MP4Atom.typeToInt("stco"), 0); if (stco != null) { log.debug("Chunk offset atom found"); //vector full of integers videoChunkOffsets = stco.getChunks(); log.debug("Chunk count: {}", videoChunkOffsets.size()); } //stss - has Sync - no sync means all samples are keyframes MP4Atom stss = stbl.lookup(MP4Atom.typeToInt("stss"), 0); if (stss != null) { log.debug("Sync sample atom found"); //vector full of integers syncSamples = stss.getSyncSamples(); log.debug("Keyframes: {}", syncSamples.size()); } //stts - has TimeSampleRecords MP4Atom stts = stbl.lookup(MP4Atom.typeToInt("stts"), 0); if (stts != null) { log.debug("Time to sample atom found"); Vector<MP4Atom.TimeSampleRecord> records = stts .getTimeToSamplesRecords(); log.debug("Record count: {}", records.size()); MP4Atom.TimeSampleRecord rec = records.firstElement(); log.debug("Record data: Consecutive samples={} Duration={}", rec.getConsecutiveSamples(), rec.getSampleDuration()); //if we have 1 record then all samples have the same duration if (records.size() > 1) { //TODO: handle video samples with varying durations log.info( "Video samples have differing durations, video playback may fail"); } videoSampleDuration = rec.getSampleDuration(); } //ctts - (composition) time to sample MP4Atom ctts = stbl.lookup(MP4Atom.typeToInt("ctts"), 0); if (ctts != null) { log.debug("Composition time to sample atom found"); //vector full of integers compositionTimes = ctts.getCompositionTimeToSamplesRecords(); log.debug("Record count: {}", compositionTimes.size()); if (log.isTraceEnabled()) { for (CompositionTimeSampleRecord rec : compositionTimes) { double offset = rec.getSampleOffset(); if (scale > 0d) { offset = (offset / (double) scale) * 1000.0; rec.setSampleOffset((int) offset); } log.trace("Record data: Consecutive samples={} Offset={}", rec.getConsecutiveSamples(), rec.getSampleOffset()); } } } } } } } } loops++; } while (loops < 3); log.trace("Busted out of track loop with {} tracks after {} loops", tracks, loops); //calculate FPS fps = (videoSampleCount * timeScale) / (double) duration; log.debug("FPS calc: ({} * {}) / {}", new Object[] { videoSampleCount, timeScale, duration }); log.debug("FPS: {}", fps); //real duration StringBuilder sb = new StringBuilder(); double videoTime = ((double) duration / (double) timeScale); log.debug("Video time: {}", videoTime); int minutes = (int) (videoTime / 60); if (minutes > 0) { sb.append(minutes); sb.append('.'); } //formatter for seconds / millis NumberFormat df = DecimalFormat.getInstance(); df.setMaximumFractionDigits(2); sb.append(df.format((videoTime % 60))); formattedDuration = sb.toString(); log.debug("Time: {}", formattedDuration); break; case 1835295092: //mdat topAtoms++; long dataSize = 0L; MP4Atom mdat = atom; dataSize = mdat.getSize(); log.debug("{}", ToStringBuilder.reflectionToString(mdat)); mdatOffset = fis.getOffset() - dataSize; log.debug("File size: {} mdat size: {}", file.length(), dataSize); break; case 1718773093: //free case 2003395685: //wide break; default: log.warn("Unexpected atom: {}", MP4Atom.intToType(atom.getType())); } } //add the tag name (size) to the offsets moovOffset += 8; mdatOffset += 8; log.debug("Offsets moov: {} mdat: {}", moovOffset, mdatOffset); } catch (IOException e) { log.error("Exception decoding header / atoms", e); } }
From source file:alice.tuprolog.lib.OOLibrary.java
private static Constructor<?> lookupConstructor(Class<?> target, Class<?>[] argClasses, Object[] argValues) throws NoSuchMethodException { // first try for exact match try {/*from w w w. j ava2s .c om*/ return target.getConstructor(argClasses); } catch (NoSuchMethodException e) { if (argClasses.length == 0) { // if no args & no exact match, out of // luck return null; } } // go the more complicated route Constructor<?>[] constructors = target.getConstructors(); Vector<Constructor<?>> goodConstructors = new Vector<>(); for (int i = 0; i != constructors.length; i++) { if (matchClasses(constructors[i].getParameterTypes(), argClasses)) goodConstructors.addElement(constructors[i]); } switch (goodConstructors.size()) { case 0: // no constructors have been found checking for assignability // and (int -> long) conversion. One last chance: // looking for compatible methods considering also // type conversions: // double --> float // (the first found is used - no most specific // method algorithm is applied ) for (int i = 0; i != constructors.length; i++) { Class<?>[] types = constructors[i].getParameterTypes(); Object[] val = matchClasses(types, argClasses, argValues); if (val != null) { // found a method compatible // after type conversions for (int j = 0; j < types.length; j++) { argClasses[j] = types[j]; argValues[j] = val[j]; } return constructors[i]; } } return null; case 1: return goodConstructors.firstElement(); default: return mostSpecificConstructor(goodConstructors); } }
From source file:alice.tuprolog.lib.OOLibrary.java
private static Method lookupMethod(Class<?> target, String name, Class<?>[] argClasses, Object[] argValues) throws NoSuchMethodException { // first try for exact match try {/*from w ww .j a v a2 s. co m*/ Method m = target.getMethod(name, argClasses); return m; } catch (NoSuchMethodException e) { if (argClasses.length == 0) { // if no args & no exact match, out of // luck return null; } } // go the more complicated route Method[] methods = target.getMethods(); Vector<Method> goodMethods = new Vector<>(); for (int i = 0; i != methods.length; i++) { if (name.equals(methods[i].getName()) && matchClasses(methods[i].getParameterTypes(), argClasses)) goodMethods.addElement(methods[i]); } switch (goodMethods.size()) { case 0: // no methods have been found checking for assignability // and (int -> long) conversion. One last chance: // looking for compatible methods considering also // type conversions: // double --> float // (the first found is used - no most specific // method algorithm is applied ) for (int i = 0; i != methods.length; i++) { if (name.equals(methods[i].getName())) { Class<?>[] types = methods[i].getParameterTypes(); Object[] val = matchClasses(types, argClasses, argValues); if (val != null) { // found a method compatible // after type conversions for (int j = 0; j < types.length; j++) { argClasses[j] = types[j]; argValues[j] = val[j]; } return methods[i]; } } } return null; case 1: return goodMethods.firstElement(); default: return mostSpecificMethod(goodMethods); } }
From source file:nzilbb.csv.CsvDeserializer.java
/** * Loads the serialized form of the graph, using the given set of named streams. * @param streams A list of named streams that contain all the * transcription/annotation data required, and possibly (a) stream(s) for the media annotated. * @param schema The layer schema, definining layers and the way they interrelate. * @return A list of parameters that require setting before {@link IDeserializer#deserialize()} * can be invoked. This may be an empty list, and may include parameters with the value already * set to a workable default. If there are parameters, and user interaction is possible, then * the user may be presented with an interface for setting/confirming these parameters, before * they are then passed to {@link IDeserializer#setParameters(ParameterSet)}. * @throws SerializationException If the graph could not be loaded. * @throws IOException On IO error.// w ww . j a va2s .co m */ @SuppressWarnings({ "rawtypes", "unchecked" }) public ParameterSet load(NamedStream[] streams, Schema schema) throws SerializationException, IOException { // take the first stream, ignore all others. NamedStream csv = Utility.FindSingleStream(streams, ".csv", "text/csv"); if (csv == null) throw new SerializationException("No CSV stream found"); setName(csv.getName()); setSchema(schema); // create a list of layers we need and possible matching layer names LinkedHashMap<Parameter, List<String>> layerToPossibilities = new LinkedHashMap<Parameter, List<String>>(); HashMap<String, LinkedHashMap<String, Layer>> layerToCandidates = new HashMap<String, LinkedHashMap<String, Layer>>(); LinkedHashMap<String, Layer> metadataLayers = new LinkedHashMap<String, Layer>(); for (Layer layer : schema.getRoot().getChildren().values()) { if (layer.getAlignment() == Constants.ALIGNMENT_NONE) { metadataLayers.put(layer.getId(), layer); } } // next turn child layer // look for person attributes for (Layer layer : schema.getParticipantLayer().getChildren().values()) { if (layer.getAlignment() == Constants.ALIGNMENT_NONE) { metadataLayers.put(layer.getId(), layer); } } // next turn child layer LinkedHashMap<String, Layer> utteranceAndMetadataLayers = new LinkedHashMap<String, Layer>(metadataLayers); utteranceAndMetadataLayers.put(getUtteranceLayer().getId(), getUtteranceLayer()); LinkedHashMap<String, Layer> whoAndMetadataLayers = new LinkedHashMap<String, Layer>(metadataLayers); whoAndMetadataLayers.put(getParticipantLayer().getId(), getParticipantLayer()); // read the header line setParser(CSVParser.parse(csv.getStream(), java.nio.charset.Charset.forName("UTF-8"), CSVFormat.EXCEL.withHeader())); setHeaderMap(parser.getHeaderMap()); Vector<String> possibleIDHeaders = new Vector<String>(); Vector<String> possibleUtteranceHeaders = new Vector<String>(); Vector<String> possibleParticipantHeaders = new Vector<String>(); for (String header : getHeaderMap().keySet()) { if (header.trim().length() == 0) continue; Vector<String> possibleMatches = new Vector<String>(); possibleMatches.add("transcript" + header); possibleMatches.add("participant" + header); possibleMatches.add("speaker" + header); possibleMatches.add(header); // special cases if (header.equalsIgnoreCase("id") || header.equalsIgnoreCase("transcript")) { possibleIDHeaders.add(header); } else if (header.equalsIgnoreCase("text") || header.equalsIgnoreCase("document")) { possibleUtteranceHeaders.add(header); } else if (header.equalsIgnoreCase("name") || header.equalsIgnoreCase("participant") || header.equalsIgnoreCase("participantid")) { possibleParticipantHeaders.add(header); } layerToPossibilities.put(new Parameter("header_" + getHeaderMap().get(header), Layer.class, header), possibleMatches); layerToCandidates.put("header_" + getHeaderMap().get(header), metadataLayers); } // next header ParameterSet parameters = new ParameterSet(); // add utterance/participant parameters int defaultUtterancePossibilityIndex = 0; // if there are no obvious participant column possibilities... Parameter idColumn = new Parameter("id", String.class, "ID Column", "Column containing the ID of the text.", false); if (possibleIDHeaders.size() == 0) { // ...include all columns possibleIDHeaders.addAll(getHeaderMap().keySet()); } else { idColumn.setValue(possibleIDHeaders.firstElement()); } idColumn.setPossibleValues(possibleIDHeaders); parameters.addParameter(idColumn); // if there are no obvious participant column possibilities... if (possibleParticipantHeaders.size() == 0) { // ...include all columns possibleParticipantHeaders.addAll(getHeaderMap().keySet()); // default participant column will be the first column, // so default utterance should be the second (if we didn't find obvious possible text column) if (possibleParticipantHeaders.size() > 1) // but only if there's more than one column { defaultUtterancePossibilityIndex = 1; } } Parameter participantColumn = new Parameter("who", "Participant Column", "Column containing the ID of the author of the text.", true, possibleParticipantHeaders.firstElement()); participantColumn.setPossibleValues(possibleParticipantHeaders); parameters.addParameter(participantColumn); // if there are no obvious text column possibilities... if (possibleUtteranceHeaders.size() == 0) { // ...include all columns possibleUtteranceHeaders.addAll(getHeaderMap().keySet()); } else { // we found a possible text column, so run with it regardless of whether we also found // a possible participant column defaultUtterancePossibilityIndex = 0; } Parameter utteranceColumn = new Parameter("text", "Text Column", "Column containing the transcript text.", true, possibleUtteranceHeaders.elementAt(defaultUtterancePossibilityIndex)); utteranceColumn.setPossibleValues(possibleUtteranceHeaders); parameters.addParameter(utteranceColumn); // add column-mapping parameters, and set possibile/default values for (Parameter p : layerToPossibilities.keySet()) { List<String> possibleNames = layerToPossibilities.get(p); LinkedHashMap<String, Layer> candidateLayers = layerToCandidates.get(p.getName()); parameters.addParameter(p); if (p.getValue() == null && candidateLayers != null && possibleNames != null) { p.setValue(Utility.FindLayerById(candidateLayers, possibleNames)); } if (p.getPossibleValues() == null && candidateLayers != null) { p.setPossibleValues(candidateLayers.values()); } } return parameters; }
From source file:edu.umn.cs.sthadoop.operations.STRangeQuery.java
public static void rangeQueryOperation(OperationsParams parameters) throws Exception { final OperationsParams params = parameters; final Path[] paths = params.getPaths(); if (paths.length <= 1 && !params.checkInput()) { printUsage();/*from ww w . ja va 2 s . c om*/ System.exit(1); } if (paths.length >= 2 && !params.checkInputOutput()) { printUsage(); System.exit(1); } if (params.get("rect") == null) { String x1 = "-" + Double.toString(Double.MAX_VALUE); String y1 = "-" + Double.toString(Double.MAX_VALUE); String x2 = Double.toString(Double.MAX_VALUE); String y2 = Double.toString(Double.MAX_VALUE); System.out.println(x1 + "," + y1 + "," + x2 + "," + y2); params.set("rect", x1 + "," + y1 + "," + x2 + "," + y2); // System.err.println("You must provide a query range"); // printUsage(); // System.exit(1); } if (params.get("interval") == null) { System.err.println("Temporal range missing"); printUsage(); System.exit(1); } TextSerializable inObj = params.getShape("shape"); if (!(inObj instanceof STPoint) && !(inObj instanceof STRectangle)) { LOG.error("Shape is not instance of STPoint or STRectangle"); printUsage(); System.exit(1); } // Get spatio-temporal slices. List<Path> STPaths = getIndexedSlices(params); final Path outPath = params.getOutputPath(); final Rectangle[] queryRanges = params.getShapes("rect", new Rectangle()); // All running jobs final Vector<Long> resultsCounts = new Vector<Long>(); Vector<Job> jobs = new Vector<Job>(); Vector<Thread> threads = new Vector<Thread>(); long t1 = System.currentTimeMillis(); for (Path stPath : STPaths) { final Path inPath = stPath; for (int i = 0; i < queryRanges.length; i++) { final OperationsParams queryParams = new OperationsParams(params); OperationsParams.setShape(queryParams, "rect", queryRanges[i]); if (OperationsParams.isLocal(new JobConf(queryParams), inPath)) { // Run in local mode final Rectangle queryRange = queryRanges[i]; final Shape shape = queryParams.getShape("shape"); final Path output = outPath == null ? null : (queryRanges.length == 1 ? outPath : new Path(outPath, String.format("%05d", i))); Thread thread = new Thread() { @Override public void run() { FSDataOutputStream outFile = null; final byte[] newLine = System.getProperty("line.separator", "\n").getBytes(); try { ResultCollector<Shape> collector = null; if (output != null) { FileSystem outFS = output.getFileSystem(queryParams); final FSDataOutputStream foutFile = outFile = outFS.create(output); collector = new ResultCollector<Shape>() { final Text tempText = new Text2(); @Override public synchronized void collect(Shape r) { try { tempText.clear(); r.toText(tempText); foutFile.write(tempText.getBytes(), 0, tempText.getLength()); foutFile.write(newLine); } catch (IOException e) { e.printStackTrace(); } } }; } else { outFile = null; } long resultCount = rangeQueryLocal(inPath, queryRange, shape, queryParams, collector); resultsCounts.add(resultCount); } catch (IOException e) { e.printStackTrace(); } catch (InterruptedException e) { e.printStackTrace(); } finally { try { if (outFile != null) outFile.close(); } catch (IOException e) { e.printStackTrace(); } } } }; thread.start(); threads.add(thread); } else { // Run in MapReduce mode Path outTempPath = outPath == null ? null : new Path(outPath, String.format("%05d", i) + "-" + inPath.getName()); queryParams.setBoolean("background", true); Job job = rangeQueryMapReduce(inPath, outTempPath, queryParams); jobs.add(job); } } } while (!jobs.isEmpty()) { Job firstJob = jobs.firstElement(); firstJob.waitForCompletion(false); if (!firstJob.isSuccessful()) { System.err.println("Error running job " + firstJob); System.err.println("Killing all remaining jobs"); for (int j = 1; j < jobs.size(); j++) jobs.get(j).killJob(); System.exit(1); } Counters counters = firstJob.getCounters(); Counter outputRecordCounter = counters.findCounter(Task.Counter.MAP_OUTPUT_RECORDS); resultsCounts.add(outputRecordCounter.getValue()); jobs.remove(0); } while (!threads.isEmpty()) { try { Thread thread = threads.firstElement(); thread.join(); threads.remove(0); } catch (InterruptedException e) { e.printStackTrace(); } } long t2 = System.currentTimeMillis(); System.out.println("QueryPlan:"); for (Path stPath : STPaths) { System.out.println(stPath.getName()); } System.out.println("Time for " + queryRanges.length + " jobs is " + (t2 - t1) + " millis"); System.out.println("Results counts: " + resultsCounts); }
From source file:com.owncloud.android.ui.activity.ContactsPreferenceActivity.java
public void openDate(View v) { String backupFolderString = getResources().getString(R.string.contacts_backup_folder) + OCFile.PATH_SEPARATOR;/*from w w w. j a v a 2 s. c o m*/ OCFile backupFolder = getStorageManager().getFileByPath(backupFolderString); Vector<OCFile> backupFiles = getStorageManager().getFolderContent(backupFolder, false); Collections.sort(backupFiles, new Comparator<OCFile>() { @Override public int compare(OCFile o1, OCFile o2) { if (o1.getModificationTimestamp() == o2.getModificationTimestamp()) { return 0; } if (o1.getModificationTimestamp() > o2.getModificationTimestamp()) { return 1; } else { return -1; } } }); Calendar cal = Calendar.getInstance(); int year = cal.get(Calendar.YEAR); int month = cal.get(Calendar.MONTH) + 1; int day = cal.get(Calendar.DAY_OF_MONTH); DatePickerDialog.OnDateSetListener dateSetListener = new DatePickerDialog.OnDateSetListener() { @Override public void onDateSet(DatePicker view, int year, int month, int dayOfMonth) { String backupFolderString = getResources().getString(R.string.contacts_backup_folder) + OCFile.PATH_SEPARATOR; OCFile backupFolder = getStorageManager().getFileByPath(backupFolderString); Vector<OCFile> backupFiles = getStorageManager().getFolderContent(backupFolder, false); // find file with modification with date and time between 00:00 and 23:59 // if more than one file exists, take oldest Calendar date = Calendar.getInstance(); date.set(year, month, dayOfMonth); // start date.set(Calendar.HOUR, 0); date.set(Calendar.MINUTE, 0); date.set(Calendar.SECOND, 1); date.set(Calendar.MILLISECOND, 0); date.set(Calendar.AM_PM, Calendar.AM); Long start = date.getTimeInMillis(); // end date.set(Calendar.HOUR, 23); date.set(Calendar.MINUTE, 59); date.set(Calendar.SECOND, 59); Long end = date.getTimeInMillis(); OCFile backupToRestore = null; for (OCFile file : backupFiles) { if (start < file.getModificationTimestamp() && end > file.getModificationTimestamp()) { if (backupToRestore == null) { backupToRestore = file; } else if (backupToRestore.getModificationTimestamp() < file.getModificationTimestamp()) { backupToRestore = file; } } } if (backupToRestore != null) { Fragment contactListFragment = ContactListFragment.newInstance(backupToRestore, getAccount()); FragmentTransaction transaction = getSupportFragmentManager().beginTransaction(); transaction.replace(R.id.contacts_linear_layout, contactListFragment); transaction.commit(); } else { Toast.makeText(ContactsPreferenceActivity.this, R.string.contacts_preferences_no_file_found, Toast.LENGTH_SHORT).show(); } } }; DatePickerDialog datePickerDialog = new DatePickerDialog(this, dateSetListener, year, month, day); datePickerDialog.getDatePicker().setMaxDate(backupFiles.lastElement().getModificationTimestamp()); datePickerDialog.getDatePicker().setMinDate(backupFiles.firstElement().getModificationTimestamp()); datePickerDialog.show(); }
From source file:net.sf.jabref.JabRef.java
public Optional<Vector<ParserResult>> processArguments(String[] args, boolean initialStartup) { cli = new JabRefCLI(args); if (!cli.isBlank() && cli.isDebugLogging()) { JabRefLogger.setDebug();/*from w w w. ja va 2 s. c o m*/ } if (initialStartup && cli.isShowVersion()) { cli.displayVersion(); } if (initialStartup && cli.isHelp()) { cli.printUsage(); return Optional.empty(); } // Check if we should reset all preferences to default values: if (cli.isPreferencesReset()) { String value = cli.getPreferencesReset(); if ("all".equals(value.trim())) { try { System.out.println(Localization.lang("Setting all preferences to default values.")); Globals.prefs.clear(); } catch (BackingStoreException e) { System.err.println(Localization.lang("Unable to clear preferences.")); e.printStackTrace(); } } else { String[] keys = value.split(","); for (String key : keys) { if (Globals.prefs.hasKey(key.trim())) { System.out.println(Localization.lang("Resetting preference key '%0'", key.trim())); Globals.prefs.clear(key.trim()); } else { System.out.println(Localization.lang("Unknown preference key '%0'", key.trim())); } } } } // Check if we should import preferences from a file: if (cli.isPreferencesImport()) { try { Globals.prefs.importPreferences(cli.getPreferencesImport()); CustomEntryTypesManager.loadCustomEntryTypes(Globals.prefs); ExportFormats.initAllExports(); } catch (JabRefException ex) { LOGGER.error("Cannot import preferences", ex); } } // Vector to put imported/loaded database(s) in. Vector<ParserResult> loaded = new Vector<>(); Vector<String> toImport = new Vector<>(); if (!cli.isBlank() && (cli.getLeftOver().length > 0)) { for (String aLeftOver : cli.getLeftOver()) { // Leftover arguments that have a "bib" extension are interpreted as // bib files to open. Other files, and files that could not be opened // as bib, we try to import instead. boolean bibExtension = aLeftOver.toLowerCase().endsWith("bib"); ParserResult pr = null; if (bibExtension) { pr = JabRef.openBibFile(aLeftOver, false); } if ((pr == null) || (pr == ParserResult.INVALID_FORMAT)) { // We will try to import this file. Normally we // will import it into a new tab, but if this import has // been initiated by another instance through the remote // listener, we will instead import it into the current database. // This will enable easy integration with web browsers that can // open a reference file in JabRef. if (initialStartup) { toImport.add(aLeftOver); } else { loaded.add(JabRef.importToOpenBase(aLeftOver).orElse(ParserResult.INVALID_FORMAT)); } } else if (pr != ParserResult.FILE_LOCKED) { loaded.add(pr); } } } if (!cli.isBlank() && cli.isFileImport()) { toImport.add(cli.getFileImport()); } for (String filenameString : toImport) { importFile(filenameString).ifPresent(loaded::add); } if (!cli.isBlank() && cli.isImportToOpenBase()) { importToOpenBase(cli.getImportToOpenBase()).ifPresent(loaded::add); } if (!cli.isBlank() && cli.isFetcherEngine()) { fetch(cli.getFetcherEngine()).ifPresent(loaded::add); } if (cli.isExportMatches()) { if (!loaded.isEmpty()) { String[] data = cli.getExportMatches().split(","); String searchTerm = data[0].replace("\\$", " "); //enables blanks within the search term: //? stands for a blank ParserResult pr = loaded.elementAt(loaded.size() - 1); BibDatabase dataBase = pr.getDatabase(); SearchQuery query = new SearchQuery(searchTerm, Globals.prefs.getBoolean(JabRefPreferences.SEARCH_CASE_SENSITIVE), Globals.prefs.getBoolean(JabRefPreferences.SEARCH_REG_EXP)); BibDatabase newBase = new DatabaseSearcher(query, dataBase).getDatabaseFromMatches(); //newBase contains only match entries //export database if ((newBase != null) && (newBase.getEntryCount() > 0)) { String formatName; //read in the export format, take default format if no format entered switch (data.length) { case 3: formatName = data[2]; break; case 2: //default ExportFormat: HTML table (with Abstract & BibTeX) formatName = "tablerefsabsbib"; break; default: System.err.println(Localization.lang("Output file missing").concat(". \n \t ") .concat(Localization.lang("Usage")).concat(": ") + JabRefCLI.getExportMatchesSyntax()); return Optional.empty(); } //end switch //export new database IExportFormat format = ExportFormats.getExportFormat(formatName); if (format == null) { System.err.println(Localization.lang("Unknown export format") + ": " + formatName); } else { // We have an ExportFormat instance: try { System.out.println(Localization.lang("Exporting") + ": " + data[1]); format.performExport(newBase, pr.getMetaData(), data[1], pr.getEncoding(), null); } catch (Exception ex) { System.err.println(Localization.lang("Could not export file") + " '" + data[1] + "': " + ex.getMessage()); } } } /*end if newBase != null*/ else { System.err.println(Localization.lang("No search matches.")); } } else { System.err.println(Localization.lang("The output option depends on a valid input option.")); } //end if(loaded.size > 0) } if (cli.isGenerateBibtexKeys()) { regenerateBibtexKeys(loaded); } if (cli.isAutomaticallySetFileLinks()) { automaticallySetFileLinks(loaded); } if (cli.isFileExport()) { if (!loaded.isEmpty()) { String[] data = cli.getFileExport().split(","); if (data.length == 1) { // This signals that the latest import should be stored in BibTeX // format to the given file. if (!loaded.isEmpty()) { ParserResult pr = loaded.elementAt(loaded.size() - 1); if (!pr.isInvalid()) { try { System.out.println(Localization.lang("Saving") + ": " + data[0]); SavePreferences prefs = SavePreferences.loadForSaveFromPreferences(Globals.prefs); Defaults defaults = new Defaults(BibDatabaseMode.fromPreference( Globals.prefs.getBoolean(JabRefPreferences.BIBLATEX_DEFAULT_MODE))); BibDatabaseWriter databaseWriter = new BibDatabaseWriter(); SaveSession session = databaseWriter.saveDatabase( new BibDatabaseContext(pr.getDatabase(), pr.getMetaData(), defaults), prefs); // Show just a warning message if encoding didn't work for all characters: if (!session.getWriter().couldEncodeAll()) { System.err.println(Localization.lang("Warning") + ": " + Localization.lang( "The chosen encoding '%0' could not encode the following characters:", session.getEncoding().displayName()) + " " + session.getWriter().getProblemCharacters()); } session.commit(new File(data[0])); } catch (SaveException ex) { System.err.println(Localization.lang("Could not save file.") + "\n" + ex.getLocalizedMessage()); } } } else { System.err .println(Localization.lang("The output option depends on a valid import option.")); } } else if (data.length == 2) { // This signals that the latest import should be stored in the given // format to the given file. ParserResult pr = loaded.elementAt(loaded.size() - 1); // Set the global variable for this database's file directory before exporting, // so formatters can resolve linked files correctly. // (This is an ugly hack!) File theFile = pr.getFile(); if (!theFile.isAbsolute()) { theFile = theFile.getAbsoluteFile(); } MetaData metaData = pr.getMetaData(); metaData.setFile(theFile); Globals.prefs.fileDirForDatabase = metaData.getFileDirectory(Globals.FILE_FIELD) .toArray(new String[0]); Globals.prefs.databaseFile = metaData.getFile(); System.out.println(Localization.lang("Exporting") + ": " + data[0]); IExportFormat format = ExportFormats.getExportFormat(data[1]); if (format == null) { System.err.println(Localization.lang("Unknown export format") + ": " + data[1]); } else { // We have an ExportFormat instance: try { format.performExport(pr.getDatabase(), pr.getMetaData(), data[0], pr.getEncoding(), null); } catch (Exception ex) { System.err.println(Localization.lang("Could not export file") + " '" + data[0] + "': " + ex.getMessage()); } } } } else { System.err.println(Localization.lang("The output option depends on a valid import option.")); } } LOGGER.debug("Finished export"); if (cli.isPreferencesExport()) { try { Globals.prefs.exportPreferences(cli.getPreferencesExport()); } catch (JabRefException ex) { LOGGER.error("Cannot export preferences", ex); } } if (!cli.isBlank() && cli.isAuxImport()) { boolean usageMsg = false; if (!loaded.isEmpty()) // bibtex file loaded { String[] data = cli.getAuxImport().split(","); if (data.length == 2) { ParserResult pr = loaded.firstElement(); AuxCommandLine acl = new AuxCommandLine(data[0], pr.getDatabase()); BibDatabase newBase = acl.perform(); boolean notSavedMsg = false; // write an output, if something could be resolved if (newBase != null) { if (newBase.getEntryCount() > 0) { String subName = StringUtil.getCorrectFileName(data[1], "bib"); try { System.out.println(Localization.lang("Saving") + ": " + subName); SavePreferences prefs = SavePreferences.loadForSaveFromPreferences(Globals.prefs); BibDatabaseWriter databaseWriter = new BibDatabaseWriter(); Defaults defaults = new Defaults(BibDatabaseMode.fromPreference( Globals.prefs.getBoolean(JabRefPreferences.BIBLATEX_DEFAULT_MODE))); SaveSession session = databaseWriter .saveDatabase(new BibDatabaseContext(newBase, defaults), prefs); // Show just a warning message if encoding didn't work for all characters: if (!session.getWriter().couldEncodeAll()) { System.err.println(Localization.lang("Warning") + ": " + Localization.lang( "The chosen encoding '%0' could not encode the following characters:", session.getEncoding().displayName()) + " " + session.getWriter().getProblemCharacters()); } session.commit(new File(subName)); } catch (SaveException ex) { System.err.println(Localization.lang("Could not save file.") + "\n" + ex.getLocalizedMessage()); } notSavedMsg = true; } } if (!notSavedMsg) { System.out.println(Localization.lang("no database generated")); } } else { usageMsg = true; } } else { usageMsg = true; } if (usageMsg) { System.out.println(Localization.lang("no base-BibTeX-file specified") + "!"); System.out.println(Localization.lang("usage") + " :"); System.out.println("jabref --aux infile[.aux],outfile[.bib] base-BibTeX-file"); } } return Optional.of(loaded); }
From source file:alter.vitro.vgw.service.query.SimpleQueryHandler.java
private Vector<ReqResultOverData> findAggrAttrValue(String pQueryDefId, Vector<QueriedMoteAndSensors> pMotesAndTheirSensorAndFunctsVec, Vector<ReqFunctionOverData> reqFunctionVec, List<String> serviceDeployStatusStr, List<String[]> localReplacedResources) { ///*from ww w.j av a 2s .c om*/ // ADDED CODE -- OPTIMIZATION PENDING +++++ // // --------------- SERVICE CONTINUATION PREP // TODO: SERVICE CONTINUATION PREP //service Continuation Additions: //String serviceDeployStatusStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_UNKNOWN; serviceDeployStatusStr.add(ResponseAggrMsg.DEPLOY_STATUS_SERVICE_UNKNOWN); // deploy status flags boolean serviceDeployAllNodesAvailable = true; boolean serviceDeployContinuationEmployed = false; boolean serviceDeployPartiallyPossible = false; boolean serviceDeployImpossible = false; // [0] is the original nodeId, [1] the replacing node id and [2] the capability //List<String[]> localReplacedResources = new ArrayList<String[]>(); // // // TODO: 1.Use the motesAndTheirSensorAndFunctVec to get the requested motes and the requested capabilities. // TODO: 2.Check wth Continuation Service and Resource Availability Service. // TODO. 2a. If all nodes are available then Deploy_Status = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_POSSIBLE. // 2b. If a node in the requested motes is unavailable (or future: a requested resource is unavailable) // Check the equivalent nodes for matches for this capability. // If a match is found, replace the node in the motesAndTheirSensorAndFunctsVec with the replacement node // and keep this replacing tracked/stored locally (as well as the cache of the continuationService) // when the results are found, replace the original mote back, but also send the extra xml that says that the values from that node for that capability are from the replacement node // TODO: Careful! a node could be replaced by more than one nodes, based on the capabilities requested! TEST THIS CASE! // TODO: Careful! a node could be replaced for one capability, but not for another! // Also set the flag serviceContinuationEmployed to true. // if at the end only this flag is set then update the Deploy_Status to ResponseAggrMsg.DEPLOY_STATUS_SERVICE_CONTINUATION // If a match is not found then remove this node from the results. // Also set the flag servicePartiallyPossible to true. // if at the end only this flag is set then update the Deploy_Status ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL // If a the end both flags serviceContinuationEmployed and servicePartiallyPossible are true // and not the serviceImpossible flag then update the Deploy_Status to ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_CONT_COMBO // // Finally if NO nodes are available for the service set the serviceImpossible flag to true and // update the deploy_status to ResponseAggrMsg.DEPLOY_STATUS_SERVICE_IMPOSSIBLE // END: SERVICE CONTINUATION PREP Vector<QueriedMoteAndSensors> originalMotesAndTheirSensorAndFunctsVec = pMotesAndTheirSensorAndFunctsVec; Vector<QueriedMoteAndSensors> newMotesAndTheirSensorAndFunctsVec = new Vector<QueriedMoteAndSensors>(); List<String> allInvolvedMoteIdsList = new ArrayList<String>(); for (QueriedMoteAndSensors aMoteAndSensors : originalMotesAndTheirSensorAndFunctsVec) { allInvolvedMoteIdsList.add(aMoteAndSensors.getMoteid()); } logger.debug("Queried motes and sensors:"); for (QueriedMoteAndSensors aMoteAndSensors : originalMotesAndTheirSensorAndFunctsVec) { logger.debug("Mote Id: " + aMoteAndSensors.getMoteid()); if (aMoteAndSensors.getQueriedSensorIdsAndFuncVec() != null && !aMoteAndSensors.getQueriedSensorIdsAndFuncVec().isEmpty()) { HashMap<String, Vector<Integer>> functionsForCapabilityOfThisMoteHM = new HashMap<String, Vector<Integer>>(); for (ReqSensorAndFunctions sensAndFuncts : aMoteAndSensors.getQueriedSensorIdsAndFuncVec()) { logger.debug(" Capabilities: " + sensAndFuncts.getSensorModelid()); // TODO: we could probably acquire the friendly name too from some map //TODO: this isNodeResourceAvailable could be also done ideally within the ContinuationOfProvisionService within the findNextEquivalaneNode funciton (also could be synchronized) //logger.debug("DDDDD Size of functs:"+ Integer.toString(sensAndFuncts.getFunctionsOverSensorModelVec().size())); //{ // int smid = sensAndFuncts.getSensorModelIdInt(); // //logger.debug("For mote "+fullMoteId +" and sensor "+Integer.toString(smid) + " function vector size is "+reqFunctionVec.size()); // for (Integer inFunctVec : sensAndFuncts.getFunctionsOverSensorModelVec()) { // logger.debug("Fid: " + inFunctVec); // } // } functionsForCapabilityOfThisMoteHM.put(sensAndFuncts.getSensorModelid(), sensAndFuncts.getFunctionsOverSensorModelVec()); if (!ResourceAvailabilityService.getInstance().isNodeResourceAvailable(pQueryDefId, aMoteAndSensors.getMoteid(), sensAndFuncts.getSensorModelid())) { logger.debug("Node id: " + aMoteAndSensors.getMoteid() + " unavailable for: " + sensAndFuncts.getSensorModelid()); String[] replacementInfo = ContinuationOfProvisionService.getInstance() .findNextEquivalentNode(pQueryDefId, allInvolvedMoteIdsList, aMoteAndSensors.getMoteid(), sensAndFuncts.getSensorModelid()); if (replacementInfo == null) { // logger.debug("Could not find replacement node for " + sensAndFuncts.getSensorModelid() + " vsn id: " + pQueryDefId); serviceDeployPartiallyPossible = true; } else { logger.debug("Found replacement node " + replacementInfo[1] + " for node " + replacementInfo[0] + " for " + replacementInfo[2] + " vsn id: " + pQueryDefId); serviceDeployContinuationEmployed = true; // to prevent duplicates (though there really should not be such case) addToLocalReplacementInfoList(localReplacedResources, replacementInfo); } } //end if: node capability is not available else { //capability is available // add self as a replacement (locally) // a node could be available for some capabilities but not for others String[] replacementInfo = { aMoteAndSensors.getMoteid(), aMoteAndSensors.getMoteid(), sensAndFuncts.getSensorModelid() }; logger.debug("Adding self to local cache"); addToLocalReplacementInfoList(localReplacedResources, replacementInfo); } } //end for loop for this node's capability //loop through the localReplacedResources for this node and update the newMotesAndTheirSensorAndFunctsVec List<String> consideredReplacementNodes = new ArrayList<String>(); for (String[] entryLocal : localReplacedResources) { //logger.debug("Checking localReplacedResources for: " + entryLocal[0]); if (entryLocal[0].compareToIgnoreCase(aMoteAndSensors.getMoteid()) == 0) { String idOfOneReplacingNode = entryLocal[1]; if (!consideredReplacementNodes.contains(idOfOneReplacingNode)) { //logger.debug("INNER Checking localReplacedResources for: " + idOfOneReplacingNode); consideredReplacementNodes.add(idOfOneReplacingNode); Vector<ReqSensorAndFunctions> replacementNodeSensorAndFuncts = new Vector<ReqSensorAndFunctions>(); QueriedMoteAndSensors replacementMoteAndSensors = new QueriedMoteAndSensors( idOfOneReplacingNode, replacementNodeSensorAndFuncts); // inner loop again to find all capabilities that this node (idOfOneReplacingNode) is a replacement for for (String[] entryLocalInner : localReplacedResources) { if (entryLocalInner[0].compareToIgnoreCase(aMoteAndSensors.getMoteid()) == 0 && entryLocalInner[1].compareToIgnoreCase(idOfOneReplacingNode) == 0) { //logger.debug("INNER MATCh FOUND for: " + entryLocalInner[1] + " capability: " + entryLocalInner[2] ); String capabilityToAdd = entryLocalInner[2]; int capabilityToAddInt = ReqSensorAndFunctions.invalidSensModelId; try { capabilityToAddInt = Integer.valueOf(capabilityToAdd); } catch (Exception ex33) { logger.error( "Could not convert capability id to int for replacement capability: " + capabilityToAdd); } //logger.error("CAP TO ADD" + capabilityToAdd); if (functionsForCapabilityOfThisMoteHM.containsKey(capabilityToAdd) && functionsForCapabilityOfThisMoteHM.get(capabilityToAdd) != null && !functionsForCapabilityOfThisMoteHM.get(capabilityToAdd).isEmpty()) { //logger.error("FOUND IN HASHMAP!!!"); Vector<Integer> funcsOverThisCapability = functionsForCapabilityOfThisMoteHM .get(capabilityToAdd); //int smid = capabilityToAddInt; //logger.debug("DEB DEB For mote "+aMoteAndSensors.getMoteid() +" and sensor "+Integer.toString(smid) + " function vector size is "+reqFunctionVec.size()); //for (Integer inFunctVec : funcsOverThisCapability) { // logger.debug("DEB DEB Fid: " + inFunctVec); //} ReqSensorAndFunctions thisSensorAndFuncts = new ReqSensorAndFunctions( capabilityToAddInt, funcsOverThisCapability); //thisSensorAndFuncts.getSensorModelid(); //thisSensorAndFuncts.getFunctionsOverSensorModelVec().size(); //logger.debug("DEB DEB 333 For sensor "+ thisSensorAndFuncts.getSensorModelid()+ " function vector size is "+ thisSensorAndFuncts.getFunctionsOverSensorModelVec().size()); //for (Integer inFunctVec : funcsOverThisCapability) { // logger.debug("DEB DEB 333 Fid: " + inFunctVec); //} replacementNodeSensorAndFuncts.addElement(thisSensorAndFuncts); } } } if (!replacementNodeSensorAndFuncts.isEmpty()) { //logger.error("ADDING ELEMENT TO NEW MOTES LIST!!!" + replacementMoteAndSensors.getMoteid() + ":: " + Integer.toString(replacementMoteAndSensors.getQueriedSensorIdsAndFuncVec().size())); replacementMoteAndSensors .setQueriedSensorIdsAndFuncVec(replacementNodeSensorAndFuncts); newMotesAndTheirSensorAndFunctsVec.addElement(replacementMoteAndSensors); } } } } //functionsForCapabilityOfThisMoteHM.clear(); } } //end for loop for this node of queried motes if (newMotesAndTheirSensorAndFunctsVec == null || newMotesAndTheirSensorAndFunctsVec.isEmpty()) { serviceDeployImpossible = true; logger.debug("Service Deploy is impossible for vsn id: " + pQueryDefId); } // decide status String statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_UNKNOWN; if (serviceDeployImpossible) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_IMPOSSIBLE; } else if (serviceDeployContinuationEmployed && serviceDeployPartiallyPossible) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_CONT_COMBO; } else if (serviceDeployContinuationEmployed) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_CONTINUATION; } else if (serviceDeployPartiallyPossible) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL; } else if (serviceDeployAllNodesAvailable && !serviceDeployImpossible && !serviceDeployContinuationEmployed && !serviceDeployPartiallyPossible) { statusDecidedStr = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_POSSIBLE; } serviceDeployStatusStr.set(0, statusDecidedStr); logger.debug("Decided DEPLOY STATUS WAS: " + serviceDeployStatusStr.get(0)); // We proceed here because even if service deploy is not possible, a reply will be sent with the status and empty lists (TODO consider) // However we also send (near the end of this method, alert messages for the deploy status if <> OK // // // TODO: To skip redundant queries in network // TODO: Count the reqFunction in reqFunction Vec (Debug print them) (also check that they are executed even if gateway level for each node-which should not happen) // TODO: Verify that if a function is gateway level and its removed(?) from the reqFunctionVec then it's not executed by the wsi adapter! // // // TODO: handle conditions for aggregate (gateway level functions). // //clone the reqFunctionsVec . TODO. this is not cloning though, we pass references to the added elements Vector<ReqFunctionOverData> onlyNodeReqFunctVec = new Vector<ReqFunctionOverData>(); Vector<ReqFunctionOverData> onlyGwLevelReqFunctVec = new Vector<ReqFunctionOverData>(); for (int i = 0; i < reqFunctionVec.size(); i++) { if (ReqFunctionOverData.isValidGatewayReqFunct(reqFunctionVec.elementAt(i).getfuncName())) onlyGwLevelReqFunctVec.addElement(reqFunctionVec.elementAt(i)); else { onlyNodeReqFunctVec.addElement(reqFunctionVec.elementAt(i)); } } // // get the involved capabilities per gatewaylevel function, and then remove the function id from those sensorModels! // // Produce a hashmap of gwLevel function name to Vector of capabilities (sensorModelId from the query/request) HashMap<String, Vector<String>> gwLevelFunctToCapsList = new HashMap<String, Vector<String>>(); // todo: IMPORTANT later we should group sensormodelIds per capability they belong to, but for now sensormodelid == capability! Iterator<ReqFunctionOverData> gwLevelFunctsIter = onlyGwLevelReqFunctVec.iterator(); while (gwLevelFunctsIter.hasNext()) { Vector<String> myInvolvedCaps = new Vector<String>(); ReqFunctionOverData tmpGwLevelFunct = gwLevelFunctsIter.next(); // new change to new Vector of motes (19/04) Iterator<QueriedMoteAndSensors> onMotesSensFunctsVecIter = newMotesAndTheirSensorAndFunctsVec .iterator(); while (onMotesSensFunctsVecIter.hasNext()) { QueriedMoteAndSensors tmpMoteAndSenAndFuncts = onMotesSensFunctsVecIter.next(); Iterator<ReqSensorAndFunctions> sensAndFunctsIter = tmpMoteAndSenAndFuncts .getQueriedSensorIdsAndFuncVec().iterator(); while (sensAndFunctsIter.hasNext()) { ReqSensorAndFunctions sensAndFuncts = sensAndFunctsIter.next(); //Vector<Integer> sensfunctsVector = sensAndFuncts.getFunctionsOverSensorModelVec(); int initSize = sensAndFuncts.getFid().size(); for (int k = initSize - 1; k >= 0; k--) { int sensfid = sensAndFuncts.getFid().get(k).intValue(); if (sensfid == tmpGwLevelFunct.getfuncId()) { if (!myInvolvedCaps.contains(sensAndFuncts.getSensorModelid())) { myInvolvedCaps.addElement(sensAndFuncts.getSensorModelid()); } // TODO: WHY??? (NOT NEEDED ANYMORE because we use the onlyNodeReqFunctVec to query the sensor and that filters out the functions in the adapter) ::here we should also delete the fid from the sensor model (but the simple way does not work for some reason, so it is left for future) //List tmpList = removeElementAt(sensAndFuncts.getFid(),k); //sensAndFuncts.getFid().clear(); //sensAndFuncts.getFid().addAll(tmpList); //sensAndFuncts.getFunctionsOverSensorModelVec().clear(); } } } } gwLevelFunctToCapsList.put(tmpGwLevelFunct.getfuncName(), myInvolvedCaps); } // // // Vector<ReqResultOverData> allResultsRead = new Vector<ReqResultOverData>(); //WsiAdapterCon myDCon = WsiAdapterConFactory.createMiddleWCon("uberdust", DbConInfoFactory.createConInfo("restHttp")); // DONE: The translateAggrQuery should not be executed for gateway level functions (skip them here or in the adapter con class.(?) // new changed to the new vector of motes : 19/04 logger.debug("Submitting query to the network"); // ASK ONLY FOR NODE LEVEL FUNCTIONS (TODO: Essentially for now, only last value is a node level function sent from the VSP, although other node level functions are supported) allResultsRead = myDCon.translateAggrQuery(newMotesAndTheirSensorAndFunctsVec, onlyNodeReqFunctVec); logger.debug("After Submitting query to the network"); // // // TODO: All gateway level functions reference a node level function at some point (either directly eg max or two hops eg "IF MAX " // // // Handle gateway level functions // first order of business, delete everything within them (some connectors could put latest values of all nodes, but we want to do it the more proper way) // then get the values of the referenced function(s) // aggregate the values and produce a single result. TODO: here UOMs of different sensor models could come into play. Handle this in the future! // // // 1. we create a new derived structure with unique fid keyed entries for required Result over data. Vector<ReqResultOverData> allUniqueFunctionsWithResults = new Vector<ReqResultOverData>(); Iterator<ReqResultOverData> messyResultsIter = allResultsRead.iterator(); // Loop over all resultOverData. They are keyed by fid, but there can be multiple of the same fid! // So here we merge those of same fid. while (messyResultsIter.hasNext()) //OUTER loop { ReqResultOverData tmpResStructFromMessyVec = messyResultsIter.next(); //ReqResultOverData tmpResStructMatched = null; boolean foundTheFid = false; Iterator<ReqResultOverData> uniqueFuncResultsIter = allUniqueFunctionsWithResults.iterator(); while (uniqueFuncResultsIter.hasNext()) //for the first pass of the OUTER loop the allUniqueFunctionsWithResults is empty { ReqResultOverData uniqueFunctResult = uniqueFuncResultsIter.next(); if (uniqueFunctResult.getFidInt() == tmpResStructFromMessyVec.getFidInt()) { foundTheFid = true; uniqueFunctResult.getOut().addAll(tmpResStructFromMessyVec.getAllResultsforFunct()); break; } } if (!foundTheFid) { allUniqueFunctionsWithResults.addElement(new ReqResultOverData(tmpResStructFromMessyVec.getFidInt(), tmpResStructFromMessyVec.getAllResultsforFunct())); } } // // Repeat this process slightly altered to add the unique Gw level functions // Iterator<ReqFunctionOverData> gwfunctIter = onlyGwLevelReqFunctVec.iterator(); while (gwfunctIter.hasNext()) //OUTER loop { ReqFunctionOverData tmpReqGwFunct = gwfunctIter.next(); //ReqResultOverData tmpResStructMatched = null; boolean foundTheFid = false; Iterator<ReqResultOverData> uniqueFuncResultsIter = allUniqueFunctionsWithResults.iterator(); while (uniqueFuncResultsIter.hasNext()) //for the first pass of the OUTER loop the allUniqueFunctionsWithResults is empty { ReqResultOverData uniqueFunctResult = uniqueFuncResultsIter.next(); if (uniqueFunctResult.getFidInt() == tmpReqGwFunct.getfuncId()) { foundTheFid = true; break; } } if (!foundTheFid) { allUniqueFunctionsWithResults.addElement( new ReqResultOverData(tmpReqGwFunct.getfuncId(), new Vector<ResultAggrStruct>())); } } // end of 1. // // 2. Go through all the gateway level functions (all of which are missing values right now). // For each gateway level function, go through all the results for this function. // gwfunctIter = onlyGwLevelReqFunctVec.iterator(); while (gwfunctIter.hasNext()) { ReqFunctionOverData tmpGwFunct = gwfunctIter.next(); Iterator<ReqResultOverData> resultsIter = allUniqueFunctionsWithResults.iterator(); // loop over all resultOverData for this specific function (matching is made in the next two lines) while (resultsIter.hasNext()) { ReqResultOverData tmpResForGWFunct = resultsIter.next(); if (tmpResForGWFunct.getFidInt() == tmpGwFunct.getfuncId()) { // descriptionTokens[0] : GW LEVEL PREFIX // descriptionTokens[1] : FUNCTION NAME // descriptionTokens[2] : REFERENCED FUNCTION ID String[] descriptionTokens = tmpGwFunct.getfuncName() .split(ReqFunctionOverData.GW_LEVEL_SEPARATOR); // // 3. Handle min, max and avg gateway level functions. (IF THEN FUNCTIONS ARE HANDLED AS ANOTHER CASE - THEY ARE ONE HOP HIGHER) // MIN, MAX, and AVG are all one hop (reference) away from a node level function (last value) if (descriptionTokens != null && descriptionTokens.length > 2 && (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.maxFunc) || descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.minFunc) || descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.avgFunc))) { logger.debug("Clearing up values for gw funct name: " + tmpGwFunct.getfuncName()); // cleanup of output list (it should however be already empty now that we rightfully only poll the WSI for node level functions) tmpResForGWFunct.getOut().clear(); tmpResForGWFunct.getAllResultsforFunct().clear(); //after cleanup of output list logger.debug("Filling up values for gw funct name: " + tmpGwFunct.getfuncName()); if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.maxFunc)) { // MAX FUNCTION ======================================= int aggregatedValues = 0; int refFunct = ReqFunctionOverData.unknownFuncId; try { refFunct = Integer.valueOf(descriptionTokens[2]); } catch (Exception exfrtm) { logger.error("Reference function id was set as unknown!"); } HashMap<String, Long> capToTsFromMinLong = new HashMap<String, Long>(); HashMap<String, Long> capToTsToMaxLong = new HashMap<String, Long>(); HashMap<String, Long> capToMaxValueLong = new HashMap<String, Long>(); // Iterator<ReqResultOverData> resultsIter002 = allUniqueFunctionsWithResults.iterator(); // INNER LOOP THROUGH FUNCTIONS with results, searching for the referenced NODE level function while (resultsIter002.hasNext()) { ReqResultOverData tmpRes = resultsIter002.next(); if (tmpRes.getFidInt() == refFunct) { // for every GENERIC capability requested( the generic capability is coded as hashcode() ) for (String currCapSidStr : gwLevelFunctToCapsList .get(tmpGwFunct.getfuncName())) { if (!capToMaxValueLong.containsKey(currCapSidStr)) { capToMaxValueLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); capToTsFromMinLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsToMaxLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); } Iterator<OutType> tmpOutItemIter = tmpRes.getOut().iterator(); while (tmpOutItemIter.hasNext()) { ResultAggrStruct tmpOutItem = new ResultAggrStruct( tmpOutItemIter.next()); if (currCapSidStr.trim().equalsIgnoreCase(tmpOutItem.getSid().trim())) { try { long longValToCompare = Long.parseLong(tmpOutItem.getVal()); if (longValToCompare > capToMaxValueLong.get(currCapSidStr) .longValue()) { capToMaxValueLong.put(currCapSidStr, Long.valueOf(longValToCompare)); } if (capToTsFromMinLong.get(currCapSidStr) .longValue() > tmpOutItem.getTis().getFromTimestamp() .getTime()) { capToTsFromMinLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getFromTimestamp().getTime())); } if (capToTsToMaxLong.get(currCapSidStr).longValue() < tmpOutItem .getTis().getToTimestamp().getTime()) { capToTsToMaxLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getToTimestamp().getTime())); } aggregatedValues += 1; } catch (Exception e) { logger.error("Invalid format to aggregate"); } } } ResultAggrStruct thisAggrResult = new ResultAggrStruct( ResultAggrStruct.MidSpecialForAggregateMultipleValues, Integer.valueOf(currCapSidStr), Long.toString(capToMaxValueLong.get(currCapSidStr)), aggregatedValues, new TimeIntervalStructure( new Timestamp(capToTsFromMinLong.get(currCapSidStr)), new Timestamp(capToTsToMaxLong.get(currCapSidStr)))); tmpResForGWFunct.getOut().add(thisAggrResult); } } } } else if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.minFunc)) { // MIN FUNCTION ======================================= int aggregatedValues = 0; int refFunct = ReqFunctionOverData.unknownFuncId; try { refFunct = Integer.valueOf(descriptionTokens[2]); } catch (Exception exfrtm) { logger.error("Reference function id was set as unknown!"); } HashMap<String, Long> capToTsFromMinLong = new HashMap<String, Long>(); HashMap<String, Long> capToTsToMaxLong = new HashMap<String, Long>(); HashMap<String, Long> capToMinValueLong = new HashMap<String, Long>(); // Iterator<ReqResultOverData> resultsIter002 = allUniqueFunctionsWithResults.iterator(); while (resultsIter002.hasNext()) { ReqResultOverData tmpRes = resultsIter002.next(); if (tmpRes.getFidInt() == refFunct) { // for every GENERIC capability requested( the genereic capability is coded as hashcode() ) for (String currCapSidStr : gwLevelFunctToCapsList .get(tmpGwFunct.getfuncName())) { if (!capToMinValueLong.containsKey(currCapSidStr)) { capToMinValueLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsFromMinLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsToMaxLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); } Iterator<OutType> tmpOutItemIter = tmpRes.getOut().iterator(); while (tmpOutItemIter.hasNext()) { ResultAggrStruct tmpOutItem = new ResultAggrStruct( tmpOutItemIter.next()); if (currCapSidStr.trim().equalsIgnoreCase(tmpOutItem.getSid().trim())) { try { long longValToCompare = Long.parseLong(tmpOutItem.getVal()); if (longValToCompare < capToMinValueLong.get(currCapSidStr) .longValue()) { capToMinValueLong.put(currCapSidStr, Long.valueOf(longValToCompare)); } if (capToTsFromMinLong.get(currCapSidStr) .longValue() > tmpOutItem.getTis().getFromTimestamp() .getTime()) { capToTsFromMinLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getFromTimestamp().getTime())); } if (capToTsToMaxLong.get(currCapSidStr).longValue() < tmpOutItem .getTis().getToTimestamp().getTime()) { capToTsToMaxLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getToTimestamp().getTime())); } aggregatedValues += 1; } catch (Exception e) { logger.error("Invalid format to aggregate"); } } } ResultAggrStruct thisAggrResult = new ResultAggrStruct( ResultAggrStruct.MidSpecialForAggregateMultipleValues, Integer.valueOf(currCapSidStr), Long.toString(capToMinValueLong.get(currCapSidStr)), aggregatedValues, new TimeIntervalStructure( new Timestamp(capToTsFromMinLong.get(currCapSidStr)), new Timestamp(capToTsToMaxLong.get(currCapSidStr)))); logger.debug("Adding a result"); tmpResForGWFunct.getOut().add(thisAggrResult); logger.debug("Added a result"); } } } } else if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.avgFunc)) { // AVG FUNCTION ======================================= int aggregatedValues = 0; int refFunct = ReqFunctionOverData.unknownFuncId; try { refFunct = Integer.valueOf(descriptionTokens[2]); } catch (Exception exfrtm) { logger.error("Reference function id was set as unknown!"); } HashMap<String, Long> capToTsFromMinLong = new HashMap<String, Long>(); HashMap<String, Long> capToTsToMaxLong = new HashMap<String, Long>(); HashMap<String, Long> capToAvgValueLong = new HashMap<String, Long>(); // Iterator<ReqResultOverData> resultsIter002 = allUniqueFunctionsWithResults.iterator(); while (resultsIter002.hasNext()) { ReqResultOverData tmpRes = resultsIter002.next(); /*System.out.println("LLLLLLLL TEST 3"); StringBuilder tmpRsOD = new StringBuilder(); tmpRsOD.append("resf fid:"); tmpRsOD.append(tmpRes.getFidInt()); tmpRsOD.append(" AND ref funct:"); tmpRsOD.append(refFunct); System.out.println("OOOOOOOOOOOOOO TEST 3B" + tmpRsOD.toString());*/ if (tmpRes.getFidInt() == refFunct) { // for every GENERIC capability requested( the genereic capability is coded as hashcode() ) for (String currCapSidStr : gwLevelFunctToCapsList .get(tmpGwFunct.getfuncName())) { if (!capToAvgValueLong.containsKey(currCapSidStr)) { capToAvgValueLong.put(currCapSidStr, Long.valueOf(0)); capToTsFromMinLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsToMaxLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); } Iterator<OutType> tmpOutItemIter = tmpRes.getOut().iterator(); while (tmpOutItemIter.hasNext()) { ResultAggrStruct tmpOutItem = new ResultAggrStruct( tmpOutItemIter.next()); if (currCapSidStr.trim().equalsIgnoreCase(tmpOutItem.getSid().trim())) { try { long longValOfSensor = Long.parseLong(tmpOutItem.getVal()); long valPrevious = capToAvgValueLong.get(currCapSidStr) .longValue(); long newVal = valPrevious + longValOfSensor; capToAvgValueLong.put(currCapSidStr, Long.valueOf(newVal)); // if (capToTsFromMinLong.get(currCapSidStr) .longValue() > tmpOutItem.getTis().getFromTimestamp() .getTime()) { capToTsFromMinLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getFromTimestamp().getTime())); } if (capToTsToMaxLong.get(currCapSidStr).longValue() < tmpOutItem .getTis().getToTimestamp().getTime()) { capToTsToMaxLong.put(currCapSidStr, Long.valueOf( tmpOutItem.getTis().getToTimestamp().getTime())); } aggregatedValues += 1; } catch (Exception e) { logger.error("Invalid format to aggregate"); } } } Double avgVal = Double .valueOf(capToAvgValueLong.get(currCapSidStr).longValue()) / Double.valueOf(aggregatedValues); /*StringBuilder tmpRs = new StringBuilder(); tmpRs.append("Result:"); tmpRs.append(avgVal); tmpRs.append(" aggr vals:"); tmpRs.append(aggregatedValues); System.out.println("OOOOOOOOOOOOOO TEST 3C" + tmpRs.toString());*/ ResultAggrStruct thisAggrResult = new ResultAggrStruct( ResultAggrStruct.MidSpecialForAggregateMultipleValues, Integer.valueOf(currCapSidStr), Double.toString(avgVal), aggregatedValues, new TimeIntervalStructure( new Timestamp(capToTsFromMinLong.get(currCapSidStr)), new Timestamp(capToTsToMaxLong.get(currCapSidStr)))); tmpResForGWFunct.getOut().add(thisAggrResult); //System.out.println("OOOOOOOOOOOOOO TEST 3D" + tmpRs.toString()); } } } } } } } } // end of while loop on ONE HOP REFERENCE GW FUNCTIONs (MIN, MAX, AVG // Start of while loop on 2nd HOP reference GW function (need the one hops already filled in) // TODO: we don't handle/anticipate the case where the IF_THEN function references another IF_THEN function (even repeatedly). More flexibility could be implemented!! gwfunctIter = onlyGwLevelReqFunctVec.iterator(); // gets a NEW iterator while (gwfunctIter.hasNext()) { ReqFunctionOverData tmpGwFunct = gwfunctIter.next(); Iterator<ReqResultOverData> resultsIter = allUniqueFunctionsWithResults.iterator(); // loop over all resultOverData for this specific function (matching is made in the next two lines) while (resultsIter.hasNext()) { ReqResultOverData tmpResForGWFunct = resultsIter.next(); if (tmpResForGWFunct.getFidInt() == tmpGwFunct.getfuncId()) { // descriptionTokens[0] : GW LEVEL PREFIX // descriptionTokens[1] : FUNCTION NAME // descriptionTokens[2] : REFERENCED FUNCTION ID String[] descriptionTokens = tmpGwFunct.getfuncName() .split(ReqFunctionOverData.GW_LEVEL_SEPARATOR); if (descriptionTokens != null && descriptionTokens.length > 2 && (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.ruleRuleBinaryAndFunc) || descriptionTokens[1] .equalsIgnoreCase(ReqFunctionOverData.ruleRuleIfThenFunc))) { logger.debug("Clearing up values for gw funct name: " + tmpGwFunct.getfuncName()); // cleanup of output list (it should however be already empty now that we rightfully only poll the WSI for node level functions) tmpResForGWFunct.getOut().clear(); tmpResForGWFunct.getAllResultsforFunct().clear(); //after cleanup of output list logger.debug("Filling values for funct name: " + tmpGwFunct.getfuncName()); if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.ruleRuleBinaryAndFunc)) { //TODO: handle a binary rule (condition1 and condition2) } else if (descriptionTokens[1].equalsIgnoreCase(ReqFunctionOverData.ruleRuleIfThenFunc)) { logger.debug("Filling values for funct name: " + tmpGwFunct.getfuncName()); //handle a binary rule (condition1 then do 3) // 1: check if the referenced function has results that meet the conditions in its threshold int consideredValues = 0; int refFunct = ReqFunctionOverData.unknownFuncId; try { refFunct = Integer.valueOf(descriptionTokens[2]); } catch (Exception exfrtm) { logger.error("Reference function id was set as unknown!"); } HashMap<String, Long> capToTsFromMinLong = new HashMap<String, Long>(); HashMap<String, Long> capToTsToMaxLong = new HashMap<String, Long>(); HashMap<String, Long> capToConditionValueLong = new HashMap<String, Long>(); // Iterator<ReqResultOverData> resultsIter002 = allUniqueFunctionsWithResults.iterator(); while (resultsIter002.hasNext()) { ReqResultOverData tmpRes = resultsIter002.next(); if (tmpRes.getFidInt() == refFunct) { // for every GENERIC capability requested( the genereic capability is coded as hashcode() ) for (String currCapSidStr : gwLevelFunctToCapsList .get(tmpGwFunct.getfuncName())) { if (!capToConditionValueLong.containsKey(currCapSidStr)) { capToTsFromMinLong.put(currCapSidStr, Long.valueOf(Long.MAX_VALUE)); capToTsToMaxLong.put(currCapSidStr, Long.valueOf(Long.MIN_VALUE)); capToConditionValueLong.put(currCapSidStr, Long.valueOf(0)); } Iterator<OutType> tmpOutItemIter = tmpRes.getOut().iterator(); while (tmpOutItemIter.hasNext()) { ResultAggrStruct tmpOutItem = new ResultAggrStruct( tmpOutItemIter.next()); if (currCapSidStr.trim().equalsIgnoreCase(tmpOutItem.getSid().trim())) { try { // TODO: Actually here we need to find in the original ReqFunctVec (that contains the full function definitions, not just the function id) // the thresholds set. Before we search for the thresholds in the referenced function but now (better) we get them from this function (If_then) boolean foundTheCurrentFunctionInTheOriginalReqFunctionVec = false; long longValOfSensor = Long.parseLong(tmpOutItem.getVal()); ReqFunctionOverData currentFunctionInCondition = null; for (int kx1 = 0; kx1 < reqFunctionVec.size(); kx1++) { if (reqFunctionVec.elementAt(kx1) .getfuncId() == tmpResForGWFunct.getFidInt()) { currentFunctionInCondition = reqFunctionVec .elementAt(kx1); foundTheCurrentFunctionInTheOriginalReqFunctionVec = true; break; } } // but also find the reference function in the condition to include details in the notification boolean foundTheReferencedFunctionInTheOriginalReqFunctionVec = false; ReqFunctionOverData referencedFunctionInCondition = null; for (int kx1 = 0; kx1 < reqFunctionVec.size(); kx1++) { if (reqFunctionVec.elementAt(kx1) .getfuncId() == tmpResForGWFunct.getFidInt()) { referencedFunctionInCondition = reqFunctionVec .elementAt(kx1); foundTheReferencedFunctionInTheOriginalReqFunctionVec = true; break; } } if (foundTheCurrentFunctionInTheOriginalReqFunctionVec) // the referred function here must have a threshold field because it's an evaluation of a condition { if (currentFunctionInCondition != null && currentFunctionInCondition .getThresholdField() != null && !currentFunctionInCondition.getThresholdField() .isEmpty()) { logger.debug( "-------- INTO EVALUATING CONDITION NOW! "); ThresholdStructure requiredThresholds = new ThresholdStructure( currentFunctionInCondition.getThresholdField()); if (requiredThresholds.getLowerBound() != null && !requiredThresholds.getLowerBound() .isEmpty()) { logger.debug("Condition low parameter: " + requiredThresholds.getLowerBound() .trim()); // TODO: handle other conditions for services (lower than, equals, between) long lowbound = Long.parseLong( requiredThresholds.getLowerBound()); if (longValOfSensor >= lowbound) { logger.debug("Sensor: " + tmpOutItem.getMid() + ". Condition is met: " + Long.toString(longValOfSensor) + " >= " + requiredThresholds .getLowerBound().trim()); consideredValues = 1; ResultAggrStruct thisAggrResult = new ResultAggrStruct( tmpOutItem.getMid(), Integer.valueOf(currCapSidStr), Long.toString(longValOfSensor), consideredValues, new TimeIntervalStructure(new Timestamp( Long.valueOf(tmpOutItem.getTis() .getFromTimestamp() .getTime())), new Timestamp(Long.valueOf( tmpOutItem.getTis() .getToTimestamp() .getTime())))); tmpResForGWFunct.getOut().add(thisAggrResult); // DONE: Send an alert notification NotificationsFromVSNs newNotify = new NotificationsFromVSNs(); newNotify.setQueryDefId(pQueryDefId); newNotify.setVgwID(myPeerId); // get continuation info. Careful, we have not yet replaced the replacemntIDs with the original nodes in the measurements here (it's done later) // but we have to set the MoteId to the Original Id and the replacementId to the replacement node String[] replaceItem = getLocalReplacemntInfoListItem( localReplacedResources, tmpOutItem.getMid(), tmpOutItem.getSid()); if (replaceItem != null && replaceItem[0] != null && !replaceItem[0].isEmpty() && replaceItem[0].compareToIgnoreCase( replaceItem[1]) != 0) { newNotify.setMoteID(replaceItem[0]); newNotify.setReplacmntID( tmpOutItem.getMid()); } else { newNotify.setMoteID(tmpOutItem.getMid()); newNotify.setReplacmntID(""); } newNotify.setValue(longValOfSensor); if (tmpOutItem.getTis() != null && tmpOutItem .getTis().isTimestampFromDefined()) newNotify.setValueTimestamp( Long.toString(tmpOutItem.getTis() .getFromTimestamp() .getTime())); newNotify.setBoundValue(lowbound); newNotify.setRefFunctName( referencedFunctionInCondition .getfuncName()); newNotify.setRefFunctTriggerSign("gt"); //default for lower bound conditions newNotify.setCapabilityCode( tmpOutItem.getSid().trim()); newNotify.setTimestamp(Long .toString(System.currentTimeMillis())); newNotify.setType( NotificationsFromVSNs.CRITICAL_TYPE); newNotify.setLevel( NotificationsFromVSNs.GATEWAY_LEVEL); newNotify.setRefFunctId( referencedFunctionInCondition .getfuncId()); newNotify.setMessage( "Condition was met for node id: " + newNotify.getMoteID() + " value: " + longValOfSensor + " capability code:__" + tmpOutItem.getSid().trim()); // Send the response to the requesting end user //System.out.println("Sending Notification!"); String notifMsgToSend = NotificationsFromVSNs .getAlertDelimitedString(newNotify); this.sendResponse(notifMsgToSend); } else { logger.debug("Sensor: " + tmpOutItem.getMid() + " with value: " + Long.toString(longValOfSensor) + " does not meet Condition!"); } } } } } catch (Exception e) { logger.error("Invalid format to aggregate"); } } } // // } } } } } } } } // Add trailing section for service deployability and replacements list // Careful! for the replacements list, skip the entries where the node replaces itself // DONE: RECONSTRUCT the Vector<ReqResultOverData> allUniqueFunctionsWithResults for the original nodes! // // logger.debug("BEFORE RECONSTRUCTION"); if (allUniqueFunctionsWithResults != null) { logger.debug("IN RECONSTRUCTION"); for (ReqResultOverData aResultOverData : allUniqueFunctionsWithResults) { String functionId = aResultOverData.getFid(); // replacing is needed only for node level functions and possibly for if then functions referring to last values of sensors (not for aggregate GW level or if_then over aggregates) // &&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&+++++++++++++++++++++++++++++++++++== /* boolean isGwLevel = false; Iterator<ReqFunctionOverData> gwfunctIterLocal = onlyGwLevelReqFunctVec.iterator(); while(gwfunctIterLocal.hasNext()) //OUTER loop { ReqFunctionOverData tmpReqGwFunct = gwfunctIterLocal.next(); if(Integer.toString(tmpReqGwFunct.getfuncId()).equalsIgnoreCase(functionId)){ isGwLevel = true; break; } } if(!isGwLevel) { */ logger.debug("FID:: " + functionId); if (aResultOverData.getAllResultsforFunct() != null) { if (aResultOverData.getAllResultsforFunct().isEmpty()) { logger.debug("has no results!!"); } else { logger.debug("found results!!"); } Vector<ResultAggrStruct> newReconstructedResultVec = null; boolean foundAtLeastOneResultForSpecificMoteId = false; for (ResultAggrStruct thisResult : aResultOverData.getAllResultsforFunct()) { if (thisResult.getMid() .compareToIgnoreCase(ResultAggrStruct.MidSpecialForAggregateMultipleValues) != 0) { if (!foundAtLeastOneResultForSpecificMoteId) { foundAtLeastOneResultForSpecificMoteId = true; newReconstructedResultVec = new Vector<ResultAggrStruct>(); } String[] replaceItem = getLocalReplacemntInfoListItem(localReplacedResources, thisResult.getMid(), thisResult.getSid()); if (replaceItem != null && replaceItem[0] != null && !replaceItem[0].isEmpty()) { logger.debug("Back to replacing node :" + thisResult.getMid() + " with original node: " + replaceItem[0]); thisResult.setMid(replaceItem[0]); newReconstructedResultVec.addElement(thisResult); } } } if (foundAtLeastOneResultForSpecificMoteId) { aResultOverData.setAllResultsforFunct(newReconstructedResultVec); } } /* } */ } } // // DEBUG: logger.debug("The gateway has collected results and is ready to send them!"); //return allResultsRead; // Support for various data types is added by the DataTypeAdapter class // ********************** COAP MESSAGES BACK TO GATEWAY ******************************* // ALSO SEND ANY SECURITY MESSAGES // TODO: we could clean the cache after sending these messages (?) if (!VitroGatewayService.getVitroGatewayService().isWsiTrustCoapMessagingSupport()) { logger.debug("No SUPPORT FOR SENDING TRUST SECURITY INFO back to VSP!"); } if (!VitroGatewayService.getVitroGatewayService().isTrustRoutingCoapMessagingActive()) { logger.debug("No ACTIVATION FOR SENDING TRUST SECURITY INFO back to VSP!"); } if (VitroGatewayService.getVitroGatewayService().isWsiTrustCoapMessagingSupport() && VitroGatewayService.getVitroGatewayService().isTrustRoutingCoapMessagingActive()) { logger.debug("Attempting to send TRUST SECURITY INFO back to VSP!"); HashMap<String, InfoOnTrustRouting> cacheTrustCoapCopy = new HashMap<String, InfoOnTrustRouting>( TrustRoutingQueryService.getInstance().getCachedDirectoryOfTrustRoutingInfo()); String aRefCapCode = ""; int aRefFunctId = 1;// last value is always in the request if (originalMotesAndTheirSensorAndFunctsVec != null) { try { aRefCapCode = originalMotesAndTheirSensorAndFunctsVec.firstElement() .getQueriedSensorIdsAndFuncVec().get(0).getSensorModelid(); } catch (Exception e339) { logger.error("Could not acquire sample capability id for security TRUST alert "); } try { aRefFunctId = originalMotesAndTheirSensorAndFunctsVec.firstElement() .getQueriedSensorIdsAndFuncVec().get(0).getFunctionsOverSensorModelVec().firstElement(); } catch (Exception e339) { logger.error("Could not acquire sample function id for security TRUST alert "); } } if (cacheTrustCoapCopy != null) { for (String sourceNodeId : cacheTrustCoapCopy.keySet()) { InfoOnTrustRouting tmpInfoOnTrust = cacheTrustCoapCopy.get(sourceNodeId); HashMap<String, Integer> tmpParentIdToPFiHM = tmpInfoOnTrust.getParentIdsToPFI(); for (String parentNodeId : tmpParentIdToPFiHM.keySet()) { // TODO: Send a SECURITY notification NotificationsFromVSNs newNotify = new NotificationsFromVSNs(); newNotify.setQueryDefId(pQueryDefId); newNotify.setVgwID(myPeerId); newNotify.setMoteID(sourceNodeId); newNotify.setValue(tmpParentIdToPFiHM.get(parentNodeId)); // TODO: Demo: change to current timestamp which is more reliable newNotify.setValueTimestamp(Long.toString(System.currentTimeMillis())); // the time stamp for the PFI value newNotify.setTimestamp(Long.toString(System.currentTimeMillis())); //the time stamp of the notification //newNotify.setTimestamp(tmpInfoOnTrust.getTimestamp() ); //newNotify.setValueTimestamp(tmpInfoOnTrust.getTimestamp()); newNotify.setType(NotificationsFromVSNs.SECURITY_TYPE); newNotify.setLevel(NotificationsFromVSNs.GATEWAY_LEVEL); // we need sample valid funct ids and capability codes related to this VSN , to associate it at the VSP level with a partial service! newNotify.setRefFunctId(aRefFunctId); newNotify.setCapabilityCode(aRefCapCode); // the message field is here used to store the parent ID. newNotify.setMessage(parentNodeId); // Send the response to the requesting end user //System.out.println("Sending Notification!"); String notifMsgToSend = NotificationsFromVSNs.getAlertDelimitedString(newNotify); try { this.sendResponse(notifMsgToSend); logger.debug("Sent one TRUST SECURITY INFO back to VSP!"); } catch (Exception securSendExc) { logger.error("Could not send Security Type notification", securSendExc); } } } } // /* logger.debug("Sending a dummy message security for TRUST-DEBUG"); { //--------------------------------------------------------------------- // TODO: Send a SECURITY notification NotificationsFromVSNs newNotify = new NotificationsFromVSNs(); newNotify.setQueryDefId(pQueryDefId); newNotify.setVgwID(myPeerId); newNotify.setMoteID("urn:wisebed:ctitestbed:0xca2"); newNotify.setValue(400); newNotify.setValueTimestamp(Long.toString(new Date().getTime())); newNotify.setTimestamp(Long.toString(new Date().getTime())); newNotify.setType(NotificationsFromVSNs.SECURITY_TYPE); newNotify.setLevel(NotificationsFromVSNs.GATEWAY_LEVEL); newNotify.setRefFunctId(aRefFunctId); newNotify.setCapabilityCode(aRefCapCode); // the message field is here used to store the parent ID. newNotify.setMessage("urn:wisebed:ctitestbed:0xCC"); // Send the response to the requesting end user //System.out.println("Sending Notification!"); String notifMsgToSend = NotificationsFromVSNs.getAlertDelimitedString(newNotify); try{ this.sendResponse(notifMsgToSend); logger.debug("Sent one TRUST SECURITY INFO back to VSP!"); }catch(Exception securSendExc){ logger.error("Could not send Security Type notification" , securSendExc); } //--------------------------------------------------------------------- } */ } //end of if we have to send the security Coap Routing Trust Messages // %%%%%%%%%% DIRECTLY INFORM THE GATEWAY OF PROBLEMATIC DEPLOY STATUS: if (serviceDeployImpossible || serviceDeployContinuationEmployed || serviceDeployPartiallyPossible) { String aRefMote = ""; String aRefCapCode = ""; int aRefFunctId = 1;// last value is always in the request if (originalMotesAndTheirSensorAndFunctsVec != null) { try { aRefMote = originalMotesAndTheirSensorAndFunctsVec.firstElement().getMoteid(); } catch (Exception e339) { logger.error("Could not acquire sample ref node it for DEPLOY ABILITY STATUS alert "); } try { aRefCapCode = originalMotesAndTheirSensorAndFunctsVec.firstElement() .getQueriedSensorIdsAndFuncVec().get(0).getSensorModelid(); } catch (Exception e339) { logger.error("Could not acquire sample capability for DEPLOY ABILITY STATUS alert "); } try { aRefFunctId = originalMotesAndTheirSensorAndFunctsVec.firstElement() .getQueriedSensorIdsAndFuncVec().get(0).getFunctionsOverSensorModelVec().firstElement(); } catch (Exception e339) { logger.error("Could not acquire sample function id for DEPLOY ABILITY STATUS alert "); } } String strMessage = ""; long deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_POSSIBLE_INT; if (serviceDeployImpossible) { strMessage = "The requested VSN cannot be supported by this island: " + myPeerId; // case ResponseAggrMsg.DEPLOY_STATUS_SERVICE_IMPOSSIBLE; deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_IMPOSSIBLE_INT; } else if (serviceDeployContinuationEmployed && serviceDeployPartiallyPossible) { // case ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_CONT_COMBO; strMessage = "The requested VSN is partially supported using service continuation on this island: " + myPeerId; deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_CONT_COMBO_INT; } else if (serviceDeployContinuationEmployed) { // case ResponseAggrMsg.DEPLOY_STATUS_SERVICE_CONTINUATION; strMessage = "The requested VSN is supported using service continuation on this island: " + myPeerId; deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_CONTINUATION_INT; } else if (serviceDeployPartiallyPossible) { // case ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL; strMessage = "The requested VSN is partially supported on this island: " + myPeerId; deployValue = ResponseAggrMsg.DEPLOY_STATUS_SERVICE_PARTIAL_INT; } // SEND THE NOTIFICATION:: // TODO: Send a DEPLOY_STATUS_TYPE notification NotificationsFromVSNs newNotify = new NotificationsFromVSNs(); newNotify.setQueryDefId(pQueryDefId); newNotify.setVgwID(myPeerId); newNotify.setMoteID(aRefMote); newNotify.setValue(deployValue); // TODO: Demo: change to current timestamp which is more reliable newNotify.setValueTimestamp(Long.toString(System.currentTimeMillis())); // the time stamp for the PFI value newNotify.setTimestamp(Long.toString(System.currentTimeMillis())); //the time stamp of the notification //newNotify.setTimestamp(tmpInfoOnTrust.getTimestamp() ); //newNotify.setValueTimestamp(tmpInfoOnTrust.getTimestamp()); newNotify.setType(NotificationsFromVSNs.DEPLOY_STATUS_TYPE); newNotify.setLevel(NotificationsFromVSNs.GATEWAY_LEVEL); // we need sample valid funct ids and capability codes related to this VSN , to associate it at the VSP level with a partial service! newNotify.setRefFunctId(aRefFunctId); newNotify.setCapabilityCode(aRefCapCode); // the message field is here used to store the parent ID. newNotify.setMessage(strMessage); // Send the response to the requesting end user //System.out.println("Sending Notification!"); String notifMsgToSend = NotificationsFromVSNs.getAlertDelimitedString(newNotify); try { this.sendResponse(notifMsgToSend); logger.debug("Sent one DEPLOY STATUS info back to VSP!"); } catch (Exception securSendExc) { logger.error("Could not send DEPLOY STATUS notification", securSendExc); } } return allUniqueFunctionsWithResults; }