List of usage examples for java.util TreeMap size
int size
To view the source code for java.util TreeMap size.
Click Source Link
From source file:snpviewer.SnpViewer.java
public void writeRegionToFile(final String chromosome, final double start, final double end) { /* get coordinates of selection and report back * write SNPs in region to file//w w w .j a va 2 s. c o m */ FileChooser fileChooser = new FileChooser(); FileChooser.ExtensionFilter extFilter = new FileChooser.ExtensionFilter("Excel (*.xlsx)", "*.xlsx"); fileChooser.getExtensionFilters().add(extFilter); fileChooser.setTitle("Write region to Excel file (.xlsx)..."); File rFile = fileChooser.showSaveDialog(mainWindow); if (rFile == null) { return; } else if (!rFile.getName().endsWith(".xlsx")) { rFile = new File(rFile.getAbsolutePath() + ".xlsx"); } final File regionFile = rFile; final Task<Boolean> writeTask = new Task() { @Override protected Boolean call() throws Exception { try { updateProgress(-1, -1); ArrayList<SnpFile> bothFiles = new ArrayList<>(); bothFiles.addAll(affFiles); bothFiles.addAll(unFiles); TreeMap<Integer, HashMap<String, String>> coordMap = new TreeMap(); /*coordmap - key is position, key of hashmap * is input filename and value call */ HashMap<Integer, String> coordToId = new HashMap<>(); double progress = 0; double total = bothFiles.size() * 5; try { BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(regionFile)); Workbook wb = new XSSFWorkbook(); Sheet sheet = wb.createSheet(); int rowNo = 0; Row row = sheet.createRow(rowNo++); for (SnpFile f : bothFiles) { if (isCancelled()) { return false; } updateProgress(++progress, total); updateMessage("Reading region in " + f.inputFile.getName()); List<SnpFile.SnpLine> lines = f.getSnpsInRegion(chromosome, (int) start, (int) end); for (SnpFile.SnpLine snpLine : lines) { if (isCancelled()) { return false; } Integer coord = snpLine.getPosition(); if (!coordMap.containsKey(coord)) { coordMap.put(coord, new HashMap<String, String>()); } String filename = f.inputFile.getName(); String rsId = snpLine.getId(); String call = snpLine.getCall(); coordMap.get(coord).put(filename, call); coordToId.put(coord, rsId); } } Cell cell = row.createCell(0); cell.setCellValue( "chr" + chromosome + ":" + coordMap.firstKey() + "-" + coordMap.lastKey()); row = sheet.createRow(rowNo++); cell = row.createCell(0); cell.setCellValue( coordToId.get(coordMap.firstKey()) + ";" + coordToId.get(coordMap.lastKey())); row = sheet.createRow(rowNo++); int colNo = 0; cell = row.createCell(colNo++); cell.setCellValue("Position"); cell = row.createCell(colNo++); cell.setCellValue("rsID"); for (SnpFile f : bothFiles) { cell = row.createCell(colNo++); if (f.getSampleName() != null && f.getSampleName().length() > 0) { cell.setCellValue(f.getSampleName()); } else { cell.setCellValue(f.getInputFileName()); } } progress = coordMap.size(); total = 5 * coordMap.size(); updateMessage("Writing region to file..."); for (Entry current : coordMap.entrySet()) { if (isCancelled()) { return false; } progress += 4; updateProgress(progress, total); row = sheet.createRow(rowNo++); colNo = 0; Integer coord = (Integer) current.getKey(); cell = row.createCell(colNo++); cell.setCellValue(coord); String rsId = coordToId.get(coord); cell = row.createCell(colNo++); cell.setCellValue(rsId); HashMap<String, String> fileToCall = (HashMap<String, String>) current.getValue(); for (SnpFile f : bothFiles) { cell = row.createCell(colNo++); if (fileToCall.containsKey(f.inputFile.getName())) { cell.setCellValue(fileToCall.get(f.inputFile.getName())); } else { cell.setCellValue("-"); } } } CellRangeAddress[] regions = { new CellRangeAddress(0, rowNo, 2, 2 + bothFiles.size()) }; SheetConditionalFormatting sheetCF = sheet.getSheetConditionalFormatting(); ConditionalFormattingRule rule1 = sheetCF .createConditionalFormattingRule(ComparisonOperator.EQUAL, "\"AA\""); PatternFormatting fill1 = rule1.createPatternFormatting(); fill1.setFillBackgroundColor(IndexedColors.LIGHT_GREEN.index); fill1.setFillPattern(PatternFormatting.SOLID_FOREGROUND); ConditionalFormattingRule rule2 = sheetCF .createConditionalFormattingRule(ComparisonOperator.EQUAL, "\"BB\""); PatternFormatting fill2 = rule2.createPatternFormatting(); fill2.setFillBackgroundColor(IndexedColors.PALE_BLUE.index); fill2.setFillPattern(PatternFormatting.SOLID_FOREGROUND); ConditionalFormattingRule rule3 = sheetCF .createConditionalFormattingRule(ComparisonOperator.EQUAL, "\"AB\""); PatternFormatting fill3 = rule3.createPatternFormatting(); fill3.setFillBackgroundColor(IndexedColors.ROSE.index); fill3.setFillPattern(PatternFormatting.SOLID_FOREGROUND); sheetCF.addConditionalFormatting(regions, rule3, rule2); sheetCF.addConditionalFormatting(regions, rule1); wb.write(out); out.close(); return true; } catch (IOException ex) { return false; } } catch (Exception ex) { return false; } } };//end of task setProgressMode(true); progressBar.progressProperty().bind(writeTask.progressProperty()); progressMessage.textProperty().bind(writeTask.messageProperty()); writeTask.setOnSucceeded(new EventHandler<WorkerStateEvent>() { @Override public void handle(WorkerStateEvent e) { if (e.getSource().getValue() == true) { Dialogs.showInformationDialog(null, "Region written to file " + "(" + regionFile.getName() + ") successfully", "Region Written", "SNP Viewer"); } else { Dialogs.showErrorDialog(null, "Region write failed.", "Write Failed", "SNP Viewer"); } setProgressMode(false); progressBar.progressProperty().unbind(); progressBar.progressProperty().set(0); progressMessage.textProperty().unbind(); progressMessage.setText(""); progressTitle.setText(""); } }); writeTask.setOnFailed(new EventHandler<WorkerStateEvent>() { @Override public void handle(WorkerStateEvent e) { setProgressMode(false); progressBar.progressProperty().unbind(); progressBar.progressProperty().set(0); progressMessage.textProperty().unbind(); progressMessage.setText(""); progressTitle.setText("Region write failed!"); Dialogs.showErrorDialog(null, "Error writing region to file\n", "Region write error", "SNP Viewer", e.getSource().getException()); } }); writeTask.setOnCancelled(new EventHandler<WorkerStateEvent>() { @Override public void handle(WorkerStateEvent e) { progressMessage.setText("Region write cancelled"); progressTitle.setText("Cancelled"); setProgressMode(false); progressBar.progressProperty().unbind(); progressBar.progressProperty().set(0); Dialogs.showErrorDialog(null, "Error writing region to file\n", "Region write error", "SNP Viewer"); } }); cancelButton.setOnAction(new EventHandler<ActionEvent>() { @Override public void handle(ActionEvent actionEvent) { writeTask.cancel(); } }); progressTitle.setText("Writing region to .xlsx file"); new Thread(writeTask).start(); }
From source file:trendanalisis.main.tools.weka.CoreWekaTFIDF.java
/** * Converts the instance w/o normalization. * /*from w w w . j ava 2s .c o m*/ * @oaram instance the instance to convert * @param v * @return the conerted instance */ private int convertInstancewoDocNorm(Instance instance, ArrayList<Instance> v, int indexInstance) { // Convert the instance into a sorted set of indexes TreeMap<Integer, Double> contained = new TreeMap<Integer, Double>(); // Copy all non-converted attributes from input to output int firstCopy = 0; for (int i = 0; i < getInputFormat().numAttributes(); i++) { if (!m_SelectedRange.isInRange(i)) { if (getInputFormat().attribute(i).type() != Attribute.STRING && getInputFormat().attribute(i).type() != Attribute.RELATIONAL) { // Add simple nominal and numeric attributes directly if (instance.value(i) != 0.0) { contained.put(new Integer(firstCopy), new Double(instance.value(i))); } } else { if (instance.isMissing(i)) { contained.put(new Integer(firstCopy), new Double(Utils.missingValue())); } else if (getInputFormat().attribute(i).type() == Attribute.STRING) { // If this is a string attribute, we have to first add // this value to the range of possible values, then add // its new internal index. if (outputFormatPeek().attribute(firstCopy).numValues() == 0) { // Note that the first string value in a // SparseInstance doesn't get printed. outputFormatPeek().attribute(firstCopy) .addStringValue("Hack to defeat SparseInstance bug"); } int newIndex = outputFormatPeek().attribute(firstCopy) .addStringValue(instance.stringValue(i)); contained.put(new Integer(firstCopy), new Double(newIndex)); } else { // relational if (outputFormatPeek().attribute(firstCopy).numValues() == 0) { Instances relationalHeader = outputFormatPeek().attribute(firstCopy).relation(); // hack to defeat sparse instances bug outputFormatPeek().attribute(firstCopy).addRelation(relationalHeader); } int newIndex = outputFormatPeek().attribute(firstCopy) .addRelation(instance.relationalValue(i)); contained.put(new Integer(firstCopy), new Double(newIndex)); } } firstCopy++; } } Map<Integer, Integer> posMap = new HashMap<>(); Bag<String> contents = new HashBag(); for (int j = 0; j < instance.numAttributes(); j++) { // if ((getInputFormat().attribute(j).type() == Attribute.STRING) if (m_SelectedRange.isInRange(j) && (instance.isMissing(j) == false)) { m_Tokenizer.tokenize(instance.stringValue(j)); int posWord = 1; while (m_Tokenizer.hasMoreElements()) { String word = m_Tokenizer.nextElement(); if (this.m_lowerCaseTokens == true) { word = word.toLowerCase(); } word = m_Stemmer.stem(word); // System.out.println(posWord +":"+ word); Integer index = m_Dictionary.get(word); Words.put(index, word); contents.add(word); if (index != null) { if (m_OutputCounts) { // Separate if here rather than two lines down // to avoid hashtable lookup Double count = contained.get(index); if (count != null) { contained.put(index, new Double(count.doubleValue() + 1.0)); } else { contained.put(index, new Double(1)); } } else { contained.put(index, new Double(1)); } if (!posMap.containsKey(index)) { posMap.put(index, posWord); // posWord++; } } posWord++; } } } // Doing TFTransform // ArrayList<Integer> posList= new ArrayList<>(posMap.values()); // System.out.println(posList); /* * Fitur Subtittle * instance attribut ke 0 adalah judul * 1 adalah lokasi * 3 adalah tanggal */ String subtittle = ""; ArrayList<String> subtittles = null; if (ins_fitur_subtitle != null) { subtittle = ins_fitur_subtitle.get(indexInstance).stringValue(0); subtittles = new ArrayList<>(Arrays.asList(subtittle.split(" "))); // subtittles.retainAll(contents); } else { fitur_subtitle = false; } if (m_TFTransform == true) { Iterator<Integer> it = contained.keySet().iterator(); int set = 0; for (; it.hasNext();) { Integer index = it.next(); if (index.intValue() >= firstCopy) { double val = contained.get(index).doubleValue(); if (isBinary_transform()) { val = 1; } // val = (1 + Math.log(val)); contained.put(index, new Double(val)); set++; } } } // Doing IDFTransform double sum = 0; if (m_IDFTransform == true) { Iterator<Integer> it = contained.keySet().iterator(); int i = 0; for (; it.hasNext();) { Integer index = it.next(); if (index.intValue() >= firstCopy) { double val = contained.get(index).doubleValue(); double valIG = (FeatureSelection.InformationGain(m_DocsCounts[index.intValue()], m_NumInstances));// Math.abs(valIG); global_tf[index.intValue()] += val; df_prob[index.intValue()] = FeatureSelection.IdfProbability((double) m_NumInstances, m_DocsCounts[index.intValue()]); IG[index.intValue()] = valIG; if (isBinary_transform()) { val = 1; val = (Math.log(val)); } val = val * (1 + Math.log((double) m_NumInstances / (double) m_DocsCounts[index.intValue()])); if (isStat_pos_word()) { val = val + (1 / Math.sqrt(posMap.get(index))); } contained.put(index, new Double(val)); // global_tf[index.intValue()]= (1+ Math.log(m_NumInstances / (double) m_DocsCounts[index.intValue()])); i++; } } } // System.out.println("-------------------"); // Convert the set to structures needed to create a sparse instance. double[] values = new double[contained.size()]; int[] indices = new int[contained.size()]; Iterator<Integer> it = contained.keySet().iterator(); for (int i = 0; it.hasNext(); i++) { Integer index = it.next(); Double value = contained.get(index); values[i] = value.doubleValue(); indices[i] = index.intValue(); //df_prob[index.intValue()] = df_prob[index.intValue()]/m_DocsCounts[index.intValue()]; } Instance inst = new SparseInstance(instance.weight(), values, indices, outputFormatPeek().numAttributes()); inst.setDataset(outputFormatPeek()); v.add(inst); return firstCopy; }
From source file:org.opensextant.extractors.geo.GazetteerMatcher.java
/** * Geotag a document, returning PlaceCandidates for the mentions in * document. Optionally just return the PlaceCandidates with name only and * no Place objects attached. Names of contients are passed back as matches, * with geo matches. Continents are filtered out by default. * * @param buffer/* w ww. ja v a 2s .c o m*/ * text * @param docid * identity of the text * @param tagOnly * True if you wish to get the matched phrases only. False if you * want the full list of Place Candidates. * @param fld * gazetteer field to use for tagging * @return place_candidates List of place candidates * @throws ExtractionException * on err */ public List<PlaceCandidate> tagText(String buffer, String docid, boolean tagOnly, String fld) throws ExtractionException { // "tagsCount":10, "tags":[{ "ids":[35], "endOffset":40, // "startOffset":38}, // { "ids":[750308, 2769912, 2770041, 10413973, 10417546], // "endOffset":49, // "startOffset":41}, // ... // "matchingDocs":{"numFound":75, "start":0, "docs":[ { // "place_id":"USGS1992921", "name":"Monterrey", "cc":"PR"}, { // "place_id":"USGS1991763", "name":"Monterrey", "cc":"PR"}, ] // Reset counts. this.defaultFilterCount = 0; this.userFilterCount = 0; // during post-processing tags we may have to distinguish between tagging/tokenizing // general vs. cjk vs. ar. But not yet though. // boolean useGeneralMode = DEFAULT_TAG_FIELD.equals(fld); long t0 = System.currentTimeMillis(); log.debug("TEXT SIZE = {}", buffer.length()); int[] textMetrics = TextUtils.measureCase(buffer); boolean isUpperCase = TextUtils.isUpperCaseDocument(textMetrics); params.set("field", fld); Map<Integer, Object> beanMap = new HashMap<Integer, Object>(100); QueryResponse response = tagTextCallSolrTagger(buffer, docid, beanMap); @SuppressWarnings("unchecked") List<NamedList<?>> tags = (List<NamedList<?>>) response.getResponse().get("tags"); this.tagNamesTime = response.getQTime(); long t1 = t0 + tagNamesTime; long t2 = System.currentTimeMillis(); boolean geocode = !tagOnly; /* * Retrieve all offsets into a long list. These offsets will report a * text span and all the gazetteer record IDs that are associated to * that span. The text could either be a name, a code or some other * abbreviation. * * For practical reasons the default behavior is to filter trivial spans * given the gazetteer data that is returned for them. * * WARNING: lots of optimizations occur here due to the potentially * large volume of tags and gazetteer data that is involved. And this is * relatively early in the pipline. */ log.debug("DOC={} TAGS SIZE={}", docid, tags.size()); TreeMap<Integer, PlaceCandidate> candidates = new TreeMap<Integer, PlaceCandidate>(); // names matched is used only for debugging, currently. Set<String> namesMatched = new HashSet<>(); tagLoop: for (NamedList<?> tag : tags) { int x1 = (Integer) tag.get("startOffset"); int x2 = (Integer) tag.get("endOffset"); int len = x2 - x1; if (len == 1) { // Ignoring place names whose length is less than 2 chars ++this.defaultFilterCount; continue; } // +1 char after last matched // Could have enabled the "matchText" option from the tagger to get // this, but since we already have the content as a String then // we might as well not make the tagger do any more work. String matchText = (String) tag.get("matchText"); // Get char immediately following match, for light NLP rules. char postChar = 0; if (x2 < buffer.length()) { postChar = buffer.charAt(x2); } // Then filter out trivial matches. E.g., Us is filtered out. vs. US would // be allowed. If lowercase abbreviations are allowed, then all matches are passed. if (len < 3) { if (TextUtils.isASCII(matchText) && !StringUtils.isAllUpperCase(matchText) && !allowLowercaseAbbrev) { ++this.defaultFilterCount; continue; } } if (TextUtils.countFormattingSpace(matchText) > 1) { // Phrases with words broken across more than one line are not // valid matches. // Phrase with a single TAB is okay ++this.defaultFilterCount; continue; } // Eliminate any newlines and extra whitespace in match matchText = TextUtils.squeeze_whitespace(matchText); /** * Filter out trivial tags. Due to normalization, we tend to get * lots of false positives that can be eliminated early. */ if (filter.filterOut(matchText)) { ++this.defaultFilterCount; continue; } PlaceCandidate pc = new PlaceCandidate(); pc.start = x1; pc.end = x2; pc.setText(matchText); /* * Filter out tags that user determined ahead of time as not-places * for their context. * */ if (userfilter != null) { if (userfilter.filterOut(pc.getTextnorm())) { log.debug("User Filter:{}", matchText); ++this.userFilterCount; continue; } } /* * Continent filter is needed, as many mentions of contients confuse * real geotagging/geocoding. * */ if (continents.filterOut(pc.getTextnorm())) { pc.isContinent = true; pc.setFilteredOut(true); candidates.put(pc.start, pc); continue; } /* * Found UPPER CASE text in a mixed-cased document. * Conservatively, this is likely an acronym or some heading. * But possibly still a valid place name. * HEURISTIC: acronyms are relatively short. * HEURISTIC: region codes can be acronyms and are valid places * * using such place candidates you may score short acronym matches lower than fully named ones. * when inferring boundaries (states, provinces, etc) */ if (!isUpperCase && pc.isUpper() && len < 5) { pc.isAcronym = true; } /* * Everything Else. */ pc.setSurroundingTokens(buffer); @SuppressWarnings("unchecked") List<Integer> placeRecordIds = (List<Integer>) tag.get("ids"); /* * This assertion is helpful in debugging: assert * placeRecordIds.size() == new * HashSet<Integer>(placeRecordIds).size() : "ids should be unique"; */ // assert!placeRecordIds.isEmpty(); namesMatched.clear(); //double maxNameBias = 0.0; for (Integer solrId : placeRecordIds) { // Yes, we must cast here. // As long as createTag generates the correct type stored in // beanMap we are fine. ScoredPlace pGeo = (ScoredPlace) beanMap.get(solrId); // assert pGeo != null; // Optimization: abbreviation filter. // // Do not add PlaceCandidates for lower case tokens that are // marked as Abbreviations, unless flagged to do so. // // DEFAULT behavior is to avoid lower case text that is tagged // as an abbreviation in gazetteer, // // Common terms: in, or, oh, me, us, we, etc. Are all not // typically place names or valid abbreviations in text. // if (!allowLowercaseAbbrev && pGeo.isAbbreviation() && pc.isLower()) { log.debug("Ignore lower case term={}", pc.getText()); // DWS: TODO what if there is another pGeo for this pc that // isn't an abbrev? Therefore shouldn't we continue this // loop and not tagLoop? continue tagLoop; } /* * If text match contains "." and it matches any abbreviation, * mark the candidate as an abbrev. TODO: Possibly best confirm * this by sentence detection, as well. However, this pertains * to text spans that contain "." within the bounds, and not * likely an ending. E.g., "U.S." or "U.S" are trivial examples; * "US" is more ambiguous, as we need to know if document is * upperCase. * * Any place abbreviation will trigger isAbbreviation = true * * "IF YOU FIND US HERE" the term 'US' is ambiguous here, so * it is not classified as an abbreviation. Otherwise if you have * "My organization YAK happens to coincide with a place named Yak. * But we first must determine if 'YAK' is a valid abbreviation for an actual place. * HEURISTIC: place abbreviations are relatively short, e.g. one word(len=7 or less) */ if (len < 8 && !pc.isAbbreviation) { assessAbbreviation(pc, pGeo, postChar, isUpperCase); } if (log.isDebugEnabled()) { namesMatched.add(pGeo.getName()); } /** * Country names are the only names you can reasonably set ahead * of time. All other names need to be assessed in context. * Negate country names, e.g., "Georgia", by exception. */ if (pGeo.isCountry()) { pc.isCountry = true; } if (geocode) { pGeo.defaultHierarchicalPath(); // Default score for geo will be calculated in PlaceCandidate pc.addPlace(pGeo); } } // If geocoding, skip this PlaceCandidate if has no places (e.g. due // to filtering) if (geocode && !pc.hasPlaces()) { log.debug("Place has no places={}", pc.getText()); continue; } else { if (log.isDebugEnabled()) { log.debug("Text {} matched {}", pc.getText(), namesMatched); } } candidates.put(pc.start, pc); } // for tag long t3 = System.currentTimeMillis(); // this.tagNamesTime = (int)(t1 - t0); this.getNamesTime = (int) (t2 - t1); this.totalTime = (int) (t3 - t0); if (log.isDebugEnabled()) { summarizeExtraction(candidates.values(), docid); } this.filteredTotal += this.defaultFilterCount + this.userFilterCount; this.matchedTotal += candidates.size(); return new ArrayList<PlaceCandidate>(candidates.values()); }
From source file:ubic.gemma.persistence.service.association.coexpression.CoexpressionDaoImpl.java
@Override public void createOrUpdate(BioAssaySet bioAssaySet, List<NonPersistentNonOrderedCoexpLink> links, LinkCreator c, Set<Gene> genesTested) { // assumption is that these are _all_ the links for this experiment assert !links.isEmpty(); assert bioAssaySet != null; assert c != null; Collections.sort(links);/*from w w w .ja v a 2 s. c o m*/ Session sess = this.getSessionFactory().getCurrentSession(); sess.setCacheMode(CacheMode.IGNORE); // to determine the species Gene gene = (Gene) sess.get(Gene.class, links.iterator().next().getFirstGene()); String geneLinkClassName = CoexpressionQueryUtils.getGeneLinkClassName(gene); /* * Check that there are no links for this experiment. */ if (this.countLinks(gene.getTaxon(), bioAssaySet) > 0) { throw new IllegalStateException( "There are already links for given bioAssaySet; they must be deleted before proceeding"); } /* * Attempt to save database trips */ Map<NonPersistentNonOrderedCoexpLink, Boolean> existingResults = this.preFetch(links); String s = "from " + geneLinkClassName + " where firstGene =:f and secondGene=:s and positiveCorrelation=:pc"; Query q = sess.createQuery(s); SQLQuery updateFlippedLinkQuery = sess .createSQLQuery("UPDATE " + CoexpressionQueryUtils.getGeneLinkTableName(gene.getTaxon()) + " SET SUPPORT=:s WHERE FIRST_GENE_FK=:g2 AND SECOND_GENE_FK=:g1 AND POSITIVE=:po"); // map of linkid to links, for establishing the EE-level links. TreeMap<Long, NonPersistentNonOrderedCoexpLink> linkIds = new TreeMap<>(); // keep order so for this experiment // they are in order. Set<Long> seenExistingLinks = new HashSet<>(); // for sanity checks. Set<NonPersistentNonOrderedCoexpLink> seenNewLinks = new HashSet<>(); // for sanity checks. Set<SupportDetails> seenNewSupportDetails = new HashSet<>(); // for sanity checks. int numNew = 0; int numUpdated = 0; int progress = 0; int BATCH_SIZE = 1024; // make a multiple of jdbc batch size... Map<SupportDetails, Gene2GeneCoexpression> batchToCreate = new LinkedHashMap<>(); List<Gene2GeneCoexpression> newFlippedLinks = new ArrayList<>(); Set<Long> genesWithUpdatedData = new HashSet<>(); sess.flush(); sess.clear(); // for each link see if there is already an entry; make a new one if necessary or update the old one. CoexpressionDaoImpl.log.info("Starting link processing"); for (NonPersistentNonOrderedCoexpLink proposedG2G : links) { Long firstGene = proposedG2G.getFirstGene(); Long secondGene = proposedG2G.getSecondGene(); // There is an index for f+s, but querying one-at-a-time is going to be slow. I attempted to speed it up by // fetching all links for a gene when we see it, but this causes problems with data being stale. Prefetching // with just the ability to tell if a link is new or not takes a lot of memory and doesn't speed things up // much. Trying keeping an index of which links a gene has, so we know whether we need to check the database // or not. // // Currently it takes about 1 minute to process 10k links on a relatively small database, much of this is // the findLink call. Gene2GeneCoexpression existingLink = this.findLink(q, proposedG2G, existingResults); /* * To speed this up? * * - Fetch all links for a gene in one batch, instead of looping over them one at a time. The problem is the * flipped links involve other genes that we fetch later in the same transaction, and this all has to be * done in one transaction. I experimented with this already */ if (existingLink == null) { // initialize the supportdetails SupportDetails sd = c.createSupportDetails(firstGene, secondGene, proposedG2G.isPositiveCorrelation()); sd.addEntity(bioAssaySet.getId()); assert sd.getNumIds() > 0; assert sd.isIncluded(bioAssaySet.getId()); // Must be unique assert !seenNewSupportDetails.contains(sd) : "Already saw " + sd + " while processing " + proposedG2G; assert proposedG2G.getLink() != null; batchToCreate.put(sd, proposedG2G.getLink()); if (seenNewLinks.contains(proposedG2G)) { CoexpressionDaoImpl.log.warn( "The data passed had the same new link represented more than once: " + proposedG2G); continue; } seenNewSupportDetails.add(sd); seenNewLinks.add(proposedG2G); if (CoexpressionDaoImpl.log.isDebugEnabled()) CoexpressionDaoImpl.log.debug("New: " + proposedG2G); numNew++; } else { // This code assumes that the flipped version is in the database, but we don't retrieve it // yet. also note that the support of the existing link could be zero, if DELETE_ORPHAN_LINKS = false // (or if initializeLinksFromExistingData was used) // Sanity check. If this happens, there must be two versions of the same link already in the input. if (seenExistingLinks.contains(existingLink.getId())) { throw new IllegalStateException( "The data passed had the same existing link represented more than once: " + existingLink); } /* sanity check that we aren't adding dataset twice; we might be able make this an assertion instead. */ if (existingLink.isSupportedBy(bioAssaySet)) { throw new IllegalStateException("Support for this experiment already exists for " + existingLink + ", must be deleted first"); } // cache old support for sanity check int oldSupport = existingLink.getSupportDetails().getNumIds(); // update the support existingLink.getSupportDetails().addEntity(bioAssaySet.getId()); existingLink.updateNumDatasetsSupporting(); // there is no cascade... on purpose. sess.update(existingLink.getSupportDetails()); assert oldSupport + 1 == existingLink.getNumDatasetsSupporting(); assert existingLink.getSupportDetails().getNumIds() == oldSupport + 1; // track so we add corresponding Experiment-level links later. linkIds.put(existingLink.getId(), new NonPersistentNonOrderedCoexpLink(existingLink)); seenExistingLinks.add(existingLink.getId()); /* * The flipped link is asserted to be in the database. The support details is already dealt with; we * just have to update the support value. */ int numFlippedUpdated = updateFlippedLinkQuery .setParameter("s", existingLink.getNumDatasetsSupporting()) .setParameter("g2", proposedG2G.getSecondGene()) .setParameter("g1", proposedG2G.getFirstGene()) .setParameter("po", proposedG2G.isPositiveCorrelation() ? 1 : 0).executeUpdate(); assert numFlippedUpdated == 1 : "Flipped link missing for " + proposedG2G + " [" + numFlippedUpdated + "]"; numUpdated++; if (CoexpressionDaoImpl.log.isDebugEnabled()) CoexpressionDaoImpl.log.debug("Updated: " + proposedG2G); } genesWithUpdatedData.add(firstGene); genesWithUpdatedData.add(secondGene); if (++progress % 5000 == 0) { CoexpressionDaoImpl.log.info("Processed " + progress + "/" + links.size() + " gene-level links..." + numUpdated + " updated, " + numNew + " new"); } if (batchToCreate.size() >= BATCH_SIZE) { newFlippedLinks.addAll(this.saveBatchAndMakeFlipped(sess, linkIds, batchToCreate, c)); } else if (numUpdated > 0 && numUpdated % BATCH_SIZE == 0) { sess.flush(); sess.clear(); } } // loop over links // tail end batch if (!batchToCreate.isEmpty()) { // we make the flipped links later to optimize their ordering. newFlippedLinks.addAll(this.saveBatchAndMakeFlipped(sess, linkIds, batchToCreate, c)); } // flush the updated ones one last time... if (numUpdated > 0) { sess.flush(); sess.clear(); } assert links.size() == linkIds.size(); CoexpressionDaoImpl.log.info(numUpdated + " updated, " + numNew + " new links"); /* * sort and save the accumulated new flipped versions of the new links, which reuse the supportDetails. In the * flipped links, the first gene is the second gene and vice versa. Continue to accumulate the flipped links. */ CoexpressionDaoImpl.log.info("Saving " + newFlippedLinks.size() + " flipped versions of new links ..."); Collections.sort(newFlippedLinks, new Comparator<Gene2GeneCoexpression>() { @Override public int compare(Gene2GeneCoexpression o1, Gene2GeneCoexpression o2) { return o1.getFirstGene().compareTo(o2.getFirstGene()); } }); progress = 0; for (Gene2GeneCoexpression gl : newFlippedLinks) { sess.save(gl); if (++progress % 5000 == 0) { CoexpressionDaoImpl.log.info("Processed " + progress + "/" + newFlippedLinks.size() + " new flipped gene-level links..."); } if (progress % BATCH_SIZE == 0) { sess.flush(); sess.clear(); } } /* * Save experiment-level links */ CoexpressionDaoImpl.log .info("Saving " + linkIds.size() + " experiment-level links (plus flipped versions) ..."); this.saveExperimentLevelLinks(sess, c, linkIds, bioAssaySet); if (genesTested != null) this.updatedTestedIn(bioAssaySet, genesTested); this.updateGeneCoexpressedWith(links); // kick anything we updated out of the cache. int numRemovedFromCache = this.gene2GeneCoexpressionCache.remove(genesWithUpdatedData); if (numRemovedFromCache > 0) CoexpressionDaoImpl.log.info(numRemovedFromCache + " results evicted from cache"); // flush happens on commit... CoexpressionDaoImpl.log.info("Done, flushing changes ..."); }
From source file:org.xframium.spi.RunDetails.java
public synchronized void writeHTMLIndex(File rootFolder, boolean complete) { Collections.sort(detailsList, new RunComparator()); int runTime = (int) System.currentTimeMillis() - (int) startTime; TreeMap<String, int[]> caseMap = new TreeMap<String, int[]>(); TreeMap<String, int[]> deviceMap = new TreeMap<String, int[]>(); TreeMap<String, int[]> osMap = new TreeMap<String, int[]>(); TreeMap<String, int[]> envMap = new TreeMap<String, int[]>(); int[] stepBreakdown = new int[3]; int[] failureBreakdown = new int[5]; int successCount = 0; for (int i = 0; i < detailsList.size(); i++) { String runKey = (String) detailsList.get(i)[0]; Device device = (Device) detailsList.get(i)[1]; int success = (int) detailsList.get(i)[2]; stepBreakdown[0] += (int) detailsList.get(i)[3]; stepBreakdown[1] += (int) detailsList.get(i)[4]; stepBreakdown[2] += (int) detailsList.get(i)[5]; long startTime = (long) detailsList.get(i)[6]; long stopTime = (long) detailsList.get(i)[7]; failureBreakdown[0] += (int) detailsList.get(i)[8]; failureBreakdown[1] += (int) detailsList.get(i)[9]; failureBreakdown[2] += (int) detailsList.get(i)[10]; failureBreakdown[3] += (int) detailsList.get(i)[11]; failureBreakdown[4] += (int) detailsList.get(i)[12]; String deviceKey = device.getEnvironment(); int[] caseValue = caseMap.get(runKey); if (caseValue == null) { caseValue = new int[] { 0, 0, 0, 0, 0 }; caseMap.put(runKey, caseValue); }/*from w ww .j a v a2s. c o m*/ if (success == 1) caseValue[0]++; else if (success == 2) caseValue[1]++; else caseValue[4]++; caseValue[2]++; caseValue[3] += (stopTime - startTime); caseValue = envMap.get(device.getEnvironment()); if (caseValue == null) { caseValue = new int[] { 0, 0, 0 }; envMap.put(device.getEnvironment(), caseValue); } if (success == 1) caseValue[0]++; else if (success == 2) caseValue[1]++; else caseValue[2]++; caseValue = deviceMap.get(deviceKey); if (caseValue == null) { caseValue = new int[] { 0, 0, 0 }; deviceMap.put(deviceKey, caseValue); } if (success == 1) caseValue[0]++; else if (success == 2) caseValue[1]++; else caseValue[2]++; String osName = device.getOs(); if (osName == null) osName = "Unknown"; caseValue = osMap.get(osName); if (caseValue == null) { caseValue = new int[] { 0, 0, 0 }; osMap.put(osName, caseValue); } if (success == 1) caseValue[0]++; else if (success == 2) caseValue[1]++; else caseValue[2]++; if ((int) detailsList.get(i)[2] == 1) successCount++; } StringBuilder stringBuilder = new StringBuilder(); File useFile = getIndex(rootFolder); writePageHeader(stringBuilder, 1); String runLength = String.format("%dh %dm %ds", TimeUnit.MILLISECONDS.toHours(runTime), TimeUnit.MILLISECONDS.toMinutes(runTime) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(runTime)), TimeUnit.MILLISECONDS.toSeconds(runTime) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(runTime))); stringBuilder.append( "<div class=\"row\"><div class=\"pull-right text-muted\"><a hRef=\"../index.html\" style=\"margin-right: 18px;\">Return to Test Execution History</a></div></div>"); stringBuilder.append( "<div class=\"panel panel-primary\"><div class=panel-heading><div class=panel-title>Execution Detail (" + runLength + ")</div></div><div class=panel-body><table class=\"table table-hover table-condensed\">"); stringBuilder.append( "<tr><th width=\"40%\">Test</th><th width=\"40%\">Environment</th><th width=\"20%\">Duration</th><th>Status</th></tr><tbody>"); int[] localBreakdown = new int[5]; for (int i = 0; i < detailsList.size(); i++) { String runKey = (String) detailsList.get(i)[0]; Device device = (Device) detailsList.get(i)[1]; String location = runKey + "/" + device.getKey() + "/"; int success = (int) detailsList.get(i)[2]; long startTime = (long) detailsList.get(i)[6]; long stopTime = (long) detailsList.get(i)[7]; localBreakdown[0] = (int) detailsList.get(i)[8]; localBreakdown[1] = (int) detailsList.get(i)[9]; localBreakdown[2] = (int) detailsList.get(i)[10]; localBreakdown[3] = (int) detailsList.get(i)[11]; localBreakdown[4] = (int) detailsList.get(i)[12]; long testRunTime = stopTime - startTime; String testRunLength = String.format("%2dh %2dm %2ds", TimeUnit.MILLISECONDS.toHours(testRunTime), TimeUnit.MILLISECONDS.toMinutes(testRunTime) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(testRunTime)), TimeUnit.MILLISECONDS.toSeconds(testRunTime) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(testRunTime))); stringBuilder.append("<tr><td><a href='").append(location + runKey + ".html'>").append(runKey) .append("</a></td><td>"); stringBuilder.append(device.getEnvironment()).append("</td>"); stringBuilder.append("<td>").append(testRunLength) .append("</td><td style=\"padding-top: 10px; \" align=\"center\">"); if (success == 1) stringBuilder.append("<span class=\"label label-success\">Pass</span>"); else { if (localBreakdown[0] > 0) stringBuilder.append("<span class=\"label label-danger\">Script</span>"); else if (localBreakdown[1] > 0) stringBuilder.append("<span class=\"label label-danger\">Configuration</span>"); else if (localBreakdown[2] > 0) stringBuilder.append("<span class=\"label label-danger\">Application</span>"); else if (localBreakdown[3] > 0) stringBuilder.append("<span class=\"label label-danger\">Cloud</span>"); else if (localBreakdown[4] > 0) stringBuilder.append("<span class=\"label label-warning\">Skipped</span>"); else stringBuilder.append("<span class=\"label label-danger\">Fail</span>"); } stringBuilder.append("</td></tr>"); } stringBuilder.append("<tr><td colSpan='6' align='center'><h6>") .append(new File(rootFolder, getRootFolder() + System.getProperty("file.separator") + "executionMap.properties") .getAbsolutePath()) .append("</h6></td></tr></tbody></table></div></div>"); stringBuilder.append( "<div class=\"panel panel-primary\"><div class=panel-heading><div class=panel-title>Environment Summary</div></div><div class=panel-body><table class=\"table table-hover table-condensed\">"); stringBuilder .append("<thead><tr><th width=60%>Environment</th><th nowrap>Pass Rate</th></thead></tr><tbody>"); for (String deviceName : envMap.keySet()) { int[] currentRecord = deviceMap.get(deviceName); int totalValue = currentRecord[0] + currentRecord[1]; double successValue = 0; if (totalValue > 0) successValue = ((double) currentRecord[0] / (double) totalValue) * 100; stringBuilder.append("<tr><td width=60%>").append(deviceName).append("</td><td>") .append(percentFormat.format(successValue)).append("%</td></tr>"); } stringBuilder.append("</tbody></table></div></div>"); stringBuilder.append( "<div class=\"panel panel-primary\"><div class=panel-heading><div class=panel-title>Test Summary</div></div><div class=panel-body><table class=\"table table-hover table-condensed\">"); stringBuilder.append( "<thead><tr><th width=60%>Test</th><th nowrap>Pass Rate</th><th nowrap>Average Duration</th></thead></tr><tbody>"); for (String deviceName : caseMap.keySet()) { int[] currentRecord = caseMap.get(deviceName); int totalValue = currentRecord[0] + currentRecord[1]; double successValue = 0; if (totalValue > 0) successValue = ((double) currentRecord[0] / (double) totalValue) * 100; int runTimex = (int) ((double) currentRecord[3] / (double) currentRecord[2]); String runLengthx = String.format("%2dh %2dm %2ds", TimeUnit.MILLISECONDS.toHours(runTimex), TimeUnit.MILLISECONDS.toMinutes(runTimex) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(runTimex)), TimeUnit.MILLISECONDS.toSeconds(runTimex) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(runTimex))); stringBuilder.append("<tr><td width=60%>").append(deviceName).append("</td><td>") .append(percentFormat.format(successValue)).append("%</td><td>").append(runLengthx) .append("</td></tr>"); } stringBuilder.append("</tbody></table></div></div>"); stringBuilder.append( "<div class=\"panel panel-primary\"><div class=panel-heading><div class=panel-title>Failure Breakdown</div></div><div class=panel-body><table class=\"table table-hover table-condensed\">"); stringBuilder.append( "<thead><tr><th width=90%>Failure Type</th><th nowrap>Failure Count</th></tr></thead><tbody>"); stringBuilder.append( "<tbody><tr><td width=90%>Scripting Issues</td><td nowrap>" + failureBreakdown[0] + "</td></tr>"); stringBuilder.append( "<tr><td width=90%>Configuration Issues</td><td nowrap>" + failureBreakdown[1] + "</td></tr>"); stringBuilder.append( "<tr><td width=90%>Application Issues</td><td nowrap>" + failureBreakdown[2] + "</td></tr>"); stringBuilder.append("<tr><td width=90%>Cloud Issues</td><td nowrap>" + failureBreakdown[3] + "</td></tr>"); stringBuilder .append("<tr><td width=90%>Skipped Tests</td><td nowrap>" + failureBreakdown[4] + "</td></tr>"); stringBuilder.append("</tbody></table></div></div></div>"); stringBuilder.append("</div></div></div></div>"); writePageFooter(stringBuilder); try { useFile.getParentFile().mkdirs(); FileWriter fileWriter = new FileWriter(useFile); fileWriter.write(stringBuilder.toString()); fileWriter.close(); if (complete) { if (historyWriter == null) historyWriter = new HistoryWriter(DataManager.instance().getReportFolder()); historyWriter.writeData(getRootFolder() + System.getProperty("file.separator") + "index.html", startTime, System.currentTimeMillis(), envMap.size(), osMap.size(), successCount, detailsList.size() - successCount, envMap, failureBreakdown[0], failureBreakdown[1], failureBreakdown[2], failureBreakdown[3], failureBreakdown[4]); } } catch (Exception e) { e.printStackTrace(); } try { HttpClient httpclient = HttpClients.createDefault(); int CONNECTION_TIMEOUT_MS = 3000; // Timeout in millis. Builder requestBuilder = RequestConfig.custom().setConnectionRequestTimeout(CONNECTION_TIMEOUT_MS) .setConnectTimeout(CONNECTION_TIMEOUT_MS).setSocketTimeout(CONNECTION_TIMEOUT_MS); /*if ( CloudRegistry.instance().getCloud().getProxyHost() != null && !CloudRegistry.instance().getCloud().getProxyHost().isEmpty() ) { requestBuilder.setProxy( new HttpHost( CloudRegistry.instance().getCloud().getProxyHost(), Integer.parseInt( CloudRegistry.instance().getCloud().getProxyPort() ) ) ); }*/ if (ProxyRegistry.instance().getProxyHost() != null && !ProxyRegistry.instance().getProxyHost().isEmpty()) { requestBuilder.setProxy(new HttpHost(ProxyRegistry.instance().getProxyHost(), Integer.parseInt(ProxyRegistry.instance().getProxyPort()))); } RequestConfig requestConfig = requestBuilder.build(); HttpPost httppost = new HttpPost("http://www.google-analytics.com/collect"); httppost.setConfig(requestConfig); List<NameValuePair> params = new ArrayList<NameValuePair>(2); params.add(new BasicNameValuePair("v", "1")); params.add(new BasicNameValuePair("tid", "UA-80178289-1")); params.add(new BasicNameValuePair("cid", "555")); params.add(new BasicNameValuePair("t", "pageview")); params.add(new BasicNameValuePair("dt", "/testExecution")); params.add(new BasicNameValuePair("dp", ApplicationRegistry.instance().getAUT().getName())); params.add(new BasicNameValuePair("an", "xFramium")); params.add(new BasicNameValuePair("av", Initializable.VERSION)); params.add(new BasicNameValuePair("dh", CloudRegistry.instance().getCloud().getHostName())); params.add(new BasicNameValuePair("cm1", detailsList.size() + "")); params.add(new BasicNameValuePair("cm2", successCount + "")); params.add(new BasicNameValuePair("cm3", (detailsList.size() - successCount) + "")); params.add( new BasicNameValuePair("cm4", (stepBreakdown[0] + stepBreakdown[1] + stepBreakdown[2]) + "")); params.add(new BasicNameValuePair("cm5", stepBreakdown[0] + "")); params.add(new BasicNameValuePair("cm6", stepBreakdown[1] + "")); params.add(new BasicNameValuePair("cm7", stepBreakdown[2] + "")); params.add(new BasicNameValuePair("cm8", envMap.size() + "")); params.add(new BasicNameValuePair("cm9", (runTime / 1000) + "")); params.add(new BasicNameValuePair("cd2", System.getProperty("os.name"))); params.add(new BasicNameValuePair("cd3", System.getProperty("java.version"))); params.add(new BasicNameValuePair("cd4", "X" + Base64.encodeBase64String(CloudRegistry.instance().getCloud().getUserName().getBytes()) + "=")); httppost.setEntity(new UrlEncodedFormEntity(params, "UTF-8")); // Execute and get the response. HttpResponse response = httpclient.execute(httppost); } catch (Exception e) { } }
From source file:gov.vha.isaac.ochre.impl.sememe.DynamicSememeUsageDescription.java
/** * Read the RefexUsageDescription data from the database for a given nid. * /* w w w . j a v a 2 s . c o m*/ * Note that most users should call {@link #read(int)} instead, as that utilizes a cache. * This always reads directly from the DB. * * @param refexUsageDescriptorSequence * @throws IOException * @throws ContradictionException */ @SuppressWarnings("unchecked") public DynamicSememeUsageDescription(int refexUsageDescriptorSequence) { refexUsageDescriptorSequence_ = refexUsageDescriptorSequence; TreeMap<Integer, DynamicSememeColumnInfo> allowedColumnInfo = new TreeMap<>(); ConceptChronology<?> assemblageConcept = Get.conceptService().getConcept(refexUsageDescriptorSequence_); for (SememeChronology<? extends DescriptionSememe<?>> descriptionSememe : assemblageConcept .getConceptDescriptionList()) { @SuppressWarnings("rawtypes") Optional<LatestVersion<DescriptionSememe<?>>> descriptionVersion = ((SememeChronology) descriptionSememe) .getLatestVersion(DescriptionSememe.class, StampCoordinates.getDevelopmentLatestActiveOnly()); if (descriptionVersion.isPresent()) { @SuppressWarnings("rawtypes") DescriptionSememe ds = descriptionVersion.get().value(); if (ds.getDescriptionTypeConceptSequence() == IsaacMetadataAuxiliaryBinding.DEFINITION_DESCRIPTION_TYPE .getConceptSequence()) { Optional<SememeChronology<? extends SememeVersion<?>>> nestesdSememe = Get.sememeService() .getSememesForComponentFromAssemblage(ds.getNid(), IsaacMetadataConstants.DYNAMIC_SEMEME_DEFINITION_DESCRIPTION.getSequence()) .findAny(); if (nestesdSememe.isPresent()) { sememeUsageDescription_ = ds.getText(); } ; } if (ds.getDescriptionTypeConceptSequence() == IsaacMetadataAuxiliaryBinding.FULLY_SPECIFIED_NAME .getConceptSequence()) { name_ = ds.getText(); } if (sememeUsageDescription_ != null && name_ != null) { break; } } } if (StringUtils.isEmpty(sememeUsageDescription_)) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. It must contain a description of type Definition with an annotation of type " + "DynamicSememe.DYNAMIC_SEMEME_DEFINITION_DESCRIPTION"); } Get.sememeService().getSememesForComponent(assemblageConcept.getNid()).forEach(sememe -> { if (sememe.getSememeType() == SememeType.DYNAMIC) { @SuppressWarnings("rawtypes") Optional<LatestVersion<? extends DynamicSememe>> sememeVersion = ((SememeChronology) sememe) .getLatestVersion(DynamicSememe.class, StampCoordinates.getDevelopmentLatestActiveOnly()); if (sememeVersion.isPresent()) { @SuppressWarnings("rawtypes") DynamicSememe ds = sememeVersion.get().value(); DynamicSememeDataBI[] refexDefinitionData = ds.getData(); if (sememe.getAssemblageSequence() == IsaacMetadataConstants.DYNAMIC_SEMEME_EXTENSION_DEFINITION .getSequence()) { if (refexDefinitionData == null || refexDefinitionData.length < 3 || refexDefinitionData.length > 7) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. It must contain at least 3 columns in the DynamicSememeDataBI attachment, and no more than 7."); } //col 0 is the column number, //col 1 is the concept with col name //col 2 is the column data type, stored as a string. //col 3 (if present) is the default column data, stored as a subtype of DynamicSememeDataBI //col 4 (if present) is a boolean field noting whether the column is required (true) or optional (false or null) //col 5 (if present) is the validator {@link DynamicSememeValidatorType}, stored as a string array. //col 6 (if present) is the validatorData for the validator in column 5, stored as a subtype of DynamicSememeDataBI try { int column = (Integer) refexDefinitionData[0].getDataObject(); UUID descriptionUUID = (UUID) refexDefinitionData[1].getDataObject(); DynamicSememeDataType type = DynamicSememeDataType .valueOf((String) refexDefinitionData[2].getDataObject()); DynamicSememeDataBI defaultData = null; if (refexDefinitionData.length > 3) { defaultData = (refexDefinitionData[3] == null ? null : refexDefinitionData[3]); } if (defaultData != null && type.getDynamicSememeMemberClass() != refexDefinitionData[3] .getDynamicSememeDataType().getDynamicSememeMemberClass()) { throw new IOException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. The type of the column (column 3) must match the type of the defaultData (column 4)"); } Boolean columnRequired = null; if (refexDefinitionData.length > 4) { columnRequired = (refexDefinitionData[4] == null ? null : (Boolean) refexDefinitionData[4].getDataObject()); } DynamicSememeValidatorType[] validators = null; DynamicSememeDataBI[] validatorsData = null; if (refexDefinitionData.length > 5) { if (refexDefinitionData[5] != null && ((DynamicSememeArrayBI<DynamicSememeStringBI>) refexDefinitionData[5]) .getDataArray().length > 0) { DynamicSememeArrayBI<DynamicSememeStringBI> readValidators = (DynamicSememeArrayBI<DynamicSememeStringBI>) refexDefinitionData[5]; validators = new DynamicSememeValidatorType[readValidators .getDataArray().length]; for (int i = 0; i < validators.length; i++) { validators[i] = DynamicSememeValidatorType .valueOf((String) readValidators.getDataArray()[i].getDataObject()); } } if (refexDefinitionData.length > 6) { if (refexDefinitionData[6] != null && ((DynamicSememeArrayBI<? extends DynamicSememeDataBI>) refexDefinitionData[6]) .getDataArray().length > 0) { DynamicSememeArrayBI<? extends DynamicSememeDataBI> readValidatorsData = (DynamicSememeArrayBI<? extends DynamicSememeDataBI>) refexDefinitionData[6]; validatorsData = new DynamicSememeDataBI[readValidatorsData .getDataArray().length]; for (int i = 0; i < validators.length; i++) { if (readValidatorsData.getDataArray()[i] != null) { validatorsData[i] = readValidatorsData.getDataArray()[i]; } else { validatorsData[i] = null; } } } } } allowedColumnInfo.put(column, new DynamicSememeColumnInfo(assemblageConcept.getPrimordialUuid(), column, descriptionUUID, type, defaultData, columnRequired, validators, validatorsData)); } catch (Exception e) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. The first column must have a data type of integer, and the third column must be a string " + "that is parseable as a DynamicSememeDataType"); } } else if (sememe .getAssemblageSequence() == IsaacMetadataConstants.DYNAMIC_SEMEME_REFERENCED_COMPONENT_RESTRICTION .getSequence()) { if (refexDefinitionData == null || refexDefinitionData.length < 1) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. If it contains a " + IsaacMetadataConstants.DYNAMIC_SEMEME_REFERENCED_COMPONENT_RESTRICTION .getFSN() + " then it must contain a single column of data, of type string, parseable as a " + ObjectChronologyType.class.getName()); } //col 0 is Referenced component restriction information - as a string. try { ObjectChronologyType type = ObjectChronologyType .parse(refexDefinitionData[0].getDataObject().toString()); if (type == ObjectChronologyType.UNKNOWN_NID) { //just ignore - it shouldn't have been saved that way anyway. } else { referencedComponentTypeRestriction_ = type; } } catch (Exception e) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. The component type restriction annotation has an invalid value"); } //col 1 is an optional Referenced component sub-restriction information - as a string. if (refexDefinitionData.length > 1 && refexDefinitionData[1] != null) { try { SememeType type = SememeType .parse(refexDefinitionData[1].getDataObject().toString()); if (type == SememeType.UNKNOWN) { //just ignore - it shouldn't have been saved that way anyway. } else { referencedComponentTypeSubRestriction_ = type; } } catch (Exception e) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. The component type restriction annotation has an invalid value"); } } else { referencedComponentTypeSubRestriction_ = null; } } } } }); refexColumnInfo_ = new DynamicSememeColumnInfo[allowedColumnInfo.size()]; int i = 0; for (int key : allowedColumnInfo.keySet()) { if (key != i) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. It must contain sequential column numbers, with no gaps, which start at 0."); } refexColumnInfo_[i++] = allowedColumnInfo.get(key); } }
From source file:net.spfbl.http.ServerHTTP.java
private static String getControlPanel(Locale locale, User user, Long begin, String filter) { StringBuilder builder = new StringBuilder(); if (begin == null && filter == null) { // builder.append("<!DOCTYPE html>\n"); builder.append("<html lang=\""); builder.append(locale.getLanguage()); builder.append("\">\n"); builder.append(" <head>\n"); builder.append(" <meta charset=\"UTF-8\">\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append(" <title>Painel de controle do SPFBL</title>\n"); } else {//from w ww.ja v a 2s . c o m builder.append(" <title>SPFBL control panel</title>\n"); } // Styled page. builder.append(" <style type=\"text/css\">\n"); builder.append(" body {\n"); builder.append(" margin:180px 0px 30px 0px;\n"); builder.append(" background:lightgray;\n"); builder.append(" }\n"); builder.append(" iframe {\n"); builder.append(" border-width: 0px 0px 0px 0px;\n"); builder.append(" width:100%;\n"); builder.append(" height:150px;\n"); builder.append(" }\n"); builder.append(" .header {\n"); builder.append(" background-color:lightgray;\n"); builder.append(" border-width: 0px 0px 0px 0px;\n"); builder.append(" position:fixed;\n"); builder.append(" top:0px;\n"); builder.append(" margin:auto;\n"); builder.append(" z-index:1;\n"); builder.append(" width:100%;\n"); builder.append(" height:180px;\n"); builder.append(" }\n"); builder.append(" .bottom {\n"); builder.append(" background-color:lightgray;\n"); builder.append(" border-width: 0px 0px 0px 0px;\n"); builder.append(" position:fixed;\n"); builder.append(" bottom:0px;\n"); builder.append(" margin:auto;\n"); builder.append(" z-index:1;\n"); builder.append(" width:100%;\n"); builder.append(" height:30px;\n"); builder.append(" }\n"); builder.append(" .button {\n"); builder.append(" background-color: #4CAF50;\n"); builder.append(" border: none;\n"); builder.append(" color: white;\n"); builder.append(" padding: 16px 32px;\n"); builder.append(" text-align: center;\n"); builder.append(" text-decoration: none;\n"); builder.append(" display: inline-block;\n"); builder.append(" font-size: 16px;\n"); builder.append(" margin: 4px 2px;\n"); builder.append(" -webkit-transition-duration: 0.4s;\n"); builder.append(" transition-duration: 0.4s;\n"); builder.append(" cursor: pointer;\n"); builder.append(" }\n"); builder.append(" .sender {\n"); builder.append(" background-color: white; \n"); builder.append(" color: black; \n"); builder.append(" border: 2px solid #008CBA;\n"); builder.append(" width: 100%;\n"); builder.append(" word-wrap: break-word;\n"); builder.append(" }\n"); builder.append(" .sender:hover {\n"); builder.append(" background-color: #008CBA;\n"); builder.append(" color: white;\n"); builder.append(" }\n"); builder.append(" .highlight {\n"); builder.append(" background: #b4b9d2;\n"); builder.append(" color:black;\n"); builder.append(" border-top: 1px solid #22262e;\n"); builder.append(" border-bottom: 1px solid #22262e;\n"); builder.append(" }\n"); builder.append(" .highlight:nth-child(odd) td {\n"); builder.append(" background: #b4b9d2;\n"); builder.append(" }\n"); builder.append(" .click {\n"); builder.append(" cursor:pointer;\n"); builder.append(" cursor:hand;\n"); builder.append(" }\n"); builder.append(" table {\n"); builder.append(" background: white;\n"); builder.append(" table-layout:fixed;\n"); builder.append(" border-collapse: collapse;\n"); builder.append(" word-wrap:break-word;\n"); builder.append(" border-radius:3px;\n"); builder.append(" border-collapse: collapse;\n"); builder.append(" margin: auto;\n"); builder.append(" padding:2px;\n"); builder.append(" width: 100%;\n"); builder.append(" box-shadow: 0 5px 10px rgba(0, 0, 0, 0.1);\n"); builder.append(" animation: float 5s infinite;\n"); builder.append(" }\n"); builder.append(" th {\n"); builder.append(" color:#FFFFFF;;\n"); builder.append(" background:#1b1e24;\n"); builder.append(" border-bottom:4px solid #9ea7af;\n"); builder.append(" border-right: 0px;\n"); builder.append(" font-size:16px;\n"); builder.append(" font-weight: bold;\n"); builder.append(" padding:4px;\n"); builder.append(" text-align:left;\n"); builder.append(" text-shadow: 0 1px 1px rgba(0, 0, 0, 0.1);\n"); builder.append(" vertical-align:middle;\n"); builder.append(" height:30px;\n"); builder.append(" }\n"); builder.append(" tr {\n"); builder.append(" border-top: 1px solid #C1C3D1;\n"); builder.append(" border-bottom-: 1px solid #C1C3D1;\n"); builder.append(" font-size:16px;\n"); builder.append(" font-weight:normal;\n"); builder.append(" text-shadow: 0 1px 1px rgba(256, 256, 256, 0.1);\n"); builder.append(" }\n"); builder.append(" tr:nth-child(odd) td {\n"); builder.append(" background:#EBEBEB;\n"); builder.append(" }\n"); builder.append(" td {\n"); builder.append(" padding:2px;\n"); builder.append(" vertical-align:middle;\n"); builder.append(" font-size:16px;\n"); builder.append(" text-shadow: -1px -1px 1px rgba(0, 0, 0, 0.1);\n"); builder.append(" border-right: 1px solid #C1C3D1;\n"); builder.append(" }\n"); builder.append(" input[type=text], select {\n"); builder.append(" width: 400px;\n"); builder.append(" padding: 0px 4px;\n"); builder.append(" margin: 1px 0;\n"); builder.append(" display: inline-block;\n"); builder.append(" background: #b4b9d2;\n"); builder.append(" border: 1px solid #ccc;\n"); builder.append(" border-radius: 4px;\n"); builder.append(" box-sizing: border-box;\n"); builder.append(" }\n"); builder.append(" </style>\n"); // JavaScript functions. TreeMap<Long, Query> queryMap = user.getQueryMap(null, null); builder.append( " <script type=\"text/javascript\" src=\"https://ajax.googleapis.com/ajax/libs/jquery/2.1.3/jquery.min.js\"></script>\n"); builder.append(" <script type=\"text/javascript\">\n"); builder.append(" window.onbeforeunload = function () {\n"); builder.append(" document.getElementById('filterField').value = '';\n"); builder.append(" window.scrollTo(0, 0);\n"); builder.append(" }\n"); builder.append(" var last = "); if (queryMap.isEmpty()) { builder.append(0); } else { builder.append(queryMap.lastKey()); } builder.append(";\n"); builder.append(" var filterText = '';\n"); builder.append(" function view(query) {\n"); builder.append(" if (query == undefined || query == 0) {\n"); builder.append(" var viewer = document.getElementById('viewer');\n"); builder.append(" viewer.src = 'about:blank';\n"); builder.append(" last = 0;\n"); builder.append(" } else if (last != query) {\n"); builder.append(" var viewer = document.getElementById('viewer');\n"); builder.append(" viewer.addEventListener('load', function() {\n"); builder.append(" if (document.getElementById(last)) {\n"); builder.append(" document.getElementById(last).className = 'tr';\n"); builder.append(" document.getElementById(last).className = 'click';\n"); builder.append(" }\n"); builder.append(" document.getElementById(query).className = 'highlight';\n"); builder.append(" last = query;\n"); builder.append(" });\n"); builder.append(" viewer.src = '"); builder.append(Core.getURL()); builder.append("' + query;\n"); builder.append(" }\n"); builder.append(" }\n"); builder.append(" function more(query) {\n"); builder.append(" var rowMore = document.getElementById('rowMore');\n"); builder.append(" rowMore.onclick = '';\n"); builder.append(" rowMore.className = 'tr';\n"); builder.append(" var columnMore = document.getElementById('columnMore');\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append(" columnMore.innerHTML = 'carregando mais registros';\n"); } else { builder.append(" columnMore.innerHTML = 'loading more records';\n"); } builder.append(" $.post(\n"); builder.append(" '"); builder.append(Core.getURL()); builder.append(user.getEmail()); builder.append("',\n"); builder.append(" {filter:filterText,begin:query},\n"); builder.append(" function(data, status) {\n"); builder.append(" if (status == 'success') {\n"); builder.append(" rowMore.parentNode.removeChild(rowMore);\n"); builder.append(" $('#tableBody').append(data);\n"); builder.append(" } else {\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " alert('Houve uma falha de sistema ao tentar realizar esta operao.');\n"); } else { builder.append( " alert('There was a system crash while trying to perform this operation.');\n"); } builder.append(" }\n"); builder.append(" }\n"); builder.append(" );\n"); builder.append(" }\n"); builder.append(" function refresh() {\n"); builder.append(" filterText = document.getElementById('filterField').value;\n"); builder.append(" $.post(\n"); builder.append(" '"); builder.append(Core.getURL()); builder.append(user.getEmail()); builder.append("',\n"); builder.append(" {filter:filterText},\n"); builder.append(" function(data, status) {\n"); builder.append(" if (status == 'success') {\n"); builder.append(" $('#tableBody').html(data);\n"); builder.append(" view($('#tableBody tr').attr('id'));\n"); builder.append(" } else {\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " alert('Houve uma falha de sistema ao tentar realizar esta operao.');\n"); } else { builder.append( " alert('There was a system crash while trying to perform this operation.');\n"); } builder.append(" }\n"); builder.append(" }\n"); builder.append(" );\n"); builder.append(" }\n"); builder.append(" </script>\n"); builder.append(" </head>\n"); // Body. builder.append(" <body>\n"); builder.append(" <div class=\"header\">\n"); if (queryMap.isEmpty()) { builder.append(" <iframe id=\"viewer\" src=\"about:blank\"></iframe>\n"); } else { builder.append(" <iframe id=\"viewer\" src=\""); builder.append(Core.getURL()); builder.append(queryMap.lastKey()); builder.append("\"></iframe>\n"); } // Construo da tabela de consultas. builder.append(" <table>\n"); builder.append(" <thead>\n"); builder.append(" <tr>\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append(" <th style=\"width:120px;\">Recepo</th>\n"); builder.append(" <th>Origem</th>\n"); builder.append(" <th>Remetente</th>\n"); builder.append(" <th>Contedo</th>\n"); builder.append(" <th>Entrega</th>\n"); } else { builder.append(" <th style=\"width:160px;\">Reception</th>\n"); builder.append(" <th style=\"width:auto;\">Source</th>\n"); builder.append(" <th style=\"width:auto;\">Sender</th>\n"); builder.append(" <th style=\"width:auto;\">Content</th>\n"); builder.append(" <th style=\"width:auto;\">Delivery</th>\n"); } builder.append(" </tr>\n"); builder.append(" </thead>\n"); builder.append(" </table>\n"); builder.append(" </div>\n"); if (queryMap.isEmpty()) { builder.append(" <table>\n"); builder.append(" <tbody>\n"); builder.append(" <tr>\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " <td colspan=\"5\" align=\"center\">nenhum registro encontrado</td>\n"); } else { builder.append(" <td colspan=\"5\" align=\"center\">no records found</td>\n"); } builder.append(" </tr>\n"); builder.append(" </tbody>\n"); builder.append(" </table>\n"); } else { DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.MEDIUM, locale); GregorianCalendar calendar = new GregorianCalendar(); Long nextQuery = null; while (queryMap.size() > User.QUERY_MAX_ROWS) { nextQuery = queryMap.pollFirstEntry().getKey(); } builder.append(" <table>\n"); builder.append(" <tbody id=\"tableBody\">\n"); for (Long time : queryMap.descendingKeySet()) { User.Query query = queryMap.get(time); boolean highlight = time.equals(queryMap.lastKey()); buildQueryRow(locale, builder, dateFormat, calendar, time, query, highlight); } if (nextQuery == null) { builder.append(" <tr>\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " <td colspan=\"5\" align=\"center\">no foram encontrados outros registros</td>\n"); } else { builder.append(" <td colspan=\"5\" align=\"center\">no more records found</td>\n"); } builder.append(" </tr>\n"); } else { builder.append(" <tr id=\"rowMore\" class=\"click\" onclick=\"more('"); builder.append(nextQuery); builder.append("')\">\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " <td id=\"columnMore\" colspan=\"5\" align=\"center\">clique para ver mais registros</td>\n"); } else { builder.append( " <td id=\"columnMore\" colspan=\"5\" align=\"center\">click to see more records</td>\n"); } builder.append(" </tr>\n"); } builder.append(" </tbody>\n"); builder.append(" </table>\n"); } builder.append(" <div class=\"bottom\">\n"); builder.append(" <table>\n"); builder.append(" <tr>\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " <th>Pesquisar <input type=\"text\" id=\"filterField\" name=\"filterField\" onkeydown=\"if (event.keyCode == 13) refresh();\" autofocus></th>\n"); } else { builder.append( " <th>Search <input type=\"text\" id=\"filterField\" name=\"filterField\" onkeydown=\"if (event.keyCode == 13) refresh();\" autofocus></th>\n"); } builder.append(" <th style=\"text-align:right;\"><small>"); builder.append( "Powered by <a target=\"_blank\" href=\"http://spfbl.net/\" style=\"color: #b4b9d2;\">SPFBL.net</a></small>"); builder.append("</th>\n"); builder.append(" </tr>\n"); builder.append(" <table>\n"); builder.append(" </div>\n"); builder.append(" </body>\n"); builder.append("</html>\n"); } else { TreeMap<Long, Query> queryMap = user.getQueryMap(begin, filter); if (queryMap.isEmpty()) { builder.append(" <tr>\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " <td colspan=\"5\" align=\"center\">nenhum registro encontrado</td>\n"); } else { builder.append(" <td colspan=\"5\" align=\"center\">no records found</td>\n"); } builder.append(" </tr>\n"); } else { DateFormat dateFormat = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.MEDIUM, locale); GregorianCalendar calendar = new GregorianCalendar(); Long nextQuery = null; while (queryMap.size() > User.QUERY_MAX_ROWS) { nextQuery = queryMap.pollFirstEntry().getKey(); } for (Long time : queryMap.descendingKeySet()) { User.Query query = queryMap.get(time); buildQueryRow(locale, builder, dateFormat, calendar, time, query, false); } if (nextQuery == null) { builder.append(" <tr>\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " <td colspan=\"5\" align=\"center\">no foram encontrados outros registros</td>\n"); } else { builder.append(" <td colspan=\"5\" align=\"center\">no more records found</td>\n"); } builder.append(" </tr>\n"); } else { builder.append(" <tr id=\"rowMore\" class=\"click\" onclick=\"more('"); builder.append(nextQuery); builder.append("')\">\n"); if (locale.getLanguage().toLowerCase().equals("pt")) { builder.append( " <td id=\"columnMore\" colspan=\"5\" align=\"center\">clique para ver mais registros</td>\n"); } else { builder.append( " <td id=\"columnMore\" colspan=\"5\" align=\"center\">click to see more records</td>\n"); } builder.append(" </tr>\n"); } } } return builder.toString(); }
From source file:sh.isaac.model.semantic.DynamicUsageDescriptionImpl.java
/** * Read the RefexUsageDescription data from the database for a given sequence or nid. * * Note that most users should call {@link #read(int)} instead, as that * utilizes a cache. This always reads directly from the DB. * * @param refexUsageDescriptorId sequence or NID of refexUsageDescriptor *///from ww w.j a va 2s .co m @SuppressWarnings("unchecked") public DynamicUsageDescriptionImpl(int refexUsageDescriptorId) { final ConceptChronology assemblageConcept = Get.conceptService() .getConceptChronology(refexUsageDescriptorId); this.refexUsageDescriptorNid = assemblageConcept.getNid(); final TreeMap<Integer, DynamicColumnInfo> allowedColumnInfo = new TreeMap<>(); for (final SemanticChronology descriptionSememe : assemblageConcept.getConceptDescriptionList()) { @SuppressWarnings("rawtypes") final LatestVersion descriptionVersion = ((SemanticChronology) descriptionSememe) .getLatestVersion(StampCoordinates.getDevelopmentLatestActiveOnly()); if (descriptionVersion.isPresent()) { @SuppressWarnings("rawtypes") final DescriptionVersion ds = (DescriptionVersion) descriptionVersion.get(); if (ds.getDescriptionTypeConceptNid() == TermAux.DEFINITION_DESCRIPTION_TYPE.getNid()) { final Optional<SemanticChronology> nestesdSememe = Get.assemblageService() .getSemanticChronologyStreamForComponentFromAssemblage(ds.getNid(), DynamicConstants.get().DYNAMIC_DEFINITION_DESCRIPTION.getNid()) .findAny(); if (nestesdSememe.isPresent()) { this.sememeUsageDescription = ds.getText(); } ; } if (ds.getDescriptionTypeConceptNid() == TermAux.FULLY_QUALIFIED_NAME_DESCRIPTION_TYPE.getNid()) { this.name = ds.getText(); } if ((this.sememeUsageDescription != null) && (this.name != null)) { break; } } } if (StringUtils.isEmpty(this.sememeUsageDescription)) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. It must contain a description of type Definition with an annotation of type " + "DynamicSememe.DYNAMIC_SEMEME_DEFINITION_DESCRIPTION"); } Get.assemblageService().getSemanticChronologyStreamForComponent(assemblageConcept.getNid()) .forEach(sememe -> { if (sememe.getVersionType() == VersionType.DYNAMIC) { @SuppressWarnings("rawtypes") final LatestVersion<? extends DynamicVersion> sememeVersion = ((SemanticChronology) sememe) .getLatestVersion(StampCoordinates.getDevelopmentLatestActiveOnly()); if (sememeVersion.isPresent()) { @SuppressWarnings("rawtypes") final DynamicVersion ds = sememeVersion.get(); final DynamicData[] refexDefinitionData = ds.getData(); if (sememe.getAssemblageNid() == DynamicConstants.get().DYNAMIC_EXTENSION_DEFINITION .getNid()) { if ((refexDefinitionData == null) || (refexDefinitionData.length < 3) || (refexDefinitionData.length > 7)) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. It must contain at least 3 columns in the DynamicSememeDataBI attachment, and no more than 7."); } // col 0 is the column number, // col 1 is the concept with col name // col 2 is the column data type, stored as a string. // col 3 (if present) is the default column data, stored as a subtype of DynamicData // col 4 (if present) is a boolean field noting whether the column is required (true) or optional (false or null) // col 5 (if present) is the validator {@link DynamicValidatorType}, stored as a string array. // col 6 (if present) is the validatorData for the validator in column 5, stored as a subtype of DynamicData try { final int column = (Integer) refexDefinitionData[0].getDataObject(); final UUID descriptionUUID = (UUID) refexDefinitionData[1].getDataObject(); final DynamicDataType type = DynamicDataType .valueOf((String) refexDefinitionData[2].getDataObject()); DynamicData defaultData = null; if (refexDefinitionData.length > 3) { defaultData = ((refexDefinitionData[3] == null) ? null : refexDefinitionData[3]); } if ((defaultData != null) && (type.getDynamicMemberClass() != refexDefinitionData[3] .getDynamicDataType().getDynamicMemberClass())) { throw new IOException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. The type of the column (column 3) must match the type of the defaultData (column 4)"); } Boolean columnRequired = null; if (refexDefinitionData.length > 4) { columnRequired = ((refexDefinitionData[4] == null) ? null : (Boolean) refexDefinitionData[4].getDataObject()); } DynamicValidatorType[] validators = null; DynamicData[] validatorsData = null; if (refexDefinitionData.length > 5) { if ((refexDefinitionData[5] != null) && ((DynamicArray<DynamicString>) refexDefinitionData[5]) .getDataArray().length > 0) { final DynamicArray<DynamicString> readValidators = (DynamicArray<DynamicString>) refexDefinitionData[5]; validators = new DynamicValidatorType[readValidators .getDataArray().length]; for (int i = 0; i < validators.length; i++) { validators[i] = DynamicValidatorType.valueOf( (String) readValidators.getDataArray()[i].getDataObject()); } } if (refexDefinitionData.length > 6) { if ((refexDefinitionData[6] != null) && ((DynamicArray<? extends DynamicData>) refexDefinitionData[6]) .getDataArray().length > 0) { final DynamicArray<? extends DynamicData> readValidatorsData = (DynamicArray<? extends DynamicData>) refexDefinitionData[6]; validatorsData = new DynamicData[readValidatorsData .getDataArray().length]; if (validators != null) { for (int i = 0; i < validators.length; i++) { if (readValidatorsData.getDataArray()[i] != null) { validatorsData[i] = readValidatorsData .getDataArray()[i]; } else { validatorsData[i] = null; } } } } } } allowedColumnInfo.put(column, new DynamicColumnInfo(assemblageConcept.getPrimordialUuid(), column, descriptionUUID, type, defaultData, columnRequired, validators, validatorsData, null)); } catch (final Exception e) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. The first column must have a data type of integer, and the third column must be a string " + "that is parseable as a DynamicSememeDataType"); } } else if (sememe.getAssemblageNid() == DynamicConstants .get().DYNAMIC_SEMEME_REFERENCED_COMPONENT_RESTRICTION.getNid()) { if ((refexDefinitionData == null) || (refexDefinitionData.length < 1)) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. If it contains a " + DynamicConstants.get().DYNAMIC_SEMEME_REFERENCED_COMPONENT_RESTRICTION .getPrimaryName() + " then it must contain a single column of data, of type string, parseable as a " + ObjectChronologyType.class.getName()); } // col 0 is Referenced component restriction information - as a string. try { final ObjectChronologyType type = ObjectChronologyType .parse(refexDefinitionData[0].getDataObject().toString(), false); if (type == ObjectChronologyType.UNKNOWN_NID) { // just ignore - it shouldn't have been saved that way anyway. } else { this.referencedComponentTypeRestriction = type; } } catch (final Exception e) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. The component type restriction annotation has an invalid value"); } // col 1 is an optional Referenced component sub-restriction information - as a string. if ((refexDefinitionData.length > 1) && (refexDefinitionData[1] != null)) { try { final VersionType type = VersionType .parse(refexDefinitionData[1].getDataObject().toString(), false); if (type == VersionType.UNKNOWN) { // just ignore - it shouldn't have been saved that way anyway. } else { this.referencedComponentTypeSubRestriction = type; } } catch (final Exception e) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. The component type restriction annotation has an invalid value"); } } else { this.referencedComponentTypeSubRestriction = null; } } } } }); this.refexColumnInfo = new DynamicColumnInfo[allowedColumnInfo.size()]; int i = 0; for (final int key : allowedColumnInfo.keySet()) { if (key != i) { throw new RuntimeException("The Assemblage concept: " + assemblageConcept + " is not correctly assembled for use as an Assemblage for " + "a DynamicSememeData Refex Type. It must contain sequential column numbers, with no gaps, which start at 0."); } this.refexColumnInfo[i++] = allowedColumnInfo.get(key); } }
From source file:org.sakaiproject.tool.assessment.facade.AssessmentGradingFacadeQueries.java
public List getExportResponsesData(String publishedAssessmentId, boolean anonymous, String audioMessage, String fileUploadMessage, String noSubmissionMessage, boolean showPartAndTotalScoreSpreadsheetColumns, String poolString, String partString, String questionString, String textString, String rationaleString, String itemGradingCommentsString, Map useridMap, String responseCommentString) { ArrayList dataList = new ArrayList(); ArrayList headerList = new ArrayList(); ArrayList finalList = new ArrayList(2); PublishedAssessmentService pubService = new PublishedAssessmentService(); HashSet publishedAssessmentSections = pubService .getSectionSetForAssessment(Long.valueOf(publishedAssessmentId)); Double zeroDouble = new Double(0.0); HashMap publishedAnswerHash = pubService .preparePublishedAnswerHash(pubService.getPublishedAssessment(publishedAssessmentId)); HashMap publishedItemTextHash = pubService .preparePublishedItemTextHash(pubService.getPublishedAssessment(publishedAssessmentId)); HashMap publishedItemHash = pubService .preparePublishedItemHash(pubService.getPublishedAssessment(publishedAssessmentId)); //Get this sorted to add the blank gradings for the questions not answered later. Set publishItemSet = new TreeSet(new ItemComparator()); publishItemSet.addAll(publishedItemHash.values()); int numSubmission = 1; String numSubmissionText = noSubmissionMessage; String lastAgentId = ""; String agentEid = ""; String firstName = ""; String lastName = ""; Set useridSet = new HashSet(useridMap.keySet()); ArrayList responseList = null; boolean canBeExported = false; boolean fistItemGradingData = true; List list = getAllOrderedSubmissions(publishedAssessmentId); Iterator assessmentGradingIter = list.iterator(); while (assessmentGradingIter.hasNext()) { // create new section-item-scores structure for this assessmentGrading Iterator sectionsIter = publishedAssessmentSections.iterator(); HashMap sectionItems = new HashMap(); TreeMap sectionScores = new TreeMap(); while (sectionsIter.hasNext()) { PublishedSectionData publishedSection = (PublishedSectionData) sectionsIter.next(); ArrayList itemsArray = publishedSection.getItemArraySortedForGrading(); Iterator itemsIter = itemsArray.iterator(); // Iterate through the assessment questions (items) HashMap itemsForSection = new HashMap(); while (itemsIter.hasNext()) { ItemDataIfc item = (ItemDataIfc) itemsIter.next(); itemsForSection.put(item.getItemId(), item.getItemId()); }//from w w w.j a va 2 s .c o m sectionItems.put(publishedSection.getSequence(), itemsForSection); sectionScores.put(publishedSection.getSequence(), zeroDouble); } AssessmentGradingData assessmentGradingData = (AssessmentGradingData) assessmentGradingIter.next(); String agentId = assessmentGradingData.getAgentId(); responseList = new ArrayList(); canBeExported = false; if (anonymous) { canBeExported = true; responseList.add(assessmentGradingData.getAssessmentGradingId()); } else { if (useridMap.containsKey(assessmentGradingData.getAgentId())) { useridSet.remove(assessmentGradingData.getAgentId()); canBeExported = true; try { agentEid = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getEid(); firstName = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getFirstName(); lastName = userDirectoryService.getUser(assessmentGradingData.getAgentId()).getLastName(); } catch (Exception e) { log.error("Cannot get user"); } responseList.add(lastName); responseList.add(firstName); responseList.add(agentEid); if (assessmentGradingData.getForGrade()) { if (lastAgentId.equals(agentId)) { numSubmission++; } else { numSubmission = 1; lastAgentId = agentId; } } else { numSubmission = 0; lastAgentId = agentId; } if (numSubmission == 0) { numSubmissionText = noSubmissionMessage; } else { numSubmissionText = String.valueOf(numSubmission); } responseList.add(numSubmissionText); } } if (canBeExported) { int sectionScoreColumnStart = responseList.size(); if (showPartAndTotalScoreSpreadsheetColumns) { Double finalScore = assessmentGradingData.getFinalScore(); if (finalScore != null) { responseList.add((Double) finalScore.doubleValue()); // gopal - cast for spreadsheet numerics } else { log.debug("finalScore is NULL"); responseList.add(0d); } } String assessmentGradingComments = ""; if (assessmentGradingData.getComments() != null) { assessmentGradingComments = assessmentGradingData.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(assessmentGradingComments); Long assessmentGradingId = assessmentGradingData.getAssessmentGradingId(); HashMap studentGradingMap = getStudentGradingData( assessmentGradingData.getAssessmentGradingId().toString(), false); ArrayList grades = new ArrayList(); grades.addAll(studentGradingMap.values()); Collections.sort(grades, new QuestionComparator(publishedItemHash)); //Add the blank gradings for the questions not answered in random pools. if (grades.size() < publishItemSet.size()) { int index = -1; for (Object pido : publishItemSet) { index++; PublishedItemData pid = (PublishedItemData) pido; if (index == grades.size() || ((ItemGradingData) ((List) grades.get(index)).get(0)) .getPublishedItemId().longValue() != pid.getItemId().longValue()) { //have to add the placeholder List newList = new ArrayList(); newList.add(new EmptyItemGrading(pid.getSection().getSequence(), pid.getItemId(), pid.getSequence())); grades.add(index, newList); } } } int questionNumber = 0; for (Object oo : grades) { // There can be more than one answer to a question, e.g. for // FIB with more than one blank or matching questions. So sort // by sequence number of answer. (don't bother to sort if just 1) List l = (List) oo; if (l.size() > 1) Collections.sort(l, new AnswerComparator(publishedAnswerHash)); String maintext = ""; String rationale = ""; String responseComment = ""; boolean addRationale = false; boolean addResponseComment = false; boolean matrixChoices = false; TreeMap responsesMap = new TreeMap(); // loop over answers per question int count = 0; ItemGradingData grade = null; //boolean isAudioFileUpload = false; boolean isFinFib = false; double itemScore = 0.0d; //Add the missing sequences! //To manage emi answers, could help with others too Map<Long, String> emiAnswerText = new TreeMap<Long, String>(); for (Object ooo : l) { grade = (ItemGradingData) ooo; if (grade == null || EmptyItemGrading.class.isInstance(grade)) { continue; } if (grade != null && grade.getAutoScore() != null) { itemScore += grade.getAutoScore().doubleValue(); } // now print answer data log.debug("<br> " + grade.getPublishedItemId() + " " + grade.getRationale() + " " + grade.getAnswerText() + " " + grade.getComments() + " " + grade.getReview()); Long publishedItemId = grade.getPublishedItemId(); ItemDataIfc publishedItemData = (ItemDataIfc) publishedItemHash.get(publishedItemId); Long typeId = publishedItemData.getTypeId(); questionNumber = publishedItemData.getSequence(); if (typeId.equals(TypeIfc.FILL_IN_BLANK) || typeId.equals(TypeIfc.FILL_IN_NUMERIC) || typeId.equals(TypeIfc.CALCULATED_QUESTION)) { log.debug("FILL_IN_BLANK, FILL_IN_NUMERIC"); isFinFib = true; String thistext = ""; Long answerid = grade.getPublishedAnswerId(); Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { sequence = answer.getSequence(); } } String temptext = grade.getAnswerText(); if (temptext == null) { temptext = "No Answer"; } thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.MATCHING)) { log.debug("MATCHING"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { temptext = answer.getText(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } else if (answerid == -1) { temptext = "None of the Above"; ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); } } else { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); } thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.IMAGEMAP_QUESTION)) { log.debug("MATCHING"); ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); Long sequence = itemTextIfc.getSequence(); String temptext = (grade.getIsCorrect()) ? "OK" : "No OK"; String thistext = sequence + ": " + temptext; if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } else if (typeId.equals(TypeIfc.EXTENDED_MATCHING_ITEMS)) { log.debug("EXTENDED_MATCHING_ITEMS"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { temptext = answer.getLabel(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } } if (sequence == null) { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); if (itemTextIfc != null) { sequence = itemTextIfc.getSequence(); } } if (sequence != null) { thistext = emiAnswerText.get(sequence); if (thistext == null) { thistext = temptext; } else { thistext = thistext + temptext; } emiAnswerText.put(sequence, thistext); } else { // Orphaned answer: the answer item to which it refers was removed after the assessment was taken, // as a result of editing the published assessment. This behaviour should be fixed, i.e. it should // not be possible to get orphaned answer item references in the database. sequence = new Long(99); emiAnswerText.put(sequence, "Item Removed"); } } else if (typeId.equals(TypeIfc.MATRIX_CHOICES_SURVEY)) { log.debug("MATRIX_CHOICES_SURVEY"); // for this kind of question a responsesMap is generated matrixChoices = true; Long answerid = grade.getPublishedAnswerId(); String temptext = "No Answer"; Long sequence = null; if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); temptext = answer.getText(); if (temptext == null) { temptext = "No Answer"; } sequence = answer.getItemText().getSequence(); } else { ItemTextIfc itemTextIfc = (ItemTextIfc) publishedItemTextHash .get(grade.getPublishedItemTextId()); sequence = itemTextIfc.getSequence(); log.debug( "Answerid null for " + grade.getPublishedItemId() + ". Adding " + sequence); temptext = "No Answer"; } responsesMap.put(sequence, temptext); } else if (typeId.equals(TypeIfc.AUDIO_RECORDING)) { log.debug("AUDIO_RECORDING"); maintext = audioMessage; //isAudioFileUpload = true; } else if (typeId.equals(TypeIfc.FILE_UPLOAD)) { log.debug("FILE_UPLOAD"); maintext = fileUploadMessage; //isAudioFileUpload = true; } else if (typeId.equals(TypeIfc.ESSAY_QUESTION)) { log.debug("ESSAY_QUESTION"); if (grade.getAnswerText() != null) { maintext = grade.getAnswerText(); } } else { log.debug("other type"); String thistext = ""; // for some question types we have another text field Long answerid = grade.getPublishedAnswerId(); if (answerid != null) { AnswerIfc answer = (AnswerIfc) publishedAnswerHash.get(answerid); if (answer != null) { String temptext = answer.getText(); if (temptext != null) thistext = temptext; } else { log.warn("Published answer for " + answerid + " is null"); } } if (count == 0) maintext = thistext; else maintext = maintext + "|" + thistext; count++; } // taking care of rationale if (!addRationale && (typeId.equals(TypeIfc.MULTIPLE_CHOICE) || typeId.equals(TypeIfc.MULTIPLE_CORRECT) || typeId.equals(TypeIfc.MULTIPLE_CORRECT_SINGLE_SELECTION) || typeId.equals(TypeIfc.TRUE_FALSE))) { log.debug( "MULTIPLE_CHOICE or MULTIPLE_CORRECT or MULTIPLE_CORRECT_SINGLE_SELECTION or TRUE_FALSE"); if (publishedItemData.getHasRationale() != null && publishedItemData.getHasRationale()) { addRationale = true; rationale = grade.getRationale(); if (rationale == null) { rationale = ""; } } } //Survey - Matrix of Choices - Add Comment Field if (typeId.equals(TypeIfc.MATRIX_CHOICES_SURVEY)) { PublishedItemData pid = (PublishedItemData) publishedItemData; if (pid.getAddCommentFlag()) { addResponseComment = true; if (responseComment.equals("") && grade.getAnswerText() != null) { responseComment = grade.getAnswerText(); } } } } // inner for - answers if (!emiAnswerText.isEmpty()) { if (maintext == null) { maintext = ""; } for (Entry<Long, String> entry : emiAnswerText.entrySet()) { maintext = maintext + "|" + entry.getKey().toString() + ":" + entry.getValue(); } if (maintext.startsWith("|")) { maintext = maintext.substring(1); } } Integer sectionSequenceNumber = null; if (grade == null || EmptyItemGrading.class.isInstance(grade)) { sectionSequenceNumber = EmptyItemGrading.class.cast(grade).getSectionSequence(); questionNumber = EmptyItemGrading.class.cast(grade).getItemSequence(); // indicate that the student was not presented with this question maintext = "-"; } else { sectionSequenceNumber = updateSectionScore(sectionItems, sectionScores, grade.getPublishedItemId(), itemScore); } if (isFinFib && maintext.indexOf("No Answer") >= 0 && count == 1) { maintext = "No Answer"; } else if ("".equals(maintext)) { maintext = "No Answer"; } String itemGradingComments = ""; // if question type is not matrix choices apply the original code if (!matrixChoices) { responseList.add(maintext); if (grade.getComments() != null) { itemGradingComments = grade.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(itemGradingComments); } else { // if there are questions not answered, a no answer response is added to the map ItemDataIfc correspondingPublishedItemData = (ItemDataIfc) publishedItemHash .get(grade.getPublishedItemId()); List correspondingItemTextArray = correspondingPublishedItemData.getItemTextArray(); log.debug("publishedItem is " + correspondingPublishedItemData.getText() + " and number of rows " + correspondingItemTextArray.size()); if (responsesMap.size() < correspondingItemTextArray.size()) { Iterator itItemTextHash = correspondingItemTextArray.iterator(); while (itItemTextHash.hasNext()) { ItemTextIfc itemTextIfc = (ItemTextIfc) itItemTextHash.next(); if (!responsesMap.containsKey(itemTextIfc.getSequence())) { log.debug("responsesMap does not contain answer to " + itemTextIfc.getText()); responsesMap.put(itemTextIfc.getSequence(), "No Answer"); } } } Iterator it = responsesMap.entrySet().iterator(); while (it.hasNext()) { Map.Entry e = (Map.Entry) it.next(); log.debug("Adding to response list " + e.getKey() + " and " + e.getValue()); responseList.add(e.getValue()); if (grade.getComments() != null) { itemGradingComments = grade.getComments().replaceAll("<br\\s*/>", ""); } responseList.add(itemGradingComments); itemGradingComments = ""; } } if (addRationale) { responseList.add(rationale); } if (addResponseComment) { responseList.add(responseComment); } // Only set header based on the first item grading data if (fistItemGradingData) { //get the pool name String poolName = null; for (Iterator i = publishedAssessmentSections.iterator(); i.hasNext();) { PublishedSectionData psd = (PublishedSectionData) i.next(); if (psd.getSequence().intValue() == sectionSequenceNumber) { poolName = psd.getSectionMetaDataByLabel(SectionDataIfc.POOLNAME_FOR_RANDOM_DRAW); } } if (!matrixChoices) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, textString, questionNumber, poolString, poolName)); if (addRationale) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, rationaleString, questionNumber, poolString, poolName)); } if (addResponseComment) { headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, responseCommentString, questionNumber, poolString, poolName)); } headerList.add(makeHeader(partString, sectionSequenceNumber, questionString, itemGradingCommentsString, questionNumber, poolString, poolName)); } else { int numberRows = responsesMap.size(); for (int i = 0; i < numberRows; i = i + 1) { headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, textString, questionNumber, i + 1, poolString, poolName)); if (addRationale) { headerList .add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, rationaleString, questionNumber, i + 1, poolString, poolName)); } if (addResponseComment) { headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, responseCommentString, questionNumber, i + 1, poolString, poolName)); } headerList.add(makeHeaderMatrix(partString, sectionSequenceNumber, questionString, itemGradingCommentsString, questionNumber, i + 1, poolString, poolName)); } } } } // outer for - questions if (showPartAndTotalScoreSpreadsheetColumns) { if (sectionScores.size() > 1) { Iterator keys = sectionScores.keySet().iterator(); while (keys.hasNext()) { Double partScore = (Double) ((Double) sectionScores.get(keys.next())).doubleValue(); responseList.add(sectionScoreColumnStart++, partScore); } } } dataList.add(responseList); if (fistItemGradingData) { fistItemGradingData = false; } } } // while if (!anonymous && useridSet.size() != 0) { Iterator iter = useridSet.iterator(); while (iter.hasNext()) { String id = (String) iter.next(); try { agentEid = userDirectoryService.getUser(id).getEid(); firstName = userDirectoryService.getUser(id).getFirstName(); lastName = userDirectoryService.getUser(id).getLastName(); } catch (Exception e) { log.error("Cannot get user"); } responseList = new ArrayList(); responseList.add(lastName); responseList.add(firstName); responseList.add(agentEid); responseList.add(noSubmissionMessage); dataList.add(responseList); } } Collections.sort(dataList, new ResponsesComparator(anonymous)); finalList.add(dataList); finalList.add(headerList); return finalList; }