List of usage examples for java.util List indexOf
int indexOf(Object o);
From source file:io.hops.erasure_coding.StripeReader.java
/** * Builds (codec.stripeLength + codec.parityLength) inputs given some erased * locations./*from w ww. j av a2 s . c om*/ * Outputs: * - the array of input streams @param inputs * - the list of erased locations @param erasedLocations. * - the list of locations that are not read @param locationsToNotRead. */ public InputStream[] buildInputs(FileSystem srcFs, Path srcFile, FileStatus srcStat, FileSystem parityFs, Path parityFile, FileStatus parityStat, int stripeIdx, long offsetInBlock, List<Integer> erasedLocations, List<Integer> locationsToRead, ErasureCode code) throws IOException { InputStream[] inputs = new InputStream[codec.stripeLength + codec.parityLength]; boolean redo = false; do { /* * In the first iteration locationsToRead is empty. * It is populated according to locationsToReadForDecode. * In consecutive iterations (if a stream failed to open) * the list is cleared and re-populated. */ locationsToRead.clear(); locationsToRead.addAll(code.locationsToReadForDecode(erasedLocations)); for (int i = 0; i < inputs.length; i++) { boolean isErased = (erasedLocations.indexOf(i) != -1); boolean shouldRead = (locationsToRead.indexOf(i) != -1); try { InputStream stm = null; if (isErased || !shouldRead) { if (isErased) { LOG.info("Location " + i + " is erased, using zeros"); } else { LOG.info("Location " + i + " need not be read, using zeros"); } stm = new RaidUtils.ZeroInputStream(srcStat.getBlockSize() * ((i < codec.parityLength) ? stripeIdx * codec.parityLength + i : stripeIdx * codec.stripeLength + i - codec.parityLength)); } else { stm = buildOneInput(i, offsetInBlock, srcFs, srcFile, srcStat, parityFs, parityFile, parityStat); } inputs[i] = stm; } catch (IOException e) { if (e instanceof BlockMissingException || e instanceof ChecksumException) { erasedLocations.add(i); redo = true; RaidUtils.closeStreams(inputs); break; } else { throw e; } } } } while (redo); return inputs; }
From source file:fr.mcc.ginco.services.ConceptHierarchicalRelationshipServiceUtil.java
@Override public ThesaurusConcept saveHierarchicalRelationship(ThesaurusConcept conceptToUpdate, List<ConceptHierarchicalRelationship> hierarchicalRelationships, List<ThesaurusConcept> allRecursiveParents, List<ThesaurusConcept> allRecursiveChilds, List<ThesaurusConcept> childrenConceptToDetach, List<ThesaurusConcept> childrenConceptToAttach) { // We update the modified relations, and we delete the relations that // have been removed List<String> oldParentConceptIds = new ArrayList<String>(); if (!conceptToUpdate.getParentConcepts().isEmpty()) { oldParentConceptIds = ThesaurusConceptUtils .getIdsFromConceptList(new ArrayList<ThesaurusConcept>(conceptToUpdate.getParentConcepts())); }// ww w.j a va 2 s. co m List<String> newParentConceptIds = new ArrayList<String>(); for (ConceptHierarchicalRelationship relation : hierarchicalRelationships) { newParentConceptIds.add(relation.getIdentifier().getParentconceptid()); } List<String> newChildConceptIds = new ArrayList<String>(); for (ThesaurusConcept newChild : childrenConceptToAttach) { newChildConceptIds.add(newChild.getIdentifier()); } // Check loops for (ThesaurusConcept childConcept : allRecursiveChilds) { if (newParentConceptIds.contains(childConcept.getIdentifier())) { throw new BusinessException("A parent concept cannot be the child of the same concept", "hierarchical-loop-violation"); } } // Check loops for (ThesaurusConcept parentConcept : allRecursiveParents) { if (newChildConceptIds.contains(parentConcept.getIdentifier())) { throw new BusinessException("A child concept cannot be the parent of the same concept", "hierarchical-loop-violation"); } } // Verify if the concept doesn't have one of its brothers as parent for (String currentParentId : newParentConceptIds) { List<String> childrenOfCurrentParentIds = ThesaurusConceptUtils .getIdsFromConceptList(thesaurusConceptDAO.getChildrenConcepts(currentParentId, 0, null)); List<String> commonIds = new ArrayList<String>(newParentConceptIds); // Compare both lists and see which elements are in common. // Those elements are both parents and brothers to the considered concept. commonIds.retainAll(childrenOfCurrentParentIds); if (!commonIds.isEmpty()) { String commonPreferedTerms = ""; for (String conceptId : commonIds) { if (commonIds.indexOf(conceptId) != 0) { commonPreferedTerms += ", "; } commonPreferedTerms += thesaurusTermDAO.getConceptPreferredTerm(conceptId).getLexicalValue(); } throw new BusinessException( "A concept cannot have one of its brother (" + commonPreferedTerms + ") as a parent", "hierarchical-brotherIsParent-violation", new Object[] { commonPreferedTerms }); } } List<String> brotherIds = new ArrayList<String>(); for (ThesaurusConcept parentConcept : conceptToUpdate.getParentConcepts()) { brotherIds.addAll(ThesaurusConceptUtils.getIdsFromConceptList( thesaurusConceptDAO.getChildrenConcepts(parentConcept.getIdentifier(), 0, null))); } // Verify if the concept doesn't have one of its brothers as child for (String currentChildId : newChildConceptIds) { if (brotherIds.contains(currentChildId)) { List<String> commonIds = new ArrayList<String>(newChildConceptIds); String commonPreferedTerms = ""; for (String conceptId : commonIds) { if (commonIds.indexOf(conceptId) != 0) { commonPreferedTerms += ", "; } commonPreferedTerms += thesaurusTermDAO.getConceptPreferredTerm(conceptId).getLexicalValue(); } throw new BusinessException( "A concept cannot have one of its brother (" + commonPreferedTerms + ") as a parent", "hierarchical-brotherIsParent-violation", new Object[] { commonPreferedTerms }); } } List<String> addedParentConceptIds = ListUtils.subtract(newParentConceptIds, oldParentConceptIds); List<String> removedParentConceptIds = ListUtils.subtract(oldParentConceptIds, newParentConceptIds); List<ThesaurusConcept> addedParentConcepts = new ArrayList<ThesaurusConcept>(); for (String id : addedParentConceptIds) { addedParentConcepts.add(thesaurusConceptDAO.getById(id)); } List<ThesaurusConcept> removedParentConcepts = new ArrayList<ThesaurusConcept>(); for (String id : removedParentConceptIds) { removedParentConcepts.add(thesaurusConceptDAO.getById(id)); } if (!addedParentConcepts.isEmpty() || !removedParentConcepts.isEmpty()) { // Treatment in case of modified hierarchy (both add or remove) // We remove this concept in all array it belongs List<ThesaurusArrayConcept> arrays = thesaurusArrayConceptDAO.getArraysOfConcept(conceptToUpdate); for (ThesaurusArrayConcept thesaurusArrayConcept : arrays) { thesaurusArrayConceptDAO.delete(thesaurusArrayConcept); } // We remove all removed parents if (!removedParentConcepts.isEmpty()) { removeParents(conceptToUpdate, removedParentConcepts); } // We set all added parents Set<ThesaurusConcept> addedParentsSet = new HashSet<ThesaurusConcept>(); for (ThesaurusConcept addedParentId : addedParentConcepts) { addedParentsSet.add(addedParentId); } if (!addedParentConcepts.isEmpty()) { conceptToUpdate.getParentConcepts().addAll(addedParentsSet); conceptToUpdate.setTopConcept(false); } if (!conceptToUpdate.getThesaurus().isPolyHierarchical() && conceptToUpdate.getParentConcepts().size() > 1) { throw new BusinessException( "Thesaurus is monohierarchical, but some concepts have multiple parents!", "monohierarchical-violation"); } // We calculate the rootconcepts for the concept to update conceptToUpdate.setRootConcepts(new HashSet<ThesaurusConcept>(getRootConcepts(conceptToUpdate))); // We launch an async method to calculate new root concept for the // children of the concept we update calculateChildrenRoots(conceptToUpdate.getIdentifier(), conceptToUpdate.getIdentifier()); } // We process children delete addChildren(conceptToUpdate, childrenConceptToAttach); removeChildren(conceptToUpdate, childrenConceptToDetach); thesaurusConceptDAO.update(conceptToUpdate); thesaurusConceptDAO.flush(); saveRoleOfHierarchicalRelationship(hierarchicalRelationships); return conceptToUpdate; }
From source file:com.twinsoft.convertigo.beans.core.StepWithExpressions.java
/** * Get representation of order for quick sort of a given database object. *///from ww w . ja va2 s . c om @Override public Object getOrder(Object object) throws EngineException { if (object instanceof Step) { List<Long> ordered = orderedSteps.get(0); long time = ((Step) object).priority; if (ordered.contains(time)) return (long) ordered.indexOf(time); else throw new EngineException("Corrupted step for StepWithExpressions \"" + getName() + "\". Step \"" + ((Step) object).getName() + "\" with priority \"" + time + "\" isn't referenced anymore."); } else return super.getOrder(object); }
From source file:hydrograph.ui.propertywindow.widgets.utility.SchemaSyncUtility.java
public Map<Integer, FilterProperties> retainIndexAndValueOfParameterFields( List<FilterProperties> filterProperties) { Map<Integer, FilterProperties> indexAndValueOfParameter = new HashMap<Integer, FilterProperties>(); for (FilterProperties filterProperty : filterProperties) { if (ParameterUtil.isParameter(filterProperty.getPropertyname())) { indexAndValueOfParameter.put(filterProperties.indexOf(filterProperty), filterProperty); }//from w ww . j ava 2s .co m } return indexAndValueOfParameter; }
From source file:com.alibaba.dubbo.util.KetamaNodeLocatorTest.java
@Test public void testDistribution() { final int nodeSize = 10; final int keySize = 10000; final List<String> nodes = generateRandomStrings(nodeSize); final long start1 = System.currentTimeMillis(); final KetamaNodeLocator locator = new KetamaNodeLocator(nodes); // Make sure the initialization doesn't take too long. assertTrue((System.currentTimeMillis() - start1) < 100); final int[] counts = new int[nodeSize]; for (int ix = 0; ix < nodeSize; ix++) { counts[ix] = 0;//w ww . ja v a 2 s .c o m } final List<String> keys = generateRandomStrings(keySize); for (final String key : keys) { final String primary = locator.getPrimary(key); counts[nodes.indexOf(primary)] += 1; } // Give about a 30% leeway each way... final int min = (keySize * 7) / (nodeSize * 10); final int max = (keySize * 13) / (nodeSize * 10); int total = 0; boolean error = false; final StringBuilder sb = new StringBuilder("Key distribution error - \n"); for (int ix = 0; ix < nodeSize; ix++) { if (counts[ix] < min || counts[ix] > max) { error = true; sb.append(" !! "); } else { sb.append(" "); } sb.append(StringUtils.rightPad(nodes.get(ix), 12)).append(": ").append(counts[ix]).append("\n"); total += counts[ix]; } // Make sure we didn't miss any keys returning values. assertEquals(keySize, total); // System.out.println(sb.toString()); if (error) { fail(sb.toString()); } }
From source file:gov.nih.nci.ncicb.cadsr.common.jsp.tag.handler.AvailableValidValue.java
public String generateHtml(List nonListedVVs, List availableValidVaues, String questionIdSeq) { StringBuffer selectHtml = new StringBuffer( "<select class=\"" + selectClassName + "\" name=\"" + selectName + "\"> \n"); StringBuffer optionHtml = (StringBuffer) pageContext.getAttribute(questionIdSeq + "validValueOptionBuffer"); ////from ww w . jav a2s. c o m //The options are cached since they dont change for the same question // if (optionHtml != null) { return selectHtml.toString() + optionHtml.toString(); } optionHtml = new StringBuffer(); ListIterator avalilableVVsListIterate = nonListedVVs.listIterator(); while (avalilableVVsListIterate.hasNext()) { FormValidValue fvv = (FormValidValue) avalilableVVsListIterate.next(); int index = availableValidVaues.indexOf(fvv); optionHtml.append("<option value=\"" + index + "\">" + fvv.getLongName() + "</option> \n"); } optionHtml.append("</select>"); pageContext.setAttribute(questionIdSeq + "validValueOptionBuffer", optionHtml); return selectHtml.toString() + optionHtml.toString(); }
From source file:burstcoin.observer.service.NetworkService.java
private List<List> createSenkeyChartData() { List<List> multiSenkeyData = new ArrayList<>(); int numberOfBlocks = 11; List<Long> orderedHeight = new ArrayList<>(genSigLookup.keySet()); Collections.sort(orderedHeight); List<Long> heightSub = orderedHeight.subList( (orderedHeight.size() > numberOfBlocks ? orderedHeight.size() - numberOfBlocks : 0), orderedHeight.size());//from www .ja v a 2 s . c o m Map<String, Long> sourcePerGenSigLookup = new HashMap<>(); Map<Long, Map<String, String>> remainingSourcesWithoutTarget = new HashMap<>(); for (Long source : heightSub) { remainingSourcesWithoutTarget.put(source, new HashMap<>()); if (heightSub.indexOf(source) < heightSub.size() - 1) { Long target = heightSub.get(heightSub.indexOf(source) + 1); Map<String, Set<String>> sourceGenSigDomainLookup = genSigLookup.get(source); // domain -> gensig Map<String, String> sourceMapping = new HashMap<>(); for (Map.Entry<String, Set<String>> sourceEntry : sourceGenSigDomainLookup.entrySet()) { for (String domain : sourceEntry.getValue()) { sourceMapping.put(domain, sourceEntry.getKey()); } } Map<String, Set<String>> targetGenSigDomainLookup = genSigLookup.get(target); // domain -> gensig Map<String, String> targetMapping = new HashMap<>(); for (Map.Entry<String, Set<String>> targetEntry : targetGenSigDomainLookup.entrySet()) { for (String domain : targetEntry.getValue()) { targetMapping.put(domain, targetEntry.getKey()); } } // sourceGenSig -> targetGensig -> domains Map<String, Map<String, List<String>>> fromToCounter = new HashMap<>(); // domain ->genSig for (Map.Entry<String, String> entry : sourceMapping.entrySet()) { String domain = entry.getKey(); String sourceGenSig = entry.getValue(); String targetGenSig = targetMapping.get(domain); // remember source block height by genSig sourcePerGenSigLookup.put(sourceGenSig, source); // skip if from/to is not available if (targetGenSig != null) { if (!fromToCounter.containsKey(sourceGenSig)) { fromToCounter.put(sourceGenSig, new HashMap<>()); } if (!fromToCounter.get(sourceGenSig).containsKey(targetGenSig)) { fromToCounter.get(sourceGenSig).put(targetGenSig, new ArrayList<>()); } fromToCounter.get(sourceGenSig).get(targetGenSig).add(domain); } else { // no target found remainingSourcesWithoutTarget.get(source).put(entry.getKey(), entry.getValue()); } } // check if we have a target for previous sources without target now List<Long> previousSources = new ArrayList<>(remainingSourcesWithoutTarget.keySet()); // ignore current source if (previousSources.contains(source)) { previousSources.remove(source); } Collections.sort(previousSources); for (Long previousSource : previousSources) { Iterator<Map.Entry<String, String>> iter = remainingSourcesWithoutTarget.get(previousSource) .entrySet().iterator(); while (iter.hasNext()) { Map.Entry<String, String> entry = iter.next(); String domain = entry.getKey(); String sourceGenSig = entry.getValue(); String targetGenSig = targetMapping.get(domain); // skip if from/to is not available if (targetGenSig != null) { if (!fromToCounter.containsKey(sourceGenSig)) { fromToCounter.put(sourceGenSig, new HashMap<>()); } if (!fromToCounter.get(sourceGenSig).containsKey(targetGenSig)) { fromToCounter.get(sourceGenSig).put(targetGenSig, new ArrayList<>()); } fromToCounter.get(sourceGenSig).get(targetGenSig).add(domain); iter.remove(); } } if (remainingSourcesWithoutTarget.get(previousSource).isEmpty()) { remainingSourcesWithoutTarget.remove(previousSource); } } for (String sourceGenSig : fromToCounter.keySet()) { for (Map.Entry<String, List<String>> entry : fromToCounter.get(sourceGenSig).entrySet()) { Long currentSource = sourcePerGenSigLookup.get(sourceGenSig); String sourceName = currentSource + " [" + sourceGenSig.substring(0, 4) + "..]"; String targetName = target + " [" + entry.getKey().substring(0, 4) + "..]"; String tooltip = entry.getValue().size() + " nodes," + " from " + currentSource + " [" + sourceGenSig.substring(0, 6) + "..]," + " to " + target + " [" + entry.getKey().substring(0, 6) + "..]"; multiSenkeyData .add(Arrays.asList(sourceName, targetName, entry.getValue().size(), tooltip)); } } } } return multiSenkeyData; }
From source file:org.jahia.modules.external.test.db.WriteableMappedDatabaseProvider.java
private void deleteRow(String path) throws RepositoryException { String[] pathTokens = StringUtils.split(path, '/'); if (pathTokens.length != 2) { throw new PathNotFoundException(path); }/*from www . ja v a 2 s. c o m*/ String table = pathTokens[0]; String rowId = pathTokens[1]; Connection conn = null; Statement stmt = null; ResultSet rs = null; try { conn = getConnection(); List<String> primaryKeys = getTablePrimaryKeys(table, conn); String[] rowData = getValuesForPrimayKeys(rowId); String query = null; if (primaryKeys.size() == 1) { query = "delete from " + table + " where " + primaryKeys.get(0) + "='" + rowData[0] + "'"; } else { StringBuilder buff = new StringBuilder(); for (String col : primaryKeys) { if (buff.length() > 0) { buff.append(" and "); } buff.append(col).append("='" + rowData[primaryKeys.indexOf(col)] + "'"); } query = "delete from " + table + " where " + buff.toString(); } stmt = conn.createStatement(); stmt.executeUpdate(query); } catch (SQLException e) { logger.debug(e.getMessage(), e); throw new RepositoryException(path, e); } finally { DbUtility.close(conn, stmt, rs); } }
From source file:com.meltmedia.cadmium.cli.LoggerCommand.java
@Override public void execute() throws Exception { String logger = null;//from w w w . j ava 2 s. c om String level = null; String site = null; if (params != null && params.size() >= 3) { logger = params.get(0); level = params.get(1); site = params.get(2); } else if (params != null && params.size() == 2) { String value = params.get(0); try { level = Level.toLevel(value, null).levelStr; } catch (Exception e) { logger = value; } site = params.get(1); } else if (params != null && params.size() == 1) { site = params.get(0); } else { System.err.println("A site is required!"); System.exit(1); } HttpClient client = httpClient(); HttpMessage method = null; site = this.getSecureBaseUrl(site); if (level != null) { String uri = site + "/system/logger/" + (StringUtils.isNotBlank(logger) ? logger + "/" : ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME + "/") + (StringUtils.isNotBlank(level) ? level : "DEBUG"); System.out.println("Updating logger [" + (StringUtils.isNotBlank(logger) ? logger : "ROOT") + "] to level [" + level + "] for site " + site); method = new HttpPost(uri); } else { String uri = site + "/system/logger/" + (StringUtils.isNotBlank(logger) ? logger + "/" : ""); System.out.println("Getting levels for " + (StringUtils.isNotBlank(logger) ? logger : "all") + " logger[s] on site " + site); method = new HttpGet(uri); } addAuthHeader(method); HttpResponse response = client.execute((HttpUriRequest) method); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) { LoggerServiceResponse configs = new Gson().fromJson(EntityUtils.toString(response.getEntity()), LoggerServiceResponse.class); List<String> nodes = new ArrayList<String>(); nodes.addAll(configs.getConfigs().keySet()); Collections.sort(nodes); List<String> loggers = new ArrayList<String>(); Map<String, String[]> loggerLevels = new HashMap<String, String[]>(); for (String node : nodes) { for (LoggerConfig config : configs.getConfigs().get(node)) { if (!loggers.contains(config.getName())) { loggers.add(config.getName()); loggerLevels.put(config.getName(), new String[nodes.size()]); Arrays.fill(loggerLevels.get(config.getName()), "-"); } loggerLevels.get(config.getName())[nodes.indexOf(node)] = config.getLevel(); } } Collections.sort(loggers); if (loggers.remove(ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME)) { loggers.add(0, ch.qos.logback.classic.Logger.ROOT_LOGGER_NAME); } System.out.println("Got " + loggers.size() + " logger[s] and " + nodes.size() + " node[s]"); for (String loggerName : loggers) { System.out.println("Logger: " + loggerName); String levels[] = loggerLevels.get(loggerName); for (String node : nodes) { System.out.println(" " + node + ": " + levels[nodes.indexOf(node)]); } } } else { System.err.println("Request failed: " + response.getStatusLine()); System.err.println("Raw response [" + EntityUtils.toString(response.getEntity()) + "]"); } }
From source file:com.b2international.snowowl.snomed.importer.net4j.SnomedSubsetImportUtil.java
private boolean processExcelFile(final SubsetEntry entry) throws InvalidFormatException, IOException { final FileInputStream inputStream = createFileInputStream(entry); final Workbook workbook = WorkbookFactory.create(inputStream); final List<Integer> list = getSheetAndFirstRowNumber(workbook, workbook.getNumberOfSheets()); if (null != list) { final int sheetNumber = list.get(0); final int firstRowNumber = list.get(1); final Sheet sheet = workbook.getSheetAt(sheetNumber); final List<String> row = collectRowValues(sheet.getRow(firstRowNumber)); entry.setHeadings(row);//w ww. j av a2 s . com entry.setSheetNumber(sheetNumber); if (entry.isHasHeader()) { Optional<String> match = FluentIterable.from(row).firstMatch(new Predicate<String>() { @Override public boolean apply(String input) { return input.contains("concept") && (input.contains("id") || input.contains("sctid")); } }); entry.setIdColumnNumber(match.isPresent() ? row.indexOf(match.get()) : 0); // default to first? } else { for (int i = 0; i < row.size(); i++) { if (isConceptId(row.get(i).trim())) { entry.setIdColumnNumber(i); } } } return true; } else { return false; } }