List of usage examples for java.lang Long compare
public static int compare(long x, long y)
From source file:org.apache.eagle.jpm.mr.running.parser.MRJobParser.java
private Set<String> calcFetchCounterAndAttemptTaskId(List<MRTask> tasks) { Set<String> needFetchAttemptTasks = new HashSet<>(); //1, sort by elapsedTime Comparator<MRTask> byElapsedTimeIncrease = (e1, e2) -> Long.compare(e1.getElapsedTime(), e2.getElapsedTime());/*from ww w . j a v a 2 s. co m*/ Comparator<MRTask> byElapsedTimeDecrease = (e1, e2) -> -1 * Long.compare(e1.getElapsedTime(), e2.getElapsedTime()); //2, get finished bottom n Iterator<MRTask> taskIteratorIncrease = tasks.stream() .filter(task -> task.getState().equals(Constants.TaskState.SUCCEEDED.toString())) .sorted(byElapsedTimeIncrease).iterator(); needFetchAttemptTasks(taskIteratorIncrease, needFetchAttemptTasks); //3, fetch finished top n Iterator<MRTask> taskIteratorDecrease = tasks.stream() .filter(task -> task.getState().equals(Constants.TaskState.SUCCEEDED.toString())) .sorted(byElapsedTimeDecrease).iterator(); needFetchAttemptTasks(taskIteratorDecrease, needFetchAttemptTasks); //4, fetch running top n taskIteratorDecrease = tasks.stream() .filter(task -> task.getState().equals(Constants.TaskState.RUNNING.toString())) .sorted(byElapsedTimeDecrease).iterator(); needFetchAttemptTasks(taskIteratorDecrease, needFetchAttemptTasks); return needFetchAttemptTasks; }
From source file:org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager.java
/** * Returns all update delta files of specified Segment. * * @param segmentId//from w ww.j a v a 2s.com * @param validUpdateFiles if true then only the valid range files will be returned. * @return */ public CarbonFile[] getUpdateDeltaFilesList(String segmentId, final boolean validUpdateFiles, final String fileExtension, final boolean excludeOriginalFact, CarbonFile[] allFilesOfSegment, boolean isAbortedFile) { String endTimeStamp = ""; String startTimeStamp = ""; long factTimeStamp = 0; LoadMetadataDetails[] segmentDetails = SegmentStatusManager .readLoadMetadata(CarbonTablePath.getMetadataPath(identifier.getTablePath())); for (LoadMetadataDetails eachSeg : segmentDetails) { if (eachSeg.getLoadName().equalsIgnoreCase(segmentId)) { // if the segment is found then take the start and end time stamp. startTimeStamp = eachSeg.getUpdateDeltaStartTimestamp(); endTimeStamp = eachSeg.getUpdateDeltaEndTimestamp(); factTimeStamp = eachSeg.getLoadStartTime(); } } // if start timestamp is empty then no update delta is found. so return empty list. if (startTimeStamp.isEmpty()) { return new CarbonFile[0]; } final Long endTimeStampFinal = CarbonUpdateUtil.getTimeStampAsLong(endTimeStamp); final Long startTimeStampFinal = CarbonUpdateUtil.getTimeStampAsLong(startTimeStamp); final long factTimeStampFinal = factTimeStamp; List<CarbonFile> listOfCarbonFiles = new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE); // else scan the segment for the delta files with the respective timestamp. for (CarbonFile eachFile : allFilesOfSegment) { String fileName = eachFile.getName(); if (fileName.endsWith(fileExtension)) { String firstPart = fileName.substring(0, fileName.lastIndexOf('.')); long timestamp = Long.parseLong(firstPart .substring(firstPart.lastIndexOf(CarbonCommonConstants.HYPHEN) + 1, firstPart.length())); if (excludeOriginalFact) { if (Long.compare(factTimeStampFinal, timestamp) == 0) { continue; } } if (validUpdateFiles) { if (Long.compare(timestamp, endTimeStampFinal) <= 0 && Long.compare(timestamp, startTimeStampFinal) >= 0) { listOfCarbonFiles.add(eachFile); } } else { // invalid cases. if (isAbortedFile) { if (Long.compare(timestamp, endTimeStampFinal) > 0) { listOfCarbonFiles.add(eachFile); } } else if (Long.compare(timestamp, startTimeStampFinal) < 0 || Long.compare(timestamp, endTimeStampFinal) > 0) { listOfCarbonFiles.add(eachFile); } } } } return listOfCarbonFiles.toArray(new CarbonFile[listOfCarbonFiles.size()]); }
From source file:org.lamport.tla.toolbox.tool.tlc.model.Model.java
private void pruneOldestSnapshots() throws CoreException { // Sort model by snapshot timestamp and remove oldest ones. final int snapshotKeepCount = TLCActivator.getDefault().getPreferenceStore() .getInt(TLCActivator.I_TLC_SNAPSHOT_KEEP_COUNT); final List<Model> snapshotModels = new ArrayList<>(getSnapshots()); if (snapshotModels.size() > snapshotKeepCount) { final int pruneCount = snapshotModels.size() - snapshotKeepCount; Collections.sort(snapshotModels, new Comparator<Model>() { public int compare(final Model model1, final Model model2) { final long ts1 = model1.getSnapshotTimeStamp(); final long ts2 = model2.getSnapshotTimeStamp(); return Long.compare(ts1, ts2); }/* w w w. j a v a 2s .c om*/ }); for (int i = 0; i < pruneCount; i++) { final Model model = snapshotModels.get(i); model.delete(new NullProgressMonitor()); } } }
From source file:org.openhab.ui.cometvisu.servlet.CometVisuServlet.java
/** * serves an RSS-Feed from a persisted string item backend for the CometVisu * rrslog-plugin/*from w w w . ja v a 2s . c o m*/ * * @param file * @param request * @param response */ private void processRssLogRequest(File file, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // retrieve the item if (request.getParameter("f") == null) return; String[] itemNames = request.getParameter("f").split(","); List<Item> items = new ArrayList<Item>(); for (String name : itemNames) { try { Item item = cometVisuApp.getItemRegistry().getItem(name); items.add(item); } catch (ItemNotFoundException e) { logger.error("item '{}' not found", name); } } if (items.size() > 0) { // Fallback to first persistenceService from list if (!CometVisuApp.getPersistenceServices().entrySet().iterator().hasNext()) { throw new IllegalArgumentException("No Persistence service found."); } if (request.getParameter("c") != null) { if (items.size() == 1) { // new log message should be store String title = request.getParameter("h"); String message = request.getParameter("c"); String state = request.getParameter("state"); // Build message Command command = new StringType(title + rssLogMessageSeparator + message + rssLogMessageSeparator + state + rssLogMessageSeparator + items.get(0).getName()); // Use the event publisher to store the item in the defined // persistance services cometVisuApp.getEventPublisher() .post(ItemEventFactory.createCommandEvent(items.get(0).getName(), command)); } // send empty response?? response.setContentType("text/plain"); response.getWriter().write(""); response.flushBuffer(); } else if (request.getParameter("dump") != null) { } else if (request.getParameter("r") != null) { // delete all log lines older than the timestamp and optional a // filter // => not possible to remove data from persistence service response.setContentType("text/plain"); response.getWriter().write( "Cannot execute query: It is not possible to delete data from openHAB PersistenceService"); response.flushBuffer(); } else if (request.getParameter("u") != null) { // update state response.setContentType("text/plain"); response.getWriter().write( "Cannot execute query: It is not possible to update data from openHAB PersistenceService"); response.flushBuffer(); } else if (request.getParameter("d") != null) { // update state response.setContentType("text/plain"); response.getWriter().write( "Cannot execute query: It is not possible to delete data from openHAB PersistenceService"); response.flushBuffer(); } else { Feed feed = new Feed(); feed.feedUrl = request.getRequestURL().toString(); feed.title = "RSS supplied logs"; feed.link = request.getRequestURL().toString(); feed.author = ""; feed.description = "RSS supplied logs"; feed.type = "rss20"; // Define the data filter FilterCriteria filter = new FilterCriteria(); Calendar start = Calendar.getInstance(); // retrieve only the historic states from the last 7 days + BeginDate is required for RRD4j service start.add(Calendar.DAY_OF_YEAR, -7); // Date end = new Date(); filter.setBeginDate(start.getTime()); // filter.setEndDate(end); filter.setPageSize(25); filter.setOrdering(Ordering.DESCENDING); for (Item item : items) { filter.setItemName(item.getName()); Iterator<Entry<String, QueryablePersistenceService>> pit = CometVisuApp.getPersistenceServices() .entrySet().iterator(); QueryablePersistenceService persistenceService = pit.next().getValue(); // Get the data from the persistence store Iterable<HistoricItem> result = persistenceService.query(filter); Iterator<HistoricItem> it = result.iterator(); boolean forceStop = false; while (!forceStop && !it.hasNext()) { if (pit.hasNext()) { persistenceService = pit.next().getValue(); result = persistenceService.query(filter); } else { // no persisted data found for this item in any of // the available persistence services forceStop = true; } } if (it.hasNext()) { logger.debug("persisted data for item {} found in service {}", item.getName(), persistenceService.getName()); } // Iterate through the data int i = 0; while (it.hasNext()) { i++; HistoricItem historicItem = it.next(); if (historicItem.getState() == null || historicItem.getState().toString().isEmpty()) continue; org.openhab.ui.cometvisu.internal.rrs.beans.Entry entry = new org.openhab.ui.cometvisu.internal.rrs.beans.Entry(); entry.publishedDate = historicItem.getTimestamp().getTime(); logger.info(rssPubDateFormat.format(entry.publishedDate) + ": " + historicItem.getState()); entry.tags = historicItem.getName(); String[] content = historicItem.getState().toString().split(rssLogMessageSeparator); if (content.length == 0) { entry.content = historicItem.getState().toString(); } else if (content.length == 1) { entry.content = content[0]; } else if (content.length == 2) { entry.title = content[0]; entry.content = content[1]; } else if (content.length == 3) { entry.title = content[0]; entry.content = content[1]; entry.state = content[2]; } else if (content.length == 4) { entry.title = content[0]; entry.content = content[1]; entry.state = content[2]; // ignore tags in content[3] as is is already known // by item name } feed.entries.add(entry); } if ("rrd4j".equals(persistenceService.getName()) && FilterCriteria.Ordering.DESCENDING.equals(filter.getOrdering())) { // the RRD4j PersistenceService does not support descending ordering so we do it manually Collections.sort(feed.entries, new Comparator<org.openhab.ui.cometvisu.internal.rrs.beans.Entry>() { @Override public int compare(org.openhab.ui.cometvisu.internal.rrs.beans.Entry o1, org.openhab.ui.cometvisu.internal.rrs.beans.Entry o2) { return Long.compare(o2.publishedDate, o1.publishedDate); } }); } logger.debug("querying {} item from {} to {} => {} results on service {}", filter.getItemName(), filter.getBeginDate(), filter.getEndDate(), i, persistenceService.getName()); } if (request.getParameter("j") != null) { // request data in JSON format response.setContentType("application/json"); response.getWriter().write("{\"responseData\": { \"feed\": " + marshalJson(feed) + "},\"responseDetails\":null,\"responseStatus\":200}"); } else { // request data in RSS format response.setContentType(MediaType.APPLICATION_ATOM_XML); // as the json bean structure does not map the rss structure // we cannot just marshal an XML String rss = "<?xml version=\"1.0\"?>\n<rss version=\"2.0\">\n<channel>\n"; rss += "<title>" + feed.title + "</title>\n"; rss += "<link>" + feed.link + "</link>\n"; rss += "<desrciption>" + feed.description + "</desription>\n"; for (org.openhab.ui.cometvisu.internal.rrs.beans.Entry entry : feed.entries) { rss += "<item>"; rss += "<title>" + entry.title + "</title>"; rss += "<description>" + entry.content + "</description>"; Date pubDate = new Date(entry.publishedDate); rss += "<pubDate>" + rssPubDateFormat.format(pubDate) + "</pubDate>"; rss += "</item>\n"; } rss += "</channel></rss>"; response.getWriter().write(rss); } response.flushBuffer(); } } }
From source file:org.openhab.ui.cometvisu.internal.servlet.CometVisuServlet.java
/** * serves an RSS-Feed from a persisted string item backend for the CometVisu * rsslog-plugin/* w w w .j a v a 2s . c o m*/ * * @param file * @param request * @param response */ private void processRssLogRequest(File file, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // retrieve the item if (request.getParameter("f") == null) { return; } String[] itemNames = request.getParameter("f").split(","); List<Item> items = new ArrayList<Item>(); for (String name : itemNames) { try { Item item = cometVisuApp.getItemRegistry().getItem(name); items.add(item); } catch (ItemNotFoundException e) { logger.error("item '{}' not found", name); } } if (items.size() > 0) { // Fallback to first persistenceService from list if (!CometVisuApp.getPersistenceServices().entrySet().iterator().hasNext()) { throw new IllegalArgumentException("No Persistence service found."); } if (request.getParameter("c") != null) { if (items.size() == 1) { // new log message should be store String title = request.getParameter("h"); String message = request.getParameter("c"); String state = request.getParameter("state"); // Build message Command command = new StringType(title + rssLogMessageSeparator + message + rssLogMessageSeparator + state + rssLogMessageSeparator + items.get(0).getName()); // Use the event publisher to store the item in the defined // persistance services cometVisuApp.getEventPublisher() .post(ItemEventFactory.createCommandEvent(items.get(0).getName(), command)); } // send empty response?? response.setContentType("text/plain"); response.getWriter().write(""); response.flushBuffer(); } else if (request.getParameter("dump") != null) { } else if (request.getParameter("r") != null) { // delete all log lines older than the timestamp and optional a // filter // => not possible to remove data from persistence service response.setContentType("text/plain"); response.getWriter().write( "Cannot execute query: It is not possible to delete data from openHAB PersistenceService"); response.flushBuffer(); } else if (request.getParameter("u") != null) { // update state response.setContentType("text/plain"); response.getWriter().write( "Cannot execute query: It is not possible to update data from openHAB PersistenceService"); response.flushBuffer(); } else if (request.getParameter("d") != null) { // update state response.setContentType("text/plain"); response.getWriter().write( "Cannot execute query: It is not possible to delete data from openHAB PersistenceService"); response.flushBuffer(); } else { Feed feed = new Feed(); feed.feedUrl = request.getRequestURL().toString(); feed.title = "RSS supplied logs"; feed.link = request.getRequestURL().toString(); feed.author = ""; feed.description = "RSS supplied logs"; feed.type = "rss20"; // Define the data filter FilterCriteria filter = new FilterCriteria(); Calendar start = Calendar.getInstance(); // retrieve only the historic states from the last 7 days + BeginDate is required for RRD4j service start.add(Calendar.DAY_OF_YEAR, -7); // Date end = new Date(); filter.setBeginDate(start.getTime()); // filter.setEndDate(end); filter.setPageSize(25); filter.setOrdering(Ordering.DESCENDING); for (Item item : items) { filter.setItemName(item.getName()); Iterator<Entry<String, QueryablePersistenceService>> pit = CometVisuApp.getPersistenceServices() .entrySet().iterator(); QueryablePersistenceService persistenceService = pit.next().getValue(); // Get the data from the persistence store Iterable<HistoricItem> result = persistenceService.query(filter); Iterator<HistoricItem> it = result.iterator(); boolean forceStop = false; while (!forceStop && !it.hasNext()) { if (pit.hasNext()) { persistenceService = pit.next().getValue(); result = persistenceService.query(filter); } else { // no persisted data found for this item in any of // the available persistence services forceStop = true; } } if (it.hasNext()) { logger.debug("persisted data for item {} found in service {}", item.getName(), persistenceService.getId()); } // Iterate through the data int i = 0; while (it.hasNext()) { i++; HistoricItem historicItem = it.next(); if (historicItem.getState() == null || historicItem.getState().toString().isEmpty()) { continue; } org.openhab.ui.cometvisu.internal.rss.beans.Entry entry = new org.openhab.ui.cometvisu.internal.rss.beans.Entry(); entry.publishedDate = historicItem.getTimestamp().getTime(); entry.tags.add(historicItem.getName()); String[] content = historicItem.getState().toString().split(rssLogMessageSeparator); if (content.length == 0) { entry.content = historicItem.getState().toString(); } else if (content.length == 1) { entry.content = content[0]; } else if (content.length == 2) { entry.title = content[0]; entry.content = content[1]; } else if (content.length == 3) { entry.title = content[0]; entry.content = content[1]; entry.state = content[2]; } else if (content.length == 4) { entry.title = content[0]; entry.content = content[1]; entry.state = content[2]; // ignore tags in content[3] as is is already known // by item name } feed.entries.add(entry); } if ("rrd4j".equals(persistenceService.getId()) && FilterCriteria.Ordering.DESCENDING.equals(filter.getOrdering())) { // the RRD4j PersistenceService does not support descending ordering so we do it manually Collections.sort(feed.entries, new Comparator<org.openhab.ui.cometvisu.internal.rss.beans.Entry>() { @Override public int compare(org.openhab.ui.cometvisu.internal.rss.beans.Entry o1, org.openhab.ui.cometvisu.internal.rss.beans.Entry o2) { return Long.compare(o2.publishedDate, o1.publishedDate); } }); } logger.debug("querying {} item from {} to {} => {} results on service {}", filter.getItemName(), filter.getBeginDate(), filter.getEndDate(), i, persistenceService.getId()); } if (request.getParameter("j") != null) { // request data in JSON format response.setContentType("application/json"); response.getWriter().write("{\"responseData\": { \"feed\": " + marshalJson(feed) + "},\"responseDetails\":null,\"responseStatus\":200}"); } else { // request data in RSS format response.setContentType(MediaType.APPLICATION_ATOM_XML); // as the json bean structure does not map the rss structure // we cannot just marshal an XML String rss = "<?xml version=\"1.0\"?>\n<rss version=\"2.0\">\n<channel>\n"; rss += "<title>" + feed.title + "</title>\n"; rss += "<link>" + feed.link + "</link>\n"; rss += "<desrciption>" + feed.description + "</desription>\n"; for (org.openhab.ui.cometvisu.internal.rss.beans.Entry entry : feed.entries) { rss += "<item>"; rss += "<title>" + entry.title + "</title>"; rss += "<description>" + entry.content + "</description>"; Date pubDate = new Date(entry.publishedDate); rss += "<pubDate>" + rssPubDateFormat.format(pubDate) + "</pubDate>"; rss += "</item>\n"; } rss += "</channel></rss>"; response.getWriter().write(rss); } response.flushBuffer(); } } }
From source file:edu.kit.dama.staging.services.impl.download.DownloadInformationServiceLocal.java
private Collection<StagingProcessor> mergeStagingProcessors(Collection<StagingProcessor> assignedProcessors, Collection<StagingProcessor> defaultProcessors) { LOGGER.debug("Checking default staging processors."); if (assignedProcessors == null || assignedProcessors.isEmpty()) { //return defaultProcessors (can't be null, so we do not have to check result) LOGGER.debug("No staging processors assigned, using only default processors."); return defaultProcessors; }/*from ww w . j a v a 2s . c o m*/ LOGGER.debug("Adding all assigned processors to result list."); List<StagingProcessor> result = new ArrayList<>(assignedProcessors); if (defaultProcessors == null || defaultProcessors.isEmpty()) { //return assigned processors LOGGER.debug("No default processors provided, using only assigned processors."); return result; } LOGGER.debug("Merging {} existing and {} default processor(s)", assignedProcessors.size(), defaultProcessors.size()); for (final StagingProcessor processor : defaultProcessors) { LOGGER.debug("Searching for default processor with id {}", processor.getUniqueIdentifier()); StagingProcessor exists = (StagingProcessor) CollectionUtils.find(assignedProcessors, new Predicate() { @Override public boolean evaluate(Object o) { return Long.compare(((StagingProcessor) o).getId(), processor.getId()) == 0; } }); if (exists == null) { LOGGER.debug("Default processor with id {} is not assigned, yet. Adding it.", processor.getId()); //add as it not exists result.add(processor); } } return result; }
From source file:com.google.uzaygezen.core.LongArrayBitVector.java
private int compareTo(long[] other) { int i;//from w ww.j av a 2s . c om for (i = data.length; --i >= 0 && data[i] == other[i];) ; final int cmp; if (i == -1) { cmp = 0; } else { // 0, positives, Long.MAX_VALUE, Long.MIN_VALUE, negatives, -1 long x = data[i] + Long.MIN_VALUE; long y = other[i] + Long.MIN_VALUE; cmp = Long.compare(x, y); assert cmp != 0; } return cmp; }
From source file:org.cgiar.ccafs.marlo.action.summaries.SearchTermsSummaryAction.java
private TypedTableModel getDeliverablesTableModel() { TypedTableModel model = new TypedTableModel( new String[] { "project_id", "title", "dev_id", "dev_title", "dev_type", "dev_sub_type", "lead_ins", "leader", "project_url", "dev_url", "phaseID", "gender", "youth", "capacityDevelopment" }, new Class[] { String.class, String.class, String.class, String.class, String.class, String.class, String.class, String.class, String.class, String.class, Long.class, String.class, String.class, String.class }, 0);/* w w w . j a va 2 s . co m*/ if (!keys.isEmpty()) { List<Project> projects = new ArrayList<>(); if (this.getSelectedPhase() != null) { for (ProjectPhase projectPhase : this.getSelectedPhase().getProjectPhases().stream() .sorted((pf1, pf2) -> Long.compare(pf1.getProject().getId(), pf2.getProject().getId())) .filter(pf -> pf.getProject() != null && pf.getProject().isActive()) .collect(Collectors.toList())) { projects.add((projectPhase.getProject())); } for (Project project : projects) { ProjectInfo projectInfo = project.getProjecInfoPhase(this.getSelectedPhase()); for (Deliverable deliverable : project.getDeliverables().stream() .sorted((d1, d2) -> Long.compare(d1.getId(), d2.getId())) .filter(d -> d.isActive() && d.getDeliverableInfo(this.getSelectedPhase()) != null && d.getDeliverableInfo().getPhase().equals(this.getSelectedPhase())) .collect(Collectors.toList())) { String devTitle = ""; // Pattern case insensitive String patternString = "(?i)\\b(" + StringUtils.join(keys, "|") + ")\\b"; Pattern pattern = Pattern.compile(patternString); // count occurrences countMatches = 0; devTitle = this.setFieldMatches( deliverable.getDeliverableInfo(this.getSelectedPhase()).getTitle(), pattern, "", null, false); String gender = "", youth = "", capacityDevelopment = ""; gender = this.setFieldMatches( deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingGender(), pattern, this.getText("summaries.gender"), deliverable.getDeliverableInfo().getCrossCuttingScoreGender(), true); youth = this.setFieldMatches( deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingYouth(), pattern, this.getText("summaries.youth"), deliverable.getDeliverableInfo().getCrossCuttingScoreYouth(), true); capacityDevelopment = this.setFieldMatches( deliverable.getDeliverableInfo(this.getSelectedPhase()).getCrossCuttingCapacity(), pattern, this.getText("summaries.capacityDevelopment"), deliverable.getDeliverableInfo().getCrossCuttingScoreCapacity(), true); if (countMatches > 0) { String projectId = "<font size=2 face='Segoe UI' color='#0000ff'>P" + project.getId().toString() + "</font>"; String projectUrl = project.getId().toString(); String title = projectInfo.getTitle(); String devId = "<font size=2 face='Segoe UI' color='#0000ff'>D" + deliverable.getId().toString() + "</font>"; String devUrl = deliverable.getId().toString(); String devType = "<font size=2 face='Segoe UI' color='#000000'></font>"; String devSubType = "<font size=2 face='Segoe UI' color='#000000'></font>"; String leadIns = "<font size=2 face='Segoe UI' color='#000000'></font>"; String leader = "<font size=2 face='Segoe UI' color='#000000'></font>"; if (projectInfo.getTitle() != null) { title = "<font size=2 face='Segoe UI' color='#000000'>" + projectInfo.getTitle() + "</font>"; } else { title = "<font size=2 face='Segoe UI' color='#000000'></font>"; } if (deliverable.getDeliverableInfo(this.getSelectedPhase()) .getDeliverableType() != null) { if (deliverable.getDeliverableInfo(this.getSelectedPhase()).getDeliverableType() .getDeliverableCategory() != null) { devType = "<font size=2 face='Segoe UI' color='#000000'>" + deliverable.getDeliverableInfo(this.getSelectedPhase()) .getDeliverableType().getDeliverableCategory().getName() + "</font>"; devSubType = "<font size=2 face='Segoe UI' color='#000000'>" + deliverable.getDeliverableInfo(this.getSelectedPhase()) .getDeliverableType().getName() + "</font>"; } else { devType = "<font size=2 face='Segoe UI' color='#000000'>D" + deliverable.getDeliverableInfo(this.getSelectedPhase()) .getDeliverableType().getName() + "</font>"; } } // Get partner responsible and institution // Set responible; DeliverablePartnership responisble = this.responsiblePartner(deliverable); if (responisble != null) { if (responisble.getProjectPartnerPerson() != null) { ProjectPartnerPerson responsibleppp = responisble.getProjectPartnerPerson(); leader = "<font size=2 face='Segoe UI' color='#000000'>" + responsibleppp.getUser().getComposedName() + "\n<" + responsibleppp.getUser().getEmail() + "></font>"; if (responsibleppp.getProjectPartner() != null) { if (responsibleppp.getProjectPartner().getInstitution() != null) { leadIns = "<font size=2 face='Segoe UI' color='#000000'>" + responsibleppp.getProjectPartner().getInstitution() .getComposedName() + "</font>"; } } } } Long phaseID = projectInfo.getPhase().getId(); model.addRow(new Object[] { projectId, title, devId, devTitle, devType, devSubType, leadIns, leader, projectUrl, devUrl, phaseID, gender, youth, capacityDevelopment }); } } } } } return model; }
From source file:org.opencb.opencga.storage.core.manager.variant.VariantStorageManager.java
Map<Long, List<Sample>> checkSamplesPermissions(Query query, QueryOptions queryOptions, VariantDBAdaptor dbAdaptor, String sessionId) throws CatalogException { final Map<Long, List<Sample>> samplesMap = new HashMap<>(); Set<VariantField> returnedFields = VariantField.getReturnedFields(queryOptions); if (!returnedFields.contains(VariantField.STUDIES)) { return Collections.emptyMap(); }/* w ww . j ava2s . com*/ if (VariantDBAdaptorUtils.isReturnedSamplesDefined(query, returnedFields)) { Map<Integer, List<Integer>> samplesToReturn = dbAdaptor.getReturnedSamples(query, queryOptions); for (Map.Entry<Integer, List<Integer>> entry : samplesToReturn.entrySet()) { if (!entry.getValue().isEmpty()) { QueryResult<Sample> samplesQueryResult = catalogManager.getAllSamples(entry.getKey(), new Query(SampleDBAdaptor.QueryParams.ID.key(), entry.getValue()), new QueryOptions("exclude", Arrays.asList("projects.studies.samples.annotationSets", "projects.studies.samples.attributes")), sessionId); if (samplesQueryResult.getNumResults() != entry.getValue().size()) { throw new CatalogAuthorizationException( "Permission denied. User " + catalogManager.getUserIdBySessionId(sessionId) + " can't read all the requested samples"); } samplesMap.put((long) entry.getKey(), samplesQueryResult.getResult()); } else { samplesMap.put((long) entry.getKey(), Collections.emptyList()); } } } else { logger.debug("Missing returned samples! Obtaining returned samples from catalog."); List<Integer> returnedStudies = dbAdaptor.getReturnedStudies(query, queryOptions); List<Study> studies = catalogManager .getAllStudies(new Query(StudyDBAdaptor.QueryParams.ID.key(), returnedStudies), new QueryOptions("include", "projects.studies.id"), sessionId) .getResult(); if (!returnedFields.contains(VariantField.STUDIES_SAMPLES_DATA)) { for (Integer returnedStudy : returnedStudies) { samplesMap.put(returnedStudy.longValue(), Collections.emptyList()); } } else { List<Long> returnedSamples = new LinkedList<>(); for (Study study : studies) { QueryResult<Sample> samplesQueryResult = catalogManager.getAllSamples(study.getId(), new Query(), new QueryOptions("exclude", Arrays.asList("projects.studies.samples.annotationSets", "projects.studies.samples.attributes")), sessionId); samplesQueryResult.getResult().sort((o1, o2) -> Long.compare(o1.getId(), o2.getId())); samplesMap.put(study.getId(), samplesQueryResult.getResult()); samplesQueryResult.getResult().stream().map(Sample::getId).forEach(returnedSamples::add); } query.append(VariantQueryParams.RETURNED_SAMPLES.key(), returnedSamples); } } return samplesMap; }
From source file:org.apache.nifi.provenance.MiNiFiPersistentProvenanceRepository.java
private void recover() throws IOException { long maxId = -1L; final List<File> filesToRecover = new ArrayList<>(); for (final File file : configuration.getStorageDirectories()) { final File[] matchingFiles = file.listFiles(new FileFilter() { @Override/*from w ww .j av a2 s.c o m*/ public boolean accept(final File pathname) { final String filename = pathname.getName(); if (!filename.contains(FILE_EXTENSION) || filename.endsWith(TEMP_FILE_SUFFIX)) { return false; } final String baseFilename = filename.substring(0, filename.indexOf(".")); return NUMBER_PATTERN.matcher(baseFilename).matches(); } }); for (final File matchingFile : matchingFiles) { filesToRecover.add(matchingFile); } } final SortedMap<Long, Path> sortedPathMap = new TreeMap<>(new Comparator<Long>() { @Override public int compare(final Long o1, final Long o2) { return Long.compare(o1, o2); } }); File maxIdFile = null; for (final File file : filesToRecover) { final String filename = file.getName(); final String baseName = filename.substring(0, filename.indexOf(".")); final long fileFirstId = Long.parseLong(baseName); sortedPathMap.put(fileFirstId, file.toPath()); if (fileFirstId > maxId) { maxId = fileFirstId; maxIdFile = file; } } if (maxIdFile != null) { // Determine the max ID in the last file. try (final RecordReader reader = RecordReaders.newRecordReader(maxIdFile, getAllLogFiles(), maxAttributeChars)) { final long eventId = reader.getMaxEventId(); if (eventId > maxId) { maxId = eventId; checkAndSetMaxEventId(maxId); } } catch (final IOException ioe) { logger.error("Failed to read Provenance Event File {} due to {}", maxIdFile, ioe); logger.error("", ioe); } } // Establish current max event ID and increment generator to pick up from this point checkAndSetMaxEventId(maxId); idGenerator.set(maxId + 1); try { final Set<File> recoveredJournals = recoverJournalFiles(); filesToRecover.addAll(recoveredJournals); // Find the file that has the greatest ID File greatestMinIdFile = null; long greatestMinId = 0L; for (final File recoveredJournal : recoveredJournals) { // if the file was removed because the journals were empty, don't count it if (!recoveredJournal.exists()) { continue; } final String basename = StringUtils.substringBefore(recoveredJournal.getName(), "."); try { final long minId = Long.parseLong(basename); sortedPathMap.put(minId, recoveredJournal.toPath()); if (greatestMinIdFile == null || minId > greatestMinId) { greatestMinId = minId; greatestMinIdFile = recoveredJournal; } } catch (final NumberFormatException nfe) { // not a file we care about... } } // Read the records in the last file to find its max id if (greatestMinIdFile != null) { try (final RecordReader recordReader = RecordReaders.newRecordReader(greatestMinIdFile, Collections.<Path>emptyList(), maxAttributeChars)) { maxId = recordReader.getMaxEventId(); } } // set the ID Generator 1 greater than the max id idGenerator.set(maxId + 1); } catch (final IOException ioe) { logger.error("Failed to recover Journal Files due to {}", ioe.toString()); logger.error("", ioe); } idToPathMap.set(Collections.unmodifiableSortedMap(sortedPathMap)); logger.trace("In recovery, path map: {}", sortedPathMap); logger.info("Recovered records"); recoveryFinished.set(true); }