List of usage examples for java.util LinkedList isEmpty
boolean isEmpty();
From source file:gate.creole.tokeniser.SimpleTokeniser.java
/** Converts the FSM from a non-deterministic to a deterministic one by * eliminating all the unrestricted transitions. *///from w w w . j ava 2 s . c o m void eliminateVoidTransitions() throws TokeniserException { //kalina:clear() faster than init() which is called with init() newStates.clear(); Set<Set<FSMState>> sdStates = new HashSet<Set<FSMState>>(); LinkedList<Set<FSMState>> unmarkedDStates = new LinkedList<Set<FSMState>>(); DFSMState dCurrentState = new DFSMState(this); Set<FSMState> sdCurrentState = new HashSet<FSMState>(); sdCurrentState.add(initialState); sdCurrentState = lambdaClosure(sdCurrentState); newStates.put(sdCurrentState, dCurrentState); sdStates.add(sdCurrentState); //find out if the new state is a final one Iterator<FSMState> innerStatesIter = sdCurrentState.iterator(); String rhs; FSMState currentInnerState; Set<String> rhsClashSet = new HashSet<String>(); boolean newRhs = false; while (innerStatesIter.hasNext()) { currentInnerState = innerStatesIter.next(); if (currentInnerState.isFinal()) { rhs = currentInnerState.getRhs(); rhsClashSet.add(rhs); dCurrentState.rhs = rhs; newRhs = true; } } if (rhsClashSet.size() > 1) { Err.println("Warning, rule clash: " + rhsClashSet + "\nSelected last definition: " + dCurrentState.rhs); } if (newRhs) dCurrentState.buildTokenDesc(); rhsClashSet.clear(); unmarkedDStates.addFirst(sdCurrentState); dInitialState = dCurrentState; Set<FSMState> nextSet; while (!unmarkedDStates.isEmpty()) { //Out.println("\n\n=====================" + unmarkedDStates.size()); sdCurrentState = unmarkedDStates.removeFirst(); for (int type = 0; type < maxTypeId; type++) { //Out.print(type); nextSet = new HashSet<FSMState>(); innerStatesIter = sdCurrentState.iterator(); while (innerStatesIter.hasNext()) { currentInnerState = innerStatesIter.next(); Set<FSMState> tempSet = currentInnerState.nextSet(type); if (null != tempSet) nextSet.addAll(tempSet); } //while(innerStatesIter.hasNext()) if (!nextSet.isEmpty()) { nextSet = lambdaClosure(nextSet); dCurrentState = newStates.get(nextSet); if (dCurrentState == null) { //we have a new DFSMState dCurrentState = new DFSMState(this); sdStates.add(nextSet); unmarkedDStates.add(nextSet); //check to see whether the new state is a final one innerStatesIter = nextSet.iterator(); newRhs = false; while (innerStatesIter.hasNext()) { currentInnerState = innerStatesIter.next(); if (currentInnerState.isFinal()) { rhs = currentInnerState.getRhs(); rhsClashSet.add(rhs); dCurrentState.rhs = rhs; newRhs = true; } } if (rhsClashSet.size() > 1) { Err.println("Warning, rule clash: " + rhsClashSet + "\nSelected last definition: " + dCurrentState.rhs); } if (newRhs) dCurrentState.buildTokenDesc(); rhsClashSet.clear(); newStates.put(nextSet, dCurrentState); } newStates.get(sdCurrentState).put(type, dCurrentState); } // if(!nextSet.isEmpty()) } // for(byte type = 0; type < 256; type++) } // while(!unmarkedDStates.isEmpty()) }
From source file:com.opengamma.util.db.management.AbstractDbManagement.java
@Override public void clearTables(String catalog, String schema, Collection<String> ignoredTables) { LinkedList<String> script = new LinkedList<String>(); Connection conn = null;/* w w w . j av a 2 s. c o m*/ try { if (!getCatalogCreationStrategy().catalogExists(catalog)) { return; // nothing to clear } conn = connect(catalog); setActiveSchema(conn, schema); Statement statement = conn.createStatement(); // Clear tables SQL List<String> tablesToClear = new ArrayList<String>(); for (String name : getAllTables(catalog, schema, statement)) { if (!ignoredTables.contains(name.toLowerCase())) { tablesToClear.add(name); } } List<String> clearTablesCommands = getClearTablesCommand(schema, tablesToClear); script.addAll(clearTablesCommands); for (String name : tablesToClear) { Table table = new Table(name); if (matches(table.getName().toLowerCase(), Pattern.compile(".*?hibernate_sequence"))) { // if it's a sequence table, reset it script.add("INSERT INTO " + table.getQualifiedName(getHibernateDialect(), null, schema) + " values ( 1 )"); } } // Now execute it all. Constraints are taken into account by retrying the failed statement after all // dependent tables have been cleared first. int i = 0; int maxAttempts = script.size() * 3; // make sure the loop eventually terminates. Important if there's a cycle in the table dependency graph SQLException latestException = null; while (i < maxAttempts && !script.isEmpty()) { String sql = script.remove(); try { statement.executeUpdate(sql); } catch (SQLException e) { // assume it failed because of a constraint violation // try deleting other tables first - make this the new last statement latestException = e; script.add(sql); } i++; } statement.close(); if (i == maxAttempts && !script.isEmpty()) { throw new OpenGammaRuntimeException( "Failed to clear tables - is there a cycle in the table dependency graph?", latestException); } } catch (SQLException e) { throw new OpenGammaRuntimeException("Failed to clear tables", e); } finally { try { if (conn != null) { conn.close(); } } catch (SQLException e) { } } }
From source file:org.epics.archiverappliance.etl.DataReductionPostProcessorsTest.java
/** * 1) Set up the raw and reduced PV's/*from w w w . ja va2 s.c o m*/ * 2) Generate data in STS * 3) Run ETL * 4) Compare */ private void testPostProcessor(String reduceDataUsing) throws Exception { cleanDataFolders(); ConfigServiceForTests configService = new ConfigServiceForTests(new File("./bin"), 1); // Set up the raw and reduced PV's PlainPBStoragePlugin etlSTS = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin("pb://localhost?name=STS&rootFolder=" + shortTermFolderName + "/&partitionGranularity=PARTITION_HOUR", configService); ; PlainPBStoragePlugin etlMTS = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin("pb://localhost?name=MTS&rootFolder=" + mediumTermFolderName + "/&partitionGranularity=PARTITION_DAY", configService); PlainPBStoragePlugin etlLTSRaw = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin("pb://localhost?name=LTS&rootFolder=" + longTermFolderName + "/&partitionGranularity=PARTITION_YEAR", configService); PlainPBStoragePlugin etlLTSReduced = (PlainPBStoragePlugin) StoragePluginURLParser .parseStoragePlugin( "pb://localhost?name=LTS&rootFolder=" + longTermFolderName + "/&partitionGranularity=PARTITION_YEAR&reducedata=" + reduceDataUsing, configService); { PVTypeInfo typeInfo = new PVTypeInfo(rawPVName, ArchDBRTypes.DBR_SCALAR_DOUBLE, true, 1); String[] dataStores = new String[] { etlSTS.getURLRepresentation(), etlMTS.getURLRepresentation(), etlLTSRaw.getURLRepresentation() }; typeInfo.setDataStores(dataStores); typeInfo.setPaused(true); configService.updateTypeInfoForPV(rawPVName, typeInfo); configService.registerPVToAppliance(rawPVName, configService.getMyApplianceInfo()); } { PVTypeInfo typeInfo = new PVTypeInfo(reducedPVName, ArchDBRTypes.DBR_SCALAR_DOUBLE, true, 1); String[] dataStores = new String[] { etlSTS.getURLRepresentation(), etlMTS.getURLRepresentation(), etlLTSReduced.getURLRepresentation() }; typeInfo.setDataStores(dataStores); typeInfo.setPaused(true); configService.updateTypeInfoForPV(reducedPVName, typeInfo); configService.registerPVToAppliance(reducedPVName, configService.getMyApplianceInfo()); } // Control ETL manually configService.getETLLookup().manualControlForUnitTests(); short currentYear = TimeUtils.getCurrentYear(); logger.info("Testing data reduction for postprocessor " + reduceDataUsing); for (int day = 0; day < 40; day++) { // Generate data into the STS on a daily basis ArrayListEventStream genDataRaw = new ArrayListEventStream(86400, new RemotableEventStreamDesc(ArchDBRTypes.DBR_SCALAR_DOUBLE, rawPVName, currentYear)); ArrayListEventStream genDataReduced = new ArrayListEventStream(86400, new RemotableEventStreamDesc(ArchDBRTypes.DBR_SCALAR_DOUBLE, reducedPVName, currentYear)); for (int second = 0; second < 86400; second++) { YearSecondTimestamp ysts = new YearSecondTimestamp(currentYear, day * 86400 + second, 0); Timestamp ts = TimeUtils.convertFromYearSecondTimestamp(ysts); genDataRaw.add(new POJOEvent(ArchDBRTypes.DBR_SCALAR_DOUBLE, ts, new ScalarValue<Double>(second * 1.0), 0, 0)); genDataReduced.add(new POJOEvent(ArchDBRTypes.DBR_SCALAR_DOUBLE, ts, new ScalarValue<Double>(second * 1.0), 0, 0)); } try (BasicContext context = new BasicContext()) { etlSTS.appendData(context, rawPVName, genDataRaw); etlSTS.appendData(context, reducedPVName, genDataReduced); } logger.debug( "For postprocessor " + reduceDataUsing + " done generating data into the STS for day " + day); // Run ETL at the end of the day Timestamp timeETLruns = TimeUtils .convertFromYearSecondTimestamp(new YearSecondTimestamp(currentYear, day * 86400 + 86399, 0)); ETLExecutor.runETLs(configService, timeETLruns); logger.debug("For postprocessor " + reduceDataUsing + " done performing ETL as though today is " + TimeUtils.convertToHumanReadableString(timeETLruns)); // Compare data for raw+postprocessor and reduced PV's. PostProcessor postProcessor = PostProcessors.findPostProcessor(reduceDataUsing); postProcessor.initialize(reduceDataUsing, rawPVName); int rawWithPPCount = 0; int reducedCount = 0; try (BasicContext context = new BasicContext()) { Timestamp startTime = TimeUtils.minusDays(TimeUtils.now(), 10 * 366); Timestamp endTime = TimeUtils.plusDays(TimeUtils.now(), 10 * 366); LinkedList<Timestamp> rawTimestamps = new LinkedList<Timestamp>(); LinkedList<Timestamp> reducedTimestamps = new LinkedList<Timestamp>(); if (postProcessor instanceof PostProcessorWithConsolidatedEventStream) { List<Callable<EventStream>> callables = etlLTSRaw.getDataForPV(context, rawPVName, startTime, endTime, postProcessor); for (Callable<EventStream> callable : callables) { callable.call(); } for (Event e : ((PostProcessorWithConsolidatedEventStream) postProcessor) .getConsolidatedEventStream()) { rawTimestamps.add(e.getEventTimeStamp()); rawWithPPCount++; } } else { try (EventStream rawWithPP = new CurrentThreadWorkerEventStream(rawPVName, etlLTSRaw.getDataForPV(context, rawPVName, startTime, endTime, postProcessor))) { for (Event e : rawWithPP) { rawTimestamps.add(e.getEventTimeStamp()); rawWithPPCount++; } } } try (EventStream reduced = new CurrentThreadWorkerEventStream(reducedPVName, etlLTSReduced.getDataForPV(context, reducedPVName, startTime, endTime))) { for (Event e : reduced) { reducedTimestamps.add(e.getEventTimeStamp()); reducedCount++; } } logger.debug( "For postprocessor " + reduceDataUsing + " for day " + day + " we have " + rawWithPPCount + " raw with postprocessor events and " + reducedCount + " reduced events"); if (rawTimestamps.size() != reducedTimestamps.size()) { while (!rawTimestamps.isEmpty() || !reducedTimestamps.isEmpty()) { if (!rawTimestamps.isEmpty()) logger.info("Raw/PP " + TimeUtils.convertToHumanReadableString(rawTimestamps.pop())); if (!reducedTimestamps.isEmpty()) logger.info( "Reduced" + TimeUtils.convertToHumanReadableString(reducedTimestamps.pop())); } } assertTrue( "For postprocessor " + reduceDataUsing + " for day " + day + " we have " + rawWithPPCount + " rawWithPP events and " + reducedCount + " reduced events", rawWithPPCount == reducedCount); } if (day > 2) { assertTrue("For postprocessor " + reduceDataUsing + " for day " + day + ", seems like no events were moved by ETL into LTS for " + rawPVName + " Count = " + rawWithPPCount, (rawWithPPCount != 0)); assertTrue("For postprocessor " + reduceDataUsing + " for day " + day + ", seems like no events were moved by ETL into LTS for " + reducedPVName + " Count = " + reducedCount, (reducedCount != 0)); } } configService.shutdownNow(); }
From source file:com.epam.dlab.mongo.DlabResourceTypeDAO.java
/** * Update exploratory cost in Mongo DB.// w w w. ja va2 s. co m * * @param user the name of user. * @param exploratoryName id of exploratory. */ private void updateExploratoryCost(String user, String exploratoryName) { LOGGER.debug("Update explorartory {} cost for user {}", exploratoryName, user); List<? extends Bson> pipeline = Arrays.asList( match(and(eq(FIELD_USER, user), eq(FIELD_EXPLORATORY_NAME, exploratoryName))), group(getGrouppingFields(FIELD_DLAB_RESOURCE_ID, ReportLine.FIELD_PRODUCT, ReportLine.FIELD_RESOURCE_TYPE, ReportLine.FIELD_CURRENCY_CODE), sum(ReportLine.FIELD_COST, "$" + ReportLine.FIELD_COST), min(FIELD_USAGE_DATE_START, "$" + ReportLine.FIELD_USAGE_DATE), max(FIELD_USAGE_DATE_END, "$" + ReportLine.FIELD_USAGE_DATE)), sort(new Document(FIELD_ID + "." + FIELD_DLAB_RESOURCE_ID, 1) .append(FIELD_ID + "." + ReportLine.FIELD_PRODUCT, 1))); AggregateIterable<Document> docs = connection.getCollection(COLLECTION_BILLING).aggregate(pipeline); LinkedList<Document> billing = new LinkedList<>(); ResourceItemList resources = getResourceList(); Double costTotal = null; String currencyCode = null; for (Document d : docs) { Document id = (Document) d.get(FIELD_ID); double cost = BillingCalculationUtils.round(d.getDouble(ReportLine.FIELD_COST), 2); costTotal = (costTotal == null ? cost : costTotal + cost); if (currencyCode == null) { currencyCode = id.getString(ReportLine.FIELD_CURRENCY_CODE); } Document total = new Document() .append(FIELD_RESOURCE_NAME, resources.getById(id.getString(FIELD_DLAB_RESOURCE_ID)).getResourceName()) .append(ReportLine.FIELD_PRODUCT, id.getString(ReportLine.FIELD_PRODUCT)) .append(ReportLine.FIELD_RESOURCE_TYPE, id.getString(ReportLine.FIELD_RESOURCE_TYPE)) .append(ReportLine.FIELD_COST, BillingCalculationUtils.formatDouble(cost)) .append(ReportLine.FIELD_CURRENCY_CODE, id.getString(ReportLine.FIELD_CURRENCY_CODE)) .append(FIELD_USAGE_DATE_START, d.getString(FIELD_USAGE_DATE_START)) .append(FIELD_USAGE_DATE_END, d.getString(FIELD_USAGE_DATE_END)); billing.add(total); } LOGGER.debug("Total explorartory {} cost for user {} is {} {}, detail count is {}", exploratoryName, user, costTotal, currencyCode, billing.size()); billing.sort(new BillingComparator()); MongoCollection<Document> cExploratory = connection.getCollection(COLLECTION_USER_INSTANCES); Bson values = Updates.combine( Updates.set(ReportLine.FIELD_COST, BillingCalculationUtils.formatDouble(costTotal)), Updates.set(FIELD_CURRENCY_CODE, currencyCode), Updates.set(COLLECTION_BILLING, (!billing.isEmpty() ? billing : null))); cExploratory.updateOne(and(and(eq(FIELD_USER, user), eq(FIELD_EXPLORATORY_NAME, exploratoryName))), values); }
From source file:org.artifactory.repo.service.RepositoryServiceImpl.java
private ItemInfo collectLastModified(RepoPath pathToSearch) { TreeBrowsingCriteria criteria = new TreeBrowsingCriteriaBuilder().applySecurity().build(); ItemTree itemTree = new ItemTree(pathToSearch, criteria); LinkedList<ItemNode> fringe = Lists.newLinkedList(); fringe.add(itemTree.getRootNode());/*from w ww . ja v a2 s. c o m*/ ItemInfo lastModified = null; while (!fringe.isEmpty()) { ItemNode last = fringe.removeLast(); if (last.hasChildren()) { fringe.addAll(last.getChildren()); } if (!last.isFolder()) { if (lastModified == null || last.getItemInfo().getLastModified() > lastModified.getLastModified()) { lastModified = last.getItemInfo(); } } } return lastModified; }
From source file:org.dbpedia.spotlight.mediawiki.ModularParser.java
private SectionContainer parseSections(SpanManager sm, ContentElementParsingParameters cepp, LinkedList<Span> lineSpans) { List<SectionContent> contentSections = new ArrayList<SectionContent>(); SectionContent sc = new SectionContent(1); if (calculateSrcSpans) { try {/*from www. j av a 2s.c o m*/ sc.setSrcSpan(new SrcSpan(sm.getSrcPos(lineSpans.getFirst().getStart()), -1)); } catch (Exception e) { System.out.println("Parse error :" + sm.toString()); } } // Identify the Line Type and call the necessary Function for the // further handling... while (!lineSpans.isEmpty()) { Span s = lineSpans.getFirst(); lineType t = getLineType(sm, s); switch (t) { case SECTION: contentSections.add(sc); int level = getSectionLevel(sm, s); sc = new SectionContent( parseContentElement(sm, cepp, new Span(s.getStart() + level, s.getEnd() - level).trim(sm)), level); lineSpans.removeFirst(); if (calculateSrcSpans) { sc.setSrcSpan(new SrcSpan(sm.getSrcPos(s.getStart()), -1)); } break; case HR: // remove the HR (----) and handle the rest as a parapraph line removeHr(sm, s); t = lineType.PARAGRAPH; case PARAGRAPH: case PARAGRAPH_BOXED: case PARAGRAPH_INDENTED: sc.addParagraph(buildParagraph(sm, cepp, lineSpans, t)); break; case NESTEDLIST: case NESTEDLIST_NR: sc.addNestedList(buildNestedList(sm, cepp, lineSpans, t)); break; case DEFINITIONLIST: sc.addDefinitionList(buildDefinitionList(sm, cepp, lineSpans)); break; case TABLE: sc.addTable(buildTable(sm, cepp, lineSpans)); break; case EMPTYLINE: lineSpans.removeFirst(); break; default: logger.error("unknown lineStart!: \"" + sm.substring(s) + "\""); lineSpans.removeFirst(); } } // add the remaining Section to the list. contentSections.add(sc); return buildSectionStructure(contentSections); }
From source file:canreg.client.gui.analysis.TableBuilderInternalFrame.java
private void generateTablesAction(FileTypes filetype) { boolean filterError = false; TableBuilderListElement tble = (TableBuilderListElement) tableTypeList.getSelectedValue(); if (tble == null) { JOptionPane.showMessageDialog(this, java.util.ResourceBundle .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame") .getString("NO_TABLE_TYPE_SELECTED"), java.util.ResourceBundle .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame") .getString("NO_TABLE_TYPE_SELECTED"), JOptionPane.ERROR_MESSAGE); return;//from w ww. j a v a 2 s . c o m } else { try { tableBuilder = TableBuilderFactory.getTableBuilder(tble); } catch (FileNotFoundException ex) { Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex); } } Set<DatabaseVariablesListElement> variables = new LinkedHashSet<>(); DistributedTableDescription tableDatadescription; JChartTableBuilderInterface chartBuilder; if (tableBuilder == null) { JOptionPane.showMessageDialog(this, java.util.ResourceBundle .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame") .getString("TABLE_TYPE_NOT_YET_IMPLEMENTED"), java.util.ResourceBundle .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame") .getString("TABLE_TYPE_NOT_YET_IMPLEMENTED"), JOptionPane.ERROR_MESSAGE); return; } else { String heading = headerOfTableTextField.getText(); int startYear = startYearChooser.getValue(); int endYear = endYearChooser.getValue(); PopulationDataset[] populations; if (dontUsePopulationDatasetCheckBox.isSelected()) { populations = generateDummyPopulationDatasets(); } else { populations = getSelectedPopulations(); } PopulationDataset[] standardPopulations = new PopulationDataset[populations.length]; tableBuilder.setUnknownAgeCode(CanRegClientApp.getApplication().getGlobalToolBox().getUnknownAgeCode()); if (tableBuilder.areThesePopulationDatasetsCompatible(populations)) { String fileName = null; // Choose file name; if (filetype != FileTypes.jchart) { if (chooser == null) { path = localSettings.getProperty(LocalSettings.TABLES_PATH_KEY); if (path == null) { chooser = new JFileChooser(); } else { chooser = new JFileChooser(path); } } int returnVal = chooser.showSaveDialog(this); if (returnVal == JFileChooser.APPROVE_OPTION) { try { localSettings.setProperty(LocalSettings.TABLES_PATH_KEY, chooser.getSelectedFile().getParentFile().getCanonicalPath()); fileName = chooser.getSelectedFile().getAbsolutePath(); } catch (IOException ex) { Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex); } } else { // cancelled return; } } setCursor(hourglassCursor); int i = 0; String populationFilterString = ""; for (PopulationDataset pop : populations) { if (pop != null) { int stdPopID = pop.getReferencePopulationID(); standardPopulations[i++] = populationDatasetsMap.get(stdPopID); if (populationFilterString.trim().length() == 0) { populationFilterString = pop.getFilter(); } else if (!populationFilterString.equalsIgnoreCase(pop.getFilter())) { // population filters not matching on all the pds... filterError = true; } } } Globals.StandardVariableNames[] variablesNeeded = tableBuilder.getVariablesNeeded(); if (variablesNeeded != null) { for (Globals.StandardVariableNames standardVariableName : variablesNeeded) { variables.add(canreg.client.CanRegClientApp.getApplication().getGlobalToolBox() .translateStandardVariableNameToDatabaseListElement( standardVariableName.toString())); } } DatabaseFilter filter = new DatabaseFilter(); String tableName = Globals.TUMOUR_AND_PATIENT_JOIN_TABLE_NAME; String filterString = rangeFilterPanel.getFilter().trim(); if (filterString.length() != 0) { filterString += " AND "; } if (populationFilterString.length() != 0) { filterString += "( " + populationFilterString + " ) AND "; } // add the years to the filter DatabaseVariablesListElement incidenceDate = canreg.client.CanRegClientApp.getApplication() .getGlobalToolBox().translateStandardVariableNameToDatabaseListElement( Globals.StandardVariableNames.IncidenceDate.toString()); filterString += incidenceDate.getDatabaseVariableName() + " BETWEEN '" + startYear * 10000 + "' AND '" + ((endYear + 1) * 10000 - 1) + "'"; // filter only the confirmed cases DatabaseVariablesListElement recordStatus = canreg.client.CanRegClientApp.getApplication() .getGlobalToolBox().translateStandardVariableNameToDatabaseListElement( Globals.StandardVariableNames.TumourRecordStatus.toString()); filterString += " AND " + recordStatus.getDatabaseVariableName() + " = '1'"; // filter away obsolete cases DatabaseVariablesListElement recordObsoleteStatus = canreg.client.CanRegClientApp.getApplication() .getGlobalToolBox().translateStandardVariableNameToDatabaseListElement( Globals.StandardVariableNames.ObsoleteFlagTumourTable.toString()); filterString += " AND " + recordObsoleteStatus.getDatabaseVariableName() + " != '1'"; filter.setFilterString(filterString); System.out.println(filterString); filter.setQueryType(DatabaseFilter.QueryType.FREQUENCIES_BY_YEAR); filter.setDatabaseVariables(variables); DistributedTableDataSourceClient tableDataSource; Object[][] incidenceData = null; try { tableDatadescription = canreg.client.CanRegClientApp.getApplication() .getDistributedTableDescription(filter, tableName); tableDataSource = new DistributedTableDataSourceClient(tableDatadescription); if (tableDatadescription.getRowCount() > 0) { incidenceData = tableDataSource.retrieveRows(0, tableDatadescription.getRowCount()); } else { // display error - no lines JOptionPane.showMessageDialog(this, "No incidence data available correspondign to the current filter, period and population.", "No incidence data", JOptionPane.ERROR_MESSAGE); } // Build the table(s) LinkedList<String> filesGenerated = tableBuilder.buildTable(heading, fileName, startYear, endYear, incidenceData, populations, standardPopulations, tble.getConfigFields(), tble.getEngineParameters(), filetype); if (filetype != FileTypes.jchart) { String filesGeneratedList = new String(); filesGeneratedList = filesGenerated.stream().map((fileN) -> "\n" + fileN) .reduce(filesGeneratedList, String::concat); setCursor(normalCursor); // Opening the resulting files if the list is not empty... if (filesGenerated.isEmpty()) { JOptionPane.showMessageDialog(this, "Please use \"View work files\" in the \"File\"-menu to open them", java.util.ResourceBundle.getBundle( "canreg/client/gui/analysis/resources/TableBuilderInternalFrame") .getString("TABLE(S)_BUILT."), JOptionPane.INFORMATION_MESSAGE); } else { JOptionPane.showMessageDialog(this, filesGeneratedList, java.util.ResourceBundle.getBundle( "canreg/client/gui/analysis/resources/TableBuilderInternalFrame") .getString("TABLE(S)_BUILT."), JOptionPane.INFORMATION_MESSAGE); filesGenerated.stream().filter((resultFileName) -> (new File(resultFileName).exists())) .forEachOrdered((resultFileName) -> { try { canreg.common.Tools.openFile(resultFileName); } catch (IOException ex) { JOptionPane.showMessageDialog(this, "Unable to open: " + resultFileName + "\n" + ex.getLocalizedMessage()); Logger.getLogger(TableBuilderInternalFrame.class.getName()) .log(Level.SEVERE, null, ex); } }); } } else { chartBuilder = (JChartTableBuilderInterface) tableBuilder; JFreeChart[] charts = chartBuilder.getCharts(); for (JFreeChart chart : charts) { JChartViewerInternalFrame chartViewerInternalFrame = new JChartViewerInternalFrame(); chartViewerInternalFrame.setChart(chart); CanRegClientView.showAndPositionInternalFrame( CanRegClientApp.getApplication().getDesktopPane(), chartViewerInternalFrame); } setCursor(normalCursor); } } catch (SQLException ex) { setCursor(normalCursor); JOptionPane.showMessageDialog(this, "Something wrong with the SQL query: \n" + ex.getLocalizedMessage(), "Error", JOptionPane.ERROR_MESSAGE); Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex); } catch (RemoteException | SecurityException | NotCompatibleDataException | DistributedTableDescriptionException | UnknownTableException ex) { Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex); } catch (TableErrorException ex) { setCursor(normalCursor); Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex); JOptionPane.showMessageDialog(this, "Something went wrong while building the table: \n" + ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } finally { setCursor(normalCursor); } } else { JOptionPane.showMessageDialog(this, java.util.ResourceBundle .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame") .getString("POPULATION_SET_NOT_COMPATIBLE"), java.util.ResourceBundle .getBundle("canreg/client/gui/analysis/resources/TableBuilderInternalFrame") .getString("NO_TABLES_BUILT"), JOptionPane.ERROR_MESSAGE); } } }
From source file:cnedu.ustcjd.widget.MultiSlider.java
@Override public boolean onTouchEvent(MotionEvent event) { if (!mIsUserSeekable || !isEnabled()) { return false; }/*from w w w . j a v a2 s.c o m*/ final int xx = Math.round(event.getX()); final int yy = Math.round(event.getY()); int pointerIdx = event.getActionIndex(); Thumb currThumb = null; if (event.getActionMasked() == MotionEvent.ACTION_DOWN || event.getActionMasked() == MotionEvent.ACTION_POINTER_DOWN) { LinkedList<Thumb> closestOnes = getClosestThumb((int) event.getX(pointerIdx)); if (isInScrollingContainer() && mDraggingThumbs.size() == 0 && exactTouched != null && pointerIdx > 0) { //we have been here before => we want to use the bar Thumb prevThumb = exactTouched.getFirst(); onStartTrackingTouch(prevThumb); exactTouched = null; } if (closestOnes != null && !closestOnes.isEmpty()) { if (closestOnes.size() == 1) { currThumb = closestOnes.getFirst(); if (isInScrollingContainer() && mDraggingThumbs.size() == 0) { exactTouched = closestOnes; } } else { //we have more than one thumb at the same place and we touched there exactTouched = closestOnes; } } } else if (event.getActionMasked() == MotionEvent.ACTION_MOVE) { if (exactTouched != null && !exactTouched.isEmpty()) { currThumb = getMostMovableThumb(event); //check if move actually changed value // if (currThumb == null) return false; } else if (mDraggingThumbs.size() > pointerIdx) { currThumb = mDraggingThumbs.get(pointerIdx); } } else if (event.getActionMasked() == MotionEvent.ACTION_UP || event.getActionMasked() == MotionEvent.ACTION_POINTER_UP) { if (mDraggingThumbs.size() > pointerIdx) { currThumb = mDraggingThumbs.get(pointerIdx); } //else we had a candidate but was never tracked else if (exactTouched != null && exactTouched.size() > 0) { currThumb = getMostMovableThumb(event); exactTouched = null; } } // else { // LinkedList<Thumb> closestOnes = getClosestThumb((int) event.getX()); // currThumb = closestOnes.getFirst(); // } switch (event.getActionMasked()) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_POINTER_DOWN: if (isInScrollingContainer() && mDraggingThumbs.size() == 0) { mTouchDownX = event.getX(pointerIdx); } else { onStartTrackingTouch(currThumb); setThumbValue(currThumb, getValue(event, currThumb), true); setHotspot(xx, yy, currThumb); } break; //with move we dont have pointer action so set them all case MotionEvent.ACTION_MOVE: if (mDraggingThumbs.contains(currThumb)) { //need the index for (int i = 0; i < mDraggingThumbs.size(); i++) { if (mDraggingThumbs.get(i) != null && mDraggingThumbs.get(i).getThumb() != null) { invalidate(mDraggingThumbs.get(i).getThumb().getBounds()); } setThumbValue(mDraggingThumbs.get(i), getValue(event, i, mDraggingThumbs.get(i)), true); } setHotspot(xx, yy, currThumb); } else { final float x = event.getX(pointerIdx); if (Math.abs(x - mTouchDownX) > mScaledTouchSlop) { onStartTrackingTouch(currThumb); exactTouched = null; setThumbValue(currThumb, getValue(event, currThumb), true); setHotspot(xx, yy, currThumb); } } break; case MotionEvent.ACTION_UP: setPressed(false); //there are other pointers left case MotionEvent.ACTION_POINTER_UP: if (currThumb != null) { boolean toUnPress = false; if (!isPressed()) { setPressed(true); toUnPress = true; } setThumbValue(currThumb, getValue(event, currThumb), true); setHotspot(xx, yy, currThumb); onStopTrackingTouch(currThumb); if (toUnPress) { setPressed(false); } } else { // currThumb = getClosestThumb(newValue); // // Touch up when we never crossed the touch slop threshold should // // be interpreted as a tap-seek to that location. // onStartTrackingTouch(currThumb); // setThumbValue(currThumb, newValue, true); // onStopTrackingTouch(currThumb); } // ProgressBar doesn't know to repaint the thumb drawable // in its inactive state when the touch stops (because the // value has not apparently changed) invalidate(); break; case MotionEvent.ACTION_CANCEL: if (mDraggingThumbs != null) { onStopTrackingTouch(); setPressed(false); } invalidate(); // see above explanation break; } return true; }
From source file:main.MainClass.java
public JSONObject sortJSONWithModuleCount(JSONObject moduleSummary) { //Set<String> ModuleNames = ModVsCountVsOwner.keySet(); LinkedList ModvsCountOwnerList = new LinkedList(); Set<String> moduleKeys = moduleSummary.keySet(); for (Iterator indiModuleItr = moduleKeys.iterator(); indiModuleItr.hasNext();) { String moduleName = (String) indiModuleItr.next(); JSONObject indiModuleJO = (JSONObject) moduleSummary.get(moduleName); int count = (Integer) indiModuleJO.get("Count"); String moduleOwner = (String) indiModuleJO.get("Owner"); JSONObject sortedelement = new JSONObject(); sortedelement.put("moduleName", moduleName); sortedelement.put("moduleCount", count); sortedelement.put("moduleOwner", moduleOwner); if (ModvsCountOwnerList.isEmpty()) { ModvsCountOwnerList.add(sortedelement); } else {//from ww w . ja v a 2 s.c o m Iterator listIterator = ModvsCountOwnerList.iterator(); JSONObject smallElement; int index = 0; boolean isLoopBreak = false; while (listIterator.hasNext()) { smallElement = (JSONObject) listIterator.next(); int smallCount = (Integer) smallElement.get("moduleCount"); if (smallCount <= count) { isLoopBreak = true; break; } index++; } if (listIterator.hasNext()) { ModvsCountOwnerList.add(index, sortedelement); } else if (isLoopBreak) { ModvsCountOwnerList.add(index, sortedelement); } else { ModvsCountOwnerList.add(sortedelement); } } } //System.out.println("LinkedList: " + ModvsCountOwnerList.toString()); JSONObject sortedModuleSummary = new JSONObject(); sortedModuleSummary.put("moduleSummary", ModvsCountOwnerList); return sortedModuleSummary; }
From source file:org.jembi.rhea.impl.ApelonServiceImpl.java
/** * Export a specified namespace as a CSV string. Traversal is done depth-first. *//* www . ja v a2 s . co m*/ public String exportNamespace(int namespaceId) throws TerminologyService.TSException { StringBuilder res = new StringBuilder(); try { LinkedList<_TSTreeNode> cStack = new LinkedList<_TSTreeNode>(); List<TSTerm> breadth = getRootTerms(namespaceId); List<TSProperty> props = getAllPropertyTypes(namespaceId); int i = 0; ThesaurusConceptQuery cQuery = ThesaurusConceptQuery.createInstance(getConn()); res.append("\"Code\",\"Name\""); for (TSProperty prop : props) res.append(",\"" + prop.getName() + "\""); res.append("\n"); while (i < breadth.size()) { ApelonTerm term = (ApelonTerm) breadth.get(i); //properties aren't being fetched for sub-concepts, so look up the term to fetch it's properties DTSProperty[] termProps = cQuery.findConceptById(term.concept.getId(), namespaceId, asd) .getFetchedProperties(); res.append("\"" + term.getCode() + "\",\"" + term.getName() + "\""); for (TSProperty prop : props) { boolean addedProp = false; for (DTSProperty termProp : termProps) { if (termProp.getName().equals(prop.getName())) { res.append(",\"" + termProp.getValue() + "\""); addedProp = true; } } if (!addedProp) res.append(",\"\""); } res.append("\n"); if (term.getHasSubConcepts()) { cStack.push(new _TSTreeNode(breadth, i)); breadth = term.getSubConcepts(); i = 0; continue; } while (i + 1 == breadth.size() && !cStack.isEmpty()) { _TSTreeNode node = cStack.pop(); breadth = node.breadth; i = node.i; } i++; } } catch (Exception ex) { throw new TerminologyService.TSException(ex); } return res.toString(); }