List of usage examples for java.util ArrayList set
public E set(int index, E element)
From source file:cnu.eslab.fileTest.NewJFrame.java
public void StackedXYAreaRenderer(ArrayList<Integer> arCpuArrayList, ArrayList<Integer> arWifiArrayList, ArrayList<Integer> arLedArrayList, ArrayList<Integer> arGpsArrayList, ArrayList<Integer> arAudioArrayList, ArrayList<Integer> arThreeGArrayList) { boolean flagGPS, flagAUDIO; flagGPS = mGPSCheckBox.isSelected(); flagAUDIO = mAudioCheckBox.isSelected(); XYSeries xyseriesCPU = new XYSeries("CPU"); XYSeries xyseriesWIFI = new XYSeries("WIFI"); XYSeries xyseriesThreeG = new XYSeries("3G"); XYSeries xyseriesLED = new XYSeries("LED"); XYSeries xyseriesGPS = new XYSeries("GPS"); XYSeries xyseriesAUDIO = new XYSeries("AUDIO"); for (int i = 0; i < arCpuArrayList.size(); i++) { //Power Tutor Bug if (arCpuArrayList.get(i) > 450) { arCpuArrayList.set(i, 450); }// w w w .ja v a 2s .c om /* * stack ? (Top -> bottom) * AUDIO / GPS / LED / ThreeG / WIFI / CPU */ xyseriesCPU.add(i, arCpuArrayList.get(i)); xyseriesWIFI.add(i, arWifiArrayList.get(i) + arCpuArrayList.get(i)); xyseriesThreeG.add(i, arThreeGArrayList.get(i) + arWifiArrayList.get(i) + arCpuArrayList.get(i)); xyseriesLED.add(i, arLedArrayList.get(i) + arThreeGArrayList.get(i) + arWifiArrayList.get(i) + arCpuArrayList.get(i)); if (flagGPS == true && flagAUDIO == true) { xyseriesGPS.add(i, arGpsArrayList.get(i) + arLedArrayList.get(i) + arThreeGArrayList.get(i) + arWifiArrayList.get(i) + arCpuArrayList.get(i)); xyseriesAUDIO.add(i, arAudioArrayList.get(i) + arGpsArrayList.get(i) + arLedArrayList.get(i) + arThreeGArrayList.get(i) + arWifiArrayList.get(i) + arCpuArrayList.get(i)); } else if (flagGPS == true && flagAUDIO == false) { xyseriesGPS.add(i, arGpsArrayList.get(i) + arLedArrayList.get(i) + arThreeGArrayList.get(i) + arWifiArrayList.get(i) + arCpuArrayList.get(i)); xyseriesAUDIO.add(i, 0); // AUDIO ? . } else if (flagGPS == false && flagAUDIO == true) { xyseriesGPS.add(i, 0); //GPS ? . xyseriesAUDIO.add(i, arAudioArrayList.get(i) + arLedArrayList.get(i) + arThreeGArrayList.get(i) + arWifiArrayList.get(i) + arCpuArrayList.get(i)); } else { xyseriesGPS.add(i, 0); //GPS ? . xyseriesAUDIO.add(i, 0); // AUDIO ? . } } XYSeriesCollection xyseriescollection = new XYSeriesCollection(); xyseriescollection.addSeries(xyseriesCPU); xyseriescollection.addSeries(xyseriesWIFI); xyseriescollection.addSeries(xyseriesThreeG); xyseriescollection.addSeries(xyseriesLED); xyseriescollection.addSeries(xyseriesGPS); xyseriescollection.addSeries(xyseriesAUDIO); JFreeChart jfreechart = ChartFactory.createXYLineChart("", "Time(sec)", "Power(mW)", xyseriescollection, PlotOrientation.VERTICAL, true, true, false); XYPlot xyplot = (XYPlot) jfreechart.getPlot(); xyplot.setDomainPannable(true); xyplot.setBackgroundPaint(new Color(255, 255, 255));//? . XYStepAreaRenderer xysteparearenderer = new XYStepAreaRenderer(2); //?? ? . ?? /*xysteparearenderer.setSeriesPaint(0, new Color(0,0,0)); xysteparearenderer.setSeriesPaint(1, new Color(30,30,30)); xysteparearenderer.setSeriesPaint(2, new Color(60,60,60)); xysteparearenderer.setSeriesPaint(3, new Color(90,90,90)); xysteparearenderer.setSeriesPaint(4, new Color(120,120,120)); xysteparearenderer.setSeriesPaint(5, new Color(150,150,150));*/ xysteparearenderer.setDataBoundsIncludesVisibleSeriesOnly(false); xysteparearenderer.setBaseToolTipGenerator(new StandardXYToolTipGenerator()); xysteparearenderer.setDefaultEntityRadius(6); xyplot.setRenderer(xysteparearenderer); ChartPanel chartPanel = new ChartPanel(jfreechart); JFrame f = new JFrame("StackedXYAreaRenderer"); f.setSize(600, 600); f.getContentPane().add(chartPanel); // f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); f.setVisible(true); }
From source file:de.tud.kom.p2psim.impl.network.gnp.topology.GnpSpace.java
/** * Calculates a good positions for the host * /*from w w w . j a v a 2 s.c o m*/ * @param host * to position * @param monitorResheduling * number of rescheduling the downhill simplex * @return gnp position for peer */ private GnpPosition insertCoordinateDownhillSimplex(Host host, int monitorResheduling) { double alpha = 1.0; double beta = 0.5; double gamma = 2; double maxDiversity = 0.5; // N + 1 initial random Solutions ArrayList<GnpPosition> solutions = new ArrayList<GnpPosition>(noOfDimensions + 1); for (int c = -1; c < noOfDimensions; c++) { GnpPosition coord = new GnpPosition(noOfDimensions, host, this); solutions.add(coord); } // best and worst solution GnpPosition bestSolution = Collections.min(solutions); GnpPosition worstSolution = Collections.max(solutions); double bestError = bestSolution.getDownhillSimplexError(); double worstError = worstSolution.getDownhillSimplexError(); double newError = 0.0; for (int z = 0; z < monitorResheduling; z++) { // resheduling for (GnpPosition coord : solutions) { if (coord != bestSolution) { coord.diversify(this.getDimension(), maxDiversity); } } // best and worst solution bestSolution = Collections.min(solutions); worstSolution = Collections.max(solutions); bestError = bestSolution.getDownhillSimplexError(); worstError = worstSolution.getDownhillSimplexError(); // stop criterion while (worstError - bestError > 0.000001 && calculationInProgress) { // move to center ... GnpPosition center = GnpPosition.getCenterSolution(solutions); GnpPosition newSolution1 = GnpPosition.getMovedSolution(worstSolution, center, 1 + alpha); newError = newSolution1.getDownhillSimplexError(); if (newError <= bestError) { GnpPosition newSolution2 = GnpPosition.getMovedSolution(worstSolution, center, 1 + alpha + gamma); int IndexOfWorstSolution = solutions.indexOf(worstSolution); if (newSolution2.getDownhillSimplexError() <= newError) { solutions.set(IndexOfWorstSolution, newSolution2); } else { solutions.set(IndexOfWorstSolution, newSolution1); } bestSolution = solutions.get(IndexOfWorstSolution); bestError = bestSolution.getDownhillSimplexError(); } else if (newError < worstError) { int IndexOfWorstSolution = solutions.indexOf(worstSolution); solutions.set(IndexOfWorstSolution, newSolution1); } else { // ... or contract around best solution for (int c = 0; c < solutions.size(); c++) { if (solutions.get(c) != bestSolution) solutions.set(c, GnpPosition.getMovedSolution(solutions.get(c), bestSolution, beta)); } bestSolution = Collections.min(solutions); bestError = bestSolution.getDownhillSimplexError(); } worstSolution = Collections.max(solutions); worstError = worstSolution.getDownhillSimplexError(); } } // Set the Coordinate Reference to the Peer host.setPositionReference(bestSolution); return bestSolution; }
From source file:com.oneops.inductor.WorkOrderExecutor.java
/** * Installs base software needed for chef / oneops * * @param pr ProcessRunner// w w w.jav a 2 s. c o m * @param wo CmsWorkOrderSimple * @param host remote host * @param port remote port * @param logKey * @param keyFile */ public void runBaseInstall(ProcessRunner pr, CmsWorkOrderSimple wo, String host, String port, String logKey, String keyFile) { long t1 = System.currentTimeMillis(); // amazon public images use ubuntu user for ubuntu os String cloudName = wo.getCloud().getCiName(); String osType = ""; if (wo.getPayLoad().containsKey("DependsOn") && wo.getPayLoad().get("DependsOn").get(0).getCiClassName().contains("Compute")) osType = wo.getPayLoad().get("DependsOn").get(0).getCiAttributes().get("ostype"); else osType = wo.getRfcCi().getCiAttributes().get("ostype"); if (osType.equals("default-cloud")) { if (!wo.getServices().containsKey("compute")) { wo.setComments("missing compute service"); return; } osType = wo.getServices().get("compute").get(cloudName).getCiAttributes().get("ostype"); logger.info("using default-cloud ostype: " + osType); } String user = getUserForOsAndCloud(osType, wo); String sudo = ""; if (!user.equals("root")) sudo = "sudo "; String setup = ""; // rackspace images don't have rsync installed if (wo.getCloud().getCiName().indexOf("rackspace") > -1) { setup = "yum -d0 -e0 -y install rsync; apt-get -y install rsync; true; "; // fedora in aws needs it too } else if (osType.indexOf("edora") > -1) { setup = "sudo yum -d0 -e0 -y install rsync; "; } // make prerequisite dirs for /opt/oneops and cookbooks String prepCmdline = setup + sudo + "mkdir -p /opt/oneops/workorder /home/" + user + "/components" + ";" + sudo + "chown -R " + user + ":" + user + " /opt/oneops;" + sudo + "chown -R " + user + ":" + user + " /home/" + user + "/components"; // double -t args are needed String[] cmd = (String[]) ArrayUtils.addAll(sshInteractiveCmdLine, new String[] { keyFile, "-p " + port, user + "@" + host, prepCmdline }); // retry initial ssh 10x slow hypervisors hosts if (!host.equals(TEST_HOST)) { ProcessResult result = pr.executeProcessRetry(cmd, logKey, 10); if (result.getResultCode() > 0) { wo.setComments("failed : can't:" + prepCmdline); return; } } // install os package repos - repo_map keyed by os ArrayList<String> repoCmdList = new ArrayList<String>(); if (wo.getServices().containsKey("compute") && wo.getServices().get("compute").get(cloudName).getCiAttributes().containsKey("repo_map") && wo.getServices().get("compute").get(cloudName).getCiAttributes().get("repo_map") .indexOf(osType) > 0) { String repoMap = wo.getServices().get("compute").get(cloudName).getCiAttributes().get("repo_map"); repoCmdList = getRepoListFromMap(repoMap, osType); } else { logger.warn("no key in repo_map for os: " + osType); } // add repo_list from compute if (wo.getRfcCi().getCiAttributes().containsKey("repo_list")) { repoCmdList.addAll(getRepoList(wo.getRfcCi().getCiAttributes().get("repo_list"))); } if (repoCmdList.size() > 0) { String[] cmdTmp = (String[]) ArrayUtils.addAll(sshInteractiveCmdLine, new String[] { keyFile, "-p " + port, user + "@" + host }); // add ";" to each cmd for (int i = 0; i < repoCmdList.size(); i++) { repoCmdList.set(i, repoCmdList.get(i) + "; "); } // add infront so env can be set before repo cmds repoCmdList.add(0, getProxyEnvVars(wo)); cmd = (String[]) ArrayUtils.addAll(cmdTmp, repoCmdList.toArray()); if (!host.equals(TEST_HOST)) { ProcessResult result = pr.executeProcessRetry(cmd, logKey, retryCount); if (result.getResultCode() > 0) { wo.setComments("failed : Replace the compute and retry the deployment"); wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1)); return; } } } // put ci cookbooks. "/" needed to get around symlinks String cookbookPath = getCookbookPath(wo.getRfcCi().getCiClassName()); String cookbook = config.getCircuitDir().replace("packer", cookbookPath) + "/"; String[] rsyncCmdLineWithKey = rsyncCmdLine.clone(); rsyncCmdLineWithKey[4] += "-p " + port + " -qi " + keyFile; String[] deploy = (String[]) ArrayUtils.addAll(rsyncCmdLineWithKey, new String[] { cookbook, user + "@" + host + ":/home/" + user + "/" + cookbookPath }); if (!host.equals(TEST_HOST)) { ProcessResult result = pr.executeProcessRetry(deploy, logKey, retryCount); if (result.getResultCode() > 0) { wo.setComments("FATAL: " + generateRsyncErrorMessage(result.getResultCode(), host + ":" + port)); wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1)); return; } } // put shared cookbooks cookbook = config.getCircuitDir().replace("packer", "shared") + "/"; rsyncCmdLineWithKey = rsyncCmdLine.clone(); rsyncCmdLineWithKey[4] += "-p " + port + " -qi " + keyFile; deploy = (String[]) ArrayUtils.addAll(rsyncCmdLineWithKey, new String[] { cookbook, user + "@" + host + ":/home/" + user + "/shared" }); if (!host.equals(TEST_HOST)) { ProcessResult result = pr.executeProcessRetry(deploy, logKey, retryCount); if (result.getResultCode() > 0) { wo.setComments("FATAL: " + generateRsyncErrorMessage(result.getResultCode(), host + ":" + port)); wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1)); return; } } // install base: oneops user, ruby, chef // double -t args are needed String[] classParts = wo.getRfcCi().getCiClassName().split("\\."); String baseComponent = classParts[classParts.length - 1].toLowerCase(); String[] cmdTmp = (String[]) ArrayUtils.addAll(sshInteractiveCmdLine, new String[] { keyFile, "-p " + port, user + "@" + host, sudo + "/home/" + user + "/" + cookbookPath + "/components/cookbooks/" + baseComponent + "/files/default/install_base.sh" }); String[] proxyList = new String[] { getProxyBashVars(wo) }; cmd = (String[]) ArrayUtils.addAll(cmdTmp, proxyList); if (!host.equals(TEST_HOST)) { ProcessResult result = pr.executeProcessRetry(cmd, logKey, retryCount); if (result.getResultCode() > 0) { wo.setComments("failed : can't run install_base.sh"); wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1)); return; } } wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1)); }
From source file:org.peerfact.impl.network.gnp.topology.GnpSpace.java
/** * Calculates a good positions for the host * //from w ww . j a v a 2s .c om * @param host * to position * @param monitorResheduling * number of rescheduling the downhill simplex * @return gnp position for peer */ private GnpPosition insertCoordinateDownhillSimplex(Host host, int monitorResheduling) { double alpha = 1.0; double beta = 0.5; double gamma = 2; double maxDiversity = 0.5; // N + 1 initial random Solutions ArrayList<GnpPosition> solutions = new ArrayList<GnpPosition>(noOfDimensions + 1); for (int c = -1; c < noOfDimensions; c++) { GnpPosition coord = new GnpPosition(noOfDimensions, host, this); solutions.add(coord); } // best and worst solution GnpPosition bestSolution = Collections.min(solutions); GnpPosition worstSolution = Collections.max(solutions); double bestError = bestSolution.getDownhillSimplexError(); double worstError = worstSolution.getDownhillSimplexError(); double newError = 0.0; for (int z = 0; z < monitorResheduling; z++) { // resheduling for (GnpPosition coord : solutions) { if (coord != bestSolution) { coord.diversify(this.getDimension(), maxDiversity); } } // best and worst solution bestSolution = Collections.min(solutions); worstSolution = Collections.max(solutions); bestError = bestSolution.getDownhillSimplexError(); worstError = worstSolution.getDownhillSimplexError(); // stop criterion while (worstError - bestError > 0.000001 && calculationInProgress) { // move to center ... GnpPosition center = GnpPosition.getCenterSolution(solutions); GnpPosition newSolution1 = GnpPosition.getMovedSolution(worstSolution, center, 1 + alpha); newError = newSolution1.getDownhillSimplexError(); if (newError <= bestError) { GnpPosition newSolution2 = GnpPosition.getMovedSolution(worstSolution, center, 1 + alpha + gamma); int IndexOfWorstSolution = solutions.indexOf(worstSolution); if (newSolution2.getDownhillSimplexError() <= newError) { solutions.set(IndexOfWorstSolution, newSolution2); } else { solutions.set(IndexOfWorstSolution, newSolution1); } bestSolution = solutions.get(IndexOfWorstSolution); bestError = bestSolution.getDownhillSimplexError(); } else if (newError < worstError) { int IndexOfWorstSolution = solutions.indexOf(worstSolution); solutions.set(IndexOfWorstSolution, newSolution1); } else { // ... or contract around best solution for (int c = 0; c < solutions.size(); c++) { if (solutions.get(c) != bestSolution) { solutions.set(c, GnpPosition.getMovedSolution(solutions.get(c), bestSolution, beta)); } } bestSolution = Collections.min(solutions); bestError = bestSolution.getDownhillSimplexError(); } worstSolution = Collections.max(solutions); worstError = worstSolution.getDownhillSimplexError(); } } // Set the Coordinate Reference to the Peer host.setPositionReference(bestSolution); return bestSolution; }
From source file:com.ibm.bi.dml.hops.rewrite.RewriteMatrixMultChainOptimization.java
/** * optimizeMMChain(): It optimizes the matrix multiplication chain in which * the last Hop is "this". Step-1) Identify the chain (mmChain). (Step-2) clear all * links among the Hops that are involved in mmChain. (Step-3) Find the * optimal ordering (dynamic programming) (Step-4) Relink the hops in * mmChain./* ww w.j a v a 2s .com*/ */ private void optimizeMMChain(Hop hop) throws HopsException { if (LOG.isTraceEnabled()) { LOG.trace("MM Chain Optimization for HOP: (" + " " + hop.getClass().getSimpleName() + ", " + hop.getHopID() + ", " + hop.getName() + ")"); } ArrayList<Hop> mmChain = new ArrayList<Hop>(); ArrayList<Hop> mmOperators = new ArrayList<Hop>(); ArrayList<Hop> tempList; // Step 1: Identify the chain (mmChain) & clear all links among the Hops // that are involved in mmChain. mmOperators.add(hop); // Initialize mmChain with my inputs for (Hop hi : hop.getInput()) { mmChain.add(hi); } // expand each Hop in mmChain to find the entire matrix multiplication // chain int i = 0; while (i < mmChain.size()) { boolean expandable = false; Hop h = mmChain.get(i); /* * Check if mmChain[i] is expandable: * 1) It must be MATMULT * 2) It must not have been visited already * (one MATMULT should get expanded only in one chain) * 3) Its output should not be used in multiple places * (either within chain or outside the chain) */ if (h instanceof AggBinaryOp && ((AggBinaryOp) h).isMatrixMultiply() && !((AggBinaryOp) hop).hasLeftPMInput() && h.getVisited() != Hop.VisitStatus.DONE) { // check if the output of "h" is used at multiple places. If yes, it can // not be expanded. if (h.getParent().size() > 1 || inputCount((Hop) ((h.getParent().toArray())[0]), h) > 1) { expandable = false; break; } else expandable = true; } h.setVisited(Hop.VisitStatus.DONE); if (!expandable) { i = i + 1; } else { tempList = mmChain.get(i).getInput(); if (tempList.size() != 2) { throw new HopsException(hop.printErrorLocation() + "Hops::rule_OptimizeMMChain(): AggBinary must have exactly two inputs."); } // add current operator to mmOperators, and its input nodes to mmChain mmOperators.add(mmChain.get(i)); mmChain.set(i, tempList.get(0)); mmChain.add(i + 1, tempList.get(1)); } } // print the MMChain if (LOG.isTraceEnabled()) { LOG.trace("Identified MM Chain: "); for (Hop h : mmChain) { logTraceHop(h, 1); } } if (mmChain.size() == 2) { // If the chain size is 2, then there is nothing to optimize. return; } else { // Step 2: construct dims array double[] dimsArray = new double[mmChain.size() + 1]; boolean dimsKnown = getDimsArray(hop, mmChain, dimsArray); if (dimsKnown) { // Step 3: clear the links among Hops within the identified chain clearLinksWithinChain(hop, mmOperators); // Step 4: Find the optimal ordering via dynamic programming. // Invoke Dynamic Programming int size = mmChain.size(); int[][] split = mmChainDP(dimsArray, mmChain.size()); // Step 5: Relink the hops using the optimal ordering (split[][]) found from DP. LOG.trace("Optimal MM Chain: "); mmChainRelinkHops(mmOperators.get(0), 0, size - 1, mmChain, mmOperators, 1, split, 1); } } }
From source file:com.ichi2.libanki.Stats.java
/** * Hourly Breakdown// ww w. j a va 2 s. c o m */ public boolean calculateBreakdown(int type) { mTitle = R.string.stats_breakdown; mAxisTitles = new int[] { R.string.stats_time_of_day, R.string.stats_percentage_correct, R.string.stats_reviews }; mValueLabels = new int[] { R.string.stats_percentage_correct, R.string.stats_answers }; mColors = new int[] { R.color.stats_counts, R.color.stats_hours }; mType = type; String lim = _revlogLimit().replaceAll("[\\[\\]]", ""); if (lim.length() > 0) { lim = " and " + lim; } Calendar sd = GregorianCalendar.getInstance(); sd.setTimeInMillis(mCol.getCrt() * 1000); int pd = _periodDays(); if (pd > 0) { lim += " and id > " + ((mCol.getSched().getDayCutoff() - (86400 * pd)) * 1000); } long cutoff = mCol.getSched().getDayCutoff(); long cut = cutoff - sd.get(Calendar.HOUR_OF_DAY) * 3600; ArrayList<double[]> list = new ArrayList<double[]>(); Cursor cur = null; String query = "select " + "23 - ((cast((" + cut + " - id/1000) / 3600.0 as int)) % 24) as hour, " + "sum(case when ease = 1 then 0 else 1 end) / " + "cast(count() as float) * 100, " + "count() " + "from revlog where type in (0,1,2) " + lim + " " + "group by hour having count() > 30 order by hour"; Timber.d(sd.get(Calendar.HOUR_OF_DAY) + " : " + cutoff + " breakdown query: %s", query); try { cur = mCol.getDb().getDatabase().rawQuery(query, null); while (cur.moveToNext()) { list.add(new double[] { cur.getDouble(0), cur.getDouble(1), cur.getDouble(2) }); } } finally { if (cur != null && !cur.isClosed()) { cur.close(); } } //TODO adjust for breakdown, for now only copied from intervals //small adjustment for a proper chartbuilding with achartengine if (list.size() == 0) { list.add(0, new double[] { 0, 0, 0 }); } for (int i = 0; i < list.size(); i++) { double[] data = list.get(i); int intHour = (int) data[0]; int hour = (intHour - 4) % 24; if (hour < 0) hour += 24; data[0] = hour; list.set(i, data); } Collections.sort(list, new Comparator<double[]>() { @Override public int compare(double[] s1, double[] s2) { if (s1[0] < s2[0]) return -1; if (s1[0] > s2[0]) return 1; return 0; } }); mSeriesList = new double[4][list.size()]; mPeak = 0.0; mMcount = 0.0; double minHour = Double.MAX_VALUE; double maxHour = 0; for (int i = 0; i < list.size(); i++) { double[] data = list.get(i); int hour = (int) data[0]; //double hour = data[0]; if (hour < minHour) minHour = hour; if (hour > maxHour) maxHour = hour; double pct = data[1]; if (pct > mPeak) mPeak = pct; mSeriesList[0][i] = hour; mSeriesList[1][i] = pct; mSeriesList[2][i] = data[2]; if (i == 0) { mSeriesList[3][i] = pct; } else { double prev = mSeriesList[3][i - 1]; double diff = pct - prev; diff /= 3.0; diff = Math.round(diff * 10.0) / 10.0; mSeriesList[3][i] = prev + diff; } if (data[2] > mMcount) mMcount = data[2]; if (mSeriesList[1][i] > mMaxCards) mMaxCards = (int) mSeriesList[1][i]; } mFirstElement = mSeriesList[0][0]; mLastElement = mSeriesList[0][mSeriesList[0].length - 1]; mMaxElements = (int) (maxHour - minHour); //some adjustments to not crash the chartbuilding with emtpy data if (mMaxElements == 0) { mMaxElements = 10; } if (mMcount == 0) { mMcount = 10; } if (mFirstElement == mLastElement) { mFirstElement = 0; mLastElement = 23; } if (mMaxCards == 0) mMaxCards = 10; return list.size() > 0; }
From source file:org.buddycode.jaxrs.samplejaxrs.authentication.BasicAuthHandler.java
@Override public boolean isAuthenticated(Map headers) { //get the value for Authorization Header ArrayList authzHeaders = (ArrayList) headers.get(AUTHORIZATION_HEADER); if (authzHeaders != null) { //get the authorization header value, if provided String authzHeader = (String) authzHeaders.get(0); String[] splittedHeaders = authzHeader.split(AUTH_HEADER_SPLITTER); if (splittedHeaders.length < 2 && !splittedHeaders[1].contains(BASIC_AUTH_HEADER)) { log.error("Invalid Auth header: " + splittedHeaders[1]); return false; }//from w ww .ja v a2 s . co m //decode it and extract username and password byte[] decodedAuthHeader = Base64.decode(splittedHeaders[1]); String authHeader = new String(decodedAuthHeader); String[] authHeaderSplittered = authHeader.split(DECODED_AUTH_HEADER_SPLITTER); if (authHeaderSplittered.length < 2) { log.error("Invalid basic header(base64 encoded string) value."); return false; } String userName = authHeaderSplittered[0]; String password = authHeaderSplittered[1]; if (userName != null && password != null) { String tenantDomain = MultitenantUtils.getTenantDomain(userName); String tenantLessUserName = MultitenantUtils.getTenantAwareUsername(userName); try { // get super tenant context and get realm service which is an osgi service RealmService realmService = (RealmService) PrivilegedCarbonContext.getThreadLocalCarbonContext() .getOSGiService(RealmService.class); if (realmService != null) { int tenantId = realmService.getTenantManager().getTenantId(tenantDomain); if (tenantId == -1) { log.error("Invalid tenant domain " + tenantDomain); return false; } //get tenant's user realm UserRealm userRealm = realmService.getTenantUserRealm(tenantId); boolean authenticated = userRealm.getUserStoreManager().authenticate(tenantLessUserName, password); if (authenticated) { //authentication success. set the username for authorization header //and proceed the REST call authzHeaders.set(0, userName); return true; } else { log.error("Authentication failed for the user: " + getFullUserName(tenantDomain, tenantLessUserName)); return false; } } else { log.error("Error in getting Realm Service for user: " + userName + ".Authentication failed " + "for the user: " + getFullUserName(tenantDomain, tenantLessUserName)); return false; } } catch (UserStoreException e) { log.error("Internal server error while authenticating the user."); return false; } } else { log.error("Authentication required for this resource. Username or password not provided."); return false; } } else { log.error( "Authentication required for this resource. Authorization header not present in the request."); return false; } }
From source file:org.eclipse.dataset.AbstractDataset.java
private static void updateShape(final ArrayList<Integer> ldims, final int depth, final int l) { if (depth >= ldims.size()) { ldims.add(l);/*from w w w .j ava 2 s. c o m*/ } else if (l > ldims.get(depth)) { ldims.set(depth, l); } }
From source file:grupob.TipoProceso.java
private void btnGuardarRegionesActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnGuardarRegionesActionPerformed ArrayList<Region> listaRegionesPas = listaRegiones; DefaultTableModel modelo = (DefaultTableModel) jTableRegiones.getModel(); for (int i = 0; i < listaRegionesPas.size(); i++) { String a = modelo.getValueAt(i, 1).toString(); String n = modelo.getValueAt(i, 0).toString(); int num = -1; try {// w ww . jav a 2 s. c o m num = Integer.parseInt(a); } catch (NumberFormatException e) { JOptionPane.showMessageDialog(null, "Error: Ingreso un valor distinto de un numero en la fila: " + (i + 1) + " columna: 2"); return; } if (num < 0) { JOptionPane.showMessageDialog(null, "Error: Ingreso un numero negativo en la fila: " + (i + 1) + " columna: 2"); return; } Region s = listaRegionesPas.get(i); Region r = new Region(s.getId(), n, num); listaRegionesPas.set(i, r); } for (int i = 0; i < listaRegionesPas.size(); i++) { Region rd = listaRegionesPas.get(i); if (rd.getId() != 0) { Manager.updateRegion(rd); } } JOptionPane.showMessageDialog(null, "Se completo de actualizar las regiones"); }
From source file:com.ge.research.semtk.sparqlX.SparqlEndpointInterface.java
/** * Convert response table json to SemTK Table json * //from w w w. java 2s. c o m * Sample input SPARQL table json: *{"Test":{"type":"literal","value":"http:\/\/research.ge.com\/dataset#Test_1"},"number":{"datatype":"http:\/\/www.w3.org\/2001\/XMLSchema#integer","type":"typed-literal","value":"1272"}}, *{"Test":{"type":"literal","value":"http:\/\/research.ge.com\/dataset#Test_2"},"number":{"datatype":"http:\/\/www.w3.org\/2001\/XMLSchema#integer","type":"typed-literal","value":"1274"}}, *{"Test":{"type":"literal","value":"http:\/\/research.ge.com\/dataset#Test_3"},"number":{"datatype":"http:\/\/www.w3.org\/2001\/XMLSchema#integer","type":"typed-literal","value":"1276"}}, * * Sample output SemTK table json: * { * "col_names":["Test","testnum"], * "rows":[["http://research.ge.com/energy/dataset#Test_1272","1272"],["http://research.ge.com/dataset#Test_1274","1274"],["http://research.ge.com/dataset#Test_1276","1276"]], * "col_type":["uri","http:\/\/www.w3.org\/2001\/XMLSchema#integer"], // TODO is this correct? * "col_count":2, * "row_count":3 * } * * @param colNamesJsonArray the JSONArray containing the column names * @param rowsJsonArray the JSONArray containing the data rows * @return * @throws Exception */ private static JSONObject getTableJSONFromResponse(JSONArray colNamesJsonArray, JSONArray rowsJsonArray) throws Exception { String key, valueValue, valueType, valueDataType; JSONObject jsonCell; ArrayList<String> colsForNewTable = new ArrayList<String>(); ArrayList<String> colTypesForNewTable = new ArrayList<String>(); ArrayList<String> rowForNewTable; ArrayList<ArrayList<String>> rowsForNewTable = new ArrayList<ArrayList<String>>(); HashMap<String, Integer> colNumHash = new HashMap<String, Integer>(); HashMap<String, String> colTypeHash = new HashMap<String, String>(); String curType = null; final String UNKNOWN = "unknown"; final String MIXED = "http://www.w3.org/2001/XMLSchema#string"; // **** Column Names and types. Parallel arrays. Types still unknown. **** for (Object colObj : colNamesJsonArray) { String colStr = (String) colObj; colsForNewTable.add(colStr); colTypesForNewTable.add(UNKNOWN); colNumHash.put(colStr, colsForNewTable.size() - 1); // hash the column indices colTypeHash.put(colStr, UNKNOWN); // hash the column types } // **** Rows **** for (int i = 0; i < rowsJsonArray.size(); i++) { // sample row: {"Test":{"type":"literal","value":"http:\/\/research.ge.com\/dataset#Test_1276"},"testnum":{"datatype":"http:\/\/www.w3.org\/2001\/XMLSchema#integer","type":"typed-literal","value":"1276"}} JSONObject row = (JSONObject) rowsJsonArray.get(i); rowForNewTable = new ArrayList<String>(); // start new row //**** loop through columns in the correct order **** for (Object colObj : colNamesJsonArray) { key = (String) colObj; jsonCell = (JSONObject) row.get(key); if (jsonCell == null) { rowForNewTable.add(""); } else { valueValue = (String) jsonCell.get("value"); valueType = (String) jsonCell.get("type"); valueDataType = (valueType.equals("typed-literal")) ? (String) jsonCell.get("datatype") : valueType; // e.g. "http:\/\/www.w3.org\/2001\/XMLSchema#integer", but only if type is "typed-literal" // add the value to the row rowForNewTable.add(valueValue); // check the type curType = colTypeHash.get(key); // fix UNKNOWN's as they become known if (curType.equals(MIXED)) { // do nothing if cell is already MIXED } else if (curType.equals(UNKNOWN)) { colTypeHash.put(key, valueDataType); colTypesForNewTable.set(colNumHash.get(key), valueDataType); // insert MIXED if types are coming back funny } else if (!curType.equals(valueDataType)) { colTypeHash.put(key, MIXED); colTypesForNewTable.set(colNumHash.get(key), MIXED); } } } rowsForNewTable.add(rowForNewTable); // add the row to the set of rows } String[] colsForNewTableArray = colsForNewTable.toArray(new String[0]); String[] colTypesForNewTableArray = colTypesForNewTable.toArray(new String[0]); // create JSON to return Table table = new Table(colsForNewTableArray, colTypesForNewTableArray, rowsForNewTable); JSONObject tableJson = table.toJson(); return tableJson; }