List of usage examples for java.lang Math floor
public static double floor(double a)
From source file:ucar.unidata.idv.control.McVHistogramWrapper.java
/** * Assumes that {@code data} has been validated and is okay to actually try * loading.// w w w. jav a 2 s.com * * @param data Data to use in histogram. Cannot be {@code null} or all NaNs. * * @throws VisADException * @throws RemoteException */ private void reallyLoadData(FlatField data) throws VisADException, RemoteException { createChart(); List dataChoiceWrappers = getDataChoiceWrappers(); try { clearHistogram(); ErrorEstimate[] errOut = new ErrorEstimate[1]; for (int paramIdx = 0; paramIdx < dataChoiceWrappers.size(); paramIdx++) { DataChoiceWrapper wrapper = (DataChoiceWrapper) dataChoiceWrappers.get(paramIdx); DataChoice dataChoice = wrapper.getDataChoice(); Unit defaultUnit = ucar.visad.Util.getDefaultRangeUnits((FlatField) data)[0]; Unit unit = ((DisplayControlImpl) imageControl).getDisplayUnit(); double[][] samples = data.getValues(false); double[] actualValues = filterData(samples[0], getTimeValues(samples, data))[0]; if ((defaultUnit != null) && !defaultUnit.equals(unit)) { actualValues = Unit.transformUnits(unit, errOut, defaultUnit, null, actualValues); } final NumberAxis domainAxis = new NumberAxis(wrapper.getLabel(unit)); domainAxis.setAutoRangeIncludesZero(false); XYItemRenderer renderer; if (getStacked()) { renderer = new StackedXYBarRenderer(); } else { renderer = new XYBarRenderer(); } if ((plot == null) && (chartPanel != null)) { plot = chartPanel.getChart().getXYPlot(); } plot.setRenderer(paramIdx, renderer); Color c = wrapper.getColor(paramIdx); domainAxis.setLabelPaint(c); renderer.setSeriesPaint(0, c); MyHistogramDataset dataset = new MyHistogramDataset(); dataset.setType(HistogramType.FREQUENCY); dataset.addSeries(dataChoice.getName() + " [" + unit + ']', actualValues, getBins()); samples = null; actualValues = null; plot.setDomainAxis(paramIdx, domainAxis, false); plot.mapDatasetToDomainAxis(paramIdx, paramIdx); plot.setDataset(paramIdx, dataset); domainAxis.addChangeListener(new AxisChangeListener() { public void axisChanged(AxisChangeEvent ae) { if (!imageControl.isInitDone()) { return; } Range range = domainAxis.getRange(); double newLow = Math.floor(range.getLowerBound() + 0.5); double newHigh = Math.floor(range.getUpperBound() + 0.5); double prevLow = getLow(); double prevHigh = getHigh(); try { ucar.unidata.util.Range newRange; if (prevLow > prevHigh) { newRange = new ucar.unidata.util.Range(newHigh, newLow); } else { newRange = new ucar.unidata.util.Range(newLow, newHigh); } ((DisplayControlImpl) imageControl).setRange(newRange); } catch (Exception e) { logger.error("Cannot change range", e); } } }); Range range = domainAxis.getRange(); low = range.getLowerBound(); high = range.getUpperBound(); } } catch (Exception exc) { System.out.println("Exception exc=" + exc); LogUtil.logException("Error creating data set", exc); } }
From source file:com.aoeng.degu.utils.net.asyncthhpclient.JsonStreamerEntity.java
@Override public void writeTo(final OutputStream outstream) throws IOException { if (outstream == null) { throw new IllegalStateException("Output stream cannot be null."); }/*from w ww . j a va 2 s. c o m*/ // Record the time when uploading started. long now = System.currentTimeMillis(); // Keys used by the HashMaps. Set<String> keys; // Use GZIP compression when sending streams, otherwise just use // a buffered output stream to speed things up a bit. OutputStream upload; if (null != contentEncoding) { upload = new GZIPOutputStream(new BufferedOutputStream(outstream), BUFFER_SIZE); } else { upload = new BufferedOutputStream(outstream); } // Always send a JSON object. upload.write('{'); // Send the K/V values. keys = kvParams.keySet(); for (String key : keys) { // Write the JSON object's key. upload.write(escape(key)); upload.write(':'); // Evaluate the value (which cannot be null). Object value = kvParams.get(key); if (value instanceof Boolean) { upload.write((Boolean) value ? JSON_TRUE : JSON_FALSE); } else if (value instanceof Long) { upload.write((((Number) value).longValue() + "").getBytes()); } else if (value instanceof Double) { upload.write((((Number) value).doubleValue() + "").getBytes()); } else if (value instanceof Float) { upload.write((((Number) value).floatValue() + "").getBytes()); } else if (value instanceof Integer) { upload.write((((Number) value).intValue() + "").getBytes()); } else { upload.write(value.toString().getBytes()); } upload.write(','); } // Buffer used for reading from input streams. byte[] buffer = new byte[BUFFER_SIZE]; // Send the stream params. keys = streamParams.keySet(); for (String key : keys) { RequestParams.StreamWrapper entry = streamParams.get(key); // Write the JSON object's key. upload.write(escape(key)); // All uploads are sent as an object containing the file's details. upload.write(':'); upload.write('{'); // Send the streams's name. upload.write(STREAM_NAME); upload.write(':'); upload.write(escape(entry.name)); upload.write(','); // Send the streams's content type. upload.write(STREAM_TYPE); upload.write(':'); upload.write(escape(entry.contentType)); upload.write(','); // Prepare the file content's key. upload.write(STREAM_CONTENTS); upload.write(':'); upload.write('"'); // Upload the file's contents in Base64. Base64OutputStream outputStream = new Base64OutputStream(upload, Base64.NO_CLOSE | Base64.NO_WRAP); // Read from input stream until no more data's left to read. int bytesRead; while ((bytesRead = entry.inputStream.read(buffer)) != -1) { outputStream.write(buffer, 0, bytesRead); } // Close the Base64 output stream. outputStream.close(); // End the file's object and prepare for next one. upload.write('"'); upload.write('}'); upload.write(','); } // Include the elapsed time taken to upload everything. // This might be useful for somebody, but it serves us well since // there will almost always be a ',' as the last sent character. upload.write(STREAM_ELAPSED); upload.write(':'); long elapsedTime = System.currentTimeMillis() - now; upload.write((elapsedTime + "}").getBytes()); Log.i(LOG_TAG, "Uploaded JSON in " + Math.floor(elapsedTime / 1000) + " seconds"); // Flush the contents up the stream. upload.flush(); upload.close(); }
From source file:com.android.dialer.lookup.google.GoogleForwardLookup.java
/** * Generate number in the range [0, max). * * @param max Upper limit (non-inclusive) * @return Random number inside [0, max) *//*w w w . j a va 2 s . c o m*/ private int getRandomInteger(int max) { return (int) Math.floor(Math.random() * max); }
From source file:com.ibm.bi.dml.runtime.matrix.DataGenMR.java
/** * <p>Starts a Rand MapReduce job which will produce one or more random objects.</p> * //from w w w . j a v a 2 s . com * @param numRows number of rows for each random object * @param numCols number of columns for each random object * @param blockRowSize number of rows in a block for each random object * @param blockColSize number of columns in a block for each random object * @param minValue minimum of the random values for each random object * @param maxValue maximum of the random values for each random object * @param sparsity sparsity for each random object * @param pdf probability density function for each random object * @param replication file replication * @param inputs input file for each random object * @param outputs output file for each random object * @param outputInfos output information for each random object * @param instructionsInMapper instruction for each random object * @param resultIndexes result indexes for each random object * @return matrix characteristics for each random object * @throws Exception if an error occurred in the MapReduce phase */ public static JobReturn runJob(MRJobInstruction inst, String[] dataGenInstructions, String instructionsInMapper, String aggInstructionsInReducer, String otherInstructionsInReducer, int numReducers, int replication, byte[] resultIndexes, String dimsUnknownFilePrefix, String[] outputs, OutputInfo[] outputInfos) throws Exception { JobConf job = new JobConf(DataGenMR.class); job.setJobName("DataGen-MR"); //whether use block representation or cell representation MRJobConfiguration.setMatrixValueClass(job, true); byte[] realIndexes = new byte[dataGenInstructions.length]; for (byte b = 0; b < realIndexes.length; b++) realIndexes[b] = b; String[] inputs = new String[dataGenInstructions.length]; InputInfo[] inputInfos = new InputInfo[dataGenInstructions.length]; long[] rlens = new long[dataGenInstructions.length]; long[] clens = new long[dataGenInstructions.length]; int[] brlens = new int[dataGenInstructions.length]; int[] bclens = new int[dataGenInstructions.length]; FileSystem fs = FileSystem.get(job); String dataGenInsStr = ""; int numblocks = 0; int maxbrlen = -1, maxbclen = -1; double maxsparsity = -1; for (int i = 0; i < dataGenInstructions.length; i++) { dataGenInsStr = dataGenInsStr + Lop.INSTRUCTION_DELIMITOR + dataGenInstructions[i]; MRInstruction mrins = MRInstructionParser.parseSingleInstruction(dataGenInstructions[i]); MRINSTRUCTION_TYPE mrtype = mrins.getMRInstructionType(); DataGenMRInstruction genInst = (DataGenMRInstruction) mrins; rlens[i] = genInst.getRows(); clens[i] = genInst.getCols(); brlens[i] = genInst.getRowsInBlock(); bclens[i] = genInst.getColsInBlock(); maxbrlen = Math.max(maxbrlen, brlens[i]); maxbclen = Math.max(maxbclen, bclens[i]); if (mrtype == MRINSTRUCTION_TYPE.Rand) { RandInstruction randInst = (RandInstruction) mrins; inputs[i] = genInst.getBaseDir() + "tmp" + _seqRandInput.getNextID() + ".randinput"; maxsparsity = Math.max(maxsparsity, randInst.getSparsity()); FSDataOutputStream fsOut = fs.create(new Path(inputs[i])); PrintWriter pw = new PrintWriter(fsOut); //for obj reuse and preventing repeated buffer re-allocations StringBuilder sb = new StringBuilder(); //seed generation Well1024a bigrand = LibMatrixDatagen.setupSeedsForRand(randInst.getSeed()); long[] nnz = LibMatrixDatagen.computeNNZperBlock(rlens[i], clens[i], brlens[i], bclens[i], randInst.getSparsity()); int nnzIx = 0; for (long r = 0; r < rlens[i]; r += brlens[i]) { long curBlockRowSize = Math.min(brlens[i], (rlens[i] - r)); for (long c = 0; c < clens[i]; c += bclens[i]) { long curBlockColSize = Math.min(bclens[i], (clens[i] - c)); sb.append((r / brlens[i]) + 1); sb.append(','); sb.append((c / bclens[i]) + 1); sb.append(','); sb.append(curBlockRowSize); sb.append(','); sb.append(curBlockColSize); sb.append(','); sb.append(nnz[nnzIx++]); sb.append(','); sb.append(bigrand.nextLong()); pw.println(sb.toString()); sb.setLength(0); numblocks++; } } pw.close(); fsOut.close(); inputInfos[i] = InputInfo.TextCellInputInfo; } else if (mrtype == MRINSTRUCTION_TYPE.Seq) { SeqInstruction seqInst = (SeqInstruction) mrins; inputs[i] = genInst.getBaseDir() + System.currentTimeMillis() + ".seqinput"; maxsparsity = 1.0; //always dense double from = seqInst.fromValue; double to = seqInst.toValue; double incr = seqInst.incrValue; // Correctness checks on (from, to, incr) boolean neg = (from > to); if (incr == 0) throw new DMLRuntimeException("Invalid value for \"increment\" in seq()."); if (neg != (incr < 0)) throw new DMLRuntimeException("Wrong sign for the increment in a call to seq()"); // Compute the number of rows in the sequence long numrows = 1 + (long) Math.floor((to - from) / incr); if (rlens[i] > 0) { if (numrows != rlens[i]) throw new DMLRuntimeException( "Unexpected error while processing sequence instruction. Expected number of rows does not match given number: " + rlens[i] + " != " + numrows); } else { rlens[i] = numrows; } if (clens[i] > 0 && clens[i] != 1) throw new DMLRuntimeException( "Unexpected error while processing sequence instruction. Number of columns (" + clens[i] + ") must be equal to 1."); else clens[i] = 1; FSDataOutputStream fsOut = fs.create(new Path(inputs[i])); PrintWriter pw = new PrintWriter(fsOut); StringBuilder sb = new StringBuilder(); double temp = from; double block_from, block_to; for (long r = 0; r < rlens[i]; r += brlens[i]) { long curBlockRowSize = Math.min(brlens[i], (rlens[i] - r)); // block (bid_i,bid_j) generates a sequence from the interval [block_from, block_to] (inclusive of both end points of the interval) long bid_i = ((r / brlens[i]) + 1); long bid_j = 1; block_from = temp; block_to = temp + (curBlockRowSize - 1) * incr; temp = block_to + incr; // next block starts from here sb.append(bid_i); sb.append(','); sb.append(bid_j); sb.append(','); /* // Need not include block size while generating seq() sb.append(curBlockRowSize); sb.append(','); sb.append(1); sb.append(',');*/ sb.append(block_from); sb.append(','); sb.append(block_to); sb.append(','); sb.append(incr); pw.println(sb.toString()); //System.out.println("MapTask " + r + ": " + sb.toString()); sb.setLength(0); numblocks++; } pw.close(); fsOut.close(); inputInfos[i] = InputInfo.TextCellInputInfo; } else { throw new DMLRuntimeException("Unexpected Data Generation Instruction Type: " + mrtype); } } dataGenInsStr = dataGenInsStr.substring(1);//remove the first "," RunningJob runjob; MatrixCharacteristics[] stats; try { //set up the block size MRJobConfiguration.setBlocksSizes(job, realIndexes, brlens, bclens); //set up the input files and their format information MRJobConfiguration.setUpMultipleInputs(job, realIndexes, inputs, inputInfos, brlens, bclens, false, ConvertTarget.BLOCK); //set up the dimensions of input matrices MRJobConfiguration.setMatricesDimensions(job, realIndexes, rlens, clens); MRJobConfiguration.setDimsUnknownFilePrefix(job, dimsUnknownFilePrefix); //set up the block size MRJobConfiguration.setBlocksSizes(job, realIndexes, brlens, bclens); //set up the rand Instructions MRJobConfiguration.setRandInstructions(job, dataGenInsStr); //set up unary instructions that will perform in the mapper MRJobConfiguration.setInstructionsInMapper(job, instructionsInMapper); //set up the aggregate instructions that will happen in the combiner and reducer MRJobConfiguration.setAggregateInstructions(job, aggInstructionsInReducer); //set up the instructions that will happen in the reducer, after the aggregation instrucions MRJobConfiguration.setInstructionsInReducer(job, otherInstructionsInReducer); //set up the replication factor for the results job.setInt("dfs.replication", replication); //set up map/reduce memory configurations (if in AM context) DMLConfig config = ConfigurationManager.getConfig(); DMLAppMasterUtils.setupMRJobRemoteMaxMemory(job, config); //determine degree of parallelism (nmappers: 1<=n<=capacity) //TODO use maxsparsity whenever we have a way of generating sparse rand data int capacity = InfrastructureAnalyzer.getRemoteParallelMapTasks(); long dfsblocksize = InfrastructureAnalyzer.getHDFSBlockSize(); //correction max number of mappers on yarn clusters if (InfrastructureAnalyzer.isYarnEnabled()) capacity = (int) Math.max(capacity, YarnClusterAnalyzer.getNumCores()); int nmapers = Math .max(Math.min((int) (8 * maxbrlen * maxbclen * (long) numblocks / dfsblocksize), capacity), 1); job.setNumMapTasks(nmapers); //set up what matrices are needed to pass from the mapper to reducer HashSet<Byte> mapoutputIndexes = MRJobConfiguration.setUpOutputIndexesForMapper(job, realIndexes, dataGenInsStr, instructionsInMapper, null, aggInstructionsInReducer, otherInstructionsInReducer, resultIndexes); MatrixChar_N_ReducerGroups ret = MRJobConfiguration.computeMatrixCharacteristics(job, realIndexes, dataGenInsStr, instructionsInMapper, null, aggInstructionsInReducer, null, otherInstructionsInReducer, resultIndexes, mapoutputIndexes, false); stats = ret.stats; //set up the number of reducers MRJobConfiguration.setNumReducers(job, ret.numReducerGroups, numReducers); // print the complete MRJob instruction if (LOG.isTraceEnabled()) inst.printCompleteMRJobInstruction(stats); // Update resultDimsUnknown based on computed "stats" byte[] resultDimsUnknown = new byte[resultIndexes.length]; for (int i = 0; i < resultIndexes.length; i++) { if (stats[i].getRows() == -1 || stats[i].getCols() == -1) { resultDimsUnknown[i] = (byte) 1; } else { resultDimsUnknown[i] = (byte) 0; } } boolean mayContainCtable = instructionsInMapper.contains("ctabletransform") || instructionsInMapper.contains("groupedagg"); //set up the multiple output files, and their format information MRJobConfiguration.setUpMultipleOutputs(job, resultIndexes, resultDimsUnknown, outputs, outputInfos, true, mayContainCtable); // configure mapper and the mapper output key value pairs job.setMapperClass(DataGenMapper.class); if (numReducers == 0) { job.setMapOutputKeyClass(Writable.class); job.setMapOutputValueClass(Writable.class); } else { job.setMapOutputKeyClass(MatrixIndexes.class); job.setMapOutputValueClass(TaggedMatrixBlock.class); } //set up combiner if (numReducers != 0 && aggInstructionsInReducer != null && !aggInstructionsInReducer.isEmpty()) job.setCombinerClass(GMRCombiner.class); //configure reducer job.setReducerClass(GMRReducer.class); //job.setReducerClass(PassThroughReducer.class); // By default, the job executes in "cluster" mode. // Determine if we can optimize and run it in "local" mode. MatrixCharacteristics[] inputStats = new MatrixCharacteristics[inputs.length]; for (int i = 0; i < inputs.length; i++) { inputStats[i] = new MatrixCharacteristics(rlens[i], clens[i], brlens[i], bclens[i]); } //set unique working dir MRJobConfiguration.setUniqueWorkingDir(job); runjob = JobClient.runJob(job); /* Process different counters */ Group group = runjob.getCounters().getGroup(MRJobConfiguration.NUM_NONZERO_CELLS); for (int i = 0; i < resultIndexes.length; i++) { // number of non-zeros stats[i].setNonZeros(group.getCounter(Integer.toString(i))); } String dir = dimsUnknownFilePrefix + "/" + runjob.getID().toString() + "_dimsFile"; stats = MapReduceTool.processDimsFiles(dir, stats); MapReduceTool.deleteFileIfExistOnHDFS(dir); } finally { for (String input : inputs) MapReduceTool.deleteFileIfExistOnHDFS(new Path(input), job); } return new JobReturn(stats, outputInfos, runjob.isSuccessful()); }
From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.util.tree.NumberArithmetic.java
/** * Returns the largest integer value less than or equal to the given * number.//from w ww . j a v a 2 s .c o m * * @param a the number * @return the largest integer value less than or equal to the given * number * @see Math#floor(double) */ public static Number floor(Number a) { if (isFloatingPoint(a)) { return Math.floor(a.doubleValue()); } else { return a.longValue(); } }
From source file:eu.cloud4soa.frontend.commons.server.services.soa.MonitoringServiceImpl.java
private <T> List<T> resamplingStatistics(List<T> list, int monitoringMaxNumDisplayableValues) { if (list.size() <= monitoringMaxNumDisplayableValues) return list; // Resampling not required List<T> result = new ArrayList<T>(); float delta = (float) list.size() / (float) monitoringMaxNumDisplayableValues; float i = 0;/* ww w . j a va 2 s .co m*/ while (i < list.size()) { result.add(list.get((int) Math.round(Math.floor(i)))); i += delta; } return result; }
From source file:ffx.numerics.fft.Real3DCuda.java
/** * {@inheritDoc}// ww w.j a v a 2 s. c om */ @Override public void run() { JCudaDriver.setExceptionsEnabled(true); JCudaDriver.setLogLevel(LogLevel.LOG_ERROR); JCufft.setExceptionsEnabled(true); JCufft.setLogLevel(LogLevel.LOG_ERROR); // Initialize the driver and create a context for the first device. cuInit(0); CUcontext pctx = new CUcontext(); CUdevice dev = new CUdevice(); CUdevprop prop = new CUdevprop(); cuDeviceGetProperties(prop, dev); logger.info(" CUDA " + prop.toFormattedString()); cuDeviceGet(dev, 0); cuCtxCreate(pctx, 0, dev); // Load the CUBIN file and obtain the "recipSummation" function. try { String bit = System.getProperty("sun.arch.data.model").trim(); URL source = getClass().getClassLoader() .getResource("ffx/numerics/fft/recipSummation-" + bit + ".cubin"); File cubinFile = File.createTempFile("recipSummation", "cubin"); FileUtils.copyURLToFile(source, cubinFile); module = new CUmodule(); cuModuleLoad(module, cubinFile.getCanonicalPath()); function = new CUfunction(); cuModuleGetFunction(function, module, "recipSummation"); } catch (Exception e) { String message = "Error loading the reciprocal summation kernel"; logger.log(Level.SEVERE, message, e); } // Copy the data array to the device. dataDevice = new CUdeviceptr(); cuMemAlloc(dataDevice, len * Sizeof.FLOAT); dataPtr = Pointer.to(data); cuMemcpyHtoD(dataDevice, dataPtr, len * Sizeof.FLOAT); // Copy the recip array to the device. recipDevice = new CUdeviceptr(); cuMemAlloc(recipDevice, len * Sizeof.FLOAT); recipPtr = Pointer.to(recip); cuMemcpyHtoD(recipDevice, recipPtr, len * Sizeof.FLOAT); // Create a Real to Complex CUFFT plan planR2C = new cufftHandle(); cufftPlan3d(planR2C, nX, nY, nZ, cufftType.CUFFT_R2C); cufftSetCompatibilityMode(planR2C, cufftCompatibility.CUFFT_COMPATIBILITY_FFTW_ALL); // Create a Complex to Real CUFFT plan planC2R = new cufftHandle(); cufftPlan3d(planC2R, nX, nY, nZ, cufftType.CUFFT_C2R); cufftSetCompatibilityMode(planC2R, cufftCompatibility.CUFFT_COMPATIBILITY_FFTW_ALL); dataDevicePtr = Pointer.to(dataDevice); recipDevicePtr = Pointer.to(recipDevice); int threads = 512; int nBlocks = len / threads + (len % threads == 0 ? 0 : 1); int gridSize = (int) Math.floor(Math.sqrt(nBlocks)) + 1; logger.info(format(" CUDA thread initialized with %d threads per block", threads)); logger.info(format(" Grid Size: (%d x %d x 1).", gridSize, gridSize)); assert (gridSize * gridSize * threads >= len); synchronized (this) { while (!free) { if (doConvolution) { cuMemcpyHtoD(dataDevice, dataPtr, len * Sizeof.FLOAT); int ret = cufftExecR2C(planR2C, dataDevice, dataDevice); if (ret != cufftResult.CUFFT_SUCCESS) { logger.warning("R2C Result " + cufftResult.stringFor(ret)); } // Set up the execution parameters for the kernel cuFuncSetBlockShape(function, threads, 1, 1); int offset = 0; offset = align(offset, Sizeof.POINTER); cuParamSetv(function, offset, dataDevicePtr, Sizeof.POINTER); offset += Sizeof.POINTER; offset = align(offset, Sizeof.POINTER); cuParamSetv(function, offset, recipDevicePtr, Sizeof.POINTER); offset += Sizeof.POINTER; offset = align(offset, Sizeof.INT); cuParamSeti(function, offset, len / 2); offset += Sizeof.INT; cuParamSetSize(function, offset); // Call the kernel function. cuLaunchGrid(function, gridSize, gridSize); ret = cufftExecC2R(planC2R, dataDevice, dataDevice); if (ret != cufftResult.CUFFT_SUCCESS) { logger.warning("C2R Result " + cufftResult.stringFor(ret)); } ret = cuMemcpyDtoH(dataPtr, dataDevice, len * Sizeof.FLOAT); doConvolution = false; notify(); } try { wait(); } catch (InterruptedException e) { logger.severe(e.toString()); } } cufftDestroy(planR2C); cufftDestroy(planC2R); cuMemFree(dataDevice); cuMemFree(recipDevice); dead = true; notify(); } logger.info(" CUDA Thread Done!"); }
From source file:mavn.network.view.JUNGPanelAdapter.java
public void drawVerticies() { Iterator iterateVerticies = verticies.iterator(); int count = 0; int x = 50;/*from w w w . java 2 s. c o m*/ int y = 50; while (iterateVerticies.hasNext()) { Number v = (Number) iterateVerticies.next(); y += 50; if (count == inputX) { y = (int) (Math.floor((50 * w2[0].length / 2)) - 50); } if (count == inputY) { y = (int) (50 + Math.floor((50 * w2[0].length / 2))); } if (count == andMin) { x += 150; y = 50; } if (count == orMin) { x += 150; y = (int) (Math.floor((50 * w2[0].length / 2)) - 75); } if (count == output) { x += 150; y = (int) (Math.floor((50 * w2[0].length / 2))); } layout.setLocation(v, x, y); layout.lock(v, true); count++; } }
From source file:com.mobileman.projecth.web.model.chart.BarChartData.java
private List<Object[]> getAllAnswers(Long questionId, Map<Long, List<Object[]>> answersCache) { if (answersCache.containsKey(questionId)) { return answersCache.get(questionId); }/* ww w . ja v a 2 s . co m*/ List<Object[]> result = new ArrayList<Object[]>(); Question question = Services.getQuestionService().findById(questionId); answersCache.put(questionId, result); List<Answer> answers = new ArrayList<Answer>(question.getQuestionType().getAnswers()); Collections.sort(answers, new Comparator<Answer>() { @Override public int compare(Answer o1, Answer o2) { return o1.getSortOrder() - o2.getSortOrder(); } }); for (Answer a : answers) { if (!Answer.Kind.NO_ANSWER.equals(a.getKind())) { if (Answer.Kind.SCALE.equals(a.getKind())) { ScaleAnswer sa = (ScaleAnswer) a; double step = sa.getStep().doubleValue(); double start = sa.getMinValue().doubleValue(); double end = sa.getMaxValue().doubleValue(); if ((end - start) / step > 10) { step = 10; } for (double d = start; d <= end; d += step) { Object[] rc = new Object[3]; //0 - name; 1 range Object[] range = new Object[2]; range[0] = d; range[1] = d + step; rc[1] = range; if (Math.floor(d) == d) { rc[0] = "" + (int) (d); } else { rc[0] = "" + d; } result.add(rc); } } else { Object[] rc = new Object[3]; //0 - name; 1 range; 2 explanation rc[0] = a.getAnswer(); rc[2] = a.getExplanation(); result.add(rc); } } } return result; }