List of usage examples for org.apache.commons.beanutils ConvertUtils convert
public static Object convert(String values[], Class clazz)
Convert an array of specified values to an array of objects of the specified class (if possible).
For more details see ConvertUtilsBean
.
From source file:uk.ac.diamond.scisoft.ncd.core.DetectorResponse.java
public Object[] process(Serializable buffer, Serializable error, int frames, final int[] dimensions) { float[] parentdata = (float[]) ConvertUtils.convert(buffer, float[].class); double[] parenterror = (double[]) ConvertUtils.convert(error, double[].class); float[] mydata = new float[parentdata.length]; double[] myerror = new double[parenterror.length]; float[] responseBuffer = response.getData(); int dataLength = 1; for (int i = 1; i < dimensions.length; i++) { if (dimensions[i] != response.getShape()[i - 1]) { logger.error("detector response dataset and image dimensions do not match"); }//from www . j a v a2 s. c o m dataLength *= dimensions[i]; } for (int i = 0; i < frames; i++) { for (int j = 0; j < dataLength; j++) { mydata[i * dataLength + j] = new Float(responseBuffer[j] * parentdata[i * dataLength + j]); myerror[i * dataLength + j] = new Double( responseBuffer[j] * responseBuffer[j] * parenterror[i * dataLength + j]); } } return new Object[] { mydata, myerror }; }
From source file:uk.ac.diamond.scisoft.ncd.core.Invariant.java
public Object[] process(Serializable buffer, Serializable errors, final int[] dimensions) { float[] parentdata = (float[]) ConvertUtils.convert(buffer, float[].class); double[] parenterrors = (double[]) ConvertUtils.convert(errors, double[].class); float[] mydata = new float[dimensions[0]]; double[] myerrors = new double[dimensions[0]]; // first dim is timeframe int[] imagedim = Arrays.copyOfRange(dimensions, 1, dimensions.length); int imagesize = 1; for (int n : imagedim) imagesize *= n;/* w w w . j a v a 2s . c o m*/ for (int i = 0; i < parentdata.length; i++) { mydata[i / imagesize] += parentdata[i]; myerrors[i / imagesize] += parenterrors[i]; } return new Object[] { mydata, myerrors }; }
From source file:uk.ac.diamond.scisoft.ncd.core.Normalisation.java
public Object[] process(Serializable buffer, Serializable errors, Serializable cbuffer, int frames, final int[] dimensions, final int[] cdimensions) { float[] parentdata = (float[]) ConvertUtils.convert(buffer, float[].class); double[] parenterrors = (double[]) ConvertUtils.convert(errors, double[].class); float[] calibdata = (float[]) cbuffer; float[] mydata = new float[parentdata.length]; double[] myerrors = new double[parenterrors.length]; int calibTotalChannels = cdimensions[1]; int parentDataLength = 1; for (int i = 1; i < dimensions.length; i++) { parentDataLength *= dimensions[i]; }//from w ww .ja v a2 s. com for (int i = 0; i < frames; i++) { float calReading = calibdata[i * calibTotalChannels + calibChannel]; if (calReading == 0) { calReading = 1; //TODO better idea? } for (int j = i * parentDataLength; j < (i + 1) * parentDataLength; j++) { mydata[j] = (float) ((normvalue / calReading) * parentdata[j]); myerrors[j] = (normvalue / calReading) * (normvalue / calReading) * parenterrors[j]; } } return new Object[] { mydata, myerrors }; }
From source file:uk.ac.diamond.scisoft.ncd.core.SaxsInvariant.java
public Object[] process(Serializable buffer, Serializable errors, Serializable axis, final int[] dimensions) { double[] parentaxis = (double[]) ConvertUtils.convert(axis, double[].class); float[] parentdata = (float[]) ConvertUtils.convert(buffer, float[].class); double[] parenterrors = (double[]) ConvertUtils.convert(errors, double[].class); int shift = (parentaxis[0] > 0 ? 1 : 0); int size = dimensions[dimensions.length - 1] + shift; double[] myaxis = new double[size]; double[] mydata = new double[size]; double[] myerrors = new double[size]; if (shift > 0) { myaxis[0] = 0.0;//from www . j ava 2 s. c o m mydata[0] = 0.0; myerrors[0] = 0.0; } for (int i = 0; i < parentaxis.length; i++) { myaxis[i + shift] = parentaxis[i]; mydata[i + shift] = parentdata[i] * parentaxis[i] * parentaxis[i]; myerrors[i + shift] = parenterrors[i] * Math.pow(parentaxis[i], 4); } UnivariateInterpolator interpolator = new SplineInterpolator(); UnivariateFunction function = interpolator.interpolate(myaxis, mydata); UnivariateIntegrator integrator = new IterativeLegendreGaussIntegrator(15, BaseAbstractUnivariateIntegrator.DEFAULT_RELATIVE_ACCURACY, BaseAbstractUnivariateIntegrator.DEFAULT_ABSOLUTE_ACCURACY); try { float result = (float) integrator.integrate(INTEGRATION_POINTS, function, 0.0, myaxis[myaxis.length - 1]); IDataset data = new FloatDataset(parentdata, dimensions); IDataset qaxis = new DoubleDataset(parentaxis, dimensions); PorodPlotData porodPlotData = (PorodPlotData) SaxsAnalysisPlotType.POROD_PLOT.getSaxsPlotDataObject(); SimpleRegression regression = porodPlotData.getPorodPlotParameters(data.squeeze(), qaxis.squeeze()); Amount<Dimensionless> c4 = porodPlotData.getC4(regression); result += (float) (c4.getEstimatedValue() / myaxis[myaxis.length - 1]); double error = 0.0; for (int i = 0; i < myaxis.length; i++) { int idx1 = Math.max(0, i - 1); int idx2 = Math.min(myaxis.length - 1, i + 1); error += Math.pow((myaxis[idx2] - myaxis[idx1]), 2) * myerrors[i] / 4.0; } error += Math.pow(c4.getAbsoluteError() / myaxis[myaxis.length - 1], 2); return new Object[] { new float[] { result }, new double[] { error } }; } catch (TooManyEvaluationsException e) { return new Object[] { new float[] { Float.NaN }, new double[] { Double.NaN } }; } catch (MaxCountExceededException e) { return new Object[] { new float[] { Float.NaN }, new double[] { Double.NaN } }; } }
From source file:uk.ac.diamond.scisoft.ncd.core.utils.NcdNexusUtils.java
public static Dataset sliceInputData(SliceSettings sliceData, DataSliceIdentifiers ids) throws HDF5Exception { long[] frames = sliceData.getFrames(); long[] start_pos = (long[]) ConvertUtils.convert(sliceData.getStart(), long[].class); int sliceDim = sliceData.getSliceDim(); int sliceSize = sliceData.getSliceSize(); long[] start_data = Arrays.copyOf(start_pos, frames.length); long[] block_data = Arrays.copyOf(frames, frames.length); Arrays.fill(block_data, 0, sliceData.getSliceDim(), 1); block_data[sliceDim] = Math.min(frames[sliceDim] - start_pos[sliceDim], sliceSize); long[] count_data = new long[frames.length]; Arrays.fill(count_data, 1);// w ww .j a v a 2 s . c o m ids.setSlice(start_data, block_data, count_data, block_data); Dataset data; int memspace_id = -1; try { int select_id = H5.H5Sselect_hyperslab(ids.dataspace_id, HDF5Constants.H5S_SELECT_SET, ids.start, ids.stride, ids.count, ids.block); if (select_id < 0) { throw new HDF5Exception("H5 select hyperslab error: can't allocate memory to read data"); } int rank = block_data.length; int dtype = HDF5Utils.getDtype(ids.dataclass_id, ids.datasize_id); int[] block_data_int = (int[]) ConvertUtils.convert(ids.block, int[].class); data = DatasetFactory.zeros(block_data_int, dtype); memspace_id = H5.H5Screate_simple(rank, ids.block, null); // Read the data using the previously defined hyperslab. if ((ids.dataset_id > 0) && (ids.dataspace_id > 0) && (memspace_id > 0)) { int read_id = H5.H5Dread(ids.dataset_id, ids.datatype_id, memspace_id, ids.dataspace_id, HDF5Constants.H5P_DEFAULT, data.getBuffer()); if (read_id < 0) { throw new HDF5Exception("H5 data read error: can't read input dataset"); } } } finally { closeH5id(memspace_id); } return data; }
From source file:uk.ac.diamond.scisoft.ncd.data.plots.SaxsPlotData.java
public void execute(int entry_group_id, DataSliceIdentifiers input_ids, DataSliceIdentifiers input_errors_ids) throws HDF5Exception { long[] frames = NcdNexusUtils.getIdsDatasetShape(input_ids.dataspace_id); int[] frames_int = (int[]) ConvertUtils.convert(frames, int[].class); int group_id = NcdNexusUtils.makegroup(entry_group_id, detector + "_" + groupName, Nexus.DATA); int type = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_FLOAT); int data_id = NcdNexusUtils.makedata(group_id, "data", type, frames, true, "a.u."); H5.H5Tclose(type);/*ww w . j a va 2 s . c om*/ type = H5.H5Tcopy(HDF5Constants.H5T_NATIVE_DOUBLE); int errors_id = NcdNexusUtils.makedata(group_id, "errors", type, frames, true, "a.u."); H5.H5Tclose(type); SliceSettings sliceSettings = new SliceSettings(frames, frames.length - 2, 1); int[] step = new int[frames_int.length]; Arrays.fill(step, 1); step[step.length - 1] = frames_int[frames_int.length - 1]; SliceND slice = new SliceND(frames_int, null, frames_int, step); IndexIterator iter = new SliceIterator(frames_int, AbstractDataset.calcSize(frames_int), slice); int[] pos = iter.getPos(); while (iter.hasNext()) { sliceSettings.setStart(pos); Dataset data_slice = NcdNexusUtils.sliceInputData(sliceSettings, input_ids).squeeze(); Dataset errors_slice = NcdNexusUtils.sliceInputData(sliceSettings, input_errors_ids).squeeze(); data_slice.setError(errors_slice); Dataset tmpFrame = getSaxsPlotDataset(data_slice, qaxis); int filespace_id = H5.H5Dget_space(data_id); int type_id = H5.H5Dget_type(data_id); long[] ave_start = (long[]) ConvertUtils.convert(slice, long[].class); long[] ave_step = (long[]) ConvertUtils.convert(step, long[].class); long[] ave_count_data = new long[frames.length]; Arrays.fill(ave_count_data, 1); int memspace_id = H5.H5Screate_simple(ave_step.length, ave_step, null); H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, ave_start, ave_step, ave_count_data, ave_step); H5.H5Dwrite(data_id, type_id, memspace_id, filespace_id, HDF5Constants.H5P_DEFAULT, tmpFrame.getBuffer()); H5.H5Sclose(filespace_id); H5.H5Sclose(memspace_id); H5.H5Tclose(type_id); if (tmpFrame.hasErrors()) { filespace_id = H5.H5Dget_space(errors_id); type_id = H5.H5Dget_type(errors_id); memspace_id = H5.H5Screate_simple(ave_step.length, ave_step, null); H5.H5Sselect_hyperslab(filespace_id, HDF5Constants.H5S_SELECT_SET, ave_start, ave_step, ave_count_data, ave_step); H5.H5Dwrite(errors_id, type_id, memspace_id, filespace_id, HDF5Constants.H5P_DEFAULT, tmpFrame.getError().getBuffer()); H5.H5Sclose(filespace_id); H5.H5Sclose(memspace_id); H5.H5Tclose(type_id); } } // add long_name attribute { int attrspace_id = H5.H5Screate_simple(1, new long[] { 1 }, null); int attrtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); H5.H5Tset_size(attrtype_id, dataName.getBytes().length); int attr_id = H5.H5Acreate(data_id, "long_name", attrtype_id, attrspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); if (attr_id < 0) { throw new HDF5Exception("H5 putattr write error: can't create attribute"); } int write_id = H5.H5Awrite(attr_id, attrtype_id, dataName.getBytes()); if (write_id < 0) { throw new HDF5Exception("H5 makegroup attribute write error: can't create signal attribute"); } H5.H5Aclose(attr_id); H5.H5Sclose(attrspace_id); H5.H5Tclose(attrtype_id); } writeAxisData(group_id); H5.H5Dclose(data_id); H5.H5Dclose(errors_id); H5.H5Gclose(group_id); }
From source file:uk.ac.diamond.scisoft.ncd.data.plots.SaxsPlotData.java
private void writeAxisData(int group_id) throws HDF5LibraryException, NullPointerException, HDF5Exception { long[] axisShape = (long[]) ConvertUtils.convert(qaxis.getShape(), long[].class); Dataset qaxisNew = getSaxsPlotAxis(qaxis); UnitFormat unitFormat = UnitFormat.getUCUMInstance(); String units = unitFormat.format(qaxisUnit); int qaxis_id = NcdNexusUtils.makeaxis(group_id, "variable", HDF5Constants.H5T_NATIVE_FLOAT, axisShape, new int[] { qaxisNew.getRank() }, 1, units); int filespace_id = H5.H5Dget_space(qaxis_id); int type_id = H5.H5Dget_type(qaxis_id); int memspace_id = H5.H5Screate_simple(qaxisNew.getRank(), axisShape, null); H5.H5Sselect_all(filespace_id);// ww w . jav a 2 s . co m H5.H5Dwrite(qaxis_id, type_id, memspace_id, filespace_id, HDF5Constants.H5P_DEFAULT, qaxisNew.getBuffer()); // add long_name attribute { int attrspace_id = H5.H5Screate_simple(1, new long[] { 1 }, null); int attrtype_id = H5.H5Tcopy(HDF5Constants.H5T_C_S1); H5.H5Tset_size(attrtype_id, variableName.getBytes().length); int attr_id = H5.H5Acreate(qaxis_id, "long_name", attrtype_id, attrspace_id, HDF5Constants.H5P_DEFAULT, HDF5Constants.H5P_DEFAULT); if (attr_id < 0) { throw new HDF5Exception("H5 putattr write error: can't create attribute"); } int write_id = H5.H5Awrite(attr_id, attrtype_id, variableName.getBytes()); if (write_id < 0) { throw new HDF5Exception("H5 makegroup attribute write error: can't create signal attribute"); } H5.H5Aclose(attr_id); H5.H5Sclose(attrspace_id); H5.H5Tclose(attrtype_id); } H5.H5Sclose(filespace_id); H5.H5Sclose(memspace_id); H5.H5Tclose(type_id); H5.H5Dclose(qaxis_id); if (qaxisNew.hasErrors()) { int qaxis_error_id = NcdNexusUtils.makedata(group_id, "variable_errors", HDF5Constants.H5T_NATIVE_DOUBLE, axisShape, false, units); filespace_id = H5.H5Dget_space(qaxis_error_id); type_id = H5.H5Dget_type(qaxis_error_id); memspace_id = H5.H5Screate_simple(qaxisNew.getRank(), axisShape, null); H5.H5Sselect_all(filespace_id); H5.H5Dwrite(qaxis_error_id, type_id, memspace_id, filespace_id, HDF5Constants.H5P_DEFAULT, qaxisNew.getError().getBuffer()); H5.H5Sclose(filespace_id); H5.H5Sclose(memspace_id); H5.H5Tclose(type_id); H5.H5Dclose(qaxis_error_id); } }
From source file:uk.ac.diamond.scisoft.ncd.passerelle.actors.core.NcdProcessingObjectTransformer.java
private int[] getResultDataShape() { selectedShape = (int[]) ConvertUtils.convert(Arrays.copyOf(frames, frames.length - dimension), int[].class); indexList = NcdDataUtils.createSliceList(format, (int[]) ConvertUtils.convert(selectedShape, int[].class)); for (int i = 0; i < selectedShape.length; i++) { selectedShape[i] = indexList.get(i).length; }/* w ww .j av a2s.co m*/ int[] imageSize = (int[]) ConvertUtils .convert(Arrays.copyOfRange(frames, frames.length - dimension, frames.length), int[].class); int[] resultDataShape = ArrayUtils.addAll(selectedShape, imageSize); return resultDataShape; }
From source file:uk.ac.diamond.scisoft.ncd.passerelle.actors.forkjoin.NcdAverageForkJoinTransformer.java
@Override protected void configureActorParameters() throws HDF5Exception { super.configureActorParameters(); sliceDim = 0;//w w w. ja va 2 s . c o m sliceSize = (int) frames[0]; int frameBatch = 1; // We will slice only 2D data. 1D data is loaded into memory completely if (averageIndices.length > 0 || dimension == 2) { // Find dimension that needs to be sliced int dimCounter = 1; for (int idx = (frames.length - 1 - dimension); idx >= 0; idx--) { if (ArrayUtils.contains(averageIndices, idx + 1)) { sliceDim = idx; sliceSize = (int) frames[idx]; dimCounter *= frames[idx]; if (dimCounter >= frameBatch) { sliceSize = (int) (frameBatch * frames[idx] / dimCounter); break; } } } } int[] gridShape = (int[]) ConvertUtils.convert(Arrays.copyOf(frames, frames.length - dimension), int[].class); if (!Arrays.equals(selectionData.getShape(), gridShape)) { selectionData = DatasetFactory.ones(gridShape, Dataset.INT); } }
From source file:uk.ac.diamond.scisoft.ncd.passerelle.actors.forkjoin.NcdImageStatsForkJoinTransformer.java
private void generatePointROIList() { int[] imageShape = (int[]) ConvertUtils .convert(Arrays.copyOfRange(frames, frames.length - dimension, frames.length), int[].class); UniformIntegerDistribution randX = new UniformIntegerDistribution(0, imageShape[1] - 1); UniformIntegerDistribution randY = new UniformIntegerDistribution(0, imageShape[0] - 1); while (points.size() < numSamples) { int[] point = new int[] { randY.sample(), randX.sample() }; PointROI pointROI = new PointROI(point); if (intSector == null || intSector.containsPoint(point[1], point[0])) { if (mask == null || mask.getBoolean(point)) { points.append(pointROI); double radius = distance.compute(intSector.getPoint(), new double[] { point[0], point[1] }); radiiMap.put(new Pair<Integer, Integer>(point[1], point[0]), radius); }/*from w w w.ja v a 2 s.co m*/ } } // Calculate resolution bins double[] sortedRadii = ArrayUtils.toPrimitive(radiiMap.values().toArray(new Double[] {})); Arrays.sort(sortedRadii); percentile.setData(sortedRadii); percentiles[0] = 0; percentiles[numBins] = Double.MAX_VALUE; for (int i = 1; i < numBins; i++) { double p = i * 100.0 / numBins; percentiles[i] = percentile.evaluate(p); } // Subdivide points into resolution bins for (int bin = 0; bin < numBins; bin++) { HashSet<Pair<Integer, Integer>> pointSet = new HashSet<Pair<Integer, Integer>>(); for (Entry<Pair<Integer, Integer>, Double> element : radiiMap.entrySet()) { double radius = element.getValue(); if (radius > percentiles[bin] && radius < percentiles[bin + 1]) { pointSet.add(element.getKey()); radiiMap.remove(element); } } resBins.add(pointSet); } }