List of usage examples for java.util Arrays copyOfRange
public static boolean[] copyOfRange(boolean[] original, int from, int to)
From source file:bb.mcmc.analysis.ConvergeStatUtils.java
protected static double spectrum0(double[] data) { final int maxLength = 200; // 200 is the default, TODO, change later int batchSize; double[] newData; if (data.length > maxLength) { final double index = 1.0 * data.length / maxLength; batchSize = (int) Math.ceil(index); int from = 0; int to = batchSize; ArrayList<Double> tempData = new ArrayList<Double>(); while (to <= data.length) { // double[] temp = Arrays.copyOfRange(data, from, to); System.out.println(Arrays.toString(temp)); double mean = DiscreteStatistics.mean(Arrays.copyOfRange(data, from, to)); tempData.add(mean);/*from w w w . j av a2 s . com*/ from = to; to += batchSize; } newData = new double[tempData.size()]; for (int i = 0; i < newData.length; i++) { newData[i] = tempData.get(i); } } else { newData = data; batchSize = 1; } double spectrum0 = calSpectrum0(newData); double var = spectrum0 * batchSize; return var; }
From source file:com.haulmont.cuba.gui.dynamicattributes.FilteringLookupAction.java
protected String getFilterComponentName(Filter filterComponent) { String filterComponentPath = ComponentsHelper.getFilterComponentPath(filterComponent); String[] strings = ValuePathHelper.parse(filterComponentPath); return ValuePathHelper.format(Arrays.copyOfRange(strings, 1, strings.length)); }
From source file:org.sakaiproject.delegatedaccess.dao.impl.DelegatedAccessDaoImpl.java
public Map<String, List<String>> getNodesBySiteRef(String[] siteRefs, String hierarchyId) { try {/* w ww . j a v a 2 s. co m*/ Map<String, List<String>> returnMap = new HashMap<String, List<String>>(); if (siteRefs == null || siteRefs.length == 0) { return returnMap; } int subArrayIndex = 0; do { int subArraySize = ORACLE_IN_CLAUSE_SIZE_LIMIT; if (subArrayIndex + subArraySize > siteRefs.length) { subArraySize = (siteRefs.length - subArrayIndex); } String[] subSiteRefs = Arrays.copyOfRange(siteRefs, subArrayIndex, subArrayIndex + subArraySize); String query = getStatement("select.hierarchyNode"); String inParams = "("; for (int i = 0; i < subSiteRefs.length; i++) { inParams += "?"; if (i < subSiteRefs.length - 1) { inParams += ","; } } inParams += ")"; query = query.replace("(?)", inParams); List<String> parameters = new ArrayList<String>(); parameters.add(hierarchyId); parameters.addAll(Arrays.asList(subSiteRefs)); List<String[]> results = (List<String[]>) getJdbcTemplate().query(query, parameters.toArray(), new RowMapper() { public Object mapRow(ResultSet resultSet, int i) throws SQLException { return new String[] { resultSet.getString("title"), resultSet.getString("ID") }; } }); if (results != null) { for (String[] result : results) { if (result != null && result.length == 2) { if (!returnMap.containsKey(result[0])) { returnMap.put(result[0], new ArrayList<String>()); } returnMap.get(result[0]).add(result[1]); } } } subArrayIndex = subArrayIndex + subArraySize; } while (subArrayIndex < siteRefs.length); return returnMap; } catch (DataAccessException ex) { log.error("Error executing query: " + ex.getClass() + ":" + ex.getMessage(), ex); return null; } }
From source file:com.trickl.math.lanczos.TridiagonalMatrix.java
protected void compute() { err.clear();/* www.jav a 2 s .com*/ eigval_distinct.clear(); multiplicty.clear(); err_noghost.clear(); eigval_distinct_noghost.clear(); multiplicty_noghost.clear(); computed = true; int n = alpha.length; EigenDecomposition eigenDecomposition = new EigenDecomposition(alpha, beta); double[] eval = eigenDecomposition.getRealEigenvalues(); Arrays.sort(eval); // Consistent with IETL // tolerance values: multipleTolerance = Math.max(alpha_max, beta_max) * 2 * epsilon * (1000 + n); threshold = Math.max(eval[0], eval[n - 1]); threshold = Math.max(errorTolerance * threshold, 5 * multipleTolerance); // error estimates of eigen values starts: // the unique eigen values selection, their multiplicities and corresponding errors calculation follows: double temp = eval[0]; eigval_distinct.add(eval[0]); int multiple = 1; for (int i = 1; i < n; i++) { double[] eigenvector = eigenDecomposition.getEigenvector(eval.length - i).toArray(); if (Math.abs(eval[i] - temp) > threshold) { eigval_distinct.add(eval[i]); temp = eval[i]; multiplicty.add(multiple); if (multiple > 1) { err.add(0.); } else { err.add(Math.abs(beta[beta.length - 1] * eigenvector[n - 1])); // *beta.rbegin() = betaMplusOne. } multiple = 1; } else { multiple++; } } // for last eigen value. multiplicty.add(multiple); if (multiple > 1) { err.add(.0); } else { double[] eigenvector = eigenDecomposition.getEigenvector(eval.length - n).toArray(); err.add(Math.abs(beta[beta.length - 1] * eigenvector[n - 1])); // *beta.rbegin() = betaMplusOne. } // the unique eigen values selection, their multiplicities and corresponding errors calculation ends. // ghosts calculations starts: double[] beta_g = Arrays.copyOfRange(beta, 1, beta.length); double[] alpha_g = Arrays.copyOfRange(alpha, 1, alpha.length); eigenDecomposition = new EigenDecomposition(alpha_g, beta_g); double[] eval_g = eigenDecomposition.getRealEigenvalues(); Arrays.sort(eval_g); // Consistent with IETL int i = 0, t2 = 0; for (double eigval : eigval_distinct) { if (multiplicty.get(i) == 1) { // test of spuriousness for the eigenvalues whose multiplicity is one. for (int j = t2; j < n - 1; j++, t2++) { // since size of reduced matrix is n-1 if (eval_g[j] - eigval >= multipleTolerance) { break; } if (Math.abs(eigval - eval_g[j]) < multipleTolerance) { multiplicty.set(i, 0); err.set(i, .0); // if eigen value is a ghost => error calculation not required, 0=> ignore error. t2++; break; } } } i++; } i = 0; for (double eigval : eigval_distinct) { if (multiplicty.get(i) != 0) { eigval_distinct_noghost.add(eigval); multiplicty_noghost.add(multiplicty.get(i)); err_noghost.add(err.get(i)); } i++; } }
From source file:com.asakusafw.runtime.io.text.directio.AbstractTextStreamFormatTest.java
/** * input - w/ header.//from ww w . j a va 2 s. c o m * @throws Exception if failed */ @Test public void input_hedaer() throws Exception { MockFormat format = format(3, HeaderType.FORCE); String[][] data = { { "A", "B", "C", }, { "D", "E", "F", }, { "G", "H", "I", }, }; try (ModelInput<String[]> in = format.createInput(String[].class, "dummy", input(data))) { String[][] result = collect(3, in); assertThat(result, is(Arrays.copyOfRange(data, 1, 3))); } }
From source file:com.aliyun.odps.mapred.cli.OptionParser.java
public void parse(String[] args) throws ParseException, FileNotFoundException, ClassNotFoundException, OdpsException { CommandLineParser parser = new BasicParser(); if (args == null || args.length <= 0) { throw new UnsupportedOperationException("Main class not specified."); }//www.j av a 2s. c o m CommandLine cmd = parser.parse(options, args, true); // Config load order: default file -> specified by file names -> specified // by kv pairs. OdpsConf odpsConf = new OdpsConf(); if (cmd.hasOption("odps")) { String odpsconf = cmd.getOptionValue("odps"); File conf = new File(odpsconf); InputStream in = new FileInputStream(conf); odpsConf.addResource(in); } if (cmd.hasOption("odpsconf")) { String[] odpsconf = cmd.getOptionValues("odpsconf"); for (String conf : odpsconf) { String[] kv = conf.split("=", 2); odpsConf.set(kv[0], kv[1]); } } Account account = getAccount(odpsConf); Odps odps = new Odps(account); odps.setDefaultProject(odpsConf.getProjName()); if (odpsConf.getEndpoint() != null) { odps.setEndpoint(odpsConf.getEndpoint()); } ss.setOdps(odps); if (cmd.hasOption("job")) { String jobconf = cmd.getOptionValue("job"); File conf = new File(jobconf); InputStream in = new FileInputStream(conf); ss.getDefaultJob().addResource(in); } if (cmd.hasOption("jobconf")) { String[] jobconfs = cmd.getOptionValues("jobconf"); for (String conf : jobconfs) { String[] kv = conf.split("=", 2); ss.getDefaultJob().set(kv[0], kv[1]); } } if (cmd.hasOption("aliases")) { String aliases = cmd.getOptionValue("aliases"); Map<String, String> map; try { map = JSON.parseObject(aliases, Map.class); } catch (Exception e) { throw new RuntimeException(e); } ss.setAliases(map); } if (cmd.hasOption("resources")) { String resources = cmd.getOptionValue("resources"); ss.getDefaultJob().setResources(resources); } if (cmd.hasOption("local")) { ss.setLocalRun(true); } String[] remain = cmd.getArgs(); if (remain == null || remain.length <= 0) { throw new UnsupportedOperationException("Main class not specified."); } mainClass = Class.forName(remain[0]); if (remain.length > 1) { arguments = Arrays.copyOfRange(remain, 1, remain.length); } else { arguments = new String[0]; } }
From source file:com.codelanx.codelanxlib.command.CommandNode.java
/** * Called from Bukkit to indicate an executed command * <br><br> {@inheritDoc}/*from w w w . j a v a 2 s .c o m*/ * * @since 0.1.0 * @version 0.1.0 * * @param sender {@inheritDoc} * @param command {@inheritDoc} * @param label {@inheritDoc} * @param args {@inheritDoc} * @return {@code true} if the returned {@link CommandStatus} was not a * {@link CommandStatus#FAILED} */ @Override public final boolean onCommand(CommandSender sender, Command command, String label, String[] args) { Exceptions.illegalPluginAccess(Reflections.accessedFromBukkit(), "Only bukkit may call this method"); CommandNode<? extends Plugin> child = this.getClosestChild(args); int start = 0; if (args.length > 0) { for (int i = 0; i < args.length; i++) { if (args[i].equalsIgnoreCase(child.getName())) { start = i + 1; break; } } args = Arrays.copyOfRange(args, start, args.length); } CommandStatus stat; try { stat = this.verifyState(child, sender, args); if (stat == null) { stat = child.execute(sender, args); } } catch (Throwable ex) { stat = CommandStatus.FAILED; child.plugin.getLogger().log(Level.SEVERE, String.format( "Unhandled exception executing command '%s %s'", label, StringUtils.join(args, " ")), ex); } //Purposely catch NPE and rethrow try { stat.handle(sender, child.format, child); } catch (NullPointerException ex) { throw new IllegalReturnException("Cannot return null from CommandNode#execute", ex); } return stat != CommandStatus.FAILED; }
From source file:ECToken3.java
public static final String decryptv3(String key, String input) throws java.io.UnsupportedEncodingException, java.security.NoSuchAlgorithmException, javax.crypto.NoSuchPaddingException, java.security.InvalidKeyException, javax.crypto.IllegalBlockSizeException, javax.crypto.BadPaddingException, java.security.InvalidAlgorithmParameterException { //---------------------------------------------------- // Base64 decode //---------------------------------------------------- String result = null;/*from www . ja v a 2 s .c o m*/ Base64 encoder = new Base64(true); byte[] inputBytes = encoder.decode(input.getBytes("ASCII")); //---------------------------------------------------- // Get SHA-256 of key //---------------------------------------------------- MessageDigest md = MessageDigest.getInstance("SHA-256"); md.update(key.getBytes("ASCII")); byte[] keyDigest = md.digest(); //System.out.format("+-------------------------------------------------------------\n"); //System.out.format("| Decrypt\n"); //System.out.format("+-------------------------------------------------------------\n"); //System.out.format("| key: %s\n", key); //System.out.format("| token: %s\n", input); //---------------------------------------------------- // Rip up the ciphertext //---------------------------------------------------- byte[] ivBytes = new byte[12]; ivBytes = Arrays.copyOfRange(inputBytes, 0, ivBytes.length); byte[] cipherBytes = new byte[inputBytes.length - ivBytes.length]; cipherBytes = Arrays.copyOfRange(inputBytes, ivBytes.length, inputBytes.length); //---------------------------------------------------- // Decrypt //---------------------------------------------------- AEADBlockCipher cipher = new GCMBlockCipher(new AESEngine()); cipher.init(false, new AEADParameters(new KeyParameter(keyDigest), MAC_SIZE_BITS, ivBytes)); //System.out.format("+-------------------------------------------------------------\n"); //System.out.format("| iv: %s\n", bytesToHex(ivBytes)); //System.out.format("| ciphertext: %s\n", bytesToHex(Arrays.copyOfRange(cipherBytes, 0, cipherBytes.length - 16))); //System.out.format("| tag: %s\n", bytesToHex(Arrays.copyOfRange(cipherBytes, cipherBytes.length - 16, cipherBytes.length))); //System.out.format("+-------------------------------------------------------------\n"); byte[] dec = new byte[cipher.getOutputSize(cipherBytes.length)]; try { int res = cipher.processBytes(cipherBytes, 0, cipherBytes.length, dec, 0); cipher.doFinal(dec, res); } catch (Exception e) { throw new RuntimeException(e); } //System.out.format("token: %s\n", new String(dec, "ASCII")); return new String(dec, "ASCII"); }
From source file:eu.stratosphere.api.java.io.CsvReader.java
/** * Configures which fields of the CSV file should be included and which should be skipped. The * parser will look at the first {@code n} fields, where {@code n} is the length of the boolean * array. The parser will skip over all fields where the boolean value at the corresponding position * in the array is {@code false}. The result contains the fields where the corresponding position in * the boolean array is {@code true}./* w w w .j a v a 2 s . co m*/ * The number of fields in the result is consequently equal to the number of times that {@code true} * occurs in the fields array. * * @param fields The array of flags that describes which fields are to be included and which not. * @return The CSV reader instance itself, to allow for fluent function chaining. */ public CsvReader includeFields(boolean... fields) { if (fields == null || fields.length == 0) { throw new IllegalArgumentException("The set of included fields must not be null or empty."); } int lastTruePos = -1; for (int i = 0; i < fields.length; i++) { if (fields[i]) { lastTruePos = i; } } if (lastTruePos == -1) { throw new IllegalArgumentException( "The description of fields to parse excluded all fields. At least one fields must be included."); } if (lastTruePos == fields.length - 1) { this.includedMask = fields; } else { this.includedMask = Arrays.copyOfRange(fields, 0, lastTruePos + 1); } return this; }
From source file:com.opengamma.analytics.financial.model.volatility.smile.function.HestonVolatilityFunction.java
@Override public Function1D<HestonModelData, double[]> getVolatilityFunction(final double forward, final double[] strikes, final double timeToExpiry) { final int n = strikes.length; final double lowestStrike = strikes[0]; final double highestStrike = strikes[n - 1]; return new Function1D<HestonModelData, double[]>() { @SuppressWarnings("synthetic-access") @Override// ww w . j av a2 s .c o m public double[] evaluate(final HestonModelData x) { final MartingaleCharacteristicExponent ce = new HestonCharacteristicExponent(x); //TODO calculations relating to the FFT setup are made each call, even though they will be very similar (depends on Characteristic // Exponent). Maybe worth calculating a typical setup, outside of this function final double[][] strikeNPrice = FFT_PRICER.price(forward, 1.0, timeToExpiry, true, ce, lowestStrike, highestStrike, n, _limitSigma, _alpha, _limitTolerance); final int m = strikeNPrice.length; final double[] k = new double[m]; final double[] vol = new double[m]; int count = 0; for (int i = 0; i < m; i++) { final double strike = strikeNPrice[i][0]; final double price = strikeNPrice[i][1]; if (price > 0.0) { double impVol; try { impVol = BlackFormulaRepository.impliedVolatility(price, forward, strike, timeToExpiry, true); k[count] = strike; vol[count] = impVol; count++; } catch (final IllegalArgumentException e) { //impVol = BlackFormulaRepository.impliedVolatility(price, forward, strike, timeToExpiry, true); } } } final double[] res = new double[n]; if (count == 0) { //i.e. every single price is invalid, which could happen with extreme parameters. All we can do without stopping the // fitter, is return zero vols. for (int i = 0; i < n; i++) { res[i] = 0.0; } } else { double[] validStrikes = new double[count]; double[] validVols = new double[count]; if (count == m) { validStrikes = k; validVols = vol; } else { validStrikes = Arrays.copyOfRange(k, 0, count); validVols = Arrays.copyOfRange(vol, 0, count); } final Interpolator1DDataBundle dataBundle = _interpolator .getDataBundleFromSortedArrays(validStrikes, validVols); for (int i = 0; i < n; i++) { res[i] = _interpolator.interpolate(dataBundle, strikes[i]); } } return res; } }; }