List of usage examples for java.util Arrays copyOfRange
public static boolean[] copyOfRange(boolean[] original, int from, int to)
From source file:edu.byu.nlp.crowdsourcing.models.gibbs.BlockCollapsedMultiAnnModelMath.java
/** * @param gold (optional) If the GOLD diagonalization method is to * be used, gold standard labels must also be passed in. *///from w ww . j ava 2 s . c om public static MultiAnnState fixLabelSwitching(MultiAnnState sample, DiagonalizationMethod diagonalizationMethod, int goldInstancesForDiagonalization, boolean diagonalizationWithFullConfusionMatrix, RandomGenerator rnd) { // public static MultiAnnSample fixLabelSwitching(MultiAnnSample sample, int[] gold, boolean labelSwitchingCheatUsesFullConfusionMatrix, Dataset data) { // manually re-order the labels according to y and then // m so that mu and alpha_j line up with their greatest entries // along the diagonal as much as possible. // This helps alleviate the problem of label switching. // Note that we are not changing the meaning of labels // globally (in the data elicited from annotators); // merely ensuring that the features-only-ML // will be most likely to assign a Y=0 assignment // the label 0; a Y=1 assignment the label 1, and // so forth (rather than learning to systematically // map Y=0 to 2, for example, which would be a setting // with just as much probability as the Y=0 to 0 setting. // It's important to NOT use the counts (e.g., logCountOfYAndM) // We need to look at actual normalized accuracies (e.g., mu). int[] y = sample.getY().clone(); int[] m = sample.getM().clone(); double[][] mu = Matrices.clone(sample.getMu()); double[][] meanMu = Matrices.clone(sample.getMeanMu()); double[][][] alpha = Matrices.clone(sample.getAlpha()); double[][][] meanAlpha = Matrices.clone(sample.getMeanAlpha()); double[] theta = sample.getTheta().clone(); double[] meanTheta = sample.getMeanTheta().clone(); double[][] logPhi = Matrices.clone(sample.getLogPhi()); double[][] meanLogPhi = Matrices.clone(sample.getMeanLogPhi()); // -------------- Fix Y ----------------------- // int[] yMap; int[] gold; switch (diagonalizationMethod) { case NONE: logger.info("Not Diagonalizing"); // diagonal mapping (no change) yMap = IntArrays.sequence(0, mu.length); break; case RAND: logger.info("Diagonalizing randomly"); // randomly shuffled mapping yMap = IntArrays.shuffled(IntArrays.sequence(0, mu.length), rnd); break; case GOLD: logger.info("Diagonalizing based on gold 'heldout data'"); // create a confusion matrix by comparing gold labels with model predictions (gold labels are constructed to match the model ordering) Boolean useLabeledConfusionMatrix = diagonalizationWithFullConfusionMatrix ? null : true; gold = Datasets.concealedLabels(sample.getData(), sample.getInstanceIndices()); int numGoldInstances = goldInstancesForDiagonalization == -1 ? gold.length : goldInstancesForDiagonalization; gold = Arrays.copyOfRange(gold, 0, numGoldInstances); int[] guesses = Arrays.copyOfRange(sample.getY(), 0, numGoldInstances); double[][] confusions = confusionMatrix(useLabeledConfusionMatrix, gold, guesses, sample.getNumLabels(), sample.getData()); // in a CONFUSION matrix, columns correspond to the latent variable y. So // permute columns to find a good diagonalization yMap = Matrices.getColReorderingForStrongDiagonal(confusions); break; case AVG_GAMMA: // in a gamma matrix, rows correspond to the latent variable y, so permute rows // to find a good diagonalization logger.info("Diagonalizing based on average alpha"); double[][] cumulativeAlphaMean = new double[sample.getNumLabels()][sample.getNumLabels()]; for (int j = 0; j < sample.getNumAnnotators(); j++) { double[][] alphaMean = meanAlpha[j]; Matrices.addToSelf(cumulativeAlphaMean, alphaMean); } yMap = Matrices.getRowReorderingForStrongDiagonal(cumulativeAlphaMean); break; case MAX_GAMMA: logger.info("Diagonalizing based on most confident alpha"); // (pfelt) Find the most definitive alpha matrix // (the one with entries that diverge least from 0 and 1) // We'll map that to be diagonal and then apply its mapping // to all of the other alphas, since alpha matrices // are constrained by the data to be coherent. double[][] bestAlphaMean = null; double min = Double.POSITIVE_INFINITY; for (int j = 0; j < sample.getNumAnnotators(); j++) { double[][] alphaMean = meanAlpha[j]; double error = getMeanSquaredDistanceFrom01(alphaMean); if (error < min) { min = error; bestAlphaMean = alphaMean; } } yMap = Matrices.getNormalizedRowReorderingForStrongDiagonal(bestAlphaMean); break; default: throw new IllegalArgumentException( "unknown diagonalization method: " + diagonalizationMethod.toString()); } logger.info("Y-mapping=" + IntArrays.toString(yMap)); // fix alpha for (int j = 0; j < sample.getNumAnnotators(); j++) { Matrices.reorderRowsToSelf(yMap, alpha[j]); Matrices.reorderRowsToSelf(yMap, meanAlpha[j]); } // fix y for (int i = 0; i < y.length; i++) { y[i] = yMap[y[i]]; } // fix theta Matrices.reorderElementsToSelf(yMap, theta); Matrices.reorderElementsToSelf(yMap, meanTheta); // fix mu Matrices.reorderRowsToSelf(yMap, mu); Matrices.reorderRowsToSelf(yMap, meanMu); // (pfelt) we don't need to update cached values anymore since we're // operating on a sample and not in the context of a model being sampled // // fix logSumCountOfYAndM // Matrices.reorderElementsToSelf(yMap, logSumCountOfYAndM); // // fix numAnnsPerJAndY // Matrices.reorderColsToSelf(yMap, numAnnsPerJAndY); // -------------- Fix M ----------------------- // // (pfelt) We used to sample from mu (by calling mu()) // to get a mu setting. I've changed this to use the params // of mu for two reasons: // 1) a small performance savings // 2) it's easier to test int[] mMap; try { mMap = Matrices.getColReorderingForStrongDiagonal(meanMu); } catch (IllegalArgumentException e) { mMap = new int[meanMu.length]; for (int i = 0; i < mMap.length; i++) { mMap[i] = i; } logger.warn("unable to diagonalize m, returning the identity mapping. " + "If this is itemresp or momresp, then this is fine. " + "If this is multiann, then there is a serious problem."); } // fix mu Matrices.reorderColsToSelf(mMap, mu); Matrices.reorderColsToSelf(mMap, meanMu); // fix m for (int i = 0; i < m.length; i++) { m[i] = mMap[m[i]]; } // fix phi Matrices.reorderRowsToSelf(mMap, logPhi); Matrices.reorderRowsToSelf(mMap, meanLogPhi); // (pfelt) we don't need to update cached values anymore since we're // operating on a sample and not in the context of a model being sampled // // fix numFeaturesPerM // Matrices.reorderElementsToSelf(mMap, numFeaturesPerM); return new BasicMultiAnnState(y, m, theta, meanTheta, logPhi, meanLogPhi, mu, meanMu, alpha, meanAlpha, sample.getData(), sample.getInstanceIndices()); }
From source file:com.stratio.ingestion.sink.mongodb.EventParser.java
private DBObject populateDocument(DocumentFieldDefinition fd, String document) { DBObject dbObject = null;/*from w ww. j a v a2 s. c o m*/ final String delimiter = fd.getDelimiter(); if (!StringUtils.isEmpty(delimiter)) { String[] documentAsArrray = document.split(delimiter); dbObject = new BasicDBObject(); Map<String, FieldDefinition> documentMapping = new LinkedHashMap<String, FieldDefinition>( fd.getDocumentMapping()); int i = 0; for (Map.Entry<String, FieldDefinition> documentField : documentMapping.entrySet()) { if (DOCUMENT_TYPE.equalsIgnoreCase(documentField.getValue().getType().name())) { dbObject.put(documentField.getKey(), parseValue(documentField.getValue(), StringUtils .join(Arrays.copyOfRange(documentAsArrray, i, documentAsArrray.length), "#"))); i += ((DocumentFieldDefinition) documentField.getValue()).getDocumentMapping().size(); } else { dbObject.put(documentField.getKey(), parseValue(documentField.getValue(), documentAsArrray[i++])); } } } else { throw new MongoSinkException("Delimiter char must be set"); } return dbObject; }
From source file:edu.umd.cs.psl.model.kernel.externalinducer.ExternalInducerKernel.java
@SuppressWarnings("unchecked") private void getDataAndExecute(DatabaseAtomStoreQuery db, Execution exe, ModelApplication app) { Map<GroundTerm, GroundTerm[]>[] datas = (Map<GroundTerm, GroundTerm[]>[]) new Object[retrievals.size()]; Map<Variable, int[]> argMap = new HashMap<Variable, int[]>(); Map<Variable, Integer> varkeymap = new HashMap<Variable, Integer>(); List<Variable> keyvarmap = new ArrayList<Variable>(); for (Variable v : retrievals.keySet()) { keyvarmap.add(v);/*from w ww.j a va 2 s .c o m*/ varkeymap.put(v, varkeymap.size()); } for (Map.Entry<Variable, Integer> varpos : varkeymap.entrySet()) { int position = varpos.getValue(); Variable var = varpos.getKey(); List<Atom> atoms = retrievals.get(var); datas[position] = new HashMap<GroundTerm, GroundTerm[]>(); assert atoms != null; if (atoms.isEmpty()) { assert externalAtomVariables.contains(var); Set<Entity> entities = db.getEntities(vartypes.get(var)); for (Entity e : entities) { datas[position].put(e, new GroundTerm[] { e }); } argMap.put(var, new int[] { position, 0 }); } else { Conjunction c = new Conjunction(atoms.toArray(new Atom[atoms.size()])); List<Variable> projectTo = new ArrayList<Variable>(); projectTo.add(var); for (Atom a : atoms) for (Term t : a.getArguments()) if (externalAtomVariables.contains(t) && !projectTo.contains(t)) projectTo.add((Variable) t); assert !projectTo.isEmpty(); ResultList results = db.query(c, projectTo); for (int k = 0; k < results.size(); k++) { GroundTerm[] result = results.get(k); if (externalAtomVariables.contains(var)) datas[position].put(result[0], result); else datas[position].put(result[0], Arrays.copyOfRange(result, 1, result.length)); } int start = externalAtomVariables.contains(var) ? 0 : 1; for (int i = start; i < projectTo.size(); i++) { argMap.put(projectTo.get(i), new int[] { position, i - start }); } } } GroundTerm[] args = new GroundTerm[head.getArity()]; for (int i = 0; i < head.getArity(); i++) { if (!(head.getArguments()[i] instanceof Variable)) args[i] = (GroundTerm) head.getArguments()[i]; } Predicate p = head.getPredicate(); if (exe == Execution.Bulk) { AtomManager amanager = app.getAtomManager(); Map<GroundTerm[], double[]> results = ((BulkExternalFunction) extFun).bulkCompute(argMap, datas); for (Map.Entry<GroundTerm[], double[]> entry : results.entrySet()) { GroundTerm[] result = entry.getKey(); GroundTerm[] arguments = args.clone(); //Fill in remaining args for (int i = 0; i < head.getArity(); i++) { if ((head.getArguments()[i] instanceof Variable)) arguments[i] = result[varkeymap.get((Variable) head.getArguments()[i])]; } GroundExternalInducer ei = new GroundExternalInducer(this, amanager.getAtom(p, arguments), entry.getValue()); app.addGroundKernel(ei); } } else if (exe == Execution.Learn) { GroundTerm[] query = new GroundTerm[head.getArity()]; for (int i = 0; i < head.getArity(); i++) { if (head.getArguments()[i] instanceof GroundTerm) query[i] = (GroundTerm) head.getArguments()[i]; } ResultListValues results = db.getFacts(head.getPredicate(), query); Map<GroundTerm[], double[]> truth = new HashMap<GroundTerm[], double[]>(); for (int k = 0; k < results.size(); k++) { truth.put(results.get(k), results.getValues(k)); } ((LearnableExternalFunction) extFun).learn(truth, argMap, datas); } else throw new UnsupportedOperationException(exe.toString()); }
From source file:edu.rit.flick.util.FlickTest.java
private final void testForLosslessness() throws IOException, InterruptedException { Flick.main(VERBOSE_FLAG, originalFile.getPath()); Unflick.main(VERBOSE_FLAG, flickedFile.getPath(), unflickedFile.getPath()); if (!originalFile.exists()) { /*//from w ww .j a va2 s . c o m * By falling in here, we assume the test failed because the file * given as input was not found. Equally so, the flicked file will * not be found by the unflicker since no flicking would have been * able to occur. */ final String expectedUsageStatement = String.format("%s%n%s%n", new NoSuchFileException(originalFile.getPath(), null, AbstractFlickFile.FILE_NOT_FOUND_EXCEPTION_MESSAGE).getMessage().trim(), new NoSuchFileException(flickedFile.getPath(), null, AbstractFlickFile.FILE_NOT_FOUND_EXCEPTION_MESSAGE).getMessage().trim()); final Object[] errorStack = errContent.toString().split("\n"); final int eSl = errorStack.length; final String actualUsageStatement = String.format("%s\n%s\n", Arrays.copyOfRange(errorStack, eSl - 2, eSl)); assertEquals(expectedUsageStatement, actualUsageStatement); return; } final FileInputStream origFIS = new FileInputStream(originalFile); ByteBufferInputStream orig = ByteBufferInputStream.map(origFIS.getChannel()); final FileInputStream comAndDecomFIS = new FileInputStream(unflickedFile); ByteBufferInputStream comAndDecom = ByteBufferInputStream.map(comAndDecomFIS.getChannel()); if (!FileUtils.contentEquals(originalFile, unflickedFile)) { long position = 0; while (orig.available() > 0) { position++; final int o = orig.read(); final int c = comAndDecom.read(); assertEquals(format(FILES_DO_NOT_MATCH_ERROR_FORMAT, originalFile, unflickedFile, position), (char) o + "", (char) c + ""); } assertEquals(orig.available(), comAndDecom.available()); origFIS.close(); orig.close(); comAndDecomFIS.close(); comAndDecom.close(); orig = null; comAndDecom = null; } }
From source file:com.acmemotors.batch.LoaderJobConfiguration.java
/** * Returns the tokenizer used to parse a line of the input file (each record is a JSON * string).// ww w . j av a 2 s .co m * * @return {@link org.springframework.batch.item.file.transform.LineTokenizer} capable * of parsing the input JSON. */ @Bean @SuppressWarnings("unchecked") public LineTokenizer jsonLineTokenzier() { return line -> { List<String> tokens = new ArrayList<>(); try { HashMap<String, Object> result = new ObjectMapper().readValue(line, HashMap.class); tokens.add((String) result.get(Field.get(0).getHerbieField())); tokens.add(String.valueOf(result.get(Field.get(1).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(2).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(3).getHerbieField()))); tokens.add(result.get(Field.get(4).getHerbieField()).toString()); tokens.add(String.valueOf(result.get(Field.get(5).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(6).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(7).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(8).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(9).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(10).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(11).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(12).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(13).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(14).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(15).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(16).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(17).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(18).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(19).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(20).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(21).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(22).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(23).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(24).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(25).getHerbieField()))); tokens.add(String.valueOf(result.get(Field.get(26).getHerbieField()))); } catch (IOException e) { throw new RuntimeException("Unable to parse json: " + line); } String[] fields = Field.herbieFields(); return new DefaultFieldSet(tokens.toArray(new String[tokens.size() - 1]), Arrays.copyOfRange(fields, 0, fields.length - 1)); }; }
From source file:OrdenacaoGUI.java
private static String arrayToString(int[] array) { String string = ""; // para vetores com mais que 23 elementos, retorna apenas os 11 primeiros e os 11 ultimos, com reticencias no meio if (array.length > 23) { int[] inicio = Arrays.copyOf(array, 11); int[] fim = Arrays.copyOfRange(array, array.length - 11, array.length); string += arrayToString(inicio); string += ", ..., "; string += arrayToString(fim);//from w w w . j a v a 2 s . c o m } else { // para vetores menores, exibe tudo for (int i = 0; i < array.length; ++i) { string += array[i]; if (i != array.length - 1) { string += ", "; } } } return string; }
From source file:com.dell.asm.asmcore.asmmanager.util.deployment.HostnameUtilTest.java
@Test public void testReplaceNumPattern() { String[][] expectations = new String[][] { new String[] { "server1", "server${num}" }, new String[] { "server1", "server${num}", "server-b" }, new String[] { "server2", "server${num}", "server1", "server3" }, new String[] { "server4", "server${num}", "server3", "server2", "server1" }, }; for (String[] strings : expectations) { String expected = strings[0]; String template = strings[1]; String[] hostnames = Arrays.copyOfRange(strings, 2, strings.length); HashSet<String> allHostnames = new HashSet<String>(Arrays.asList(hostnames)); assertEquals(expected, hostnameUtil.replaceNumPattern(template, allHostnames)); }/*w w w . j av a 2 s .c o m*/ }
From source file:com.basistech.lucene.tools.LuceneQueryTool.java
void run(String[] queryOpts, PrintStream out) throws IOException, org.apache.lucene.queryparser.classic.ParseException { if (tabular && fieldNames == null) { // Unlike a SQL result set, Lucene docs from a single query (or %all) may // have different fields, so a tabular format won't make sense unless we // know the exact fields beforehand. Also note that multivalued fields // may have a different number of values in each doc, which also won't // make sense with tabular output. We detect that at runtime. throw new RuntimeException("--tabular requires --fields to be passed"); }//from w w w . j av a 2s. c o m if (sortFields) { Collections.sort(fieldNames); } String opt = queryOpts[0]; if ("%ids".equals(opt)) { List<String> ids = Lists.newArrayList(Arrays.copyOfRange(queryOpts, 1, queryOpts.length)); dumpIds(ids.iterator()); } else if ("%id-file".equals(opt)) { Iterator<String> iterator = new LineIterator(new BufferedReader(new FileReader(queryOpts[1]))); dumpIds(iterator); } else if ("%all".equals(opt)) { runQuery(null, out); } else if ("%enumerate-fields".equals(opt)) { for (String fieldName : allFieldNames) { out.println(fieldName); } } else if ("%count-fields".equals(opt)) { countFields(); } else if ("%enumerate-terms".equals(opt)) { if (queryOpts.length != 2) { throw new RuntimeException("%enumerate-terms requires exactly one field."); } enumerateTerms(queryOpts[1]); } else if ("%script".equals(opt)) { if (queryOpts.length != 2) { throw new RuntimeException("%script requires exactly one arg."); } runScript(queryOpts[1]); } else { runQuery(queryOpts[0], out); } }
From source file:com.opengamma.maths.lowlevelapi.datatypes.primitive.CompressedSparseColumnFormatMatrix.java
@Override public double[] getRowElements(int index) { // getting rows in CSC form is generally bad double[] tmp = new double[_cols]; double val; int ptr = 0;/* ww w. j a v a 2 s .c o m*/ for (int i = 0; i < _cols; i++) { val = this.getEntry(index, i); if (Double.doubleToLongBits(val) != 0) { tmp[ptr] = val; ptr++; } } return Arrays.copyOfRange(tmp, 0, ptr); }
From source file:net.pflaeging.PortableSigner.SignCommandLine.java
/** Creates a new instance of CommandLine */ public SignCommandLine(String args[]) { langcodes = ""; java.util.Enumeration<String> langCodes = ResourceBundle .getBundle("net/pflaeging/PortableSigner/SignatureblockLanguages").getKeys(); while (langCodes.hasMoreElements()) { langcodes = langcodes + langCodes.nextElement() + "|"; }/*from w ww . ja va 2s .c o m*/ langcodes = langcodes.substring(0, langcodes.length() - 1); // System.out.println("Langcodes: " + langcodes); CommandLine cmd; Options options = new Options(); options.addOption("t", true, rbi18n.getString("CLI-InputFile")); options.addOption("o", true, rbi18n.getString("CLI-OutputFile")); options.addOption("s", true, rbi18n.getString("CLI-SignatureFile")); options.addOption("p", true, rbi18n.getString("CLI-Password")); options.addOption("n", false, rbi18n.getString("CLI-WithoutGUI")); options.addOption("f", false, rbi18n.getString("CLI-Finalize")); options.addOption("h", false, rbi18n.getString("CLI-Help")); options.addOption("b", true, rbi18n.getString("CLI-SigBlock") + langcodes); options.addOption("i", true, rbi18n.getString("CLI-SigImage")); options.addOption("c", true, rbi18n.getString("CLI-SigComment")); options.addOption("r", true, rbi18n.getString("CLI-SigReason")); options.addOption("l", true, rbi18n.getString("CLI-SigLocation")); options.addOption("e", true, rbi18n.getString("CLI-EmbedSignature")); options.addOption("pwdfile", true, rbi18n.getString("CLI-PasswdFile")); options.addOption("ownerpwd", true, rbi18n.getString("CLI-OwnerPasswd")); options.addOption("ownerpwdfile", true, rbi18n.getString("CLI-OwnerPasswdFile")); options.addOption("z", false, rbi18n.getString("CLI-LastPage")); CommandLineParser parser = new PosixParser(); HelpFormatter usage = new HelpFormatter(); try { cmd = parser.parse(options, args); input = cmd.getOptionValue("t", ""); output = cmd.getOptionValue("o", ""); signature = cmd.getOptionValue("s", ""); password = cmd.getOptionValue("p", ""); nogui = cmd.hasOption("n"); help = cmd.hasOption("h"); finalize = !cmd.hasOption("f"); sigblock = cmd.getOptionValue("b", ""); sigimage = cmd.getOptionValue("i", ""); comment = cmd.getOptionValue("c", ""); reason = cmd.getOptionValue("r", ""); location = cmd.getOptionValue("l", ""); embedParams = cmd.getOptionValue("e", ""); pwdFile = cmd.getOptionValue("pwdfile", ""); ownerPwdString = cmd.getOptionValue("ownerpwd", ""); ownerPwdFile = cmd.getOptionValue("ownerpwdfile", ""); lastPage = !cmd.hasOption("z"); if (cmd.getArgs().length != 0) { throw new ParseException(rbi18n.getString("CLI-UnknownArguments")); } } catch (ParseException e) { System.err.println(rbi18n.getString("CLI-WrongArguments")); usage.printHelp("PortableSigner", options); System.exit(3); } if (nogui) { if (input.equals("") || output.equals("") || signature.equals("")) { System.err.println(rbi18n.getString("CLI-MissingArguments")); usage.printHelp("PortableSigner", options); System.exit(2); } if (!help) { if (password.equals("")) { // password missing if (!pwdFile.equals("")) { // read the password from the given file try { FileInputStream pwdfis = new FileInputStream(pwdFile); byte[] pwd = new byte[1024]; password = ""; try { do { int r = pwdfis.read(pwd); if (r < 0) { break; } password += new String(pwd); password = password.trim(); } while (pwdfis.available() > 0); pwdfis.close(); } catch (IOException ex) { } } catch (FileNotFoundException fnfex) { } } else { // no password file given, read from standard input System.out.print(rbi18n.getString("CLI-MissingPassword")); Console con = System.console(); if (con == null) { byte[] pwd = new byte[1024]; password = ""; try { do { int r = System.in.read(pwd); if (r < 0) { break; } password += new String(pwd); password = password.trim(); } while (System.in.available() > 0); } catch (IOException ex) { } } else { // Console not null. Use it to read the password without echo char[] pwd = con.readPassword(); if (pwd != null) { password = new String(pwd); } } } } if (ownerPwdString.equals("") && ownerPwdFile.equals("")) { // no owner password or owner password file given, read from standard input System.out.print(rbi18n.getString("CLI-MissingOwnerPassword") + " "); Console con = System.console(); if (con == null) { byte[] pwd = new byte[1024]; String tmppassword = ""; try { do { int r = System.in.read(pwd); if (r < 0) { break; } tmppassword += new String(pwd, 0, r); tmppassword = tmppassword.trim(); } while (System.in.available() > 0); } catch (java.io.IOException ex) { // TODO: perhaps notify the user } ownerPwd = tmppassword.getBytes(); } else { // Console not null. Use it to read the password without echo char[] pwd = con.readPassword(); if (pwd != null) { ownerPwd = new byte[pwd.length]; for (int i = 0; i < pwd.length; i++) { ownerPwd[i] = (byte) pwd[i]; } } } } else if (!ownerPwdString.equals("")) { ownerPwd = ownerPwdString.getBytes(); } else if (!ownerPwdFile.equals("")) { try { FileInputStream pwdfis = new FileInputStream(ownerPwdFile); ownerPwd = new byte[0]; byte[] tmp = new byte[1024]; byte[] full; try { do { int r = pwdfis.read(tmp); if (r < 0) { break; } // trim the length: tmp = Arrays.copyOfRange(tmp, 0, r); //System.arraycopy(tmp, 0, tmp, 0, r); full = new byte[ownerPwd.length + tmp.length]; System.arraycopy(ownerPwd, 0, full, 0, ownerPwd.length); System.arraycopy(tmp, 0, full, ownerPwd.length, tmp.length); ownerPwd = full; } while (pwdfis.available() > 0); pwdfis.close(); } catch (IOException ex) { } } catch (FileNotFoundException fnfex) { } } } } if (!embedParams.equals("")) { String[] parameter = null; parameter = embedParams.split(","); try { Float vPosF = new Float(parameter[0]), lMarginF = new Float(parameter[1]), rMarginF = new Float(parameter[2]); vPos = vPosF.floatValue(); lMargin = lMarginF.floatValue(); rMargin = rMarginF.floatValue(); noSigPage = true; } catch (NumberFormatException nfe) { System.err.println(rbi18n.getString("CLI-embedParameter-Error")); usage.printHelp("PortableSigner", options); System.exit(5); } } if (!(langcodes.contains(sigblock) || sigblock.equals(""))) { System.err.println(rbi18n.getString("CLI-Only-german-english") + langcodes); usage.printHelp("PortableSigner", options); System.exit(4); } if (help) { usage.printHelp("PortableSigner", options); System.exit(1); } // System.err.println("CMDline: input: " + input); // System.err.println("CMDline: output: " + output); // System.err.println("CMDline: signature: " + signature); // System.err.println("CMDline: password: " + password); // System.err.println("CMDline: sigblock: " + sigblock); // System.err.println("CMDline: sigimage: " + sigimage); // System.err.println("CMDline: comment: " + comment); // System.err.println("CMDline: reason: " + reason); // System.err.println("CMDline: location: " + location); // System.err.println("CMDline: pwdFile: " + pwdFile); // System.err.println("CMDline: ownerPwdFile: " + ownerPwdFile); // System.err.println("CMDline: ownerPwdString: " + ownerPwdString); // System.err.println("CMDline: ownerPwd: " + ownerPwd.toString()); }