List of usage examples for java.util Arrays copyOfRange
public static boolean[] copyOfRange(boolean[] original, int from, int to)
From source file:com.palantir.atlasdb.ptobject.EncodingUtils.java
public static byte[] decodeFlippedSizedBytes(byte[] bytes, int offset) { int len = (int) decodeFlippedVarLong(bytes, offset); offset += sizeOfVarLong(len);/* ww w . jav a 2 s. co m*/ return Arrays.copyOfRange(flipAllBitsInPlace(bytes, offset, len), offset, offset + len); }
From source file:com.zotoh.maedr.etc.AppRunner.java
@SuppressWarnings("serial") private static boolean parseArgs(String[] args) throws Exception { if (args.length < 2) { return false; }/*from w w w .j a va 2 s .c o m*/ inizBundle(); String home = StringUtils.stripEnd(niceFPath(new File(args[0])), "/"); final File h = new File(home); final File cwd = getCWD(); List<Cmdline> cmds = new ArrayList<Cmdline>() { { add(new CmdSamples(h, cwd)); add(new CmdCrypto(h, cwd)); add(new CmdCloud(h, cwd)); add(new CmdDevice(h, cwd)); add(new CmdAppOps(h, cwd)); add(new CmdMiscOps(h, cwd)); } }; for (Cmdline c : cmds) { if (c.getCmds().contains(args[1])) { c.eval(Arrays.copyOfRange(args, 1, args.length)); return true; } } return false; }
From source file:eus.ixa.ixa.pipe.convert.AbsaSemEval.java
private static void absa2015ToNAFNER(KAFDocument kaf, String fileName, String language) { // reading the ABSA xml file SAXBuilder sax = new SAXBuilder(); XPathFactory xFactory = XPathFactory.instance(); try {// www . ja v a2 s. co m Document doc = sax.build(fileName); XPathExpression<Element> expr = xFactory.compile("//sentence", Filters.element()); List<Element> sentences = expr.evaluate(doc); // naf sentence counter int counter = 1; for (Element sent : sentences) { List<Integer> wfFromOffsets = new ArrayList<>(); List<Integer> wfToOffsets = new ArrayList<>(); List<WF> sentWFs = new ArrayList<>(); List<Term> sentTerms = new ArrayList<>(); // sentence id and original text String sentId = sent.getAttributeValue("id"); String sentString = sent.getChildText("text"); // the list contains just one list of tokens List<List<Token>> segmentedSentence = StringUtils.tokenizeSentence(sentString, language); for (List<Token> sentence : segmentedSentence) { for (Token token : sentence) { WF wf = kaf.newWF(token.startOffset(), token.getTokenValue(), counter); wf.setXpath(sentId); final List<WF> wfTarget = new ArrayList<WF>(); wfTarget.add(wf); wfFromOffsets.add(wf.getOffset()); wfToOffsets.add(wf.getOffset() + wf.getLength()); sentWFs.add(wf); Term term = kaf.newTerm(KAFDocument.newWFSpan(wfTarget)); term.setPos("O"); term.setLemma(token.getTokenValue()); sentTerms.add(term); } } counter++; String[] tokenIds = new String[sentWFs.size()]; for (int i = 0; i < sentWFs.size(); i++) { tokenIds[i] = sentWFs.get(i).getId(); } // going through every opinion element for each sentence // each opinion element can contain one or more opinions Element opinionsElement = sent.getChild("Opinions"); if (opinionsElement != null) { // iterating over every opinion in the opinions element List<Element> opinionList = opinionsElement.getChildren(); for (Element opinion : opinionList) { String category = opinion.getAttributeValue("category"); String targetString = opinion.getAttributeValue("target"); System.err.println("-> " + category + ", " + targetString); // adding OTE if (!targetString.equalsIgnoreCase("NULL")) { int fromOffset = Integer.parseInt(opinion.getAttributeValue("from")); int toOffset = Integer.parseInt(opinion.getAttributeValue("to")); int startIndex = -1; int endIndex = -1; for (int i = 0; i < wfFromOffsets.size(); i++) { if (wfFromOffsets.get(i) == fromOffset) { startIndex = i; } } for (int i = 0; i < wfToOffsets.size(); i++) { if (wfToOffsets.get(i) == toOffset) { // span is +1 with respect to the last token of the span endIndex = i + 1; } } // TODO remove this condition to correct manually offsets if (startIndex != -1 && endIndex != -1) { List<String> wfIds = Arrays .asList(Arrays.copyOfRange(tokenIds, startIndex, endIndex)); List<String> wfTermIds = NAFUtils.getWFIdsFromTerms(sentTerms); if (NAFUtils.checkTermsRefsIntegrity(wfIds, wfTermIds)) { List<Term> nameTerms = kaf.getTermsFromWFs(wfIds); ixa.kaflib.Span<Term> neSpan = KAFDocument.newTermSpan(nameTerms); List<ixa.kaflib.Span<Term>> references = new ArrayList<ixa.kaflib.Span<Term>>(); references.add(neSpan); Entity neEntity = kaf.newEntity(references); neEntity.setType(category); } } } } } } // end of sentence } catch (JDOMException | IOException e) { e.printStackTrace(); } }
From source file:eu.dety.burp.joseph.utilities.Converter.java
/** * Build {@link RSAPublicKey} from PublicKey PEM string * //from w ww. j av a2s .c o m * @param pemInput * PublicKey PEM string * @return {@link RSAPublicKey} or null */ public static RSAPublicKey getRsaPublicKeyByPemString(String pemInput) { RSAPublicKey publicKey = null; String pubKey = pemInput.replaceAll("(-+BEGIN PUBLIC KEY-+\\r?\\n|-+END PUBLIC KEY-+\\r?\\n?)", ""); // PKCS8 try { byte[] keyBytes = Base64.decodeBase64(pubKey); X509EncodedKeySpec spec = new X509EncodedKeySpec(keyBytes); KeyFactory keyFactory = KeyFactory.getInstance("RSA"); publicKey = (RSAPublicKey) keyFactory.generatePublic(spec); } catch (Exception e) { } // PKCS1 try { byte[] keyBytes = Base64.decodeBase64(pubKey); keyBytes = Arrays.copyOfRange(keyBytes, 24, keyBytes.length); X509EncodedKeySpec spec = new X509EncodedKeySpec(keyBytes); KeyFactory keyFactory = KeyFactory.getInstance("RSA"); publicKey = (RSAPublicKey) keyFactory.generatePublic(spec); } catch (Exception e) { } return publicKey; }
From source file:de.petendi.commons.crypto.HybridCrypto.java
private ArrayList<byte[]> splitSecretAndIV(byte[] concatenated) { byte[] iv = Arrays.copyOfRange(concatenated, 0, 16); byte[] symmetricKey = Arrays.copyOfRange(concatenated, 16, concatenated.length); ArrayList<byte[]> splitted = new ArrayList<>(2); splitted.add(0, iv);//from w w w . j ava 2 s .c o m splitted.add(1, symmetricKey); return splitted; }
From source file:com.amalto.core.objects.DroppedItemPOJO.java
/** * find all pks of dropped items// ww w . j a v a 2s .co m */ public static List<DroppedItemPOJOPK> findAllPKs(String regex) throws XtentisException { // get XmlServerSLWrapperLocal XmlServer server = Util.getXmlServerCtrlLocal(); if ("".equals(regex) || "*".equals(regex) || ".*".equals(regex)) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ regex = null; } try { //retrieve the item String[] ids = server.getAllDocumentsUniqueID(MDM_ITEMS_TRASH); if (ids == null) { return Collections.emptyList(); } //build PKs collection List<DroppedItemPOJOPK> list = new ArrayList<DroppedItemPOJOPK>(); Map<String, ComplexTypeMetadata> conceptMap = new HashMap<String, ComplexTypeMetadata>(); for (String uid : ids) { String[] uidValues = uid.split("\\."); //$NON-NLS-1$ ItemPOJOPK refItemPOJOPK; if (uidValues.length < 3) { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Could not read id '" + uid + "'. Skipping it."); } continue; } // check xsd key's length String uidPrefix = uidValues[0] + "." + uidValues[1] + "."; //$NON-NLS-1$//$NON-NLS-2$ String[] idArray = Arrays.copyOfRange(uidValues, 2, uidValues.length); if (!conceptMap.containsKey(uidPrefix)) { MetadataRepository repository = ServerContext.INSTANCE.get().getMetadataRepositoryAdmin() .get(uidValues[0]); conceptMap.put(uidPrefix, repository.getComplexType(uidValues[1])); } if (conceptMap.get(uidPrefix) != null && conceptMap.get(uidPrefix).getKeyFields().size() == 1) { idArray = new String[] { StringUtils.removeStart(uid, uidPrefix) }; } refItemPOJOPK = new ItemPOJOPK(new DataClusterPOJOPK(uidValues[0]), uidValues[1], idArray); // set revision id as "" DroppedItemPOJOPK droppedItemPOJOPK = new DroppedItemPOJOPK(refItemPOJOPK, "/"); //$NON-NLS-1$ //$NON-NLS-2$ if (regex != null) { if (uid.matches(regex)) { list.add(droppedItemPOJOPK); } } else { list.add(droppedItemPOJOPK); } } return list; } catch (Exception e) { String err = "Unable to find all the identifiers for dropped items " + ": " + e.getClass().getName() + ": " + e.getLocalizedMessage(); LOGGER.error(err, e); throw new XtentisException(err, e); } }
From source file:playground.christoph.evacuation.analysis.AgentsInEvacuationAreaWriter.java
/** * @return a graphic showing the number of agents in the evacuated area *///ww w . j ava2 s.c o m private JFreeChart getGraphic(String title, String legend, String modeName, int inputData[]) { /* * Write only the number of defined picture bins to the plot. */ int data[]; if (inputData.length > this.nofPictureBins) { data = Arrays.copyOfRange(inputData, 0, this.nofPictureBins); } else data = inputData; final XYSeriesCollection xyData = new XYSeriesCollection(); final XYSeries dataSerie = new XYSeries(legend, false, true); for (int i = 0; i < data.length; i++) { double hour = i * this.binSize / 60.0 / 60.0; dataSerie.add(hour, data[i]); } xyData.addSeries(dataSerie); final JFreeChart chart = ChartFactory.createXYStepChart(title + ", " + modeName + ", it." + this.iteration, "time", "# agents", xyData, PlotOrientation.VERTICAL, true, // legend false, // tooltips false // urls ); XYPlot plot = chart.getXYPlot(); final CategoryAxis axis1 = new CategoryAxis("hour"); axis1.setTickLabelFont(new Font("SansSerif", Font.PLAIN, 7)); plot.setDomainAxis(new NumberAxis("time")); return chart; }
From source file:org.opensafety.hishare.util.implementation.EncryptionImpl.java
private IvParameterSpec generateIv(byte[] salt) { byte[] iv = Arrays.copyOfRange(salt, 0, 16); return new IvParameterSpec(iv); }
From source file:com.github.horrorho.inflatabledonkey.chunk.engine.ChunkListDecrypter.java
byte[] key(ChunkInfo chunkInfo) { byte[] chunkEncryptionKey = chunkInfo.getChunkEncryptionKey().toByteArray(); if (chunkEncryptionKey.length != 0x11 || chunkEncryptionKey[0] != 0x01) { throw new IllegalArgumentException("unsupported key type: 0x" + Hex.toHexString(chunkEncryptionKey)); }/* w w w . j a v a2 s . co m*/ return Arrays.copyOfRange(chunkEncryptionKey, 1, chunkEncryptionKey.length); }
From source file:bb.mcmc.analysis.ConvergeStatUtils.java
private static double calSpectrum0(double[] newData) { final int N = newData.length; final int Nfreq = (int) Math.floor(N / 2); final double oneOverN = 1.0 / N; double[] freq = new double[Nfreq]; double[] f1 = new double[Nfreq]; for (int i = 0; i < Nfreq; i++) { freq[i] = oneOverN * (i + 1);// w ww.j a va 2 s .co m f1[i] = SQRT3 * (4 * freq[i] - 1); } double[] complexArray = ConvergeStatUtils.realToComplexArray(newData); double[] spec = new double[N]; DoubleFFT_1D fft = new DoubleFFT_1D(N); fft.complexForward(complexArray); for (int i = 0; i < N; i++) { Complex complexData = new Complex(complexArray[i * 2], complexArray[i * 2 + 1]); complexData = complexData.multiply(complexData.conjugate()); spec[i] = complexData.getReal() / N; } spec = Arrays.copyOfRange(spec, 1, f1.length + 1); double[] coefficients = gammaGLM.coefficients(spec, f1); double v = Math.exp(coefficients[0] + coefficients[1] * -SQRT3); return v; }