List of usage examples for java.util ArrayList clear
public void clear()
From source file:eu.freme.broker.eservices.FremeNER.java
@RequestMapping(value = "/e-entity/freme-ner/documents", method = { RequestMethod.POST, RequestMethod.GET }) public ResponseEntity<String> execute( /*@RequestParam(value = "input", required = false) String input, @RequestParam(value = "i", required = false) String i, @RequestParam(value = "informat", required = false) String informat, @RequestParam(value = "f", required = false) String f, @RequestParam(value = "outformat", required = false) String outformat, @RequestParam(value = "o", required = false) String o, @RequestParam(value = "prefix", required = false) String prefix, @RequestParam(value = "p", required = false) String p, *///w w w. j a v a2 s .c o m @RequestHeader(value = "Accept", required = false) String acceptHeader, @RequestHeader(value = "Content-Type", required = false) String contentTypeHeader, @RequestParam(value = "language", required = true) String language, @RequestParam(value = "dataset", required = true) String dataset, @RequestParam(value = "numLinks", required = false) String numLinksParam, @RequestParam(value = "enrichement", required = false) String enrichementType, @RequestParam(value = "mode", required = false) String mode, @RequestParam(value = "domain", required = false) String domain, @RequestParam(value = "types", required = false) String types, @RequestParam(value = "datasetKey", required = false) String datasetKey, @RequestParam Map<String, String> allParams, @RequestBody(required = false) String postBody) { try { // System.out.println(domain); // System.out.println(types); // Check the language parameter. if (!SUPPORTED_LANGUAGES.contains(language)) { // The language specified with the langauge parameter is not supported. throw new eu.freme.broker.exception.BadRequestException("Unsupported language."); } if (dataset.equals("wand")) { if (datasetKey != null) { if (datasetKey.equals(wandKey)) { // The user has access right to the dataset. } else { throw new eu.freme.broker.exception.AccessDeniedException( "You dont have access right for this dataset" + wandKey); } } else { throw new eu.freme.broker.exception.AccessDeniedException( "You dont have access right for this dataset"); } } ArrayList<String> rMode = new ArrayList<>(); // Check the MODE parameter. if (mode != null) { String[] modes = mode.split(","); for (String m : modes) { if (m.equals("spot") || m.equals("classify") || m.equals("link") || m.equals("all")) { // OK, the mode is supported. rMode.add(m); } else { // The mode specified is not supported. throw new eu.freme.broker.exception.BadRequestException("Unsupported mode: " + m); } } if (rMode.contains("classify") && !rMode.contains("spot")) { throw new eu.freme.broker.exception.BadRequestException( "Unsupported mode combination: classification must be performed in combination with spotting."); } if (rMode.contains("all")) { rMode.clear(); rMode.add("all"); } } else { // OK, perform all. rMode.add("all"); } int numLinks = 1; // Check the dataset parameter. if (numLinksParam != null) { numLinks = Integer.parseInt(numLinksParam); if (numLinks > 5) { numLinks = 1; } } //NIFParameterSet parameters = this.normalizeNif(input, informat, outformat, postBody, acceptHeader, contentTypeHeader, prefix); NIFParameterSet nifParameters = this.normalizeNif(postBody, acceptHeader, contentTypeHeader, allParams, false); Model inModel = ModelFactory.createDefaultModel(); Model outModel = ModelFactory.createDefaultModel(); outModel.setNsPrefix("dbpedia", "http://dbpedia.org/resource/"); outModel.setNsPrefix("dbpedia-de", "http://de.dbpedia.org/resource/"); outModel.setNsPrefix("dbpedia-nl", "http://nl.dbpedia.org/resource/"); outModel.setNsPrefix("dbpedia-es", "http://es.dbpedia.org/resource/"); outModel.setNsPrefix("dbpedia-it", "http://it.dbpedia.org/resource/"); outModel.setNsPrefix("dbpedia-fr", "http://fr.dbpedia.org/resource/"); outModel.setNsPrefix("dbpedia-ru", "http://ru.dbpedia.org/resource/"); outModel.setNsPrefix("dbc", "http://dbpedia.org/resource/Category:"); outModel.setNsPrefix("rdfs", "http://www.w3.org/2000/01/rdf-schema#"); outModel.setNsPrefix("dcterms", "http://purl.org/dc/terms/"); outModel.setNsPrefix("freme-onto", "http://freme-project.eu/ns#"); String docForProcessing = null; if (nifParameters.getInformat().equals(RDFConstants.RDFSerialization.PLAINTEXT)) { // input is sent as value of the input parameter docForProcessing = nifParameters.getInput(); // if(rMode.size() == 1 && rMode.contains("link")) { // throw new eu.freme.broker.exception.BadRequestException("Unsupported mode combination: you must provide NIF in order to perform only linking."); // } } else { // input is sent as body of the request if (rMode.size() == 1 && rMode.contains("link")) { docForProcessing = postBody; } else { inModel = rdfConversionService.unserializeRDF(nifParameters.getInput(), nifParameters.getInformat()); StmtIterator iter = inModel.listStatements(null, RDF.type, inModel .getResource("http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#Context")); boolean textFound = false; String tmpPrefix = "http://freme-project.eu/#"; // The first nif:Context with assigned nif:isString will be processed. while (!textFound) { Resource contextRes = iter.nextStatement().getSubject(); tmpPrefix = contextRes.getURI().split("#")[0]; nifParameters.setPrefix(tmpPrefix); Statement isStringStm = contextRes.getProperty(inModel.getProperty( "http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString")); if (isStringStm != null) { docForProcessing = isStringStm.getObject().asLiteral().getString(); textFound = true; } } } if (docForProcessing == null) { throw new eu.freme.broker.exception.BadRequestException("No content to process."); } } String fremeNERRes = entityAPI.callFremeNER(docForProcessing, language, nifParameters.getPrefix(), dataset, numLinks, rMode, nifParameters.getInformat().contentType(), domain, types); outModel.read(new ByteArrayInputStream(fremeNERRes.getBytes()), null, "TTL"); outModel.add(inModel); HashMap<String, String> templateParams = new HashMap<>(); if (enrichementType != null) { if (enrichementType.equals("dbpedia-categories")) { Template template = templateDAO.findOneById(300); outModel = dataEnricher.enrichWithTemplate(outModel, template, templateParams); } } return createSuccessResponse(outModel, nifParameters.getOutformat()); } catch (BadRequestException e) { logger.error(e.getMessage(), e); throw new eu.freme.broker.exception.BadRequestException(e.getMessage()); } catch (eu.freme.eservices.eentity.exceptions.ExternalServiceFailedException e) { logger.error(e.getMessage(), e); throw new ExternalServiceFailedException(); } catch (Exception e) { logger.error(e.getMessage(), e); throw new eu.freme.broker.exception.BadRequestException(e.getMessage()); } }
From source file:org.apache.hadoop.hbase.replication.regionserver.ReplicationSource.java
/** * We only want KVs that are scoped other than local * @param entry The entry to check for replication *//*from w w w .j a v a 2 s . c om*/ protected void removeNonReplicableEdits(HLog.Entry entry) { String tabName = entry.getKey().getTablename().getNameAsString(); ArrayList<KeyValue> kvs = entry.getEdit().getKeyValues(); Map<String, List<String>> tableCFs = null; try { tableCFs = this.replicationPeers.getTableCFs(peerId); } catch (IllegalArgumentException e) { LOG.error("should not happen: can't get tableCFs for peer " + peerId + ", degenerate as if it's not configured by keeping tableCFs==null"); } int size = kvs.size(); // clear kvs(prevent replicating) if logKey's table isn't in this peer's // replicable table list (empty tableCFs means all table are replicable) if (tableCFs != null && !tableCFs.containsKey(tabName)) { kvs.clear(); } else { NavigableMap<byte[], Integer> scopes = entry.getKey().getScopes(); List<String> cfs = (tableCFs == null) ? null : tableCFs.get(tabName); for (int i = size - 1; i >= 0; i--) { KeyValue kv = kvs.get(i); // The scope will be null or empty if // there's nothing to replicate in that WALEdit // ignore(remove) kv if its cf isn't in the replicable cf list // (empty cfs means all cfs of this table are replicable) if (scopes == null || !scopes.containsKey(kv.getFamily()) || (cfs != null && !cfs.contains(Bytes.toString(kv.getFamily())))) { kvs.remove(i); } } } if (kvs.size() < size / 2) { kvs.trimToSize(); } }
From source file:com.hichinaschool.flashcards.libanki.Models.java
private Object[] _reqForTemplate(JSONObject m, ArrayList<String> flds, JSONObject t) { try {/*from w w w .j av a2 s. co m*/ ArrayList<String> a = new ArrayList<String>(); ArrayList<String> b = new ArrayList<String>(); for (String f : flds) { a.add("ankiflag"); b.add(""); } Object[] data; data = new Object[] { 1l, 1l, m.getLong("id"), 1l, t.getInt("ord"), "", Utils.joinFields(a.toArray(new String[a.size()])) }; String full = mCol._renderQA(data).get("q"); data = new Object[] { 1l, 1l, m.getLong("id"), 1l, t.getInt("ord"), "", Utils.joinFields(b.toArray(new String[b.size()])) }; String empty = mCol._renderQA(data).get("q"); // if full and empty are the same, the template is invalid and there is no way to satisfy it if (full.equals(empty)) { return new Object[] { "none", new JSONArray(), new JSONArray() }; } String type = "all"; JSONArray req = new JSONArray(); ArrayList<String> tmp = new ArrayList<String>(); for (int i = 0; i < flds.size(); i++) { tmp.clear(); tmp.addAll(a); tmp.set(i, ""); data[6] = Utils.joinFields(tmp.toArray(new String[tmp.size()])); // if no field content appeared, field is required if (!mCol._renderQA(data, new ArrayList<String>()).get("q").contains("ankiflag")) { req.put(i); } } if (req.length() > 0) { return new Object[] { type, req }; } // if there are no required fields, switch to any mode type = "any"; req = new JSONArray(); for (int i = 0; i < flds.size(); i++) { tmp.clear(); tmp.addAll(b); tmp.set(i, "1"); data[6] = Utils.joinFields(tmp.toArray(new String[tmp.size()])); // if not the same as empty, this field can make the card non-blank if (!mCol._renderQA(data).get("q").equals(empty)) { req.put(i); } } return new Object[] { type, req }; } catch (JSONException e) { throw new RuntimeException(e); } }
From source file:org.apache.hadoop.hive.cli.CliDriver.java
int processLocalCmd(String cmd, CommandProcessor proc, CliSessionState ss) { int tryCount = 0; boolean needRetry; int ret = 0;/*from www.java2 s .c o m*/ do { try { needRetry = false; if (proc != null) { if (proc instanceof Driver) { Driver qp = (Driver) proc; PrintStream out = ss.out; long start = System.currentTimeMillis(); if (ss.getIsVerbose()) { out.println(cmd); } qp.setTryCount(tryCount); ret = qp.run(cmd).getResponseCode(); if (ret != 0) { qp.close(); return ret; } // query has run capture the time long end = System.currentTimeMillis(); double timeTaken = (end - start) / 1000.0; ArrayList<String> res = new ArrayList<String>(); printHeader(qp, out); // print the results int counter = 0; try { if (out instanceof FetchConverter) { ((FetchConverter) out).fetchStarted(); } while (qp.getResults(res)) { for (String r : res) { out.println(r); } counter += res.size(); res.clear(); if (out.checkError()) { break; } } } catch (IOException e) { console.printError( "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); ret = 1; } int cret = qp.close(); if (ret == 0) { ret = cret; } if (out instanceof FetchConverter) { ((FetchConverter) out).fetchFinished(); } console.printInfo("Time taken: " + timeTaken + " seconds" + (counter == 0 ? "" : ", Fetched: " + counter + " row(s)")); } else { String firstToken = tokenizeCmd(cmd.trim())[0]; String cmd_1 = getFirstCmd(cmd.trim(), firstToken.length()); if (ss.getIsVerbose()) { ss.out.println(firstToken + " " + cmd_1); } CommandProcessorResponse res = proc.run(cmd_1); if (res.getResponseCode() != 0) { ss.out.println("Query returned non-zero code: " + res.getResponseCode() + ", cause: " + res.getErrorMessage()); } if (res.getConsoleMessages() != null) { for (String consoleMsg : res.getConsoleMessages()) { console.printInfo(consoleMsg); } } ret = res.getResponseCode(); } } } catch (CommandNeedRetryException e) { console.printInfo("Retry query with a different approach..."); tryCount++; needRetry = true; } } while (needRetry); return ret; }
From source file:com.att.nsa.cambria.service.impl.EventsServiceImpl.java
/** * //from ww w.j a va 2s.c om * @param ctx * @param topic * @param msg * @param defaultPartition * @param chunked * @param mediaType * @throws ConfigDbException * @throws AccessDeniedException * @throws TopicExistsException * @throws CambriaApiException * @throws IOException */ private void pushEvents(DMaaPContext ctx, String topic, InputStream msg, String defaultPartition, boolean chunked, String mediaType) throws ConfigDbException, AccessDeniedException, TopicExistsException, CambriaApiException, IOException { final MetricsSet metricsSet = ctx.getConfigReader().getfMetrics(); // setup the event set final CambriaEventSet events = new CambriaEventSet(mediaType, msg, chunked, defaultPartition); // start processing, building a batch to push to the backend final long startMs = System.currentTimeMillis(); long count = 0; long maxEventBatch = 1024 * 16; String batchlen = AJSCPropertiesMap.getProperty(CambriaConstants.msgRtr_prop, BATCH_LENGTH); if (null != batchlen) maxEventBatch = Long.parseLong(batchlen); // long maxEventBatch = ctx.getConfigReader().getSettings().getLong(BATCH_LENGTH, 1024 * 16); final LinkedList<Publisher.message> batch = new LinkedList<Publisher.message>(); final ArrayList<KeyedMessage<String, String>> kms = new ArrayList<KeyedMessage<String, String>>(); try { // for each message... Publisher.message m = null; while ((m = events.next()) != null) { // add the message to the batch batch.add(m); final KeyedMessage<String, String> data = new KeyedMessage<String, String>(topic, m.getKey(), m.getMessage()); kms.add(data); // check if the batch is full final int sizeNow = batch.size(); if (sizeNow > maxEventBatch) { ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms); kms.clear(); batch.clear(); metricsSet.publishTick(sizeNow); count += sizeNow; } } // send the pending batch final int sizeNow = batch.size(); if (sizeNow > 0) { ctx.getConfigReader().getfPublisher().sendBatchMessage(topic, kms); kms.clear(); batch.clear(); metricsSet.publishTick(sizeNow); count += sizeNow; } final long endMs = System.currentTimeMillis(); final long totalMs = endMs - startMs; LOG.info("Published " + count + " msgs in " + totalMs + "ms for topic " + topic); // build a responseP final JSONObject response = new JSONObject(); response.put("count", count); response.put("serverTimeMs", totalMs); DMaaPResponseBuilder.respondOk(ctx, response); } catch (Exception excp) { int status = HttpStatus.SC_NOT_FOUND; String errorMsg = null; if (excp instanceof CambriaApiException) { status = ((CambriaApiException) excp).getStatus(); JSONTokener jsonTokener = new JSONTokener(((CambriaApiException) excp).getBody()); JSONObject errObject = new JSONObject(jsonTokener); errorMsg = (String) errObject.get("message"); } ErrorResponse errRes = new ErrorResponse(status, DMaaPResponseCode.PARTIAL_PUBLISH_MSGS.getResponseCode(), errorMessages.getPublishMsgError() + ":" + topic + "." + errorMessages.getPublishMsgCount() + count + "." + errorMsg, null, Utils.getFormattedDate(new Date()), topic, null, ctx.getRequest().getRemoteHost(), null, null); LOG.info(errRes.toString()); throw new CambriaApiException(errRes); } }
From source file:com.github.michalbednarski.intentslab.browser.PermissionsFetcher.java
@Override Object getEntries(Context context) { PackageManager pm = context.getPackageManager(); List<PackageInfo> installedPackages = pm.getInstalledPackages(PackageManager.GET_PERMISSIONS); ArrayList<Component> foundPermissions = new ArrayList<Component>(); final boolean grouped = mGrouped; // Avoid race conditions ArrayList<Category> apps = grouped ? new ArrayList<Category>() : null; for (PackageInfo aPackage : installedPackages) { if (aPackage.permissions == null || aPackage.permissions.length == 0) { continue; }//from w ww .j a v a 2s. c o m for (PermissionInfo permission : aPackage.permissions) { if (ComponentFetcher.checkProtectionLevel(permission, mProtectionFilter) && (mNameSubstring == null || permission.name.toLowerCase().contains(mNameSubstring.toLowerCase()))) { Component component = new Component(); component.title = permission.name; component.subtitle = String.valueOf(permission.loadLabel(pm)); component.componentInfo = permission; foundPermissions.add(component); } } if (grouped && !foundPermissions.isEmpty()) { Category category = new Category(); category.title = String.valueOf(aPackage.applicationInfo.loadLabel(pm)); category.subtitle = aPackage.packageName; category.components = foundPermissions.toArray(new Component[foundPermissions.size()]); apps.add(category); foundPermissions.clear(); } } return grouped ? apps.toArray(new Category[apps.size()]) : foundPermissions.toArray(new Component[foundPermissions.size()]); }
From source file:org.apache.hadoop.mapred.Yunti3Scheduler.java
private void updateMinSlots() { // For each pool, distribute its task allocation among jobs in it that need // slots. This is a little tricky since some jobs in the pool might not be // able to use all the slots, e.g. they might have only a few tasks left. // To deal with this, we repeatedly split up the available task slots // between the jobs left, give each job min(its alloc, # of slots it needs), // and redistribute any slots that are left over between jobs that still // need slots on the next pass. If, in total, the jobs in our pool don't // need all its allocation, we leave the leftover slots for general use. for (YTPool pool : poolMgr.getPools()) { // Clear old minSlots for (JobInfo info : pool.getJobInfos().values()) { info.mapFairShare = 0;/*from w w w .jav a 2 s.c o m*/ info.reduceFairShare = 0; } if (poolMgr.getPoolUseFIFO(pool.getName())) { updateMinSlotsByFIFO(pool); continue; } for (final YTTaskType type : YTTaskType.values()) { List<JobInProgress> jobs = new LinkedList<JobInProgress>(pool.getRunnableJobs()); int slotsLeft = poolMgr.getAllocation(pool.getName(), type); // Keep assigning slots until none are left while (slotsLeft > 0) { // Figure out total weight of the highest joblevel jobs that still // need slots double totalWeight = 0; int topJobLevel = -1; ArrayList<JobInProgress> jobsOftopJobLevel = new ArrayList<JobInProgress>(); for (Iterator<JobInProgress> it = jobs.iterator(); it.hasNext();) { JobInProgress job = it.next(); JobInfo info = pool.getJobInfo(job); if (pool.isRunnable(job) && pool.runnableTasks(job, type) > pool.minTasks(job, type)) { if (info.jobLevel > topJobLevel) { topJobLevel = info.jobLevel; totalWeight = pool.weight(job, type); jobsOftopJobLevel.clear(); jobsOftopJobLevel.add(job); } else if (info.jobLevel == topJobLevel) { totalWeight += pool.weight(job, type); jobsOftopJobLevel.add(job); } } else { it.remove(); } } if (totalWeight == 0) // No jobs that can use more slots are left break; // Assign slots to jobs, using the floor of their weight divided by // total weight. This ensures that all jobs get some chance to take // a slot. Then, if no slots were assigned this way, we do another // pass where we use ceil, in case some slots were still left over. int oldSlots = slotsLeft; // Copy slotsLeft so we can modify it for (JobInProgress job : jobsOftopJobLevel) { double weight = pool.weight(job, type); int share = (int) Math.floor(oldSlots * weight / totalWeight); slotsLeft = giveMinSlots(job, type, slotsLeft, share, pool); } if (slotsLeft == oldSlots) { for (JobInProgress job : jobsOftopJobLevel) { double weight = pool.weight(job, type); int share = (int) Math.ceil(oldSlots * weight / totalWeight); slotsLeft = giveMinSlots(job, type, slotsLeft, share, pool); } } } } } }
From source file:br.org.gdt.beans.GchFormularioBean.java
public String salvarPessoasFormulario() { if (gchFormulariopessoa != null) { RequestContext contextReq = RequestContext.getCurrentInstance(); Iterator<RecPessoa> keyIterrator = checked.keySet().iterator(); ArrayList<ParametrosEmail> parametros = new ArrayList<>(); ParametrosEmail ItemParametro;/*from ww w. ja v a 2s .co m*/ FacesContext context = FacesContext.getCurrentInstance(); EncryptDecryptString cripto = new EncryptDecryptString(); //Configuraes da caixa de e-mail padro do sistema String emailResponsavel = "murphyrhnotifica@gmail.com"; String senha = "murphy2017"; String assunto = "Preenchimento de Formulrio"; String url = context.getExternalContext().getRequestServerName() + ":" + context.getExternalContext().getRequestServerPort() + context.getExternalContext().getApplicationContextPath() + "/ModuloCapitalHumano/ResponderFormulario.xhtml?%1s"; String mensagem = ""; boolean vinculou = false; parametros.clear(); while (keyIterrator.hasNext()) { RecPessoa pessoa = keyIterrator.next(); Boolean value = checked.get(pessoa); if (value) { //Seta null para no dar pau no Hibernate gchFormulariopessoa.setFormPesCodigo(0); gchFormulariopessoa.setRecIdpessoa(pessoa); gchFormulariopessoa.setFormulario(gchFormulario); gchFormulariopessoa.setFormRespondido(false); gchFormularioPessoaService.save(gchFormulariopessoa); String parametroUrl = gchFormulario.getFormCodigo() + "&" + pessoa.getRecIdpessoa(); String parametroBase64 = DatatypeConverter.printBase64Binary(parametroUrl.getBytes()); String urlFormatada = String.format(url, "id=" + parametroBase64); String msgFormatada = "<html></br></br><div style='border:2px solid #0094ff;'><h2 style='background:#87CEEB;color:white;padding:10px;color: #222;'>Formulrio " + gchFormulario.getFormNome() + "</h2><div style='color:#333;padding:10px;'><p style='font-size:120%;text-shadow: 0px 2px 3px #555;'>Voc acaba de receber um formulrio com algumas perguntas para que possamos lhe conhecer melhor. O prazo de respostas at \"" + gchFormulario.getFormPrazoResposta().toString() + "\"</p></br>Para acess-lo clique <a href='http://" + urlFormatada + "'>aqui</a></br></br><h3>Instrues de Preenchimento</h3></br><p>- Responda com sinceridade!</p><p>- Somente possvel marcar uma alternativa por pergunta!</p><p>- S possvel responder o formulrio uma nica vez!</p></div><h4 style='background:#ADD8E6;padding:8px;'>Murphy RH - Todos os direitos Reservados</h4></div></html>"; //Cria item de parametro de email ItemParametro = new ParametrosEmail(); ItemParametro.setRemetente(emailResponsavel); ItemParametro.setSenha(senha); ItemParametro.setAssunto(assunto); ItemParametro.setMensagem(msgFormatada); ItemParametro.setDestinatario(pessoa.getRecEmail()); ItemParametro.setFormularioPessoa(gchFormulariopessoa); parametros.add(ItemParametro); vinculou = true; } } this.formAtivo = false; gchFormulariopessoa = new GchFormularioPessoa(); checked = new HashMap<RecPessoa, Boolean>(); keyIterrator.remove(); //Se vinculou ao menos 1 envia e-mail if (vinculou) { GerenciadorEmail novoEnvio = new GerenciadorEmail(); try { novoEnvio.EnviarEmail(parametros); parametros.clear(); } catch (Exception ex) { Logger.getLogger(GchFormularioBean.class.getName()).log(Level.SEVERE, null, ex); } } String MsgNotificacao = "Formulrio disponibilizado para as pessoas selecionadas!"; Helper.mostrarNotificacao("Sucesso", MsgNotificacao, "success"); } gchTodosFormularios = null; //isso faz com que a listagem se atualize return "Formularios"; }
From source file:com.krawler.spring.crm.common.crmManagerDAOImpl.java
@Override public void setLastUsedFlagForLeadRouting(String userid, String companyid) throws ServiceException { String hql = "UPDATE LeadRoutingUsers set lastused = ? where user.userID in (select userID from User where company = ?)"; ArrayList filter_params = new ArrayList(); filter_params.add(false);//from ww w.j a v a 2 s . c o m Company company = (Company) get(Company.class, (companyid)); filter_params.add(company); executeUpdate(hql, filter_params.toArray()); hql = "UPDATE LeadRoutingUsers set lastused = ? where user.userID = ?"; filter_params.clear(); filter_params.add(true); filter_params.add(userid); executeUpdate(hql, filter_params.toArray()); }
From source file:com.datamelt.nifi.processors.ExecuteRuleEngine.java
/** * generates the flow files for each row of data for the ruleengine details in the flow file - in the form of a list * //from w w w . ja v a 2 s . co m * @param context process context * @param session process session * @param detailsRows array of rows containing the data row and a property map * @param header the header from the flow file content or the given field names * @param headerPresent indicator from the configuration if a header is present * @return a list of flow files */ private List<FlowFile> generateFlowFilesRuleEngineDetails(ProcessContext context, ProcessSession session, ArrayList<RuleEngineRow> detailsRows, HeaderRow header, boolean headerPresent) { List<FlowFile> detailsFlowFiles = new ArrayList<>(); for (int i = 0; i < detailsRows.size(); i++) { FlowFile detailsFlowFile = session.create(); detailsFlowFile = updateFlowFileRuleEngineDetailsContent(header, headerPresent, context, session, detailsFlowFile, detailsRows.get(i)); // use the attributes of the original flow file detailsFlowFile = session.putAllAttributes(detailsFlowFile, detailsRows.get(i).getMap()); detailsFlowFiles.add(detailsFlowFile); } detailsRows.clear(); getLogger().debug("created list of " + detailsFlowFiles.size() + " ruleengine details flowfiles"); return detailsFlowFiles; }