List of usage examples for java.util LinkedList toString
public String toString()
From source file:org.trnltk.experiment.morphology.ambiguity.DataDiffUtilTest.java
@Test public void shouldComputeDiff_whenLastItemsAreExtra() { final ArrayList<Character> listA = Lists.newArrayList('a', 'b', 'c', 'd'); final ArrayList<Character> listB = Lists.newArrayList('a', 'b'); final DataDiffUtil<Character> util = new DataDiffUtil<Character>(); final LinkedList<DataDiffUtil.Diff<Character>> diffs = util.diff_main(listA, listB); assertThat(diffs.toString(), equalTo("[Diff(EQUAL,\"[a, b]\"), Diff(DELETE,\"[c, d]\")]")); }
From source file:org.trnltk.experiment.morphology.ambiguity.DataDiffUtilTest.java
@Test public void shouldComputeDiff_whenLastItemsAreMissing() { final ArrayList<Character> listA = Lists.newArrayList('a', 'b'); final ArrayList<Character> listB = Lists.newArrayList('a', 'b', 'x', 'y'); final DataDiffUtil<Character> util = new DataDiffUtil<Character>(); final LinkedList<DataDiffUtil.Diff<Character>> diffs = util.diff_main(listA, listB); assertThat(diffs.toString(), equalTo("[Diff(EQUAL,\"[a, b]\"), Diff(INSERT,\"[x, y]\")]")); }
From source file:org.trnltk.experiment.morphology.ambiguity.DataDiffUtilTest.java
@Test public void shouldComputeDiff_whenLastItemIsDifferent() { final ArrayList<Character> listA = Lists.newArrayList(ArrayUtils.toObject("abcd".toCharArray())); final ArrayList<Character> listB = Lists.newArrayList(ArrayUtils.toObject("abce".toCharArray())); final DataDiffUtil<Character> util = new DataDiffUtil<Character>(); final LinkedList<DataDiffUtil.Diff<Character>> diffs = util.diff_main(listA, listB); assertThat(diffs.toString(), equalTo("[Diff(EQUAL,\"[a, b, c]\"), Diff(DELETE,\"[d]\"), Diff(INSERT,\"[e]\")]")); }
From source file:org.trnltk.experiment.morphology.ambiguity.DataDiffUtilTest.java
@Test public void shouldComputeDiff_whenMiddleItemIsDifferent() { final ArrayList<Character> listA = Lists.newArrayList('a', 'b', 'c'); final ArrayList<Character> listB = Lists.newArrayList('a', 'x', 'c'); final DataDiffUtil<Character> util = new DataDiffUtil<Character>(); final LinkedList<DataDiffUtil.Diff<Character>> diffs = util.diff_main(listA, listB); assertThat(diffs.toString(), equalTo("[Diff(EQUAL,\"[a]\"), Diff(DELETE,\"[b]\"), Diff(INSERT,\"[x]\"), Diff(EQUAL,\"[c]\")]")); }
From source file:org.trnltk.experiment.morphology.ambiguity.DataDiffUtilTest.java
@Test public void shouldComputeDiff_whenLastItemsAreDifferent() { final ArrayList<Character> listA = Lists.newArrayList('a', 'b', 'c', 'd'); final ArrayList<Character> listB = Lists.newArrayList('a', 'b', 'x', 'y'); final DataDiffUtil<Character> util = new DataDiffUtil<Character>(); final LinkedList<DataDiffUtil.Diff<Character>> diffs = util.diff_main(listA, listB); assertThat(diffs.toString(), equalTo("[Diff(EQUAL,\"[a, b]\"), Diff(DELETE,\"[c, d]\"), Diff(INSERT,\"[x, y]\")]")); }
From source file:org.eurekastreams.server.service.opensocial.spi.PersonServiceImpl.java
/** * This is the implementation method to retrieve a number of people generally associated with a group or by a set of * userids.// w w w .j av a2s .c o m * * @param userIds * - set of userids to retrieve. * @param groupId * - group id to retrieve. * @param collectionOptions * - collection options. * @param fields * - fields to retrieve with these users. * @param token * - security token for this request. * * @return instance of person * */ @SuppressWarnings("unchecked") public Future<RestfulCollection<Person>> getPeople(final Set<UserId> userIds, final GroupId groupId, final CollectionOptions collectionOptions, final Set<String> fields, final SecurityToken token) { log.trace("Entering getPeople"); List<Person> osPeople = new ArrayList<Person>(); LinkedList<PersonModelView> people = null; try { if (groupId.getType().equals(Type.friends)) { Principal currentPrincipal = getPrincipal(token); if (currentPrincipal == null) { throw new IllegalArgumentException("Invalid requestor"); } GetFollowersFollowingRequest currentRequest = new GetFollowersFollowingRequest(EntityType.PERSON, currentPrincipal.getAccountId(), 0, Integer.MAX_VALUE); ServiceActionContext currentContext = new ServiceActionContext(currentRequest, currentPrincipal); PagedSet<PersonModelView> peopleResults = (PagedSet<PersonModelView>) serviceActionController .execute(currentContext, getFollowingAction); people = new LinkedList<PersonModelView>(peopleResults.getPagedSet()); } else { LinkedList<String> userIdList = new LinkedList<String>(); for (UserId currentUserId : userIds) { if (!currentUserId.getUserId(token).equals("null")) { userIdList.add(currentUserId.getUserId(token)); } } log.debug("Sending getPeople userIdList to action: " + userIdList.toString()); GetPeopleByOpenSocialIdsRequest currentRequest = new GetPeopleByOpenSocialIdsRequest(userIdList, groupId.getType().toString().toLowerCase()); ServiceActionContext currentContext = new ServiceActionContext(currentRequest, getPrincipal(token)); people = (LinkedList<PersonModelView>) serviceActionController.execute(currentContext, getPeopleAction); } if (log.isDebugEnabled()) { log.debug("Retrieved " + people.size() + " people from action"); } for (PersonModelView currentPerson : people) { osPeople.add(convertToOSPerson(currentPerson)); } } catch (Exception ex) { log.error("Error occurred retrieving people ", ex); throw new ProtocolException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, ex.getMessage()); } return ImmediateFuture.newInstance(new RestfulCollection<Person>(osPeople)); }
From source file:org.trnltk.morphology.contextless.parser.ContextlessMorphologicParser.java
@Override public LinkedList<MorphemeContainer> parse(final TurkishSequence input) { // * find initial containers --> find possible roots and create containers around them // * apply mandatory transitions // * traverse until there are no candidates --> find all results if (logger.isDebugEnabled()) logger.debug("Parsing input " + input); final List<MorphemeContainer> candidateMorphemeContainers = this.findInitialMorphemeContainers(input); if (logger.isDebugEnabled()) { logger.debug(//from w w w. j a va2s. c o m String.format("Found %d candidate morpheme containers", candidateMorphemeContainers.size())); for (MorphemeContainer morphemeContainer : candidateMorphemeContainers) { logger.debug("\t " + morphemeContainer.toString()); } } logger.debug("Applying mandatory transitions to candidates"); final List<MorphemeContainer> candidateMorphemeContainersWithMandatoryTransitions = mandatoryTransitionApplier .applyMandatoryTransitionsToMorphemeContainers(candidateMorphemeContainers, input); final LinkedList<MorphemeContainer> results = new LinkedList<MorphemeContainer>(); final LinkedList<MorphemeContainer> newCandidates = this .traverseCandidates(candidateMorphemeContainersWithMandatoryTransitions, results, input); if (CollectionUtils.isNotEmpty(newCandidates)) throw new IllegalStateException( "There are still parse morpheme containers to traverse, but traversing is finished : " + newCandidates.toString()); return results; }
From source file:org.elasticwarehouse.core.ElasticWarehouseAPIProcessorTask.java
public boolean processRequest(Client esClient, OutputStream os, ElasticWarehouseAPIProcessorTaskParams params) throws IOException { if (params.action == null && params.status == null && params.list == null && params.cancel == null) { os.write(responser.errorMessage("'action', 'status', 'cancel' or 'list' are expected.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else if (params.action != null) { params.action = params.action.toLowerCase(); if (params.action.equals("scan")) { /*String path = request.getParameter("path"); String targetfolder = request.getParameter("targetfolder"); String recurrence = request.getParameter("recurrence"); boolean brecurrence = false; //if( targetfolder == null ) // targetfolder = ResourceTools.preprocessFolderName(path); if( recurrence != null )/* ww w . j a v a2 s.c o m*/ brecurrence = Boolean.parseBoolean(recurrence);*/ if (params.path == null) { os.write(responser.errorMessage("path is needed for scan action.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else { ElasticWarehouseTask taskUUID = tasksManager_.launchScan(params.path, params.targetfolder, params.recurrence, params.keepalive, params.newerthan); os.write(responser.taskAcceptedMessage("Scanning " + params.path, 0, taskUUID)); } } else if (params.action.equals("rethumb")) { ElasticWarehouseTask taskUUID = tasksManager_.launchRethumb(); os.write(responser.taskAcceptedMessage("Started thumbnails regeneration", 0, taskUUID)); } else if (params.action.equals("mkdir")) { //String folder = request.getParameter("folder"); if (params.folder == null) { os.write(responser.errorMessage("folder is needed for mkdir action.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else { ElasticWarehouseTask taskUUID = tasksManager_.createFolder(params.folder); os.write(responser.taskAcceptedMessage("mkdir " + params.folder, 0, taskUUID)); } } else if (params.action.equals("rmdir")) { //String folder = request.getParameter("folder"); if (params.folder == null) { os.write(responser.errorMessage("folder is needed for rmdir action.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else { ElasticWarehouseTask taskUUID = tasksManager_.removeFolder(params.folder); os.write(responser.taskAcceptedMessage("rmdir " + params.folder, 0, taskUUID)); } } else if (params.action.equals("move")) { //String folder = request.getParameter("folder"); //String id = request.getParameter("id"); if (params.id == null) { os.write(responser.errorMessage("Please provide id of file to be moved.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else if (params.folder == null) { os.write(responser.errorMessage("folder is needed for move action.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else { ElasticWarehouseTask taskUUID = tasksManager_.moveTo(params.id, params.folder); os.write(responser.taskAcceptedMessage("move id=" + params.id + " to " + params.folder, 0, taskUUID)); } } else if (params.action.equals("rename")) { if (params.id == null) { os.write(responser.errorMessage("Please provide id of file or folder to be renamed.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else if (params.targetname == null || params.targetname.length() == 0) { os.write(responser.errorMessage("targetname cannot be empty.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else { //check if any other task is running on current item LinkedList<String> ctasks = tasksManager_.getTasks(false, conf_.getNodeName()/* NetworkTools.getHostName()*/, 999, 0, false, true, params.id); if (ctasks.size() > 0) { os.write(responser.errorMessage( "Another task is running on current item. Try again later or cancel tasks:" + ctasks.toString(), ElasticWarehouseConf.URL_GUIDE_TASK)); } else { ElasticWarehouseTask taskUUID = tasksManager_.rename(params.id, params.targetname); os.write(responser.taskAcceptedMessage("rename id=" + params.id, 0, taskUUID)); } } } else if (params.action.equals("delete")) { //String id = request.getParameter("id"); if (params.id == null) { os.write(responser.errorMessage("Please provide id of file to be deleted.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else { ElasticWarehouseTask taskUUID = tasksManager_.delete(params.id); os.write(responser.taskAcceptedMessage("delete id=" + params.id, 0, taskUUID)); } } else { os.write(responser.errorMessage("Unknown task action.", ElasticWarehouseConf.URL_GUIDE_TASK)); } } else if (params.status != null) { ElasticWarehouseTask task = tasksManager_.getTask(params.status, true); if (task != null) { os.write(task.getJsonSourceBuilder().string().getBytes()); } else { os.write(responser.errorMessage("Unknown task Id " + params.status, ElasticWarehouseConf.URL_GUIDE_TASK)); } } else if (params.cancel != null) { ElasticWarehouseTask task = tasksManager_.getTask(params.cancel, true); if (task != null) { if (task.finished()) { os.write(responser.errorMessage( "Task Id " + params.cancel + " is finished and cannot be cancelled.", ElasticWarehouseConf.URL_GUIDE_TASK)); } else { task = tasksManager_.cancelTask(params.cancel); if (task == null) os.write(responser.errorMessage( "Task Id: " + params.cancel + " is no longer running and cannot be cancelled.", ElasticWarehouseConf.URL_GUIDE_TASK)); else os.write(task.getJsonSourceBuilder().string().getBytes()); } } else { os.write(responser.errorMessage("Unknown task Id " + params.status, ElasticWarehouseConf.URL_GUIDE_TASK)); } } else if (params.list != null) { params.list = params.list.toLowerCase(); //int size = ParseTools.parseIntDirect(request.getParameter("size"), ElasticWarehouseConf.TASKLISTSIZE); //int from = ParseTools.parseIntDirect(request.getParameter("from"), 0); LinkedList<String> tasks = null; if (params.list.equals("active")) tasks = tasksManager_.getTasks(false, conf_.getNodeName()/* NetworkTools.getHostName()*/, params.size, params.from, params.showrequest, params.allhosts); else tasks = tasksManager_.getTasks(null, conf_.getNodeName() /*NetworkTools.getHostName()*/, params.size, params.from, params.showrequest, params.allhosts); XContentBuilder builder = jsonBuilder().startArray(); for (String taskid : tasks) { ElasticWarehouseTask task = tasksManager_.getTask(taskid, true); if (task == null) { throw new IOException("Cannot fetch taskId: " + taskid); } else { builder = task.getJsonSourceBuilder(builder, true); } } builder.endArray(); os.write(builder.string().getBytes()); } return true; }
From source file:org.alfresco.repo.management.SafeApplicationEventMulticaster.java
/** * Return a Collection of ApplicationListeners matching the given event * type. Non-matching listeners get excluded early. * /*from www. j a v a 2 s.c o m*/ * @param event * the event to be propagated. Allows for excluding non-matching * listeners early, based on cached matching information. * @return a Collection of ApplicationListeners * @see org.springframework.context.ApplicationListener */ protected Collection<ApplicationListener> getApplicationListeners(ApplicationEvent event) { Class<? extends ApplicationEvent> eventType = event.getClass(); Class sourceType = event.getSource().getClass(); ListenerCacheKey cacheKey = new ListenerCacheKey(eventType, sourceType); ListenerRetriever retriever = this.retrieverCache.get(cacheKey); if (retriever != null) { return retriever.getApplicationListeners(); } else { retriever = new ListenerRetriever(true); LinkedList<ApplicationListener> allListeners = new LinkedList<ApplicationListener>(); synchronized (this.defaultRetriever) { if (!this.defaultRetriever.applicationListenerBeans.isEmpty()) { BeanFactory beanFactory = getBeanFactory(); for (String listenerBeanName : this.defaultRetriever.applicationListenerBeans) { ApplicationListener listener = beanFactory.getBean(listenerBeanName, ApplicationListener.class); if (supportsEvent(listener, eventType, sourceType)) { retriever.applicationListenerBeans.add(listenerBeanName); allListeners.add(listener); } } } for (ApplicationListener listener : this.defaultRetriever.applicationListeners) { if (!allListeners.contains(listener) && supportsEvent(listener, eventType, sourceType)) { retriever.applicationListeners.add(listener); allListeners.add(listener); } } OrderComparator.sort(allListeners); this.retrieverCache.put(cacheKey, retriever); } if (log.isDebugEnabled()) { log.debug(allListeners.toString()); } return allListeners; } }
From source file:at.ac.tuwien.dsg.cloud.salsa.engine.smartdeployment.main.SmartDeploymentService.java
private String enrich_CAMF_CSAR_Process(String csarTmp, String serviceName) { String extractedFolder = csarTmp + ".extracted"; String toscaFile = extractedFolder + "/Definitions/Application.tosca"; String scriptDir = extractedFolder + "/Scripts/"; try {/*from w w w . ja v a2 s . co m*/ // extract CSAR CSARParser.extractCsar(new File(csarTmp), extractedFolder); // enrich with QUELLE for String toscaXML = FileUtils.readFileToString(new File(toscaFile)); EngineLogger.logger.debug("Read tosca string done. 100 first characters: {}", toscaXML); EngineLogger.logger.debug("Now trying to enrich with QUELLE...."); //enrichCAMFToscaWithQuelle(toscaXML, serviceName, new String[]{EnrichFunctions.QuelleCloudServiceRecommendation.toString(), EnrichFunctions.SalsaInfoCompletion.toString()}); SmartDeploymentService sds = new SmartDeploymentService(); String result = sds.enrichCAMFToscaWithQuelle(toscaXML, serviceName, new String[] { EnrichFunctions.QuelleCloudServiceRecommendation.toString() }); EngineLogger.logger.debug("After enrich with QUELLE, the result is: {}", result); // write back to right place FileUtils.writeStringToFile(new File(toscaFile), result); // read software requirement in TOSCA for each node, put in a map + artifact // a map between node ID and full requirement in Tag Map<String, String> allRequirements = new HashMap<>(); TDefinitions def = ToscaXmlProcess.readToscaFile(toscaFile); for (TNodeTemplate node : ToscaStructureQuery.getNodeTemplateList(def)) { EngineLogger.logger.debug("Checking node: {}", node.getId()); String policiesStr = new String(); if (node.getPolicies() != null) { EngineLogger.logger.debug("Found policies of node: " + node.getId() + "/" + node.getName()); List<TPolicy> policies = node.getPolicies().getPolicy(); for (TPolicy p : policies) { if (p.getPolicyType().getLocalPart().equals("Requirement") && p.getPolicyType().getPrefix().equals("SmartDeployment")) { if (p.getName().startsWith("CONSTRAINT")) { // TODO: parse SYBL policies } else { policiesStr += p.getName().trim(); if (!p.getName().trim().endsWith(";")) { policiesStr += ";"; EngineLogger.logger.debug("polociesStr = {}", policiesStr); } } } } } EngineLogger.logger.debug("Collected policies for node {} is : {}", node.getId(), policiesStr); allRequirements.put(node.getId(), policiesStr); } EngineLogger.logger.debug("In total, we got following requirements: " + allRequirements.toString()); // Load dependency graph knowledge base String dependencyDataFile = SmartDeploymentService.class.getResource("/data/salsa.dependencygraph.xml") .getFile(); SalsaStackDependenciesGraph depGraph = SalsaStackDependenciesGraph .fromXML(FileUtils.readFileToString(new File(dependencyDataFile))); // ENRICH SCRIPT // extract all the requirement, put into the hashmap for (Map.Entry<String, String> entry : allRequirements.entrySet()) { EngineLogger.logger.debug("Analyzing node: {}. Full policies string is: *** {} ***", entry.getKey(), entry.getValue()); // extract CARL Strings CharStream stream = new ANTLRInputStream(entry.getValue()); CARLLexer lexer = new CARLLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CARLParser parser = new CARLParser(tokens); RequirementsContext requirementsContext = parser.requirements(); ParseTreeWalker walker = new ParseTreeWalker(); // create standard walker CARLProgramListener extractor = new CARLProgramListener(parser); walker.walk(extractor, requirementsContext); // initiate walk of tree with listener org.eclipse.camf.carl.model.Requirements requirements = extractor.getRequirements(); HashMap<String, String> allReqsOfNode = new HashMap<>(); ArrayList<String> checkList = new ArrayList<>(); // os=Ubuntu; os:ver=12.04; sw=jre:1.7 ==> os=Ubuntu, // here flat all the requirement of the node for (IRequirement req : requirements.getRequirements()) { EngineLogger.logger.debug("Irequirement: " + req.toString()); if (req.getCategory().equals(RequirementCategory.SOFTWARE)) { SoftwareRequirement swr = (SoftwareRequirement) req; allReqsOfNode.put("sw", removeQuote(swr.getName())); allReqsOfNode.put(removeQuote(swr.getName()) + ":ver", swr.getVersion().getVersion()); checkList.add(swr.getName()); } else { if (req.getCategory().equals(RequirementCategory.OPERATING_SYSTEM)) { // the system part is generated by quelle OSRequirement osReq = (OSRequirement) req; if (osReq.getName() != null) { allReqsOfNode.put("os", removeQuote(osReq.getName())); } if (osReq.getVersion() != null) { allReqsOfNode.put("os:ver", osReq.getVersion().getVersion()); } } } } // find all the deploymet script of all "sw" requirements LinkedList<String> listOfScripts = new LinkedList<>(); EngineLogger.logger.debug("The node {} will be enriched based-on the requirements: {}", entry.getKey(), checkList.toString()); for (String swReq : checkList) { EngineLogger.logger.debug("Searching deployment script for software req: {}", swReq); SalsaStackDependenciesGraph theNode = depGraph.findNodeByName(swReq); EngineLogger.logger.debug("Node found: {}", theNode.getName()); EngineLogger.logger.debug("All requirements: {}", allReqsOfNode.toString()); LinkedList<String> tmp = theNode.searchDeploymentScriptTemplate(allReqsOfNode); if (tmp != null) { listOfScripts.addAll(tmp); } } EngineLogger.logger.debug(listOfScripts.toString()); // create a script to solve all dependencies first String nodeID = entry.getKey(); String theDependencyScript = "#!/bin/bash \n\n######## Generated by the Decision Module to solve the software dependencies ######## \n\n"; for (String appendScript : listOfScripts) { String theAppend = SmartDeploymentService.class.getResource("/scriptRepo/" + appendScript) .getFile(); String stringToAppend = FileUtils.readFileToString(new File(theAppend)); theDependencyScript += stringToAppend + "\n"; } theDependencyScript += "######## End of generated script ########"; String tmpScriptFile = scriptDir + "/" + nodeID + ".salsatmp"; // read original script, remove the #!/bin/bash if having String originalScriptFile = null; TNodeTemplate node = ToscaStructureQuery.getNodetemplateById(nodeID, def); EngineLogger.logger.debug("Getting artifact template of node: {}", node.getId()); for (TDeploymentArtifact art : node.getDeploymentArtifacts().getDeploymentArtifact()) { EngineLogger.logger.debug("Checking art.Name: {}, type: {}", art.getName(), art.getArtifactType().getLocalPart()); if (art.getArtifactType().getLocalPart().equals("ScriptArtifactPropertiesType")) { String artTemplateID = art.getArtifactRef().getLocalPart(); TArtifactTemplate artTemplate = ToscaStructureQuery.getArtifactTemplateById(artTemplateID, def); if (artTemplate != null) { originalScriptFile = artTemplate.getArtifactReferences().getArtifactReference().get(0) .getReference(); originalScriptFile = extractedFolder + "/" + originalScriptFile; } } } if (originalScriptFile != null) { String originalScript = FileUtils.readFileToString(new File(originalScriptFile)); originalScript = originalScript.replace("#!/bin/bash", ""); originalScript = originalScript.replace("#!/bin/sh", ""); theDependencyScript += originalScript; FileUtils.writeStringToFile(new File(tmpScriptFile), theDependencyScript); EngineLogger.logger.debug("originalScript: {}, moveto: {}", originalScriptFile, originalScriptFile + ".original"); FileUtils.moveFile(FileUtils.getFile(originalScriptFile), FileUtils.getFile(originalScriptFile + ".original")); FileUtils.moveFile(FileUtils.getFile(tmpScriptFile), FileUtils.getFile(originalScriptFile)); } else { // TODO: there is no original script, just add new template, add tmpScript into that } } // end for each node in allRequirements analysis // repack the CSAR FileUtils.deleteQuietly(FileUtils.getFile(csarTmp)); File directory = new File(extractedFolder); File[] fList = directory.listFiles(); //CSARParser.buildCSAR(fList, csarTmp); String builtCSAR = SalsaConfiguration.getToscaTemplateStorage() + "/" + serviceName + ".csar"; CSARParser.buildCSAR(extractedFolder, builtCSAR); } catch (IOException ex) { EngineLogger.logger.error("Error when enriching CSAR: " + csarTmp, ex); return "Error"; } catch (JAXBException ex) { EngineLogger.logger.error("Cannot parse the Tosca definition in CSAR file: " + toscaFile, ex); return "Error"; } // return the link to the CSAR String csarURLReturn = SalsaConfiguration.getSalsaCenterEndpoint() + "/rest/smart/CAMFTosca/enrich/CSAR/" + serviceName; EngineLogger.logger.info("Enrich CSAR done. URL to download is: {}", csarURLReturn); return csarURLReturn; }