List of usage examples for java.util EnumMap EnumMap
public EnumMap(Map<K, ? extends V> m)
From source file:org.talend.camel.designer.ui.wizards.actions.JavaCamelJobScriptsExportWSAction.java
private Map<ExportChoice, Object> getExportChoice() { Map<ExportChoice, Object> exportChoiceMap = new EnumMap<>(ExportChoice.class); exportChoiceMap.put(ExportChoice.needJobItem, false); exportChoiceMap.put(ExportChoice.needSourceCode, false); exportChoiceMap.put(ExportChoice.needMetaInfo, true); exportChoiceMap.put(ExportChoice.needContext, true); exportChoiceMap.put(ExportChoice.addStatistics, addStatisticsCode); return exportChoiceMap; }
From source file:org.apache.metron.pcapservice.PcapReceiverImplRestEasy.java
/** * Enable filtering PCAP results by fixed properties and start/end packet TS * * @param srcIp filter value//from w w w . ja v a 2 s . com * @param dstIp filter value * @param protocol filter value * @param srcPort filter value * @param dstPort filter value * @param startTime filter value * @param endTime filter value * @param numReducers Specify the number of reducers to use when executing the mapreduce job * @param includeReverseTraffic Indicates if filter should check swapped src/dest addresses and IPs * @param servlet_response * @return REST response * @throws IOException */ @GET @Path("/pcapGetter/getPcapsByIdentifiers") public Response getPcapsByIdentifiers(@QueryParam("srcIp") String srcIp, @QueryParam("dstIp") String dstIp, @QueryParam("protocol") String protocol, @QueryParam("srcPort") String srcPort, @QueryParam("dstPort") String dstPort, @DefaultValue("-1") @QueryParam("startTime") long startTime, @DefaultValue("-1") @QueryParam("endTime") long endTime, @DefaultValue("10") @QueryParam("numReducers") int numReducers, @DefaultValue("false") @QueryParam("includeReverseTraffic") boolean includeReverseTraffic, @Context HttpServletResponse servlet_response) throws IOException { if (!isValidPort(srcPort)) { return Response.serverError().status(Response.Status.NO_CONTENT) .entity("'srcPort' must not be null, empty or a non-integer").build(); } if (!isValidPort(dstPort)) { return Response.serverError().status(Response.Status.NO_CONTENT) .entity("'dstPort' must not be null, empty or a non-integer").build(); } final boolean includeReverseTrafficF = includeReverseTraffic; PcapsResponse response = new PcapsResponse(); SequenceFileIterable results = null; try { if (startTime < 0) { startTime = 0L; } if (endTime < 0) { endTime = System.currentTimeMillis(); } //convert to nanoseconds since the epoch startTime = TimestampConverters.MILLISECONDS.toNanoseconds(startTime); endTime = TimestampConverters.MILLISECONDS.toNanoseconds(endTime); EnumMap<Constants.Fields, String> query = new EnumMap<Constants.Fields, String>( Constants.Fields.class) { { if (srcIp != null) { put(Constants.Fields.SRC_ADDR, srcIp); } if (dstIp != null) { put(Constants.Fields.DST_ADDR, dstIp); } if (srcPort != null) { put(Constants.Fields.SRC_PORT, srcPort); } if (dstPort != null) { put(Constants.Fields.DST_PORT, dstPort); } if (protocol != null) { put(Constants.Fields.PROTOCOL, protocol); } put(Constants.Fields.INCLUDES_REVERSE_TRAFFIC, "" + includeReverseTrafficF); } }; if (LOGGER.isDebugEnabled()) { LOGGER.debug("Query received: " + Joiner.on(",").join(query.entrySet())); } results = getQueryUtil().query(new org.apache.hadoop.fs.Path(ConfigurationUtil.getPcapOutputPath()), new org.apache.hadoop.fs.Path(ConfigurationUtil.getTempQueryOutputPath()), startTime, endTime, numReducers, query, CONFIGURATION.get(), FileSystem.get(CONFIGURATION.get()), new FixedPcapFilter.Configurator()); response.setPcaps(results != null ? Lists.newArrayList(results) : null); } catch (Exception e) { LOGGER.error("Exception occurred while fetching Pcaps by identifiers :", e); throw new WebApplicationException("Unable to fetch Pcaps via MR job", e); } finally { if (null != results) { results.cleanup(); } } // return http status '200 OK' along with the complete pcaps response file, // and headers return Response.ok(response.getPcaps(), MediaType.APPLICATION_OCTET_STREAM).status(200).build(); }
From source file:gr.abiss.calipso.domain.Metadata.java
private void init() { fields = new EnumMap<Field.Name, Field>(Field.Name.class); fieldsByLabel = new HashMap<String, Field>(); if (logger.isDebugEnabled()) { for (Field f : fields.values()) { logger.debug("Field " + " name " + f.getName().getText() + " , description" + f.getName().getDescription() + " "); }/*from w w w . j ava 2 s.com*/ } roles = new HashMap<String, Role>(); states = new TreeMap<Integer, String>(); statesByName = new TreeMap<String, Integer>(); statesPlugins = new TreeMap<Integer, String>(); maxDurations = new TreeMap<Integer, Long>(); fieldOrder = new LinkedList<Field.Name>(); assetTypeIdMap = new TreeMap<Integer, Long>(); existingAssetTypeIdsMap = new TreeMap<Integer, Long>(); existingAssetTypeMultipleMap = new TreeMap<Integer, Boolean>(); // date formats this.dateFormats.put(DATETIME_FORMAT_LONG, YYYY_MM_DD_HH_MM_SS); this.dateFormats.put(DATETIME_FORMAT_SHORT, YYYY_MM_DD_HH_MM); this.dateFormats.put(DATE_FORMAT_LONG, YYYY_MM_DD); this.dateFormats.put(DATE_FORMAT_SHORT, YYYY_MM_DD); }
From source file:io.treefarm.plugins.haxe.TestCompileMojo.java
@Override public void execute() throws MojoExecutionException, MojoFailureException { super.execute(); if (munitCompiler.getHasRequirements()) { if (openflIsActive() && testTargets != null && testClasspath != null) { String logInfo = "Compiling tests for MassiveUnit using OpenFL "; logInfo += (testCoverage ? "WITH code coverage" : "WITHOUT code coverage") + "."; if (testDebug) { logInfo += "\n *** with debug, so only tests with @TestDebug will be built ***"; }/*from w w w .j a v a 2 s . com*/ getLog().info(logInfo); Set<String> classPaths = new HashSet<String>(); String cleanClassPathList = ""; try { List<String> displayHxml = openflCompiler.displayHxml(project, testTargets.iterator().next(), nmml, null, null, null); for (String line : displayHxml) { String classPath = StringUtils.substringAfter(line, "-cp "); if (classPath.length() > 0) { classPaths.add(classPath); } } } catch (Exception e) { throw new MojoFailureException("Tests compilation failed", e); } compilerFlags = new ArrayList<String>(); compilerFlags.add("-lib munit"); compilerFlags.add("-lib hamcrest"); if (testCoverage && classPaths.size() > 0) { compilerFlags.add("-lib mcover"); compilerFlags.add("-D MCOVER"); /*String mCoverDirective = "--macro mcover.MCover.coverage\\([\\'\\'],[\\'"; //String mCoverDirective = "--macro mcover.MCover.coverage([''],['"; Iterator<String> it = classPaths.iterator(); String classPath; while(it.hasNext()) { classPath = it.next(); if (!StringUtils.contains(classPath, ",") && StringUtils.indexOf(classPath, "/") != 0) { if (cleanClassPathList.length() > 0) { cleanClassPathList += ","; } cleanClassPathList += classPath; } } mCoverDirective += cleanClassPathList + "\\']\\)"; //mCoverDirective += cleanClassPathList + "'],[''])"; compilerFlags.add(mCoverDirective); getLog().info("mcover call: " + mCoverDirective);*/ } compilerFlags.add("-cp " + testClasspath); try { if (testRunner == null) { testRunner = TEST_RUNNER; } if (testHxml == null) { testHxml = TEST_HXML; } List<String> displayHxml = openflCompiler.displayHxml(project, testTargets, nmml, compilerFlags, testMain, testRunner); String hxmlDump = ""; for (String hxmlLine : displayHxml) { hxmlDump += hxmlLine + "\n"; } File hxmlFile = new File(outputDirectory, testHxml); if (hxmlFile.exists()) { FileUtils.deleteQuietly(hxmlFile); } hxmlFile.createNewFile(); FileWriter fw = new FileWriter(hxmlFile.getAbsoluteFile()); BufferedWriter bw = new BufferedWriter(fw); bw.write(hxmlDump); bw.close(); if (testResources != null) { File resourcesFile = new File(outputDirectory.getParentFile(), testResources); File tmpResourcesFile = new File(outputDirectory, "tmp_resources"); tmpResourcesFile.mkdirs(); FileUtils.copyDirectory(resourcesFile, new File(tmpResourcesFile, resourcesFile.getName())); testResources = tmpResourcesFile.getAbsolutePath(); } if (testBinPath == null) { testBinPath = TEST_BIN_PATH; } File testBinFile = new File(outputDirectory, testBinPath); testBinPath = testBinFile.getAbsolutePath(); munitCompiler.config(testClasspath, testBinPath, testBinPath, cleanClassPathList, hxmlFile.getAbsolutePath(), testResources, testTemplates); openflCompiler.initialize(debug, verbose, false, false, true, testDebug); openflCompiler.compile(project, testTargets, nmml, compilerFlags, testMain, testRunner, true, false, false); } catch (Exception e) { throw new MojoFailureException("Tests compilation failed", e); } } else { getLog().info("Compiling tests using MassiveUnit."); try { munitCompiler.initialize(testDebug, false); munitCompiler.setOutputDirectory(outputDirectory); munitCompiler.compile(project, null); } catch (Exception e) { throw new MojoFailureException("Tests compilation failed", e); } } } else { getLog().info("Compiling tests using standard Haxe unit testing."); if (testRunner == null || project.getTestCompileSourceRoots().size() == 0) { getLog().info("No test sources to compile"); return; } String output = OutputNamesHelper.getTestOutput(project); EnumMap<CompileTarget, String> targets = new EnumMap<CompileTarget, String>(CompileTarget.class); targets.put(CompileTarget.neko, output); try { haxeCompiler.compile(project, targets, testRunner, true, true, verbose); } catch (Exception e) { throw new MojoFailureException("Tests compilation failed", e); } } }
From source file:fr.free.movierenamer.scrapper.impl.movie.TracktScrapper.java
private void addCast(List<CastingInfo> casting, String path, JSONObject json, String job) { for (JSONObject cast : JSONUtils.selectList(path, json)) { Map<CastingInfo.PersonProperty, String> personFields = new EnumMap<CastingInfo.PersonProperty, String>( CastingInfo.PersonProperty.class); personFields.put(CastingInfo.PersonProperty.name, JSONUtils.selectString("name", cast)); if (job.equals(CastingInfo.ACTOR)) { personFields.put(CastingInfo.PersonProperty.character, JSONUtils.selectString("character", cast)); }/*from www . j a v a 2 s .c o m*/ JSONObject image = JSONUtils.selectObject("images", cast); ImageInfo imgInfo = null; if (image != null) { String img = JSONUtils.selectString("headshot", image); if (img != null && !img.equals(NOIMAGE)) { Map<ImageInfo.ImageProperty, String> fields = new HashMap<ImageInfo.ImageProperty, String>(); fields.put(ImageInfo.ImageProperty.url, JSONUtils.selectString("headshot", image)); int id = fields.get(ImageInfo.ImageProperty.url).hashCode(); imgInfo = new ImageInfo(id, fields, ImageCategoryProperty.actor); } } personFields.put(CastingInfo.PersonProperty.job, job); casting.add(new CastingInfo(personFields, imgInfo)); } }
From source file:org.apache.metron.dataloads.nonbulk.flatfile.SimpleFlatFileSummarizerTest.java
public void testWholeFile(final int numThreads) throws IOException, InvalidWriterOutput { ExtractorHandler handler = ExtractorHandler.load(stellarExtractorConfigWholeFile); LocalSummarizer summarizer = new MockSummarizer(new HashMap<String, String>() { {//from ww w . ja v a2 s. co m for (String domain : domains) { put(domain, "1," + domain); } } }); final AtomicReference<Object> finalObj = new AtomicReference<>(null); EnumMap<SummarizeOptions, Optional<Object>> options = new EnumMap<SummarizeOptions, Optional<Object>>( SummarizeOptions.class) { { put(SummarizeOptions.INPUT, Optional.of(".")); put(SummarizeOptions.BATCH_SIZE, Optional.of(5)); put(SummarizeOptions.QUIET, Optional.of(true)); put(SummarizeOptions.OUTPUT_MODE, Optional.of(new PeekingWriter(finalObj))); put(SummarizeOptions.OUTPUT, Optional.of("out")); put(SummarizeOptions.NUM_THREADS, Optional.of(numThreads)); } }; summarizer.importData(options, handler, new Configuration()); String expr = "MAP_GET(DOMAIN_REMOVE_TLD(domain), s) > 0"; for (String domain : domains) { Boolean b = (Boolean) StellarProcessorUtils.run(expr, ImmutableMap.of("s", finalObj.get(), "domain", domain)); Assert.assertTrue("Can't find " + domain, b); } }
From source file:org.whitesource.agent.hash.HashCalculator.java
/** * Removes all JavaScript header comments from the file and calculates SHA-1 checksum. * * @param byteArray to calculate// w w w . j a v a2 s . co m * @return Calculated SHA-1 for the given file. */ public Map<ChecksumType, String> calculateJavaScriptHashes(byte[] byteArray) throws WssHashException { Map<ChecksumType, String> checksums = new EnumMap<>(ChecksumType.class); try { String fileContent = IOUtils.toString(byteArray, UTF_8); ParseResult parseResult = new JavaScriptParser().parse(fileContent); if (parseResult != null) { // no comments String contentWithoutComments = parseResult.getContentWithoutComments(); if (StringUtils.isNotBlank(contentWithoutComments)) { HashCalculationResult noCommentsSha1 = calculateSuperHash(contentWithoutComments.getBytes()); if (noCommentsSha1 != null) { checksums.put(ChecksumType.SHA1_NO_COMMENTS_SUPER_HASH, noCommentsSha1.getFullHash()); } } // no headers String headerlessContent = parseResult.getContentWithoutHeaderComments(); if (StringUtils.isNotBlank(headerlessContent)) { String headerlessChecksum = calculateByteArrayHash(headerlessContent.getBytes(), HashAlgorithm.SHA1); checksums.put(ChecksumType.SHA1_NO_HEADER, headerlessChecksum); } } } catch (Exception e) { throw new WssHashException("Error calculating JavaScript hash: " + e.getMessage()); } return checksums; }
From source file:eu.ggnet.dwoss.report.ReportAgentBean.java
@Override public ViewReportResult prepareReport(ReportParameter p, boolean loadUnreported) { attachDanglingComplaints(p.getContractor(), p.getEnd()); List<ReportLine> findUnreportedUnits = reportLineEao.findUnreportedUnits(p.getContractor(), (loadUnreported) ? null : p.getStart(), p.getEnd()); EnumMap<ViewReportResult.Type, NavigableSet<ReportLine>> lines = new EnumMap<>(ViewReportResult.Type.class); PrepareReportPartition unitPartition = partition(findUnreportedUnits, p.getContractor()); lines.put(ACTIVE_INFO, unitPartition.getActiveInfo()); lines.put(REPORT_INFO, filterReportInfo(unitPartition.getReportAble())); lines.put(REPAYMENTS, filterRepayed(unitPartition.getReportAble())); switch (p.getViewMode()) { case DEFAULT: lines.put(INVOICED, filterInvoiced(unitPartition.getReportAble())); break;//w w w . j av a 2 s. c om case YEARSPLITT_AND_WARRANTIES: YearSplit filterInvoicedSplit = filterInvoicedSplit(unitPartition.getReportAble(), p.getStart()); lines.put(PAST_ONE_YEAR, filterInvoicedSplit.getAfter()); lines.put(UNDER_ONE_YEAR, filterInvoicedSplit.getBefore()); PrepareReportPartition warrantyPartition = partition( filterWarrenty(reportLineEao.findUnreportedWarrentys(), unitPartition.getReportAble()), p.getContractor()); lines.put(WARRENTY, filterInvoiced(warrantyPartition.getReportAble())); lines.get(ACTIVE_INFO).addAll(warrantyPartition.getActiveInfo()); lines.get(REPAYMENTS).addAll(filterRepayed(warrantyPartition.getReportAble())); lines.get(REPORT_INFO).addAll(filterReportInfo(warrantyPartition.getReportAble())); break; } ViewReportResult viewReportResult = new ViewReportResult(lines, p); viewReportResult.getAllLines().stream().forEach((allLine) -> reportEm.detach(allLine)); if (!marginCalculator.isUnsatisfied()) marginCalculator.get().recalc(viewReportResult); return viewReportResult; }
From source file:org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.java
long loadEditRecords(EditLogInputStream in, boolean closeOnExit, long expectedStartingTxId, StartupOption startOpt, MetaRecoveryContext recovery) throws IOException { FSDirectory fsDir = fsNamesys.dir;/*w w w.ja va 2s. co m*/ EnumMap<FSEditLogOpCodes, Holder<Integer>> opCounts = new EnumMap<FSEditLogOpCodes, Holder<Integer>>( FSEditLogOpCodes.class); if (LOG.isTraceEnabled()) { LOG.trace("Acquiring write lock to replay edit log"); } fsNamesys.writeLock(); fsDir.writeLock(); long recentOpcodeOffsets[] = new long[4]; Arrays.fill(recentOpcodeOffsets, -1); long expectedTxId = expectedStartingTxId; long numEdits = 0; long lastTxId = in.getLastTxId(); long numTxns = (lastTxId - expectedStartingTxId) + 1; StartupProgress prog = NameNode.getStartupProgress(); Step step = createStartupProgressStep(in); prog.setTotal(Phase.LOADING_EDITS, step, numTxns); Counter counter = prog.getCounter(Phase.LOADING_EDITS, step); long lastLogTime = monotonicNow(); long lastInodeId = fsNamesys.dir.getLastInodeId(); try { while (true) { try { FSEditLogOp op; try { op = in.readOp(); if (op == null) { break; } } catch (Throwable e) { // Handle a problem with our input check203UpgradeFailure(in.getVersion(true), e); String errorMessage = formatEditLogReplayError(in, recentOpcodeOffsets, expectedTxId); FSImage.LOG.error(errorMessage, e); if (recovery == null) { // We will only try to skip over problematic opcodes when in // recovery mode. throw new EditLogInputException(errorMessage, e, numEdits); } MetaRecoveryContext.editLogLoaderPrompt("We failed to read txId " + expectedTxId, recovery, "skipping the bad section in the log"); in.resync(); continue; } recentOpcodeOffsets[(int) (numEdits % recentOpcodeOffsets.length)] = in.getPosition(); if (op.hasTransactionId()) { if (op.getTransactionId() > expectedTxId) { MetaRecoveryContext.editLogLoaderPrompt( "There appears " + "to be a gap in the edit log. We expected txid " + expectedTxId + ", but got txid " + op.getTransactionId() + ".", recovery, "ignoring missing " + " transaction IDs"); } else if (op.getTransactionId() < expectedTxId) { MetaRecoveryContext.editLogLoaderPrompt( "There appears " + "to be an out-of-order edit in the edit log. We " + "expected txid " + expectedTxId + ", but got txid " + op.getTransactionId() + ".", recovery, "skipping the out-of-order edit"); continue; } } try { if (LOG.isTraceEnabled()) { LOG.trace("op=" + op + ", startOpt=" + startOpt + ", numEdits=" + numEdits + ", totalEdits=" + totalEdits); } long inodeId = applyEditLogOp(op, fsDir, startOpt, in.getVersion(true), lastInodeId); if (lastInodeId < inodeId) { lastInodeId = inodeId; } } catch (RollingUpgradeOp.RollbackException e) { throw e; } catch (Throwable e) { LOG.error("Encountered exception on operation " + op, e); if (recovery == null) { throw e instanceof IOException ? (IOException) e : new IOException(e); } MetaRecoveryContext.editLogLoaderPrompt( "Failed to " + "apply edit log operation " + op + ": error " + e.getMessage(), recovery, "applying edits"); } // Now that the operation has been successfully decoded and // applied, update our bookkeeping. incrOpCount(op.opCode, opCounts, step, counter); if (op.hasTransactionId()) { lastAppliedTxId = op.getTransactionId(); expectedTxId = lastAppliedTxId + 1; } else { expectedTxId = lastAppliedTxId = expectedStartingTxId; } // log progress if (op.hasTransactionId()) { long now = monotonicNow(); if (now - lastLogTime > REPLAY_TRANSACTION_LOG_INTERVAL) { long deltaTxId = lastAppliedTxId - expectedStartingTxId + 1; int percent = Math.round((float) deltaTxId / numTxns * 100); LOG.info("replaying edit log: " + deltaTxId + "/" + numTxns + " transactions completed. (" + percent + "%)"); lastLogTime = now; } } numEdits++; totalEdits++; } catch (RollingUpgradeOp.RollbackException e) { LOG.info("Stopped at OP_START_ROLLING_UPGRADE for rollback."); break; } catch (MetaRecoveryContext.RequestStopException e) { MetaRecoveryContext.LOG .warn("Stopped reading edit log at " + in.getPosition() + "/" + in.length()); break; } } } finally { fsNamesys.dir.resetLastInodeId(lastInodeId); if (closeOnExit) { in.close(); } fsDir.writeUnlock(); fsNamesys.writeUnlock(); if (LOG.isTraceEnabled()) { LOG.trace("replaying edit log finished"); } if (FSImage.LOG.isDebugEnabled()) { dumpOpCounts(opCounts); } } return numEdits; }
From source file:org.openecomp.sdc.be.externalapi.servlet.AssetsDataServlet.java
@GET @Path("/{assetType}/{uuid}/metadata") @Produces(MediaType.APPLICATION_JSON)/*w ww. ja va 2 s. c o m*/ @ApiOperation(value = "Fetch metadata of asset by uuid", httpMethod = "GET", notes = "Returns metadata of asset", response = Response.class) @ApiResponses(value = { @ApiResponse(code = 200, message = "Assets Fetched"), @ApiResponse(code = 401, message = "Authorization required"), @ApiResponse(code = 403, message = "Restricted operation"), @ApiResponse(code = 404, message = "Asset not found") }) public Response getAssetListByUuid(@PathParam("assetType") final String assetType, @PathParam("uuid") final String uuid, @Context final HttpServletRequest request) { Response response = null; ResponseFormat responseFormat = null; String instanceIdHeader = request.getHeader(Constants.X_ECOMP_INSTANCE_ID_HEADER); AuditingActionEnum auditingActionEnum = AuditingActionEnum.GET_ASSET_METADATA; String requestURI = request.getRequestURI(); String url = request.getMethod() + " " + requestURI; log.debug("Start handle request of {}", url); String serverBaseURL = request.getRequestURL().toString(); EnumMap<AuditingFieldsKeysEnum, Object> additionalParam = new EnumMap<AuditingFieldsKeysEnum, Object>( AuditingFieldsKeysEnum.class); ComponentTypeEnum componentType = ComponentTypeEnum.findByParamName(assetType); additionalParam.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_CONSUMER_ID, instanceIdHeader); additionalParam.put(AuditingFieldsKeysEnum.AUDIT_DISTRIBUTION_RESOURCE_URL, requestURI); additionalParam.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_TYPE, componentType.getValue()); additionalParam.put(AuditingFieldsKeysEnum.AUDIT_SERVICE_INSTANCE_ID, uuid); // Mandatory if (instanceIdHeader == null || instanceIdHeader.isEmpty()) { log.debug("getAssetList: Missing X-ECOMP-InstanceID header"); responseFormat = getComponentsUtils().getResponseFormat(ActionStatus.MISSING_X_ECOMP_INSTANCE_ID); getComponentsUtils().auditExternalGetAsset(responseFormat, auditingActionEnum, additionalParam); return buildErrorResponse(responseFormat); } try { ServletContext context = request.getSession().getServletContext(); ElementBusinessLogic elementLogic = getElementBL(context); getAssetUtils(context); Either<List<? extends Component>, ResponseFormat> assetTypeData = elementLogic .getCatalogComponentsByUuidAndAssetType(assetType, uuid); if (assetTypeData.isRight()) { log.debug("getAssetList: Asset Fetching Failed"); responseFormat = assetTypeData.right().value(); getComponentsUtils().auditExternalGetAsset(responseFormat, auditingActionEnum, additionalParam); return buildErrorResponse(responseFormat); } else { log.debug("getAssetList: Asset Fetching Success"); additionalParam.put(AuditingFieldsKeysEnum.AUDIT_RESOURCE_NAME, assetTypeData.left().value().iterator().next().getName()); Either<List<? extends AssetMetadata>, ResponseFormat> resMetadata = assetMetadataUtils .convertToAssetMetadata(assetTypeData.left().value(), serverBaseURL, true); if (resMetadata.isRight()) { log.debug("getAssetList: Asset conversion Failed"); responseFormat = resMetadata.right().value(); getComponentsUtils().auditExternalGetAsset(responseFormat, auditingActionEnum, additionalParam); return buildErrorResponse(responseFormat); } Object result = RepresentationUtils.toRepresentation(resMetadata.left().value().iterator().next()); responseFormat = getComponentsUtils().getResponseFormat(ActionStatus.OK); getComponentsUtils().auditExternalGetAsset(responseFormat, auditingActionEnum, additionalParam); response = buildOkResponse(responseFormat, result); return response; } } catch (Exception e) { BeEcompErrorManager.getInstance().logBeRestApiGeneralError("Fetch filtered list of assets"); log.debug("getAssetList: Fetch list of assets failed with exception", e); return buildErrorResponse(getComponentsUtils().getResponseFormat(ActionStatus.GENERAL_ERROR)); } }