Example usage for java.util Map getClass

List of usage examples for java.util Map getClass

Introduction

In this page you can find the example usage for java.util Map getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:org.apache.openjpa.util.ProxyManagerImpl.java

public Map copyMap(Map orig) {
    if (orig == null)
        return null;
    if (orig instanceof Proxy)
        return (Map) ((Proxy) orig).copy(orig);

    ProxyMap proxy = getFactoryProxyMap(orig.getClass());
    return (Map) proxy.copy(orig);
}

From source file:net.firejack.platform.core.cache.CacheProcessor.java

private Map<String, IdFilter> getUserIdFilters(Long userId) {
    CacheManager cacheManager = CacheManager.getInstance();
    Map<String, IdFilter> userFilters = cacheManager.getIdFiltersForUser(userId);
    if (userFilters == null || userFilters.getClass().equals(Collections.emptyMap().getClass())) {
        userFilters = new HashMap<String, IdFilter>();
        cacheManager.setIdFiltersForUser(userId, userFilters);
    }/*w w  w  . j a  v  a  2  s  . c om*/
    return userFilters;
}

From source file:org.cloudgraph.cassandra.service.GraphDispatcher.java

/**
 * Attempts to lock the given datastore entity. If the given entity has no locking data or
 * the entity is already locked by the given user, the lock is refreshed. Otherwise the lock is
 * overwritten (slammed) if another user has an expired lock. If another user has a current 
 * lock on the given datastore entity, a LockedEntityException is thrown.
 *  //from   ww w  . j  av  a  2 s .  c o m
 * @param entity - the datastore entity
 * @param dataObject - the value object
 * @param lockedDateProperty - the last locked date property definition metadata 
 * @param lockedByNameProperty - the last locked by name property definition metadata
 * @param snapshotDate - the query snapshot date
 */
private void lock(PlasmaDataObject dataObject, Map<String, PropertyPair> entity,
        PlasmaProperty lockedDateProperty, PlasmaProperty lockedByNameProperty, Timestamp snapshotDate)
        throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
    if (lockedDateProperty != null && lockedByNameProperty != null) {
        PropertyPair lockedDatePair = entity.get(lockedDateProperty.getName());
        PropertyPair lockedByNamePair = entity.get(lockedByNameProperty.getName());
        String lockedByName = (String) lockedByNamePair.getValue();
        Date lockedDate = (Date) lockedDatePair.getValue();
        CoreHelper.unflagLocked(dataObject);
        //log.info("flag locked");                                                                                     
        if (lockedByName == null || username.equals(lockedByName)) // no lock or same user                             
        {
            if (log.isDebugEnabled()) {
                log.debug("locking " + entity.getClass().getSimpleName() + " (" + dataObject.getUUIDAsString()
                        + ")");
            }
            entity.put(lockedByNameProperty.getName(), new PropertyPair(lockedByNameProperty, username));
            entity.put(lockedDateProperty.getName(),
                    new PropertyPair(lockedDateProperty, this.snapshotMap.getSnapshotDate()));
        } else // another user has existing or expired lock                                                                                                          
        {
            long timeout = 300000L;
            DataAccessProvider providerConf = PlasmaConfig.getInstance()
                    .getDataAccessProvider(DataAccessProviderName.JDBC);
            if (providerConf.getConcurrency() != null)
                if (providerConf.getConcurrency().getPessimisticLockTimeoutMillis() > 0)
                    timeout = providerConf.getConcurrency().getPessimisticLockTimeoutMillis();
            if (snapshotDate.getTime() - lockedDate.getTime() > timeout) // existing lock expired                      
            {
                if (log.isDebugEnabled()) {
                    log.debug(
                            "locking " + entity.getClass().getSimpleName() + " (" + dataObject.getUUIDAsString()
                                    + ") - existing lock by '" + lockedByName + "' expired");
                }
                entity.put(lockedByNameProperty.getName(), new PropertyPair(lockedByNameProperty, username));
                entity.put(lockedDateProperty.getName(),
                        new PropertyPair(lockedDateProperty, this.snapshotMap.getSnapshotDate()));
            } else {
                if (log.isWarnEnabled()) {
                    log.warn("could not issue lock for user '" + String.valueOf(username)
                            + "' for snapshot date " + String.valueOf(snapshotDate));
                }
                throw new LockedEntityException(entity.getClass().getSimpleName(), lockedByName, lockedDate);
            }
        }
    }
}

From source file:org.cloudgraph.rdb.service.GraphDispatcher.java

/**
 * Attempts to lock the given datastore entity. If the given entity has no
 * locking data or the entity is already locked by the given user, the lock is
 * refreshed. Otherwise the lock is overwritten (slammed) if another user has
 * an expired lock. If another user has a current lock on the given datastore
 * entity, a LockedEntityException is thrown.
 * //from  w w  w  . ja va 2 s. c o m
 * @param entity
 *          - the datastore entity
 * @param dataObject
 *          - the value object
 * @param lockedDateProperty
 *          - the last locked date property definition metadata
 * @param lockedByNameProperty
 *          - the last locked by name property definition metadata
 * @param snapshotDate
 *          - the query snapshot date
 */
private void lock(PlasmaDataObject dataObject, Map<String, PropertyPair> entity,
        PlasmaProperty lockedDateProperty, PlasmaProperty lockedByNameProperty, Timestamp snapshotDate)
        throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
    if (lockedDateProperty != null && lockedByNameProperty != null) {
        PropertyPair lockedDatePair = entity.get(lockedDateProperty.getName());
        PropertyPair lockedByNamePair = entity.get(lockedByNameProperty.getName());
        String lockedByName = (String) lockedByNamePair.getValue();
        Date lockedDate = (Date) lockedDatePair.getValue();
        CoreHelper.unflagLocked(dataObject);
        // log.info("flag locked");
        if (lockedByName == null || username.equals(lockedByName)) // no
        // lock
        // or
        // same
        // user
        {
            if (log.isDebugEnabled()) {
                log.debug("locking " + entity.getClass().getSimpleName() + " (" + dataObject.getUUIDAsString()
                        + ")");
            }
            entity.put(lockedByNameProperty.getName(), new PropertyPair(lockedByNameProperty, username));
            entity.put(lockedDateProperty.getName(),
                    new PropertyPair(lockedDateProperty, this.snapshotMap.getSnapshotDate()));
        } else // another user has existing or expired lock
        {
            long timeout = 300000L;
            DataAccessProvider providerConf = PlasmaRuntime.getInstance()
                    .getDataAccessProvider(DataAccessProviderName.JDBC);
            if (providerConf.getConcurrency() != null)
                if (providerConf.getConcurrency().getPessimisticLockTimeoutMillis() > 0)
                    timeout = providerConf.getConcurrency().getPessimisticLockTimeoutMillis();
            if (snapshotDate.getTime() - lockedDate.getTime() > timeout) // existing
            // lock
            // expired
            {
                if (log.isDebugEnabled()) {
                    log.debug(
                            "locking " + entity.getClass().getSimpleName() + " (" + dataObject.getUUIDAsString()
                                    + ") - existing lock by '" + lockedByName + "' expired");
                }
                entity.put(lockedByNameProperty.getName(), new PropertyPair(lockedByNameProperty, username));
                entity.put(lockedDateProperty.getName(),
                        new PropertyPair(lockedDateProperty, this.snapshotMap.getSnapshotDate()));
            } else {
                if (log.isWarnEnabled()) {
                    log.warn("could not issue lock for user '" + String.valueOf(username)
                            + "' for snapshot date " + String.valueOf(snapshotDate));
                }
                throw new LockedEntityException(entity.getClass().getSimpleName(), lockedByName, lockedDate);
            }
        }
    }
}

From source file:com.datafibers.kafka.connect.SchemaedFileSourceTask.java

private List<SourceRecord> pollFromFile() throws InterruptedException {
    log.trace("pollFromFile");
    CsvSchema bootstrapCsv;//from   w ww  .j ava 2s .  c om
    CsvMapper csvMapper = new CsvMapper();
    ObjectMapper jsonMapper = new ObjectMapper();
    MappingIterator<Map<?, ?>> mappingIterator;
    ArrayList<SourceRecord> records = null;
    long currentTime = System.currentTimeMillis();
    long recordsPerPoll;

    // TODO: Improve ExceptionOnEof logic.
    // The code below only works when each pass through
    // poll() reads all available records (not a given).
    if (config.getExceptionOnEof() && streamOffset != null) {
        throw new ConnectException("No more deta available on FileInputStream");
    }

    // Initialize the bootstrapCsv schema if necessary
    if (recordSchema == null || inputType.equalsIgnoreCase("json")) {
        log.trace("Constructing csvSchema from emptySchema");
        bootstrapCsv = config.getCsvHeaders() ? CsvSchema.emptySchema().withHeader()
                : CsvSchema.emptySchema().withoutHeader();
    } else {
        // We've seen a schema, so we'll assume headers from the recordSchema
        log.trace("Constructing csvSchema from recordSchema");
        CsvSchema.Builder builder = new CsvSchema.Builder();
        builder.setUseHeader(false);
        builder.setColumnSeparator(',');
        for (Field f : recordSchema.fields()) {
            log.trace("adding column {}", f.name());
            builder.addColumn(f.name());
        }
        bootstrapCsv = builder.build();
    }
    try {
        if (stream == null)
            openFileStream();
        if (reader == null)
            reader = new BufferedReader(new InputStreamReader(stream));

        if (inputType.equalsIgnoreCase("json")) {
            mappingIterator = jsonMapper.readerFor(Map.class).readValues(reader);
        } else if (inputType.equalsIgnoreCase("csv")) {
            mappingIterator = csvMapper.readerWithSchemaFor(Map.class).with(bootstrapCsv).readValues(reader);
        } else {
            log.error("Unsupported file input type specified ({})", inputType);
            return null;
        }
    } catch (FileNotFoundException fnf) {
        log.warn("Couldn't find file {} for SchemaedFileSourceTask, sleeping to wait for it to be created",
                logFilename());
        synchronized (this) {
            this.wait(1000);
        }
        return null;
    } catch (IOException e) {
        // IOException thrown when no more records in stream
        log.warn("Processed all available data from {}; sleeping to wait additional records", logFilename());
        // Close reader and stream; swallowing exceptions ... we're about to throw a Retry
        try {
            reader.close();
        } catch (Exception nested) {
        } finally {
            reader = null;
        }

        if (stream != System.in) {
            try {
                stream.close();
            } catch (Exception nested) {
            } finally {
                stream = null;
            }
        }

        synchronized (this) {
            this.wait(1000);
        }
        return null;
    }
    log.debug("mappingIterator of type {} created; begin reading data file",
            mappingIterator.getClass().toString());

    // The csvMapper class is really screwy; can't figure out why it
    // won't return a rational Schema ... so we'll extract it from the
    // the first object later.
    if (recordSchema == null && inputType.equalsIgnoreCase("csv") && csvMapper.schema().size() > 0) {
        recordSchema = ConvertMappingSchema(csvMapper.schemaWithHeader());
        log.trace("recordSchema created from csvMapper; type {}", recordSchema.type().toString());
    }
    try {
        FileInputStream fstream = (FileInputStream) stream;
        Long lastElementOffset = streamOffset;
        recordsPerPoll = 3;

        while (mappingIterator.hasNext()) {
            Map<?, ?> element = mappingIterator.next();
            Long elementOffset, iteratorOffset;
            recordCount++;
            recordsPerPoll--;

            iteratorOffset = mappingIterator.getCurrentLocation().getByteOffset(); // never works !!!
            if (iteratorOffset < 0) {
                // The stream channel will CLOSE on the last clean record
                // seen by mapping Iterator, so we have be careful here
                // Additionally, when parsing CSV files, there seems to be a
                // lot of Bad File Descriptor errors; ignore them.
                try {
                    elementOffset = fstream.getChannel().position();
                } catch (java.nio.channels.ClosedChannelException e) {
                    log.trace("getChannel.position threw {}", e.toString());
                    elementOffset = lastElementOffset;
                } catch (IOException e) {
                    log.trace("getChannel.position threw {}", e.toString());
                    elementOffset = lastElementOffset;
                }
            } else {
                log.trace("mappingIterator.getCurrentLocation() returns {}", iteratorOffset.toString());
                elementOffset = iteratorOffset;
            }
            log.trace("Next input record: {} (class {}) from file position {}", element.toString(),
                    element.getClass().toString(), elementOffset.toString());

            if (recordSchema == null) {
                recordSchema = ConvertMappingSchema(element.keySet());
                log.trace("recordSchema created from element; type {}", recordSchema.type().toString());
            }

            if (records == null)
                records = new ArrayList<>();
            records.add(new SourceRecord(offsetKey(filename), offsetValue(elementOffset), topic, recordSchema,
                    ConvertMappingElement(recordSchema, (HashMap<?, ?>) element)));
            streamOffset = lastElementOffset = elementOffset;
        }
    } catch (Exception e) {
        throw new ConnectException(e);
    }

    lastPollTime = currentTime;
    return records;
}

From source file:com.clican.pluto.dataprocess.engine.processes.DplExecProcessor.java

public void process(ProcessorContext context) throws DataProcessException {
    log.trace("start processing dpl expression(" + this.getId() + ") ...");

    try {// w w w . j av  a 2  s.c o m
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
        if (this.clazz == null) {
            List<Map<String, Object>> result = dplStatement.execute(dpl, context);

            if (result == null) {
                if (log.isDebugEnabled()) {
                    log.warn("dpl set attribute[" + resultName + "] to null(returned by statement)");
                }
                context.setAttribute(resultName, null);
                return;
            }

            if (log.isDebugEnabled()) {
                log.debug("dpl set attribute[" + resultName + "],size=" + result.size());
            }
            if (tracesLog.isDebugEnabled()) {
                if (traces != null && traces.size() != 0) {
                    String debug = "\n";
                    for (Map<String, Object> row : result) {
                        for (String trace : traces) {
                            Object obj = PropertyUtils.getNestedProperty(row, trace);
                            if (obj instanceof Date) {
                                obj = sdf.format((Date) obj);
                            }
                            debug += trace = "[" + obj + "],";
                        }
                        debug += "\n";
                    }
                    tracesLog.debug(debug);
                }
            }
            if (singleRow) {
                if (result.size() > 0) {
                    context.setAttribute(resultName, result.get(0));
                } else {
                    context.setAttribute(resultName, null);
                }
            } else {
                context.setAttribute(resultName, result);
            }
        } else {
            List<T> result = dplStatement.execute(dpl, context, clazz);

            if (result == null) {
                if (log.isDebugEnabled()) {
                    log.warn("set attribute[" + resultName + "] to null(returned by statement)");
                }
                context.setAttribute(resultName, null);
                return;
            }

            if (log.isDebugEnabled()) {
                log.debug("set attribute[" + resultName + "],size=" + result.size());
            }
            if (tracesLog.isDebugEnabled()) {
                if (traces != null && traces.size() != 0) {
                    String debug = "\n";
                    for (T row : result) {
                        for (String trace : traces) {
                            Object obj = null;
                            if (row.getClass().isPrimitive() || Number.class.isAssignableFrom(row.getClass())
                                    || row.getClass().equals(String.class)
                                    || row.getClass().equals(Date.class)) {
                                obj = row;
                            } else {
                                obj = PropertyUtils.getNestedProperty(row, trace);
                            }
                            if (obj instanceof Date) {
                                obj = sdf.format((Date) obj);
                            }
                            debug += trace = "[" + obj + "],";
                        }
                        debug += "\n";
                    }
                    tracesLog.debug(debug);
                }
            }
            if (singleRow) {
                if (result.size() > 0) {
                    context.setAttribute(resultName, result.get(0));
                } else {
                    context.setAttribute(resultName, null);
                }
            } else {
                context.setAttribute(resultName, result);
            }
        }
    } catch (Exception e) {
        throw new DataProcessException("dpl=[" + dpl + "]", e);
    } finally {
        log.trace("finish processing dpl expression(" + this.getId() + ")");
    }

}

From source file:org.soybeanMilk.core.bean.DefaultGenericConverter.java

/**
 * ?/* ww  w . ja v a 2 s . c o m*/
 * @param map
 * @param type
 * @return
 * @throws ConvertException
 * @date 2012-5-14
 */
@SuppressWarnings("unchecked")
protected Object convertMapToType(Map<?, ?> map, Type type) throws ConvertException {
    Object result = null;

    //
    Type customType = getMapCustomTargetType(map, null);
    if (customType != null)
        type = customType;

    if (type == null) {
        result = map;
    } else if (SbmUtils.isClassType(type)) {
        Class<?> clazz = SbmUtils.narrowToClass(type);

        //Map???
        if (customType == null && isAncestorType(Map.class, clazz)) {
            result = map;
        } else {
            result = convertPropertyValueMapToClass(toPropertyValueMap((Map<String, ?>) map), clazz);
        }
    } else if (type instanceof ParameterizedType) {
        boolean convert = true;

        ParameterizedType pt = (ParameterizedType) type;
        Type rt = pt.getRawType();

        //Map<?, ?>Map<Object, Object>???
        if (isAncestorType(rt, map.getClass())) {
            Type[] at = pt.getActualTypeArguments();
            if (at != null && at.length == 2 && ((Object.class.equals(at[0]) && Object.class.equals(at[1]))
                    || (isSimpleWildcardType(at[0]) && isSimpleWildcardType(at[1])))) {
                convert = false;
            }
        }

        if (convert)
            result = convertPropertyValueMapToParameterrizedType(toPropertyValueMap((Map<String, ?>) map), pt);
        else
            result = map;
    } else if (type instanceof GenericArrayType) {
        result = convertPropertyValueMapToGenericArrayType(toPropertyValueMap((Map<String, ?>) map),
                (GenericArrayType) type);
    } else if (type instanceof TypeVariable<?>) {
        result = convertObjectToType(map, reify(type));
    } else if (type instanceof WildcardType) {
        result = convertObjectToType(map, reify(type));
    } else
        result = converterNotFoundThrow(map.getClass(), type);

    return result;
}

From source file:org.cloudgraph.cassandra.service.GraphDispatcher.java

/**
 * Compares the application-level concurrency state of the given datastore entity against the 
 * query snapshot date. If the snapshot date is current, the concurrency data for the entity is 
 * refreshed. Otherwise a shapshot concurrency exception is thrown. 
 * @param type - the type definition/*from   w ww .j  a  v  a2  s  .c  o m*/
 * @param entity - the map representing datastore entity
 * @param lastUpdatedDateProperty - the last updated date property definition metadata 
 * @param lastUpdatedByNameProperty - the last updated by name property definition metadata
 * @param snapshotDate - the query snapshot date
 */
private void checkAndRefreshConcurrencyFields(Type type, Map<String, PropertyPair> entity,
        Property lastUpdatedDateProperty, Property lastUpdatedByNameProperty, Timestamp snapshotDate)
        throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
    PropertyPair lastUpdatedDatePair = entity.get(lastUpdatedDateProperty.getName());
    PropertyPair lastUpdatedByPair = entity.get(lastUpdatedByNameProperty.getName());

    String entityName = type.getName();

    if (lastUpdatedDatePair != null) {
        Date lastUpdatedDate = (Date) lastUpdatedDatePair.getValue();
        if (log.isDebugEnabled())
            log.debug("comparing " + lastUpdatedDate + "greater than snapshot: " + snapshotDate);
        if (lastUpdatedDate.getTime() > snapshotDate.getTime()) {
            if (lastUpdatedByPair != null) {
                String lastUpdatedBy = (String) lastUpdatedByPair.getValue();
                throw new InvalidSnapshotException(entityName, username, snapshotDate, lastUpdatedBy,
                        lastUpdatedDate);
            } else
                throw new InvalidSnapshotException(entityName, username, snapshotDate, "unknown",
                        lastUpdatedDate);
        }
        PropertyPair updatedDatePair = new PropertyPair(lastUpdatedDatePair.getProp(),
                this.snapshotMap.getSnapshotDate());
        entity.put(lastUpdatedDatePair.getProp().getName(), updatedDatePair);

        if (lastUpdatedByPair != null) {
            PropertyPair updatedByPair = new PropertyPair(lastUpdatedByPair.getProp(), username);
            entity.put(updatedByPair.getProp().getName(), updatedByPair);
        }
    }
    if (log.isDebugEnabled()) {
        log.debug("reset updated-date " + entity.getClass().getSimpleName() + " (" + entityName + " - "
                + this.snapshotMap.getSnapshotDate() + "("
                + String.valueOf(this.snapshotMap.getSnapshotDate().getTime()) + ")");
    }
}

From source file:jp.terasoluna.fw.web.struts.form.DynaValidatorActionFormExTest.java

/**
 * ????s?B/*from   w w  w. j av  a2 s  . c om*/
 *
 * @throws Exception ?\bh?O
 * @see junit.framework.TestCase#setUp()
 */
@SuppressWarnings("unchecked")
@Override
protected void setUp() throws Exception {
    super.setUp();
    // create formEx
    // ?set?\bh?o?Av?peB?L?qt@Cw
    this.formEx = (DynaValidatorActionFormEx) creator.create(CONFIG_FILE_PATH);

    // i[l??
    int hogeInt = 123;
    String hogeString = "data1";
    int[] hogeIntArray = { -100, 0, 10, 111 };
    String[] hogeStringArray = new String[4];
    Object[] hogeObjectArray = new Object[4];
    List<Object> hogeList = new ArrayList<Object>();
    Map<String, Object> hogeMap = new HashMap<String, Object>();
    Runnable hogeRunnable = new Runnable() {
        public void run() {
        }
    };
    boolean hogeBoolean = true;
    byte hogeByte = 1;
    char hogeChar = 'A';
    double hogeDouble = 999.9;
    float hogeFloat = 999;
    short hogeShort = 9;
    long hogeLong = 9;

    for (int i = 0; i < 4; i++) {
        hogeStringArray[i] = "data" + (i + 1);
        hogeObjectArray[i] = new Integer(i + 1);
        hogeList.add(i, "data" + (i + 1));
        hogeMap.put("field" + (i + 1), "data" + (i + 1));
    }

    String[] fields = { "hogeInt", "hogeString", "hogeIntArray", "hogeStringArray", "hogeObjectArray",
            "hogeList", "hogeMap", "hogeRunnable", "hogeBoolean", "hogeByte", "hogeChar", "hogeDouble",
            "hogeFloat", "hogeShort", "hogeLong" };
    Class[] fieldClasses = { int.class, hogeString.getClass(), hogeIntArray.getClass(),
            hogeStringArray.getClass(), hogeObjectArray.getClass(), hogeList.getClass(), hogeMap.getClass(),
            hogeRunnable.getClass(), boolean.class, byte.class, char.class, double.class, float.class,
            short.class, long.class };
    DynaProperty[] props = new DynaProperty[fields.length];
    HashMap<String, DynaProperty> propsMap = new HashMap<String, DynaProperty>();
    for (int i = 0; i < fields.length; i++) {
        props[i] = new DynaProperty(fields[i], fieldClasses[i]);
        propsMap.put(props[i].getName(), props[i]);
    }
    DynaActionFormClass dynaActionFormClass = (DynaActionFormClass) UTUtil.getPrivateField(this.formEx,
            "dynaClass");
    UTUtil.setPrivateField(dynaActionFormClass, "properties", props);
    UTUtil.setPrivateField(dynaActionFormClass, "propertiesMap", propsMap);

    Map<String, Object> map = (Map) UTUtil.getPrivateField(this.formEx, "dynaValues");
    map.put("hogeInt", hogeInt);
    map.put("hogeString", hogeString);
    map.put("hogeIntArray", hogeIntArray);
    map.put("hogeStringArray", hogeStringArray);
    map.put("hogeObjectArray", hogeObjectArray);
    map.put("hogeList", hogeList);
    map.put("hogeMap", hogeMap);
    map.put("hogeRunnable", hogeRunnable);
    map.put("hogeBoolean", hogeBoolean);
    map.put("hogeByte", hogeByte);
    map.put("hogeChar", hogeChar);
    map.put("hogeDouble", hogeDouble);
    map.put("hogeFloat", hogeFloat);
    map.put("hogeShort", hogeShort);
    map.put("hogeLong", hogeLong);
}

From source file:org.openecomp.sdnc.sli.aai.AAIDeclarations.java

@Override
public QueryStatus query(String resource, boolean localOnly, String select, String key, String prefix,
        String orderBy, SvcLogicContext ctx) throws SvcLogicException {

    getLogger().debug("AAIService.query \tresource = " + resource);

    String vnfId = null;/*from   ww w . j ava2 s.co  m*/
    String vnfName = null;
    HashMap<String, String> nameValues = keyToHashMap(key, ctx);
    getLogger().debug("key = " + nameValues.toString());

    if (!checkOldFormat(resource, nameValues)) {
        ctx.setAttribute(String.format("%s.error.message", prefix), String
                .format("Resource %s is not supported. Key string contains invaid identifiers", resource));
        return QueryStatus.FAILURE;
    }

    if (resource == null || resource.isEmpty() || AAIRequest.createRequest(resource, nameValues) == null) {
        ctx.setAttribute(String.format("%s.error.message", prefix),
                String.format("Resource %s is not supported", resource));
        return QueryStatus.FAILURE;
    }

    // process data using new model
    boolean useNewModelProcessing = true;
    // process server query by name the old way
    if (("vserver".equals(resource) || "vserver2".equals(resource))) {
        if (nameValues.containsKey("vserver_name") || nameValues.containsKey("vserver-name")
                || nameValues.containsKey("vserver.vserver_name")
                || nameValues.containsKey("vserver.vserver-name"))
            useNewModelProcessing = false;
    }
    if ("generic-vnf".equals(resource)) {
        if (nameValues.containsKey("vnf_name") || nameValues.containsKey("vnf-name")
                || nameValues.containsKey("generic_vnf.vnf_name")
                || nameValues.containsKey("generic-vnf.vnf-name"))
            useNewModelProcessing = false;
    }

    // process data using new model
    if (useNewModelProcessing && AAIRequest.createRequest(resource, nameValues) != null) {

        try {
            return newModelQuery(resource, localOnly, select, key, prefix, orderBy, ctx);
        } catch (Exception exc) {
            getLogger().warn("Failed query - returning FAILURE", exc);
            return QueryStatus.FAILURE;
        }
    }

    ObjectMapper mapper = AAIService.getObjectMapper();
    Map<String, Object> attributes = new HashMap<String, Object>();

    String modifier = null;

    if (resource.contains(":")) {
        String[] tokens = resource.split(":");
        resource = tokens[0];
        if (tokens.length > 1) {
            modifier = tokens[1];
        }
    }

    resource = resource.toLowerCase().replace("-", "_");

    try {

        switch (resource) {
        case "generic_vnf":
            vnfId = nameValues.get("vnf_id");
            if (nameValues.containsKey("vnf_id"))
                vnfId = nameValues.get("vnf_id");
            else if (nameValues.containsKey("generic_vnf.vnf_name"))
                vnfId = nameValues.get("generic_vnf.vserver_name");

            if (nameValues.containsKey("vnf_name"))
                vnfName = nameValues.get("vnf_name");
            else if (nameValues.containsKey("generic_vnf.vnf_name"))
                vnfName = nameValues.get("generic_vnf.vnf_name");

            if (vnfId != null && !vnfId.isEmpty()) {
                // at this point of the project this part should not be executed
                vnfId = vnfId.trim().replace("'", "").replace("$", "").replace("'", "");
                GenericVnf vnf = this.requestGenericVnfData(vnfId);
                if (vnf == null) {
                    return QueryStatus.NOT_FOUND;
                }

                attributes = mapper.convertValue(vnf, attributes.getClass());
            } else if (vnfName != null && !vnfName.isEmpty()) {
                try {
                    vnfName = vnfName.trim().replace("'", "").replace("$", "").replace("'", "");
                    GenericVnf vnf = this.requestGenericVnfeNodeQuery(vnfName);
                    if (vnf == null) {
                        return QueryStatus.NOT_FOUND;
                    }
                    vnfId = vnf.getVnfId();
                    nameValues.put("vnf_id", vnfId);
                    attributes = mapper.convertValue(vnf, attributes.getClass());
                } catch (AAIServiceException exc) {
                    int errorCode = exc.getReturnCode();
                    switch (errorCode) {
                    case 400:
                    case 404:
                    case 412:
                        break;
                    default:
                        getLogger().warn("Caught exception trying to refresh generic VNF", exc);
                    }
                    ctx.setAttribute(prefix + ".error.message", exc.getMessage());
                    if (errorCode >= 300) {
                        ctx.setAttribute(prefix + ".error.http.response-code", "" + exc.getReturnCode());
                    }
                    return QueryStatus.FAILURE;
                }
            } else {
                getLogger().warn("No arguments are available to process generic VNF");
                return QueryStatus.FAILURE;
            }
            break;
        case "vserver":
        case "vserver2":
            String vserverName = null;
            if (nameValues.containsKey("vserver_name"))
                vserverName = nameValues.get("vserver_name");
            else if (nameValues.containsKey("vserver.vserver_name"))
                vserverName = nameValues.get("vserver.vserver_name");

            String vserverId = null;
            if (nameValues.containsKey("vserver_id"))
                vserverId = nameValues.get("vserver_id");
            if (nameValues.containsKey("vserver.vserver_id"))
                vserverId = nameValues.get("vserver.vserver_id");
            String tenantId = nameValues.get("teannt_id");

            if (vserverName != null)
                vserverName = vserverName.trim().replace("'", "").replace("$", "").replace("'", "");
            if (vserverId != null)
                vserverId = vserverId.trim().replace("'", "").replace("$", "").replace("'", "");
            if (tenantId != null)
                tenantId = tenantId.trim().replace("'", "").replace("$", "").replace("'", "");

            if (vserverName != null) {
                URL vserverUrl = null;
                try {
                    vserverUrl = this.requestVserverURLNodeQuery(vserverName);
                } catch (AAIServiceException aaiexc) {
                    ctx.setAttribute(prefix + ".error.message", aaiexc.getMessage());
                    if (aaiexc.getReturnCode() >= 300) {
                        ctx.setAttribute(prefix + ".error.http.response-code", "" + aaiexc.getReturnCode());
                    }

                    if (aaiexc.getReturnCode() == 404)
                        return QueryStatus.NOT_FOUND;
                    else
                        return QueryStatus.FAILURE;
                }
                if (vserverUrl == null) {
                    return QueryStatus.NOT_FOUND;
                }

                tenantId = getTenantIdFromVserverUrl(vserverUrl);
                String cloudOwner = getCloudOwnerFromVserverUrl(vserverUrl);
                String cloudRegionId = getCloudRegionFromVserverUrl(vserverUrl);

                Vserver vserver = null;
                try {
                    vserver = this.requestVServerDataByURL(vserverUrl);
                } catch (AAIServiceException aaiexc) {
                    ctx.setAttribute(prefix + ".error.message", aaiexc.getMessage());
                    if (aaiexc.getReturnCode() >= 300) {
                        ctx.setAttribute(prefix + ".error.http.response-code", "" + aaiexc.getReturnCode());
                    }

                    if (aaiexc.getReturnCode() == 404)
                        return QueryStatus.NOT_FOUND;
                    else
                        return QueryStatus.FAILURE;
                }
                if (vserver == null) {
                    return QueryStatus.NOT_FOUND;
                }
                attributes = mapper.convertValue(vserver, attributes.getClass());
                if (!attributes.containsKey("tenant-id") && tenantId != null) {
                    attributes.put("tenant-id", tenantId);
                }
                if (!attributes.containsKey("cloud-owner") && cloudOwner != null) {
                    attributes.put("cloud-owner", cloudOwner);
                }
                if (!attributes.containsKey("cloud-region-id") && cloudRegionId != null) {
                    attributes.put("cloud-region-id", cloudRegionId);
                }
            } else if (vserverId != null && tenantId != null) {
                Vserver vserver = this.requestVServerData(tenantId, vserverId, "att-aic", "AAIAIC25");
                if (vserver == null) {
                    return QueryStatus.NOT_FOUND;
                }
                attributes = mapper.convertValue(vserver, attributes.getClass());
                if (!attributes.containsKey("tenant-id") && tenantId != null) {
                    attributes.put("tenant-id", tenantId);
                }
            } else {
                return QueryStatus.FAILURE;
            }
            break;

        default:
            return QueryStatus.FAILURE;
        }

        QueryStatus retval = QueryStatus.SUCCESS;

        if (attributes == null || attributes.isEmpty()) {
            retval = QueryStatus.NOT_FOUND;
            getLogger().debug("No data found");
        } else {
            if (ctx != null) {
                if (prefix != null) {
                    ArrayList<String> keys = new ArrayList<String>(attributes.keySet());

                    int numCols = keys.size();

                    for (int i = 0; i < numCols; i++) {
                        String colValue = null;
                        String colName = keys.get(i);
                        Object object = attributes.get(colName);

                        if (object != null && object instanceof String) {
                            colValue = (String) object;

                            if (prefix != null) {
                                getLogger().debug("Setting " + prefix + "." + colName.replaceAll("_", "-")
                                        + " = " + colValue);
                                ctx.setAttribute(prefix + "." + colName.replaceAll("_", "-"), colValue);
                            } else {
                                getLogger()
                                        .debug("Setting " + colValue.replaceAll("_", "-") + " = " + colValue);
                                ctx.setAttribute(colValue.replaceAll("_", "-"), colValue);
                            }
                        } else if (object != null && object instanceof Map) {
                            if (colName.equals(modifier) || colName.equals("relationship-list")) {
                                String localNodifier = modifier;
                                if (localNodifier == null)
                                    localNodifier = "relationship-list";
                                Map<String, Object> properties = (Map<String, Object>) object;
                                writeMap(properties, prefix + "." + localNodifier, ctx);
                            }
                        }
                    }
                }
            }
        }
        getLogger().debug("Query - returning " + retval);
        return (retval);

    } catch (Exception exc) {
        getLogger().warn("Failed query - returning FAILURE", exc);
        return QueryStatus.FAILURE;
    }

    //      return QueryStatus.SUCCESS;
}