Example usage for java.util Map toString

List of usage examples for java.util Map toString

Introduction

In this page you can find the example usage for java.util Map toString.

Prototype

public String toString() 

Source Link

Document

Returns a string representation of the object.

Usage

From source file:org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.java

private void analyzeAlterTableUpdateStats(ASTNode ast, String tblName, Map<String, String> partSpec)
        throws SemanticException {
    String colName = getUnescapedName((ASTNode) ast.getChild(0));
    Map<String, String> mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0));

    Table tbl = getTable(tblName);/*from  ww  w .jav  a  2s  .c o m*/
    String partName = null;
    if (partSpec != null) {
        try {
            partName = Warehouse.makePartName(partSpec, false);
        } catch (MetaException e) {
            // TODO Auto-generated catch block
            throw new SemanticException("partition " + partSpec.toString() + " not found");
        }
    }

    String colType = null;
    List<FieldSchema> cols = tbl.getCols();
    for (FieldSchema col : cols) {
        if (colName.equalsIgnoreCase(col.getName())) {
            colType = col.getType();
            break;
        }
    }

    if (colType == null)
        throw new SemanticException("column type not found");

    ColumnStatsDesc cStatsDesc = new ColumnStatsDesc(tbl.getDbName() + "." + tbl.getTableName(),
            Arrays.asList(colName), Arrays.asList(colType), partSpec == null);
    ColumnStatsUpdateTask cStatsUpdateTask = (ColumnStatsUpdateTask) TaskFactory
            .get(new ColumnStatsUpdateWork(cStatsDesc, partName, mapProp), conf);
    rootTasks.add(cStatsUpdateTask);
}

From source file:org.apache.hadoop.hbase.coprocessor.TimeseriesAggregateImplementation.java

@Override
public void getMin(RpcController controller, TimeseriesAggregateRequest request,
        RpcCallback<TimeseriesAggregateResponse> done) {
    InternalScanner scanner = null;/*from   w ww  .  jav a2 s  .com*/
    TimeseriesAggregateResponse response = null;
    // TimeRange intervalRange = null;
    T min = null;
    boolean hasScannerRange = false;
    Map<Long, T> minimums = new HashMap<Long, T>();
    if (!request.hasRange()) {
        hasScannerRange = true; // When no timerange is being passed in via
        // the request, it is
        // assumed, that the scanner is
        // timestamp-range bound
    }

    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        T temp;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        List<TimeRange> timeRanges = getAllTimeRanges(scan, request);
        scanner = env.getRegion().getScanner(scan);
        List<Cell> results = new ArrayList<Cell>();
        byte[] colFamily = scan.getFamilies()[0];

        boolean hasMoreRows = false;
        do {
            results.clear();
            hasMoreRows = scanner.next(results);
            for (Cell kv : results) {
                long timestamp = 0;
                if (hasScannerRange)
                    timestamp = kv.getTimestamp();
                else
                    timestamp = getMillisTimestampFromOffset(getTimestampFromRowKeyAsMillis(kv, request),
                            Bytes.toInt(kv.getQualifier()));
                for (TimeRange t : timeRanges) {
                    if (t.withinTimeRange(timestamp)) {
                        long minTimestamp = t.getMin();
                        if (minimums.containsKey(minTimestamp)) {
                            min = minimums.get(minTimestamp);
                        } else
                            min = null;
                        temp = ci.getValue(colFamily, kv.getQualifier(), kv);
                        min = (min == null || (temp != null && ci.compare(temp, min) < 0)) ? temp : min;
                        minimums.put(minTimestamp, min);
                    }
                }
            }
        } while (hasMoreRows);
        if (!minimums.isEmpty()) {
            TimeseriesAggregateResponse.Builder responseBuilder = TimeseriesAggregateResponse.newBuilder();

            for (Map.Entry<Long, T> entry : minimums.entrySet()) {
                TimeseriesAggregateResponseEntry.Builder valueBuilder = TimeseriesAggregateResponseEntry
                        .newBuilder();
                TimeseriesAggregateResponseMapEntry.Builder mapElementBuilder = TimeseriesAggregateResponseMapEntry
                        .newBuilder();

                valueBuilder.addFirstPart(ci.getProtoForCellType(entry.getValue()).toByteString());

                mapElementBuilder.setKey(entry.getKey());
                mapElementBuilder.setValue(valueBuilder.build());

                responseBuilder.addEntry(mapElementBuilder.build());
            }
            response = responseBuilder.build();
        }
    } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    log.info("Minimums from this region are " + env.getRegion().getRegionNameAsString() + ": "
            + minimums.toString());
    done.run(response);
}

From source file:com.nuvolect.securesuite.webserver.CrypServer.java

@Override
public Response serve(IHTTPSession session) {

    if (!m_serverEnabled) {

        return null;
    }//from  ww w.  j  a v  a 2s.co  m

    Map<String, List<String>> paramsMultiple = session.getParameters();

    m_session = session;

    CookieHandler cookies = session.getCookies();
    Map<String, String> headers = session.getHeaders();
    String uniqueId = cookies.read(CConst.UNIQUE_ID);

    if (uniqueId == null) {

        if (embedded_header_value.isEmpty())
            embedded_header_value = WebUtil.getServerUrl(m_ctx);

        for (Map.Entry<String, String> entry : headers.entrySet()) {

            if (entry.getKey().startsWith(EMBEDDED_HEADER_KEY)
                    && entry.getValue().contains(embedded_header_value)) {
                uniqueId = CConst.EMBEDDED_USER;
                break;
            }
        }
        if (DEBUG && uniqueId == null) {

            LogUtil.log(LogUtil.LogType.CRYP_SERVER, "header value mismatch: " + embedded_header_value);
            for (Map.Entry<String, String> entry : headers.entrySet()) {

                LogUtil.log(LogUtil.LogType.CRYP_SERVER,
                        "header: " + entry.getKey() + ":::" + entry.getValue());
            }
        }
    }

    if (uniqueId == null) {

        uniqueId = String.valueOf(System.currentTimeMillis());
        cookies.set(CConst.UNIQUE_ID, uniqueId, 30);
    }
    /**
     * Session is authenticated when authentication is wide open or
     * session has been previously authenticated.
     */
    mAuthenticated = Cryp.getLockCode(m_ctx).isEmpty() || uniqueId.contentEquals(CConst.EMBEDDED_USER)
            || get(uniqueId, CConst.AUTHENTICATED, "0").contentEquals("1");

    Method method = session.getMethod();
    Map<String, String> params = new HashMap<String, String>();

    /**
     * Get files associated with a POST method
     */
    Map<String, String> files = new HashMap<String, String>();
    try {
        session.parseBody(files);
    } catch (ResponseException e) {
        LogUtil.logException(CrypServer.class, e);
    } catch (IOException e) {
        LogUtil.logException(CrypServer.class, e);
    }
    /**
     * {
     *    "data": {
     *        "EventID": 0,
     *        "StartAt": "2017/04/13 12:00 AM",
     *        "EndAt": "2017/04/14 12:00 AM",
     *        "IsFullDay": false,
     *        "Title ": "Sample title",
     *        "Description": "Something about the event"
     *    }
     * }
     */
    if (method.equals(Method.POST) && files.size() > 0) {

        if (files.containsKey("postData")) {

            try {
                JSONObject postData = new JSONObject(files.get("postData"));
                JSONObject data = postData.getJSONObject("data");
                params.put("data", data.toString());

                Iterator<String> keys = data.keys();

                while (keys.hasNext()) {

                    String key = keys.next();
                    String value = data.getString(key);
                    params.put(key, value);
                }
            } catch (JSONException e) {
                LogUtil.logException(CrypServer.class, e);
            }
        }
    }

    /**
     * Parameters can now have multiple values for a single key.
     * Iterate over params and copy to a HashMap<String, String>.
     * This "old way" is simple and compatible with code base.
     * Duplicate keys are made unique { key, key_2, key_3, .. key_n }
     */
    Set<String> keySet = paramsMultiple.keySet();
    for (String key : keySet) {
        List<String> values = paramsMultiple.get(key);
        int n = 0;
        for (String value : values) {
            if (++n == 1) {
                params.put(key, value);
            } else {
                params.put(key + "_" + n, value);
            }
        }
    }

    String uri = session.getUri();
    params.put(CConst.URI, uri);
    params.put(CConst.URL, m_serverUrl);
    params.put("queryParameterStrings", session.getQueryParameterString());

    params.put(CConst.UNIQUE_ID, uniqueId);

    log(LogUtil.LogType.CRYP_SERVER, method + " '" + uri + "' " + params.toString());

    InputStream is = null;
    EXT ext = null;

    String fileExtension = FilenameUtils.getExtension(uri).toLowerCase(US);
    if (fileExtension.isEmpty()) {
        if (uri.contentEquals("/")) {
            ext = EXT.htm;
            if (mAuthenticated)
                uri = "/list.htm";
            else
                uri = "/login.htm";
        } else {
            ext = determineServiceEnum(uri);
        }
    } else {
        try {
            ext = EXT.valueOf(fileExtension);
        } catch (IllegalArgumentException e) {
            log(LogUtil.LogType.CRYP_SERVER, "ERROR invalid extension " + uri + fileExtension);
            ext = EXT.invalid;
        }
    }

    try {

        if (uri == null)
            return null;

        switch (ext) {

        case js:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MimeUtil.MIME_JS, is, -1);
        case css:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MimeUtil.MIME_CSS, is, -1);
        case map:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MIME_JSON, is, -1);
        case png:
            if (uri.startsWith("/img") || uri.startsWith("/css") || uri.startsWith("/elFinder")) {

                is = m_ctx.getAssets().open(uri.substring(1));
                return new Response(Status.OK, MIME_PNG, is, -1);
            } else if (uri.startsWith("/files/")) {
                String fileName = FilenameUtils.getName(uri);
                File file = new File(m_ctx.getFilesDir() + "/" + fileName);
                is = new FileInputStream(file);
                return new Response(Status.OK, MIME_PNG, is, -1);
            }
            log(LogUtil.LogType.CRYP_SERVER, "ERROR not found: " + uri);
            return new Response(Status.NOT_FOUND, MIME_PLAINTEXT, "Not found: " + uri);
        case jpg:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MIME_JPG, is, -1);
        case gif:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MIME_GIF, is, -1);
        case ico:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MIME_ICO, is, -1);
        case ttf:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MIME_TTF, is, -1);
        case wav:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MIME_WAV, is, -1);
        case woff:
        case woff2:
            is = m_ctx.getAssets().open(uri.substring(1));
            return new Response(Status.OK, MIME_WOFF, is, -1);
        case htm:
        case html: {
            if (uri.contentEquals("/login.htm")) {
                log(LogUtil.LogType.CRYP_SERVER, "Serving login.htm");
                is = m_ctx.getAssets().open("login.htm");
                return new Response(Status.OK, MimeUtil.MIME_HTML, is, -1);
            }
            if (uri.contentEquals("/footer.htm")) {
                log(LogUtil.LogType.CRYP_SERVER, "Serving footer.htm");
                is = m_ctx.getAssets().open("footer.htm");
                return new Response(Status.OK, MimeUtil.MIME_HTML, is, -1);
            }
            if (mAuthenticated) {

                return serveAuthenticatedHtml(uri, uniqueId, params);
            } else {
                return new Response(Status.UNAUTHORIZED, MIME_PLAINTEXT, "Invalid authentication: " + uri);
            }
        }
        case omni: {
            String mime = "";
            OmniFile omniFile = new OmniFile(uri);
            if (omniFile.getPath().startsWith(CConst.TMB_FOLDER)) {
                /**
                 * Request for a thumbnail file.
                 * The file name is hashed and mime type is png.
                 */
                mime = MIME_PNG;
            } else
                mime = omniFile.getMime();

            is = omniFile.getFileInputStream();
            return new Response(Status.OK, mime, is, -1);
        }
        case admin: {
            /**
             * GET/POST /admin?cmd=login works with or without validation.
             * All other REST services require authentication.
             */
            if (params.containsKey("cmd") && params.get("cmd").contentEquals("login")) {

                is = AdminCmd.process(m_ctx, params);
                return new Response(Status.OK, MIME_JSON, is, -1);
            }
        }
        case calendar:
        case connector:
        case sync: {
            if (passSecurityCheck(uri, headers)) {

                switch (ext) {
                case admin:
                    is = AdminCmd.process(m_ctx, params);
                    return new Response(Status.OK, MIME_JSON, is, -1);
                case calendar: {
                    String json = CalendarRest.process(m_ctx, params);
                    return new Response(Status.OK, MIME_JSON, json);
                }
                case connector: {

                    String mime = MIME_JSON;
                    if (params.get("cmd").contentEquals("upload")) {
                        loadUploadParams(files, params);
                    } else if (params.get("cmd").contentEquals("file")) {
                        OmniFile omniFile = new OmniFile(params.get("target"));
                        mime = omniFile.getMime();
                    }

                    ServeCmd serveCmd = new ServeCmd(m_ctx, params);

                    boolean zipDl = params.get("cmd").equals("zipdl") && params.containsKey("download")
                            && params.get("download").equals("1");
                    if (zipDl) {
                        zipDownloadFileHash = params.get("targets[]_2");
                        mime = MIME_ZIP;
                    }
                    Response response = new Response(Status.OK, mime, serveCmd.process(), -1);
                    if (zipDl) {
                        response.addHeader("Content-disposition", "attachment;filename=\"Archive.zip\"");
                    }
                    return response;
                }
                case sync:
                    String json = SyncRest.process(m_ctx, params);
                    return new Response(Status.OK, MIME_PLAINTEXT, json);
                }
            } else {
                /**
                 * The security token can be temporarily disabled during companion pairing.
                 */
                boolean hostVerifierDisabled = !WebUtil.NullHostNameVerifier
                        .getInstance().m_hostVerifierEnabled;
                if (ext == EXT.sync && hostVerifierDisabled && params.containsKey(CConst.CMD) && (params
                        .get(CConst.CMD).contentEquals(SyncRest.CMD.register_companion_device.toString())
                        || params.get(CConst.CMD).contentEquals(SyncRest.CMD.companion_ip_test.toString()))) {

                    log(LogUtil.LogType.CRYP_SERVER, "sec_tok test skipped");
                    String json = SyncRest.process(m_ctx, params);
                    return new Response(Status.OK, MIME_PLAINTEXT, json);
                } else {

                    log(LogUtil.LogType.CRYP_SERVER, "Authentication ERROR: " + params);
                    return new Response(Status.UNAUTHORIZED, MIME_PLAINTEXT, "Authentication error: " + uri);
                }
            }
        }
        case invalid:
            log(LogUtil.LogType.CRYP_SERVER, "ERROR invalid extension " + uri);
            return new Response(Status.NOT_ACCEPTABLE, MIME_PLAINTEXT, "Invalid request " + uri);
        default:
            log(LogUtil.LogType.CRYP_SERVER, "ERROR unmanaged extension " + ext);
            return new Response(Status.NOT_FOUND, MIME_PLAINTEXT, "Not found: " + uri);
        }

    } catch (Exception e) {
        log(LogUtil.LogType.CRYP_SERVER, "ERROR exception " + uri);
        LogUtil.logException(CrypServer.class, e);
    }
    return new Response(Status.NOT_FOUND, MIME_PLAINTEXT, "Unmanaged request: " + uri);
}

From source file:org.apache.hadoop.hbase.coprocessor.TimeseriesAggregateImplementation.java

@Override
public void getSum(RpcController controller, TimeseriesAggregateRequest request,
        RpcCallback<TimeseriesAggregateResponse> done) {
    TimeseriesAggregateResponse response = null;
    InternalScanner scanner = null;/*w  w w  .ja v a 2 s. c o m*/
    Map<Long, S> sums = new HashMap<Long, S>();
    boolean hasScannerRange = false;

    if (!request.hasRange()) {
        hasScannerRange = true; // When no timerange is being passed in via
        // the request, it is
        // assumed, that the scanner is
        // timestamp-range bound
    }

    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        S sumVal = null;
        T temp;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        List<TimeRange> timeRanges = getAllTimeRanges(scan, request);
        scanner = env.getRegion().getScanner(scan);
        byte[] colFamily = scan.getFamilies()[0];
        List<Cell> results = new ArrayList<Cell>();
        boolean hasMoreRows = false;
        do {
            results.clear();
            hasMoreRows = scanner.next(results);
            for (Cell kv : results) {
                long timestamp = 0;
                if (hasScannerRange)
                    timestamp = kv.getTimestamp();
                else
                    timestamp = getMillisTimestampFromOffset(getTimestampFromRowKeyAsMillis(kv, request),
                            Bytes.toInt(kv.getQualifier()));
                for (TimeRange t : timeRanges) {
                    if (t.withinTimeRange(timestamp)) {
                        long minTimestamp = t.getMin();
                        if (sums.containsKey(minTimestamp)) {
                            sumVal = sums.get(minTimestamp);
                        } else
                            sumVal = null;
                        temp = ci.getValue(colFamily, kv.getQualifier(), kv);
                        if (temp != null)
                            sumVal = ci.add(sumVal, ci.castToReturnType(temp));
                        sums.put(minTimestamp, sumVal);
                    }
                }
            }
        } while (hasMoreRows);
        if (!sums.isEmpty()) {
            TimeseriesAggregateResponse.Builder responseBuilder = TimeseriesAggregateResponse.newBuilder();

            for (Map.Entry<Long, S> entry : sums.entrySet()) {
                TimeseriesAggregateResponseEntry.Builder valueBuilder = TimeseriesAggregateResponseEntry
                        .newBuilder();
                TimeseriesAggregateResponseMapEntry.Builder mapElementBuilder = TimeseriesAggregateResponseMapEntry
                        .newBuilder();
                valueBuilder.addFirstPart(ci.getProtoForPromotedType(entry.getValue()).toByteString());
                mapElementBuilder.setKey(entry.getKey());
                mapElementBuilder.setValue(valueBuilder.build());
                responseBuilder.addEntry(mapElementBuilder.build());
            }
            response = responseBuilder.build();
        }
    } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    log.info("Sums from this region are " + env.getRegion().getRegionNameAsString() + ": " + sums.toString());
    done.run(response);
}

From source file:org.apache.hadoop.hbase.coprocessor.TimeseriesAggregateImplementation.java

@Override
public void getMax(RpcController controller, TimeseriesAggregateRequest request,
        RpcCallback<TimeseriesAggregateResponse> done) {
    InternalScanner scanner = null;//from  www  . j  a  v a2 s  .  c  o m
    TimeseriesAggregateResponse response = null;
    T max = null;
    boolean hasScannerRange = false;
    Map<Long, T> maximums = new HashMap<Long, T>();

    if (!request.hasRange()) {
        hasScannerRange = true; // When no timerange is being passed in via
        // the request, it is
        // assumed, that the scanner is
        // timestamp-range bound
    }

    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        T temp;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        scanner = env.getRegion().getScanner(scan);
        List<TimeRange> timeRanges = getAllTimeRanges(scan, request);
        List<Cell> results = new ArrayList<Cell>();
        byte[] colFamily = scan.getFamilies()[0];
        boolean hasMoreRows = false;
        do {
            results.clear();
            hasMoreRows = scanner.next(results);
            for (Cell kv : results) {
                // if (intervalRange.getMin() < maxTimeStamp) {
                long timestamp = 0;
                if (hasScannerRange)
                    timestamp = kv.getTimestamp();
                else
                    timestamp = getMillisTimestampFromOffset(getTimestampFromRowKeyAsMillis(kv, request),
                            Bytes.toInt(kv.getQualifier()));
                for (TimeRange t : timeRanges) {
                    if (t.withinTimeRange(timestamp)) {
                        long minTimestamp = t.getMin();
                        if (maximums.containsKey(minTimestamp)) {
                            max = maximums.get(minTimestamp);
                        } else
                            max = null;
                        temp = ci.getValue(colFamily, kv.getQualifier(), kv);
                        max = (max == null || (temp != null && ci.compare(temp, max) > 0)) ? temp : max;
                        maximums.put(minTimestamp, max);
                    }
                }
            }
        } while (hasMoreRows);
        if (!maximums.isEmpty()) {
            TimeseriesAggregateResponse.Builder responseBuilder = TimeseriesAggregateResponse.newBuilder();

            for (Map.Entry<Long, T> entry : maximums.entrySet()) {
                TimeseriesAggregateResponseEntry.Builder valueBuilder = TimeseriesAggregateResponseEntry
                        .newBuilder();
                TimeseriesAggregateResponseMapEntry.Builder mapElementBuilder = TimeseriesAggregateResponseMapEntry
                        .newBuilder();

                valueBuilder.addFirstPart(ci.getProtoForCellType(entry.getValue()).toByteString());

                mapElementBuilder.setKey(entry.getKey());
                mapElementBuilder.setValue(valueBuilder.build());

                responseBuilder.addEntry(mapElementBuilder.build());
            }
            response = responseBuilder.build();
        }
    } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    log.info("Maximums from this region are " + env.getRegion().getRegionNameAsString() + ": "
            + maximums.toString());
    done.run(response);
}

From source file:com.gtwm.pb.model.manageData.DataManagement.java

/**
 * Used by both the public saveRecord and globalEdit methods
 *///from ww w  .  j ava  2  s.c  o  m
private void saveRecord(HttpServletRequest request, TableInfo table,
        LinkedHashMap<BaseField, BaseValue> dataToSave, boolean newRecord, Set<Integer> rowIds,
        SessionDataInfo sessionData, List<FileItem> multipartItems)
        throws InputRecordException, ObjectNotFoundException, SQLException, CantDoThatException,
        CodingErrorException, DisallowedException, MissingParametersException {
    if ((dataToSave.size() == 0) && (!newRecord)) {
        // Note: this does actually happen quite a lot, from two particular
        // users, therefore I've commented out the log warning.
        // Haven't tracked down the cause but it doesn't seem to be creating
        // a problem.
        // logger.warn("Call to saveRecord with no data to save. User = "
        // + request.getRemoteUser() + ", table = " + table + ", rowIds = "
        // + rowIds);
        return;
    }
    this.setHiddenFieldValues(request, table, dataToSave, newRecord);
    boolean globalEdit = false;
    int rowId = -1;
    if (rowIds.size() > 1) {
        globalEdit = true;
    } else if (rowIds.size() == 1) {
        rowId = (new LinkedList<Integer>(rowIds)).getFirst();
    } else {
        throw new ObjectNotFoundException("Row ID list " + rowIds + " is invalid");
    }
    StringBuilder SQLCodeBuilder = new StringBuilder();
    // Generate CSV of fields and placeholders to use in update/insert SQL
    // string
    StringBuilder fieldsCsvBuilder = new StringBuilder();
    StringBuilder fieldsAndPlaceholdersCsvBuilder = new StringBuilder();
    StringBuilder valuePlaceholdersCsvBuilder = new StringBuilder();
    for (BaseField field : dataToSave.keySet()) {
        fieldsCsvBuilder.append(field.getInternalFieldName());
        fieldsCsvBuilder.append(", ");
        valuePlaceholdersCsvBuilder.append("?, ");
        fieldsAndPlaceholdersCsvBuilder.append(field.getInternalFieldName());
        fieldsAndPlaceholdersCsvBuilder.append("=?, ");
    }
    // Used if doing an INSERT
    String fieldsCsv = fieldsCsvBuilder.toString();
    String valuePlaceholdersCsv = valuePlaceholdersCsvBuilder.toString();
    // Used if doing an UPDATE
    String fieldsAndPlaceholdersCsv = fieldsAndPlaceholdersCsvBuilder.toString();
    if (!fieldsCsv.equals("")) {
        fieldsCsv = fieldsCsv.substring(0, fieldsCsv.length() - 2);
        valuePlaceholdersCsv = valuePlaceholdersCsv.substring(0, valuePlaceholdersCsv.length() - 2);
        fieldsAndPlaceholdersCsv = fieldsAndPlaceholdersCsv.substring(0, fieldsAndPlaceholdersCsv.length() - 2);
    }
    if (newRecord) {
        SQLCodeBuilder.append("INSERT INTO " + table.getInternalTableName());
        if (fieldsCsv.equals("")) {
            SQLCodeBuilder.append(" VALUES(default)");
        } else {
            SQLCodeBuilder.append("(" + fieldsCsv + ") VALUES (" + valuePlaceholdersCsv + ")");
        }
    } else {
        SQLCodeBuilder.append("UPDATE " + table.getInternalTableName() + " SET " + fieldsAndPlaceholdersCsv);
        if (globalEdit) {
            // add filter for various row ids
            SQLCodeBuilder.append(" WHERE " + table.getPrimaryKey().getInternalFieldName() + " in (?");
            for (int i = 1; i < rowIds.size(); i++) {
                SQLCodeBuilder.append(",?");
            }
            SQLCodeBuilder.append(")");
        } else {
            // add filter for single row id
            SQLCodeBuilder.append(" WHERE " + table.getPrimaryKey().getInternalFieldName() + "=?");
        }
    }
    Connection conn = null;
    int fieldNumber = 0;
    // Will be set if we're inserting a record
    int newRowId = -1;
    TableDataInfo tableData = new TableData(table);
    try {
        conn = this.dataSource.getConnection();
        conn.setAutoCommit(false);
        PreparedStatement statement = conn.prepareStatement(SQLCodeBuilder.toString());
        for (BaseField field : dataToSave.keySet()) {
            // If an exception is raised, currentField will be the field
            // which caused it
            // currentField = field;
            fieldNumber++;
            BaseValue fieldValue = dataToSave.get(field);
            if (field instanceof FileField) {
                if (fieldValue.isNull() || fieldValue.toString().equals("")) {
                    throw new InputRecordException("No file specified for the upload", field);
                }
            }
            if (fieldValue.isNull()) {
                statement.setNull(fieldNumber, Types.NULL);
            } else {
                if (fieldValue instanceof TextValue) {
                    String textValue = ((TextValue) fieldValue).toXmlString();
                    statement.setString(fieldNumber, textValue);
                } else if (fieldValue instanceof IntegerValue) {
                    // if no related value, set relation field to null
                    if (field instanceof RelationField && (((IntegerValue) fieldValue).getValueInteger() == -1)
                            || (fieldValue.isNull())) {
                        statement.setNull(fieldNumber, Types.NULL);
                    } else {
                        statement.setInt(fieldNumber, ((IntegerValue) fieldValue).getValueInteger());
                    }
                } else if (fieldValue instanceof DurationValue) {
                    statement.setString(fieldNumber, ((DurationValue) fieldValue).getSqlFormatInterval());
                } else if (fieldValue instanceof DecimalValue) {
                    statement.setDouble(fieldNumber, ((DecimalValue) fieldValue).getValueFloat());
                } else if (fieldValue instanceof DateValue) {
                    if (((DateValue) fieldValue).getValueDate() != null) {
                        java.util.Date javaDateValue = ((DateValue) fieldValue).getValueDate().getTime();
                        java.sql.Timestamp sqlTimestampValue = new java.sql.Timestamp(javaDateValue.getTime());
                        statement.setTimestamp(fieldNumber, sqlTimestampValue);
                    } else {
                        statement.setTimestamp(fieldNumber, null);
                    }
                } else if (fieldValue instanceof CheckboxValue) {
                    statement.setBoolean(fieldNumber, ((CheckboxValue) fieldValue).getValueBoolean());
                } else if (fieldValue instanceof FileValue) {
                    statement.setString(fieldNumber, ((FileValue) fieldValue).toString());
                } else {
                    throw new CodingErrorException("Field value " + fieldValue + " is of unknown type "
                            + fieldValue.getClass().getSimpleName());
                }
            }
        }
        // We've finished setting individual fields, if an SQL error occurs
        // after here we won't know which
        // field caused it without looking for it by other means
        // currentField = null;
        if (!newRecord) {
            if (globalEdit) {
                // Fill in the 'WHERE [row id field] in (?,..,?)' for use in
                // the UPDATE statement
                for (Integer aRowId : rowIds) {
                    if (tableData.isRecordLocked(conn, sessionData, aRowId)) {
                        throw new CantDoThatException(
                                "Record " + aRowId + " from table " + table + " is locked to prevent editing");
                    }
                    statement.setInt(++fieldNumber, aRowId);
                }
            } else {
                // Fill in the 'WHERE [row id field]=?' for use in the
                // UPDATE statement
                if (tableData.isRecordLocked(conn, sessionData, rowId)) {
                    throw new CantDoThatException(
                            "Record " + rowId + " from table " + table + " is locked to prevent editing");
                }
                statement.setInt(fieldNumber + 1, rowId);
            }
        }
        int numRowsAffected = statement.executeUpdate();
        statement.close();
        if ((numRowsAffected != 1) && (!globalEdit)) {
            conn.rollback();
            if (numRowsAffected > 0) {
                throw new ObjectNotFoundException(String.valueOf(numRowsAffected)
                        + " records would be altered during a single record save");
            } else {
                throw new ObjectNotFoundException(
                        "The current record can't be found to edit - perhaps someone else has deleted it");
            }
        }
        if (newRecord) {
            // Find the newly inserted Row ID
            // postgres-specific code, not database independent
            String SQLCode = "SELECT currval('" + table.getInternalTableName() + "_"
                    + table.getPrimaryKey().getInternalFieldName() + "_seq')";
            statement = conn.prepareStatement(SQLCode);
            ResultSet results = statement.executeQuery();
            if (results.next()) {
                newRowId = results.getInt(1);
            } else {
                results.close();
                statement.close();
                throw new SQLException(
                        "Row ID not found for the newly inserted record. '" + SQLCodeBuilder + "' didn't work");
            }
            results.close();
            statement.close();
        }
        conn.commit();
    } catch (SQLException sqlex) {
        // Find out which field caused the error by looking for internal
        // field names in the error message
        String errorMessage = sqlex.getMessage();
        for (BaseField possibleCauseField : dataToSave.keySet()) {
            if (errorMessage.contains(possibleCauseField.getInternalFieldName())) {
                if (errorMessage.contains("check constraint")) {
                    errorMessage = "The value " + dataToSave.get(possibleCauseField)
                            + " falls outside the allowed range";
                } else if (errorMessage.contains("not-null constraint")) {
                    errorMessage = "No value entered";
                } else if (errorMessage.contains("unique constraint")) {
                    errorMessage = "Value " + dataToSave.get(possibleCauseField)
                            + " is already in the database and cannot be entered again";
                } else if (errorMessage.contains("foreign key constraint")
                        && possibleCauseField instanceof RelationField) {
                    errorMessage = "Please select a valid "
                            + ((RelationField) possibleCauseField).getRelatedTable() + " record first";
                } else {
                    errorMessage = "Value " + dataToSave.get(possibleCauseField) + " not allowed ("
                            + Helpers.replaceInternalNames(errorMessage, table.getDefaultReport()) + ")";
                }
                throw new InputRecordException(errorMessage, possibleCauseField, sqlex);
            }
        }
        // Not able to find field
        errorMessage = Helpers.replaceInternalNames(errorMessage, table.getDefaultReport());
        throw new InputRecordException(errorMessage, null, sqlex);
    } finally {
        if (conn != null) {
            conn.close();
        }
    }
    // If any fields were files to upload, do the actual uploads.
    // Do this after the commit in case the uploads take a long time and
    // time out the SQL connection.
    for (BaseField field : dataToSave.keySet()) {
        if (field instanceof FileField) {
            try {
                if (newRecord) {
                    this.uploadFile(request, (FileField) field, (FileValue) dataToSave.get(field), newRowId,
                            multipartItems);
                } else {
                    this.uploadFile(request, (FileField) field, (FileValue) dataToSave.get(field), rowId,
                            multipartItems);
                }
            } catch (CantDoThatException cdtex) {
                throw new InputRecordException("Error uploading file: " + cdtex.getMessage(), field, cdtex);
            } catch (FileUploadException fuex) {
                throw new InputRecordException("Error uploading file: " + fuex.getMessage(), field, fuex);
            }
        }
    }
    if (newRecord) {
        sessionData.setRowId(table, newRowId);
    }
    this.logLastDataChangeTime(request);
    logLastTableDataChangeTime(table);
    UsageLogger usageLogger = new UsageLogger(this.dataSource);
    AppUserInfo user = null;
    if (request.getRemoteUser() == null) {
        user = ServletUtilMethods.getPublicUserForRequest(request, this.authManager.getAuthenticator());
    } else {
        user = this.authManager.getUserByUserName(request, request.getRemoteUser());
    }
    // Send websocket notification
    // UsageLogger.sendNotification(user, table, sessionData.getReport(),
    // rowId, "edit", "Record saved: " + dataToSave);
    // Log everything apart from hidden (auto set) fields
    Map<BaseField, BaseValue> dataToLog = new LinkedHashMap<BaseField, BaseValue>();
    for (Map.Entry<BaseField, BaseValue> entrySet : dataToSave.entrySet()) {
        BaseField field = entrySet.getKey();
        if (!field.getHidden()) {
            BaseValue value = entrySet.getValue();
            dataToLog.put(field, value);
        }
    }
    if (newRecord) {
        usageLogger.logDataChange(user, table, null, AppAction.SAVE_NEW_RECORD, newRowId, dataToLog.toString());
    } else if (globalEdit) {
        // TODO: need better logging of global edits
        usageLogger.logDataChange(user, table, null, AppAction.GLOBAL_EDIT, rowId, dataToLog.toString());
    } else {
        BaseField fieldUpdated = null;
        Set<BaseField> fieldSet = new TreeSet<BaseField>();
        for (BaseField field : dataToSave.keySet()) {
            if (!field.getHidden()) {
                fieldSet.add(field);
            }
        }
        if (fieldSet.size() == 1) {
            fieldUpdated = new LinkedList<BaseField>(fieldSet).getFirst();
        }
        usageLogger.logDataChange(user, table, fieldUpdated, AppAction.UPDATE_RECORD, rowId,
                dataToLog.toString());
    }
    UsageLogger.startLoggingThread(usageLogger);
}

From source file:org.apache.hadoop.hive.ql.parse.DDLSemanticAnalyzer.java

private void analyzeExchangePartition(String[] qualified, ASTNode ast) throws SemanticException {
    Table destTable = getTable(qualified);
    Table sourceTable = getTable(getUnescapedName((ASTNode) ast.getChild(1)));

    // Get the partition specs
    Map<String, String> partSpecs = getValidatedPartSpec(sourceTable, (ASTNode) ast.getChild(0), conf, false);
    validatePartitionValues(partSpecs);/*w  ww  .  j av a 2s  .  c  o  m*/
    boolean sameColumns = MetaStoreUtils.compareFieldColumns(destTable.getAllCols(), sourceTable.getAllCols());
    boolean samePartitions = MetaStoreUtils.compareFieldColumns(destTable.getPartitionKeys(),
            sourceTable.getPartitionKeys());
    if (!sameColumns || !samePartitions) {
        throw new SemanticException(ErrorMsg.TABLES_INCOMPATIBLE_SCHEMAS.getMsg());
    }
    // check if source partition exists
    getPartitions(sourceTable, partSpecs, true);

    // Verify that the partitions specified are continuous
    // If a subpartition value is specified without specifying a partition's value
    // then we throw an exception
    int counter = isPartitionValueContinuous(sourceTable.getPartitionKeys(), partSpecs);
    if (counter < 0) {
        throw new SemanticException(ErrorMsg.PARTITION_VALUE_NOT_CONTINUOUS.getMsg(partSpecs.toString()));
    }
    List<Partition> destPartitions = null;
    try {
        destPartitions = getPartitions(destTable, partSpecs, true);
    } catch (SemanticException ex) {
        // We should expect a semantic exception being throw as this partition
        // should not be present.
    }
    if (destPartitions != null) {
        // If any destination partition is present then throw a Semantic Exception.
        throw new SemanticException(ErrorMsg.PARTITION_EXISTS.getMsg(destPartitions.toString()));
    }
    AlterTableExchangePartition alterTableExchangePartition = new AlterTableExchangePartition(sourceTable,
            destTable, partSpecs);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTableExchangePartition), conf));
}

From source file:com.MainFiles.Functions.java

public HashMap getESBResponse(String strAgentID, String cardNumber, String processingCode, String amount,
        String referenceNumber, String narration, String transactionIdentifier, String strAccountNumber,
        String creditAccount, String strField65, String Phonenumber, String strField68)
        throws InterruptedException {
    String response = "";
    String strField_02 = "";
    String strField_00 = "";
    String strFieldPRD = "";
    String strProcessingCode = "";
    String strMessageToPOS = "";
    String STAN = "" + GetSequenceNumber();

    try {//from w  w w. j  a  v a 2 s.c  o m

        if (Phonenumber.toString().trim().equals("") || Phonenumber.toString().trim() == null) {
            strField_02 = cardNumber;
        } else {
            strField_02 = "255" + Phonenumber.substring(Phonenumber.length() - 9);
        }

        switch (processingCode) {
        case "120000": // MERCHANT PAYMENTS
            strProcessingCode = "400000";
            strField_00 = "0200";
            strFieldPRD = "AGMP";
            narration = "PAYMENT OF GOODS AND SERVICES FOR " + strAccountNumber;
            break;
        case "310000":// BALANCE ENQUIRY
            strProcessingCode = processingCode;
            strField_00 = "0200";
            narration = "BALANCE ENQUIRY FOR ACCOUNT " + strAccountNumber;
            break;
        case "300000": // AGENT FLOAT (we do BI for Agent float)
            strProcessingCode = "310000";
            strField_00 = "0200";
            strFieldPRD = "FLBI";
            narration = "AGENT FLOAT FOR ACCOUNT " + strAccountNumber;
            break;
        case "380000": //MINI STATEMENT
            strProcessingCode = processingCode;
            strField_00 = "0200";
            narration = "MINI STATEMENT FOR ACCOUNT " + strAccountNumber;
            break;
        case "340000": // CARD ACTIVATION
            strProcessingCode = processingCode;
            strField_00 = "0100";
            break;
        case "010000": // CASH WITHDRAWAL
            strProcessingCode = processingCode;
            strFieldPRD = "CHWL";
            strField_00 = "0200";
            narration = "CASH WITHDRAWAL FOR ACCOUNT " + strAccountNumber;
            break;
        case "500000": // BILL PAYMENTS
            strProcessingCode = processingCode;
            strFieldPRD = "";
            strField_00 = "0200";
            break;
        case "400000": // FUNDS TRANSFER
            strProcessingCode = processingCode;
            strFieldPRD = "AGFT";
            strField_00 = "0200";
            narration = "FUNDS TRANSFER FOR ACCOUNT " + strAccountNumber;
            break;
        case "210000": // CASH DEPOSIT
            strProcessingCode = processingCode;
            strFieldPRD = "CHDP";
            strField_00 = "0200";
            break;
        case "420000": // TOPUPS
            strField_00 = "0200";
            strField65 = strField_02;
            strProcessingCode = processingCode;
            break;
        default:
            strField_00 = "0200";
            strProcessingCode = processingCode;
            break;
        }

        Map<String, String> ISOdetails = new HashMap<>();

        ISOdetails.put("0", strField_00);
        ISOdetails.put("2", strField_02);
        ISOdetails.put("3", strProcessingCode);
        ISOdetails.put("4", amount);
        ISOdetails.put("7", this.anyDate("MMddHHmmss"));
        ISOdetails.put("11", STAN);
        ISOdetails.put("32", SOURCE_ID);
        ISOdetails.put("37", referenceNumber);
        ISOdetails.put("65", strField65);
        ISOdetails.put("66", getTerminalID(strField68));
        ISOdetails.put("68", strField68);
        ISOdetails.put("88", narration);
        ISOdetails.put("100", transactionIdentifier);
        ISOdetails.put("102", strAccountNumber);
        ISOdetails.put("103", creditAccount);
        ISOdetails.put("104", strAgentID);
        ISOdetails.put("CorrelationID", referenceNumber);
        ISOdetails.put("PRD", strFieldPRD);
        ISOdetails.put("HASH", sendTransactionHash(strAgentID, strField68).toLowerCase());
        this.log("REQUEST :: " + referenceNumber + "\n" + ISOdetails.toString() + "\n\n", "ESB_Request");

        boolean sentToWebLogic = false;
        HashMap ParamsFromAdapter = new HashMap();
        QueueWriter queueWriter = new QueueWriter(QUEUE_REQUEST, PROVIDER_URL);

        int trials = 0;
        do {
            sentToWebLogic = queueWriter.sendObject((HashMap) ISOdetails, referenceNumber);
            trials++;
        } while (sentToWebLogic == false & trials < 3);

        if (sentToWebLogic) {
            long Start = System.currentTimeMillis();
            long Stop = Start + (Integer.parseInt(ISO8583Adaptor.ESB_TIMEOUT) * 1000);
            do {
                Thread.currentThread().sleep(100);
                ParamsFromAdapter = this.getWeblogicMessageFromQueue(referenceNumber);
            } while (ParamsFromAdapter.isEmpty() && System.currentTimeMillis() < Stop);

            if (ParamsFromAdapter.isEmpty()) {
                //Excempt for processing code 340000
                if (!ISOdetails.get("3").equals("340000")) {
                    //printMsg:No reponse from ESB
                    System.out.println("===================== ");
                    System.out.println("===================== ");
                    System.out.println("ESB Timeout Response at "
                            + (new SimpleDateFormat("MMMM dd,yyyy hh:mm:ss.SSS a zzzz"))
                                    .format(new java.util.Date()));
                    System.out.println("LoggedError:CorrelationID:" + referenceNumber + "");
                    System.out.println("LoggedError:StatusCode:999");
                    System.out.println("LoggedError:Status Description:Response timeout from ESB Gateway");

                    //Send Failed Response to POS
                    String TransactionType = getTransactionType(ISOdetails.get("3").toString());

                    strMessageToPOS += this.strResponseHeader(strField68) + "#";
                    strMessageToPOS += "AGENT ID:  " + ISOdetails.get("104").toString() + "#";
                    strMessageToPOS += "TRAN NUM:  " + ISOdetails.get("37").toString() + "#";
                    strMessageToPOS += "--------------------------------" + "#";
                    strMessageToPOS += "                                " + "#";
                    strMessageToPOS += padEqual(TransactionType.toUpperCase()) + "#";
                    strMessageToPOS += "                                " + "#";
                    strMessageToPOS += "   NO RESPONSE FROM ESB GATEWAY " + "#";
                    strMessageToPOS += " " + "#";
                    strMessageToPOS += this.strResponseFooter(ISOdetails.get("104").toString()) + "#";
                    SendPOSResponse(strMessageToPOS, ISOdetails.get("37").toString());
                }
            } else {

                switch (processingCode) {
                case "340000":// For Card Activation Return Array
                    return ParamsFromAdapter;
                case "300000"://AgentFloat
                    ParamsFromAdapter.remove("3");
                    ParamsFromAdapter.put("3", "300000");
                    break;
                case "120000":
                    ParamsFromAdapter.remove("3");
                    ParamsFromAdapter.put("3", "120000");
                    break;
                default:
                    break;
                }
                printScreenMessage(ParamsFromAdapter);
                response = this.genHashDelimiterString(ParamsFromAdapter, referenceNumber);
            }
        }
    } catch (Exception ex) {
        this.log("Error on getESBResponse " + ex.getMessage() + "\n" + this.StackTraceWriter(ex),
                "getESBResponse");
    }
    return null;
}

From source file:org.marketcetera.strategy.LanguageTestBase.java

/**
 * Executes one iteration of the <code>getAllCurrencyPositionsAsOf</code> test. 
 *
 * @param inDate a <code>Date</code> value
 * @param inExpectedPositions a <code>Map&lt;PositionKey&lt;Currency&gt;,BigDecimal&gt;</code> value
 * @throws Exception if an unexpected error occurs
 *///  w w  w  .  j ava2  s  .  c  o m
private void doAllCurrencyPositionsAsOfTest(Date inDate,
        Map<PositionKey<Currency>, BigDecimal> inExpectedPositions) throws Exception {
    StrategyCoordinates strategy = getPositionsStrategy();
    setPropertiesToNull();
    AbstractRunningStrategy.setProperty("allCurrencyPositionsAsOfDuringStop", "not-empty");
    if (inDate != null) {
        AbstractRunningStrategy.setProperty("date", Long.toString(inDate.getTime()));
    }
    verifyStrategyStartsAndStops(strategy.getName(), getLanguage(), strategy.getFile(), null, null, null);
    // verify expected results
    assertEquals((inExpectedPositions == null ? null : inExpectedPositions.toString()),
            AbstractRunningStrategy.getProperty("allCurrencyPositionsAsOf"));
    assertNull(AbstractRunningStrategy.getProperty("allCurrencyPositionsAsOfDuringStop"));
}