Example usage for org.apache.commons.lang StringUtils left

List of usage examples for org.apache.commons.lang StringUtils left

Introduction

In this page you can find the example usage for org.apache.commons.lang StringUtils left.

Prototype

public static String left(String str, int len) 

Source Link

Document

Gets the leftmost len characters of a String.

Usage

From source file:org.kuali.maven.plugins.externals.MojoHelper.java

public String trimSnapshot(String version) {
    if (version.toUpperCase().endsWith(QUALIFIER_DELIMETER + MAVEN_SNAPSHOT_TOKEN)) {
        int length = MAVEN_SNAPSHOT_TOKEN.length() + 1;
        return StringUtils.left(version, version.length() - length);
    } else {//from  w  ww . j a v  a 2  s.co m
        return version;
    }
}

From source file:org.kuali.ole.vnd.batch.VendorExcludeInputFileType.java

@Override
public Object parse(byte[] fileByteContent) throws ParseException {
    LOG.info("Parsing Vendor Exclude Input File ...");

    // create CSVReader, using conventional separator, quote, null escape char, skip first line, use strict quote, ignore leading white space 
    int skipLine = 1; // skip the first line, which is the header line
    Reader inReader = new InputStreamReader(new ByteArrayInputStream(fileByteContent));
    CSVReader reader = new CSVReader(inReader, ',', '"', Character.MIN_VALUE);

    List<DebarredVendorDetail> debarredVendors = new ArrayList<DebarredVendorDetail>();
    String[] nextLine;//from w w w  .ja v a  2 s.c om
    DebarredVendorDetail vendor;
    int lineNumber = skipLine;

    try {
        while ((nextLine = reader.readNext()) != null) {
            lineNumber++;
            LOG.debug("Line " + lineNumber + ": " + nextLine[0]);

            vendor = new DebarredVendorDetail();
            boolean emptyLine = true;

            // this should never happen, as for an empty line, CSVReader.readNext returns a string array with an empty string as the only element 
            // but just in case somehow a zero sized array is returned, we skip it.
            if (nextLine.length == 0) {
                continue;
            }

            StringBuffer name = new StringBuffer();
            // if the name field is not empty, use that as vendor name 
            if (StringUtils.isNotEmpty(nextLine[0])) {
                name.append(nextLine[0]);
            }
            // otherwise, there should be a first/middle/last name, which we concatenate into vendor name
            else {
                if (nextLine.length > 1 && !StringUtils.isNotEmpty(nextLine[1])) {
                    name.append(" " + nextLine[1]);
                }
                if (nextLine.length > 2 && !StringUtils.isNotEmpty(nextLine[2])) {
                    name.append(" " + nextLine[2]);
                }
                if (nextLine.length > 3 && !StringUtils.isNotEmpty(nextLine[3])) {
                    name.append(" " + nextLine[3]);
                }
                if (nextLine.length > 4 && !StringUtils.isNotEmpty(nextLine[4])) {
                    name.append(" " + nextLine[4]);
                }
                if (nextLine.length > 5 && StringUtils.isNotEmpty(nextLine[5])) {
                    name.append(" " + nextLine[5]);
                }
            }
            if (StringUtils.isNotEmpty(name.toString())) {
                vendor.setName(StringUtils.left(name.toString(), FIELD_SIZES[0]));
                emptyLine = false;
            }

            if (nextLine.length > 6 && StringUtils.isNotEmpty(nextLine[6])) {
                vendor.setAddress1(StringUtils.left(nextLine[6], FIELD_SIZES[1]));
                emptyLine = false;
            }
            if (nextLine.length > 7 && StringUtils.isNotEmpty(nextLine[7])) {
                vendor.setAddress2(StringUtils.left(nextLine[7], FIELD_SIZES[2]));
                emptyLine = false;
            }
            if (nextLine.length > 8 && StringUtils.isNotEmpty(nextLine[8])) {
                vendor.setCity(StringUtils.left(nextLine[8], FIELD_SIZES[3]));
                emptyLine = false;
            }
            if (nextLine.length > 9 && StringUtils.isNotEmpty(nextLine[9])) {
                vendor.setProvince(StringUtils.left(nextLine[9], FIELD_SIZES[4]));
                emptyLine = false;
            }
            if (nextLine.length > 10 && StringUtils.isNotEmpty(nextLine[10])) {
                vendor.setState(StringUtils.left(nextLine[10], FIELD_SIZES[5]));
                emptyLine = false;
            }
            if (nextLine.length > 11 && StringUtils.isNotEmpty(nextLine[11])) {
                vendor.setZip(StringUtils.left(nextLine[11], FIELD_SIZES[6]));
                emptyLine = false;
            }
            if (nextLine.length > 13 && StringUtils.isNotEmpty(nextLine[13])) {
                vendor.setAliases(StringUtils.left(StringUtils.remove(nextLine[13], "\""), FIELD_SIZES[7]));
                emptyLine = false;
            }
            if (nextLine.length > 18 && StringUtils.isNotEmpty(nextLine[18])) {
                vendor.setDescription(StringUtils.left(nextLine[18], FIELD_SIZES[8]));
                emptyLine = false;
            }

            if (emptyLine) {
                /* give warnings on a line that doesn't have any useful vendor info
                LOG.warn("Note: line " + lineNumber + " in the Vendor Exclude Input File is skipped since all parsed fields are empty.");
                */
                // throw parser exception on a line that doesn't have any useful vendor info.
                // Since the file usually doesn't contain empty lines or lines with empty fields, this happening usually is a good indicator that 
                // some line ahead has wrong data format, for ex, missing a quote on a field, which could mess up the following fields and lines. 
                throw new ParseException("Line " + lineNumber
                        + " in the Vendor Exclude Input File contains no valid field or only empty fields within quote pairs. Please check the lines ahead to see if any field is missing quotes.");
            } else {
                vendor.setLoadDate(new Date(new java.util.Date().getTime()));
                debarredVendors.add(vendor);
            }
        }
    } catch (IOException ex) {
        throw new ParseException(
                "Error reading Vendor Exclude Input File at line " + lineNumber + ": " + ex.getMessage());
    }

    LOG.info("Total number of lines read from Vendor Exclude Input File: " + lineNumber);
    LOG.info("Total number of vendors parsed from Vendor Exclude Input File: " + debarredVendors.size());
    return debarredVendors;
}

From source file:org.openbel.framework.core.kam.JdbcKAMLoaderImpl.java

/**
 * {@inheritDoc}/*w  ww . j  a  va2s.  co  m*/
 */
@Override
public void loadDocuments(List<DocumentHeader> documents) throws SQLException {
    PreparedStatement ps = getPreparedStatement(DOCUMENT_HEADER_INFORMATION_SQL);

    for (int i = 0; i < documents.size(); i++) {
        DocumentHeader dh = documents.get(i);

        ps.setInt(1, (i + 1));
        ps.setString(2, StringUtils.left(dh.getName(), 255));
        ps.setString(3, StringUtils.left(dh.getDescription(), 255));
        ps.setString(4, StringUtils.left(dh.getVersion(), 64));
        ps.setString(5, StringUtils.left(dh.getCopyright(), 4000));
        ps.setString(6, StringUtils.left(dh.getDisclaimer(), 4000));
        ps.setString(7, StringUtils.left(dh.getContactInfo(), 4000));
        ps.setString(8, StringUtils.left(dh.getAuthors(), 4000));
        ps.setString(9, StringUtils.left(dh.getLicenses(), 4000));

        ps.execute();
    }
}

From source file:org.openbel.framework.internal.KAMStoreDaoImpl.java

/**
 * Builds an SQL snippet to help select KAM edges that match <code>kamFilter</code>.
 *
 * The returned SQL snippet is a SELECT statement that queries the {@code kam_edge_id}'s of
 * the KAM edges that satisfy the provided KAM filter.  Callers may INNER JOIN with the snippet
 * to select more fields of the {@code kam_edge} table.
 *
 * @param kamFilter A selection of KAM filter criteria to apply in including or excluding edges.
 * <code>kamFilter</code> can not be null.
 * @return A pair of a SQL snippet and a list of all parameters to bind when using the
 * generated SQL in a PreparedStatement.
 *//*from ww w  .java  2 s. c o m*/
private Pair<String, List<String>> getFilteredSelectProtoEdgesSql(KamFilter kamFilter) {

    List<FilterCriteria> criteria = kamFilter.getFilterCriteria();

    // Create a StringBuilder for each part of the SQL query.
    StringBuilder baseQuery = new StringBuilder("SELECT ke.kam_edge_id FROM @.kam_edge ke");
    StringBuilder citationJoins = new StringBuilder();
    StringBuilder whereClause = new StringBuilder(" WHERE TRUE");
    StringBuilder havingClause = new StringBuilder();

    boolean joinedStatements = false, joinedAnnotations = false, groupedByEdge = false;

    // Create lists to contain the Strings that will need to
    // be bound in the PreparedStatement
    ArrayList<String> annotationParameters = new ArrayList<String>(),
            citationParameters = new ArrayList<String>();

    int uniqueSubselectId = 0;

    // Matching some of the types of FilterCriteria require an SQL aggregate function
    // that computes boolean AND or OR over a group.
    // Use MAX(CASE WHEN some_boolean THEN 1 ELSE 0 END)=1 for a boolean OR aggregate function.
    // Use MIN(CASE WHEN some_boolean THEN 1 ELSE 0 END)=1 for a boolean AND aggregate function.

    for (FilterCriteria criterion : criteria) {
        boolean include = criterion.isInclude();

        // The filter criteria are ANDed together.

        if (criterion instanceof RelationshipTypeFilterCriteria) {
            Set<RelationshipType> relationships = ((RelationshipTypeFilterCriteria) criterion).getValues();
            int size = relationships.size();
            if (size == 0) {
                // There is nothing on which to match.
                continue;
            }

            whereClause.append(" AND ");

            // An include filter matches the edges that have at least one of the provided
            // relationship types (i.e. the tests of equality of the edge relationship type to each
            // of the provided relationship types are ORed).
            // An exclude filter matches the edges that have none of the provided relationship types
            // (i.e. the tests of equality of the edge relationship type to each of the provided
            // relationship types are ORed, then finally complemented (NOT)).
            if (!include) {
                whereClause.append("NOT ");
            }

            whereClause.append("(");
            int count = 0;
            for (RelationshipType relationship : relationships) {
                whereClause.append("ke.relationship_type_id=");
                whereClause.append(relationship.getValue());
                if (++count < size) {
                    whereClause.append(" OR ");
                }
            }
            whereClause.append(")");

        } else if (criterion instanceof BelDocumentFilterCriteria) {
            Set<BelDocumentInfo> documents = ((BelDocumentFilterCriteria) criterion).getValues();
            int size = documents.size();
            if (size == 0) {
                // There is nothing on which to match.
                continue;
            }

            // The provided documents are matched against the documents associated with the
            // supporting evidence for the KAM edges, that is the statements that are associated
            // with the edge.   This filter requires joining with the <code>statement</code> table.
            if (!joinedStatements) {
                baseQuery.append(
                        " LEFT OUTER JOIN @.kam_edge_statement_map kesm ON ke.kam_edge_id=kesm.kam_edge_id");
                baseQuery.append(" LEFT OUTER JOIN @.statement s ON kesm.statement_id=s.statement_id");
                joinedStatements = true;
            }

            // An include filter matches only the edges that have supporting evidence statements that
            // have a document that is one of the provided documents.  A predicate expression
            // for "being one of the provided documents" must be evaluated for all documents
            // of supporting statements.  If that predicate is true for any document then the edge
            // passes the filter (i.e. a boolean OR).
            // An exclude filter matches only the edges all of whose supporting evidence statements do
            // not have documents that are one of the provided documents.  The predicate expression
            // of "being one of the provided documents" must be false for all documents of all
            // supporting statements for an edge to pass an "exclude" filter (i.e. NOT OR).

            // The query will need to group by edge and use a HAVING clause to match the predicate.
            if (!groupedByEdge) {
                havingClause = new StringBuilder(" HAVING TRUE");
                groupedByEdge = true;
            }

            havingClause.append(" AND");
            if (!include) {
                havingClause.append(" NOT");
            }
            havingClause.append(" MAX(CASE WHEN (");
            int count = 0;
            for (BelDocumentInfo doc : documents) {
                havingClause.append("s.document_id=");
                havingClause.append(doc.getId());
                if (++count < size) {
                    havingClause.append(" OR ");
                }
            }
            havingClause.append(") THEN 1 ELSE 0 END)=1");

        } else if (criterion instanceof AnnotationFilterCriteria) {
            AnnotationFilterCriteria ac = (AnnotationFilterCriteria) criterion;
            Integer type = ac.getAnnotationType().getId();
            Set<String> annotations = ac.getValues();
            int size = annotations.size();
            if (size == 0) {
                continue;
            }

            // This case is very similar to the BelDocumentFilterCriteria case, except
            // that the match is performed on non-citation annotations.

            // The query will need to join on the <code>statement</code> table.
            if (!joinedStatements) {
                baseQuery.append(
                        " LEFT OUTER JOIN @.kam_edge_statement_map kesm ON ke.kam_edge_id=kesm.kam_edge_id");
                baseQuery.append(" LEFT OUTER JOIN @.statement s ON kesm.statement_id=s.statement_id");
                joinedStatements = true;
            }

            // The query will need to group by edge.
            if (!groupedByEdge) {
                havingClause = new StringBuilder(" HAVING TRUE");
                groupedByEdge = true;
            }

            // The query will also need to join the <code>annotations</code> table.
            if (!joinedAnnotations) {
                baseQuery.append(
                        " LEFT OUTER JOIN @.statement_annotation_map sam ON s.statement_id=sam.statement_id");
                baseQuery.append(" LEFT OUTER JOIN @.annotation a ON sam.annotation_id=a.annotation_id");
                baseQuery.append(
                        " LEFT OUTER JOIN @.annotation_definition ad ON a.annotation_definition_id=ad.annotation_definition_id");
                baseQuery.append(" LEFT OUTER JOIN @.objects o ON a.value_oid=o.objects_id");
                baseQuery.append(" LEFT OUTER JOIN @.objects_text ot ON o.objects_text_id=ot.objects_text_id");
                joinedAnnotations = true;
            }

            havingClause.append(" AND");
            if (!include) {
                havingClause.append(" NOT");
            }
            havingClause.append(" MAX(CASE WHEN (");
            int count = 0;
            for (String annotation : annotations) {
                String encryptedValue = null;
                try {
                    // The annotation values are store encrypted in the database, so the match
                    // must be done on the encrypted values.
                    encryptedValue = encryptionService.encrypt(annotation);
                } catch (EncryptionServiceException ex) {
                    continue; // TODO
                }

                annotationParameters.add(encryptedValue);
                havingClause.append("(a.annotation_definition_id=");
                havingClause.append(type);
                havingClause.append(" AND ((o.varchar_value IS NOT NULL AND o.varchar_value=?)");
                havingClause.append(" OR (o.varchar_value IS NULL AND ");
                if (dbConnection.isDerby()) {
                    // Apache Derby does not support comparing CLOBs so cast
                    // to the largest VARCHAR type for the comparison.
                    havingClause.append("CAST(ot.text_value AS VARCHAR(32672))");
                } else {
                    havingClause.append("ot.text_value");
                }
                havingClause.append("=?)))");

                if (++count < size) {
                    havingClause.append(" OR ");
                }
            }
            havingClause.append(") THEN 1 ELSE 0 END)=1");

        } else if (criterion instanceof CitationFilterCriteria) {
            Set<Citation> citations = ((CitationFilterCriteria) criterion).getValues();
            int size = citations.size();
            if (size == 0) {
                continue;
            }

            // This case is similar to the AnnotationFilterCriteria, except that the annotations
            // used in the match are the predefined citation annotations
            // (authors, comment, date, name, reference, and type) and the predicate
            // to determine whether the citation of a supporting evidence statement
            // matches a provided citation is computed in a subselect.

            // The query will need to join on the <code>statement</code> table.
            if (!joinedStatements) {
                baseQuery.append(
                        " LEFT OUTER JOIN @.kam_edge_statement_map kesm ON ke.kam_edge_id=kesm.kam_edge_id");
                baseQuery.append(" LEFT OUTER JOIN @.statement s ON kesm.statement_id=s.statement_id");
                joinedStatements = true;
            }

            whereClause.append(" AND");

            // An include filter matches an edge only if one of the supporting statements has one
            // of the provided citations (i.e. boolean OR).
            // An exclude filter matches an edge only if none of the supporting statements has one
            // of the provided citations (i.e. NOT OR).
            if (!include) {
                whereClause.append(" NOT");
            }
            whereClause.append(" (FALSE");

            for (Citation citation : citations) {
                String encryptedReference = null, encryptedDate = null, encryptedName = null,
                        encryptedComment = null, encryptedType = null, encryptedAuthors = null;
                try {
                    // The citation annotation values are store encrypted in the database, so the match
                    // must be done on the encrypted values.
                    final String id = citation.getId();
                    encryptedReference = (id != null ? encryptionService.encrypt(id) : null);
                    final String name = citation.getName();
                    encryptedName = (name != null ? encryptionService.encrypt(name) : null);
                    final String comment = citation.getComment();
                    encryptedComment = (comment != null ? encryptionService.encrypt(comment) : null);
                    final CitationType citationType = citation.getCitationType();
                    encryptedType = (citationType != null
                            ? encryptionService.encrypt(citationType.getDisplayValue())
                            : null);

                    // Pack the authors string exactly as done in
                    // CitationDataConverter.convert(Citation, Map<String, BELAnnotationDefinition>).
                    final List<String> authors = citation.getAuthors();
                    if (BELUtilities.hasItems(authors)) {
                        encryptedAuthors = encryptionService
                                .encrypt(StringUtils.left(PackUtils.packValues(authors), 4000));
                    }

                    final Date date = citation.getPublicationDate();
                    encryptedDate = (date != null ? encryptionService.encrypt(dateFormat.format(date)) : null);
                } catch (EncryptionServiceException ex) {
                    continue; // TODO
                }

                citationJoins.append(" LEFT OUTER JOIN (");
                citationJoins.append("SELECT s.statement_id statement_id, SUM(CASE WHEN (FALSE");

                int countNulls = 0;
                for (String type : KAMStoreConstants.CITATION_ANNOTATION_DEFINITION_IDS) {
                    String value = null;
                    if (type == CitationAuthorsAnnotationDefinition.ANNOTATION_DEFINITION_ID) {
                        value = encryptedAuthors;
                    } else if (type == CitationDateAnnotationDefinition.ANNOTATION_DEFINITION_ID) {
                        value = encryptedDate;
                    } else if (type == CitationNameAnnotationDefinition.ANNOTATION_DEFINITION_ID) {
                        value = encryptedName;
                    } else if (type == CitationTypeAnnotationDefinition.ANNOTATION_DEFINITION_ID) {
                        value = encryptedType;
                    } else if (type == CitationCommentAnnotationDefinition.ANNOTATION_DEFINITION_ID) {
                        value = encryptedComment;
                    } else if (type == CitationReferenceAnnotationDefinition.ANNOTATION_DEFINITION_ID) {
                        value = encryptedReference;
                    }

                    if (noLength(value)) {
                        // For example the citation date can be null, in which case the
                        // supporting statements are not checked for the citation date
                        // annotation.
                        ++countNulls;
                        continue;
                    }

                    citationParameters.add(value);

                    citationJoins.append(" OR (ad.name='");
                    citationJoins.append(type);
                    citationJoins.append("' AND ((o.varchar_value IS NOT NULL AND o.varchar_value=?)");
                    citationJoins.append(" OR (o.varchar_value IS NULL AND ");
                    if (dbConnection.isDerby()) {
                        // Apache Derby does not support comparing CLOBs so cast
                        // to the largest VARCHAR type for the comparison.
                        // https://db.apache.org/derby/docs/10.7/ref/rrefjdbc96386.html
                        citationJoins.append("CAST(ot.text_value AS VARCHAR(32672))");
                    } else {
                        citationJoins.append("ot.text_value");
                    }
                    citationJoins.append("=?)))");
                }

                citationJoins.append(") THEN 1 ELSE 0 END) citations");
                citationJoins.append(" FROM @.statement s");
                citationJoins.append(
                        " LEFT OUTER JOIN @.statement_annotation_map sam ON s.statement_id=sam.statement_id");
                citationJoins.append(" LEFT OUTER JOIN @.annotation a ON sam.annotation_id=a.annotation_id");
                citationJoins.append(
                        " LEFT OUTER JOIN @.annotation_definition ad ON a.annotation_definition_id=ad.annotation_definition_id");
                citationJoins.append(" LEFT OUTER JOIN @.objects o ON a.value_oid=o.objects_id");
                citationJoins
                        .append(" LEFT OUTER JOIN @.objects_text ot ON o.objects_text_id=ot.objects_text_id");
                citationJoins.append(" GROUP BY s.statement_id) t");
                citationJoins.append(uniqueSubselectId);
                citationJoins.append(" ON s.statement_id=t");
                citationJoins.append(uniqueSubselectId);
                citationJoins.append(".statement_id");

                // The subselect above stores whether the supporting statements of each edge have the
                // provided citation in the "has_citation" field.
                whereClause.append(" OR (t");
                whereClause.append(uniqueSubselectId);
                whereClause.append(".citations<>0 AND MOD(t");
                whereClause.append(uniqueSubselectId);
                whereClause.append(".citations,");
                whereClause.append(KAMStoreConstants.CITATION_ANNOTATION_DEFINITION_IDS.length - countNulls);
                whereClause.append(")=0)");

                ++uniqueSubselectId;
            }

            whereClause.append(")");
        }
    }

    // Prepare the final SQL query.
    baseQuery.append(citationJoins);
    baseQuery.append(whereClause);
    if (groupedByEdge) {
        baseQuery.append(" GROUP BY ke.kam_edge_id");
        baseQuery.append(havingClause);
    }

    // Create a pair of the SQL query string and a list of the string parameters that
    // need to be bound, in order, in any PreparedStatement that uses the query string.
    final String sql = baseQuery.toString();
    final int size = 2 * (citationParameters.size() + annotationParameters.size());
    final List<String> bindings = new ArrayList<String>(size);
    for (String param : citationParameters) {
        bindings.add(param);
        bindings.add(param);
    }
    for (String param : annotationParameters) {
        bindings.add(param);
        bindings.add(param);
    }

    return new Pair<String, List<String>>(sql, bindings);
}

From source file:org.opencastproject.publication.youtube.YouTubeV3PublicationServiceImpl.java

private String truncateTitleToMaxFieldLength(final String title, final boolean tolerateNull) {
    if (StringUtils.isBlank(title) && !tolerateNull) {
        throw new IllegalArgumentException("Title fields cannot be null, empty, or whitespace");
    }/*from w ww .  j  av a 2  s .  c om*/
    if (isMaxFieldLengthSet() && (title != null)) {
        return StringUtils.left(title, maxFieldLength);
    } else {
        return title;
    }
}

From source file:org.openhab.binding.lifx.internal.fields.MACAddress.java

private void formatHex(String original, int length, String separator) throws IOException {
    ByteArrayInputStream bis = new ByteArrayInputStream(original.getBytes());
    byte[] buffer = new byte[length];
    String result = "";
    while (bis.read(buffer) > 0) {
        for (byte b : buffer) {
            result += (char) b;
        }//from   w  w w. java 2 s . c  o  m
        Arrays.fill(buffer, (byte) 0);
        result += separator;
    }

    hex = StringUtils.left(result, result.length() - 1);
}

From source file:org.openhab.binding.plugwise.internal.Energy.java

public Energy(String logdate, long l, int interval) {

    if (logdate.length() == 8) {

        if (!logdate.equals("FFFFFFFF")) {

            int year = 0;
            int month = 0;
            long minutes = 0;

            year = Integer.parseInt(StringUtils.left(logdate, 2), 16) + 2000;
            month = Integer.parseInt(StringUtils.mid(logdate, 2, 2), 16);
            minutes = Long.parseLong(StringUtils.right(logdate, 4), 16);

            time = new DateTime(year, month, 1, 0, 0, DateTimeZone.UTC).plusMinutes((int) minutes)
                    .toDateTime(DateTimeZone.getDefault()).minusHours(1);

        } else {/*from  w w w .j  a v  a 2 s. c om*/
            time = DateTime.now();
            this.interval = interval;
            this.pulses = 0;
        }

    } else {
        time = DateTime.now();
    }

    this.interval = interval;
    this.pulses = l;

}

From source file:org.openhab.binding.plugwise.protocol.InformationResponseMessage.java

@Override
protected void parsePayLoad() {
    Pattern RESPONSE_PATTERN = Pattern
            .compile("(\\w{16})(\\w{2})(\\w{2})(\\w{4})(\\w{8})(\\w{2})(\\w{2})(\\w{12})(\\w{8})(\\w{2})");

    Matcher matcher = RESPONSE_PATTERN.matcher(payLoad);
    if (matcher.matches()) {
        MAC = matcher.group(1);/*from ww  w .  j  a va2s  . com*/
        year = Integer.parseInt(matcher.group(2), 16) + 2000;
        month = Integer.parseInt(matcher.group(3), 16);
        minutes = Integer.parseInt(matcher.group(4), 16);
        logAddress = (Integer.parseInt(matcher.group(5), 16) - 278528) / 8;
        powerState = (matcher.group(6).equals("01"));
        hertz = Integer.parseInt(matcher.group(7), 16);
        hardwareVersion = StringUtils.left(matcher.group(8), 4) + "-" + StringUtils.mid(matcher.group(8), 4, 4)
                + "-" + StringUtils.right(matcher.group(8), 4);
        firmwareVersion = Integer.parseInt(matcher.group(9), 16);
        unknown = Integer.parseInt(matcher.group(10), 16);
    } else {
        logger.debug("Plugwise protocol RoleCallResponseMessage error: {} does not match", payLoad);
    }
}

From source file:org.polymap.core.data.imex.shape.ShapeExportFeaturesOperation.java

public Status execute(final IProgressMonitor monitor) throws Exception {
    monitor.beginTask(context.adapt(FeatureOperationExtension.class).getLabel(), context.features().size());

    // complex type?
    FeatureCollection features = context.features();
    if (!(features.getSchema() instanceof SimpleFeatureType)) {
        throw new UnsupportedOperationException("Complex features are not supported yet.");
    }//from w  ww  . jav  a 2 s  .  c o  m

    SimpleFeatureType srcSchema = (SimpleFeatureType) features.getSchema();

    // shapeSchema
    ILayer layer = context.adapt(ILayer.class);
    final String basename = layer != null ? FilenameUtils.normalize(layer.getLabel())
            : FilenameUtils.normalize(srcSchema.getTypeName());

    SimpleFeatureTypeBuilder ftb = new SimpleFeatureTypeBuilder();
    ftb.setName(basename);
    ftb.setCRS(srcSchema.getCoordinateReferenceSystem());

    // attributes
    final Map<String, String> nameMap = new HashMap();
    for (AttributeDescriptor attr : srcSchema.getAttributeDescriptors()) {
        // attribute name (shapefile: 10 max)
        String targetName = StringUtils.left(attr.getLocalName(), 10);
        for (int i = 1; nameMap.containsValue(targetName); i++) {
            targetName = StringUtils.left(attr.getLocalName(), 10 - (i / 10 + 1)) + i;
            log.info("    Shapefile: " + attr.getLocalName() + " -> " + targetName);
        }
        nameMap.put(attr.getLocalName(), targetName);

        ftb.add(targetName, attr.getType().getBinding());
    }
    final SimpleFeatureType shapeSchema = ftb.buildFeatureType();

    // retyped collection
    final SimpleFeatureBuilder fb = new SimpleFeatureBuilder(shapeSchema);
    FeatureCollection<SimpleFeatureType, SimpleFeature> retyped = new RetypingFeatureCollection<SimpleFeatureType, SimpleFeature>(
            features, shapeSchema) {
        private int count = 0;

        protected SimpleFeature retype(SimpleFeature feature) {
            if (monitor.isCanceled()) {
                throw new RuntimeException("Operation canceled.");
            }
            for (Property prop : feature.getProperties()) {
                Object value = prop.getValue();
                // Shapefile has length limit 254
                if (value instanceof String) {
                    value = StringUtils.abbreviate((String) value, 254);
                }
                fb.set(nameMap.get(prop.getName().getLocalPart()), value);
            }
            if (++count % 100 == 0) {
                monitor.worked(100);
                monitor.subTask("Objekte: " + count);
            }
            return fb.buildFeature(feature.getID());
        }
    };

    ShapefileDataStoreFactory shapeFactory = new ShapefileDataStoreFactory();

    Map<String, Serializable> params = new HashMap<String, Serializable>();
    final File shapefile = File.createTempFile(basename + "-", ".shp");
    shapefile.deleteOnExit();
    params.put(ShapefileDataStoreFactory.URLP.key, shapefile.toURI().toURL());
    params.put(ShapefileDataStoreFactory.CREATE_SPATIAL_INDEX.key, Boolean.FALSE);

    ShapefileDataStore shapeDs = (ShapefileDataStore) shapeFactory.createNewDataStore(params);
    shapeDs.createSchema(shapeSchema);

    //shapeDs.forceSchemaCRS(DefaultGeographicCRS.WGS84);
    //shapeDs.setStringCharset( )

    String typeName = shapeDs.getTypeNames()[0];
    FeatureStore<SimpleFeatureType, SimpleFeature> shapeFs = (FeatureStore<SimpleFeatureType, SimpleFeature>) shapeDs
            .getFeatureSource(typeName);

    // no tx needed; without tx saves alot of memory

    shapeFs.addFeatures(retyped);

    // test code; slow but reads just one feature at a time
    //        FeatureIterator<SimpleFeature> it = retyped.features();
    //        try {
    //            while (it.hasNext()) {
    //                try {
    //                    SimpleFeature feature = it.next();
    //                    DefaultFeatureCollection coll = new DefaultFeatureCollection( null, null );
    //                    coll.add( feature );
    //                    //shapeFs.addFeatures( coll );
    //                    log.info( "Added: " +  feature );
    //                }
    //                catch (Exception e ) {
    //                    log.warn( "", e );
    //                }
    //            }
    //        }
    //        finally {
    //            it.close();
    //        }

    // open download        
    Polymap.getSessionDisplay().asyncExec(new Runnable() {
        public void run() {
            String url = DownloadServiceHandler.registerContent(new ContentProvider() {

                public String getContentType() {
                    return "application/zip";
                }

                public String getFilename() {
                    return basename + ".shp.zip";
                }

                public InputStream getInputStream() throws Exception {
                    ByteArrayOutputStream bout = new ByteArrayOutputStream(1024 * 1024);
                    ZipOutputStream zipOut = new ZipOutputStream(bout);

                    for (String fileSuffix : FILE_SUFFIXES) {
                        zipOut.putNextEntry(new ZipEntry(basename + "." + fileSuffix));
                        File f = new File(shapefile.getParent(),
                                StringUtils.substringBefore(shapefile.getName(), ".") + "." + fileSuffix);
                        InputStream in = new BufferedInputStream(new FileInputStream(f));
                        IOUtils.copy(in, zipOut);
                        in.close();
                        f.delete();
                    }
                    zipOut.close();
                    return new ByteArrayInputStream(bout.toByteArray());
                }

                public boolean done(boolean success) {
                    // all files deleted in #getInputStream()
                    return true;
                }
            });

            log.info("Shapefile: download URL: " + url);
            ExternalBrowser.open("download_window", url,
                    ExternalBrowser.NAVIGATION_BAR | ExternalBrowser.STATUS);
        }
    });
    monitor.done();
    return Status.OK;
}

From source file:org.projectforge.continuousdb.DatabaseUpdateDao.java

/**
 * Max length is 30 (may-be for Oracle compatibility).
 * @param table/*from   w  w w.ja  va2s . co m*/
 * @param columnNames
 * @param existingConstraintNames
 * @return The generated constraint name different to the given names.
 */
public String createUniqueConstraintName(final String table, final String[] columnNames,
        final String[] existingConstraintNames) {
    final StringBuilder sb = new StringBuilder();
    sb.append(StringUtils.left(table, 15)).append("_uq_").append(StringUtils.left(columnNames[0], 8));
    final String prefix = sb.toString().toLowerCase();
    for (int i = 1; i < 1000; i++) {
        final String name = prefix + i;
        if (existingConstraintNames == null || existingConstraintNames.length == 0) {
            return name;
        }
        boolean exists = false;
        for (final String existingName : existingConstraintNames) {
            if (existingName != null && existingName.equals(name) == true) {
                exists = true;
                break;
            }
        }
        if (exists == false) {
            return name;
        }
    }
    final String message = "Oups, can't find any free constraint name! This must be a bug or a database out of control! Tryiing to find a name '"
            + prefix + "[0-999]' for table '" + table + "'.";
    log.error(message);
    throw new UnsupportedOperationException(message);
}