Example usage for java.util LinkedList get

List of usage examples for java.util LinkedList get

Introduction

In this page you can find the example usage for java.util LinkedList get.

Prototype

public E get(int index) 

Source Link

Document

Returns the element at the specified position in this list.

Usage

From source file:au.edu.ausstage.networks.LookupManager.java

/**
 * A method to lookup the key collaborators for a contributor
 *
 * @param id         the unique id of the contributor
 * @param formatType the required format of the data
 * @param sortType   the required way in which the data is to be sorted
 *
 * @return           the results of the lookup
 *//* ww  w  .  ja v  a 2  s  .  c om*/
public String getKeyCollaborators(String id, String formatType, String sortType) {

    // check on the parameters
    if (InputUtils.isValidInt(id) == false || InputUtils.isValid(formatType) == false
            || InputUtils.isValid(sortType) == false) {
        throw new IllegalArgumentException("All parameters to this method are required");
    }

    // define a Tree Set to store the results
    java.util.LinkedList<Collaborator> collaborators = new java.util.LinkedList<Collaborator>();

    // define other helper variables
    QuerySolution row = null;
    Collaborator collaborator = null;

    // define the base sparql query
    String sparqlQuery = "PREFIX foaf:       <" + FOAF.NS + ">" + "PREFIX ausestage:  <" + AuseStage.NS + "> "
            + "SELECT ?collaborator ?collabGivenName ?collabFamilyName ?function ?firstDate ?lastDate ?collabCount "
            + "WHERE {  " + "       @ a foaf:Person ; "
            + "                      ausestage:hasCollaboration ?collaboration. "
            + "       ?collaboration ausestage:collaborator ?collaborator; "
            + "                      ausestage:collaborationFirstDate ?firstDate; "
            + "                      ausestage:collaborationLastDate ?lastDate; "
            + "                      ausestage:collaborationCount ?collabCount. "
            + "       ?collaborator  foaf:givenName ?collabGivenName; "
            + "                      foaf:familyName ?collabFamilyName; "
            + "                      ausestage:function ?function. " + "       FILTER (?collaborator != @) "
            + "}";

    // do we need to sort by name?
    if (sortType.equals("count") == true) {
        sparqlQuery += " ORDER BY DESC(?collabCount)";
    } else if (sortType.equals("name") == true) {
        sparqlQuery += " ORDER BY ?collabFamilyName ?collabGivenName";
    }

    // build a URI from the id
    id = AusStageURI.getContributorURI(id);

    // add the contributor URI to the query
    sparqlQuery = sparqlQuery.replaceAll("@", "<" + id + ">");

    // execute the query
    ResultSet results = rdf.executeSparqlQuery(sparqlQuery);

    // build the dataset
    // use a numeric sort order
    while (results.hasNext()) {
        // loop though the resulset
        // get a new row of data
        row = results.nextSolution();

        // instantiate a collaborator object
        collaborator = new Collaborator(AusStageURI.getId(row.get("collaborator").toString()));

        // check to see if the list contains this collaborator
        if (collaborators.indexOf(collaborator) != -1) {
            // collaborator is already in the list
            collaborator = collaborators.get(collaborators.indexOf(collaborator));

            // update the function
            collaborator.setFunction(row.get("function").toString());

        } else {
            // collaborator is not on the list

            // get the name
            collaborator.setGivenName(row.get("collabGivenName").toString());
            collaborator.setFamilyName(row.get("collabFamilyName").toString(), true);

            // get the dates
            collaborator.setFirstDate(row.get("firstDate").toString());
            collaborator.setLastDate(row.get("lastDate").toString());

            // get the collaboration count
            collaborator.setCollaborations(Integer.toString(row.get("collabCount").asLiteral().getInt()));

            // add the url
            collaborator.setUrl(AusStageURI.getURL(row.get("collaborator").toString()));

            // add the function
            collaborator.setFunction(row.get("function").toString());

            collaborators.add(collaborator);
        }
    }

    // play nice and tidy up
    rdf.tidyUp();

    // sort by the id
    if (sortType.equals("id") == true) {
        TreeMap<Integer, Collaborator> collaboratorsToSort = new TreeMap<Integer, Collaborator>();

        for (int i = 0; i < collaborators.size(); i++) {
            collaborator = collaborator = collaborators.get(i);

            collaboratorsToSort.put(Integer.parseInt(collaborator.getId()), collaborator);
        }

        // empty the list
        collaborators.clear();

        // add the collaborators back to the list
        Collection values = collaboratorsToSort.values();
        Iterator iterator = values.iterator();

        while (iterator.hasNext()) {
            // get the collaborator
            collaborator = (Collaborator) iterator.next();

            collaborators.add(collaborator);
        }

        collaboratorsToSort = null;
    }

    // define a variable to store the data
    String dataString = null;

    if (formatType.equals("html") == true) {
        dataString = createHTMLOutput(collaborators);
    } else if (formatType.equals("xml") == true) {
        dataString = createXMLOutput(collaborators);
    } else if (formatType.equals("json") == true) {
        dataString = createJSONOutput(collaborators);
    }

    // return the data
    return dataString;
}

From source file:eu.dime.ps.semantic.service.impl.PimoService.java

private void assertPersonalIdentifier(String identifier) throws NameNotUniqueException {
    LinkedList<URI> existing = null;
    PlainLiteral nameLiteral = new PlainLiteralImpl(identifier);
    ClosableIterator<Statement> it = tripleStore.findStatements(Variable.ANY, Variable.ANY,
            NAO.personalIdentifier, nameLiteral);
    while (it.hasNext()) {
        Statement s = it.next();/*  w w w .ja v  a2s .  c o  m*/
        // check if it is in the user's pimo
        if (tripleStore.containsStatements(Variable.ANY, s.getSubject(), PIMO.isDefinedBy, this.userPimoUri)) {
            if (existing == null) {
                existing = new LinkedList<URI>();
            }
            existing.add(s.getSubject().asURI());
        } else {
            logger.warn("Found resource <" + s.getSubject() + "> with personal identifier '" + identifier
                    + "' which is not " + "defined by the user's PIMO. Maybe this is a programming bug");
        }
    }
    it.close();
    if (existing != null) {
        throw new NameNotUniqueException(
                "Identifier '" + identifier + "' was already used for Thing with URI <" + existing.get(0) + ">",
                existing.get(0).toString());
    }
}

From source file:ddf.catalog.transformer.input.pdf.GeoPdfParserImpl.java

/**
 * Generates a WKT compliant String from a PDF Document if it contains GeoPDF information.
 * Currently, only WGS84 Projections are supported (GEOGRAPHIC GeoPDF ProjectionType).
 *
 * @param pdfDocument - The PDF document
 * @return the WKT String//from w w w.  j  a va  2  s  . c om
 * @throws IOException
 */
@Override
public String apply(PDDocument pdfDocument) throws IOException {
    ToDoubleVisitor toDoubleVisitor = new ToDoubleVisitor();
    LinkedList<String> polygons = new LinkedList<>();

    for (PDPage pdPage : pdfDocument.getPages()) {
        COSDictionary cosObject = pdPage.getCOSObject();

        COSBase lgiDictObject = cosObject.getObjectFromPath(LGIDICT);

        // Handle Multiple Map Frames
        if (lgiDictObject instanceof COSArray) {
            for (int i = 0; i < ((COSArray) lgiDictObject).size(); i++) {
                COSDictionary lgidict = (COSDictionary) cosObject.getObjectFromPath(LGIDICT + "/[" + i + "]");

                COSDictionary projectionArray = (COSDictionary) lgidict.getDictionaryObject(PROJECTION);
                if (projectionArray != null) {
                    String projectionType = ((COSString) projectionArray.getItem(PROJECTION_TYPE)).getString();
                    if (GEOGRAPHIC.equals(projectionType)) {
                        COSArray neatlineArray = (COSArray) cosObject
                                .getObjectFromPath(LGIDICT + "/[" + i + "]/" + NEATLINE);
                        getWktFromNeatLine(lgidict, neatlineArray, toDoubleVisitor).ifPresent(polygons::add);
                    } else {
                        LOGGER.debug("Unsupported projection type {}.  Map Frame will be skipped.",
                                projectionType);
                    }
                } else {
                    LOGGER.debug("No projection array found on the map frame.  Map Frame will be skipped.");
                }
            }
            // Handle One Map Frame
        } else if (lgiDictObject instanceof COSDictionary) {
            COSDictionary lgidict = (COSDictionary) lgiDictObject;
            COSDictionary projectionArray = (COSDictionary) lgidict.getDictionaryObject(PROJECTION);
            if (projectionArray != null) {
                String projectionType = ((COSString) projectionArray.getItem(PROJECTION_TYPE)).getString();
                if (GEOGRAPHIC.equals(projectionType)) {
                    COSArray neatlineArray = (COSArray) cosObject.getObjectFromPath(LGIDICT + "/" + NEATLINE);
                    if (neatlineArray == null) {
                        neatlineArray = generateNeatLineFromPDFDimensions(pdPage);
                    }

                    getWktFromNeatLine(lgidict, neatlineArray, toDoubleVisitor).ifPresent(polygons::add);
                } else {
                    LOGGER.debug("Unsupported projection type {}.  Map Frame will be skipped.", projectionType);
                }
            } else {
                LOGGER.debug("No projection array found on the map frame.  Map Frame will be skipped.");
            }
        }
    }

    if (polygons.size() == 0) {
        LOGGER.debug(
                "No GeoPDF information found on PDF during transformation.  Metacard location will not be set.");
        return null;
    }

    if (polygons.size() == 1) {
        return POLYGON + polygons.get(0) + "))";
    } else {
        return polygons.stream().map(polygon -> "((" + polygon + "))")
                .collect(Collectors.joining(",", MULTIPOLYGON, ")"));
    }
}

From source file:ddf.catalog.transformer.input.pdf.GeoPdfParser.java

/**
 * Generates a WKT compliant String from a PDF Document if it contains GeoPDF information.
 * Currently, only WGS84 Projections are supported (GEOGRAPHIC GeoPDF ProjectionType).
 *
 * @param pdfDocument - The PDF document
 * @return the WKT String/*  w ww  .  j  av  a  2  s  . co m*/
 * @throws IOException
 */
public String getWktFromPDF(PDDocument pdfDocument) throws IOException {
    ToDoubleVisitor toDoubleVisitor = new ToDoubleVisitor();
    LinkedList<String> polygons = new LinkedList<>();

    for (PDPage pdPage : pdfDocument.getPages()) {
        COSDictionary cosObject = pdPage.getCOSObject();

        COSBase lgiDictObject = cosObject.getObjectFromPath(LGIDICT);

        // Handle Multiple Map Frames
        if (lgiDictObject instanceof COSArray) {
            for (int i = 0; i < ((COSArray) lgiDictObject).size(); i++) {
                COSDictionary lgidict = (COSDictionary) cosObject.getObjectFromPath(LGIDICT + "/[" + i + "]");

                COSDictionary projectionArray = (COSDictionary) lgidict.getDictionaryObject(PROJECTION);
                if (projectionArray != null) {
                    String projectionType = ((COSString) projectionArray.getItem(PROJECTION_TYPE)).getString();
                    if (GEOGRAPHIC.equals(projectionType)) {
                        COSArray neatlineArray = (COSArray) cosObject
                                .getObjectFromPath(LGIDICT + "/[" + i + "]/" + NEATLINE);
                        String wktString = getWktFromNeatLine(lgidict, neatlineArray, toDoubleVisitor);
                        polygons.add(wktString);
                    } else {
                        LOGGER.debug("Unsupported projection type {}.  Map Frame will be skipped.",
                                projectionType);
                    }
                } else {
                    LOGGER.debug("No projection array found on the map frame.  Map Frame will be skipped.");
                }
            }
            // Handle One Map Frame
        } else if (lgiDictObject instanceof COSDictionary) {
            COSDictionary lgidict = (COSDictionary) lgiDictObject;
            COSDictionary projectionArray = (COSDictionary) lgidict.getDictionaryObject(PROJECTION);
            if (projectionArray != null) {
                String projectionType = ((COSString) projectionArray.getItem(PROJECTION_TYPE)).getString();
                if (GEOGRAPHIC.equals(projectionType)) {
                    COSArray neatlineArray = (COSArray) cosObject.getObjectFromPath(LGIDICT + "/" + NEATLINE);
                    if (neatlineArray == null) {
                        neatlineArray = generateNeatLineFromPDFDimensions(pdPage);

                    }
                    polygons.add(getWktFromNeatLine(lgidict, neatlineArray, toDoubleVisitor));

                } else {
                    LOGGER.debug("Unsupported projection type {}.  Map Frame will be skipped.", projectionType);
                }
            } else {
                LOGGER.debug("No projection array found on the map frame.  Map Frame will be skipped.");
            }
        }
    }

    if (polygons.size() == 0) {
        LOGGER.debug(
                "No GeoPDF information found on PDF during transformation.  Metacard location will not be set.");
        return null;
    }

    if (polygons.size() == 1) {
        return POLYGON + polygons.get(0) + "))";
    } else {
        return polygons.stream().map(polygon -> "((" + polygon + "))")
                .collect(Collectors.joining(",", MULTIPOLYGON, ")"));
    }
}

From source file:com.datatorrent.stram.StreamingContainerManager.java

/**
 * Compute checkpoints required for a given operator instance to be recovered.
 * This is done by looking at checkpoints available for downstream dependencies first,
 * and then selecting the most recent available checkpoint that is smaller than downstream.
 *
 * @param operator Operator instance for which to find recovery checkpoint
 * @param ctx      Context into which to collect traversal info
 *///  ww w  .  j  a  va  2 s  .  c om
public void updateRecoveryCheckpoints(PTOperator operator, UpdateCheckpointsContext ctx) {
    if (operator.getRecoveryCheckpoint().windowId < ctx.committedWindowId.longValue()) {
        ctx.committedWindowId.setValue(operator.getRecoveryCheckpoint().windowId);
    }

    if (operator.getState() == PTOperator.State.ACTIVE && (ctx.currentTms
            - operator.stats.lastWindowIdChangeTms) > operator.stats.windowProcessingTimeoutMillis) {
        // if the checkpoint is ahead, then it is not blocked but waiting for activation (state-less recovery, at-most-once)
        if (ctx.committedWindowId.longValue() >= operator.getRecoveryCheckpoint().windowId) {
            LOG.debug("Marking operator {} blocked committed window {}, recovery window {}", operator,
                    Codec.getStringWindowId(ctx.committedWindowId.longValue()),
                    Codec.getStringWindowId(operator.getRecoveryCheckpoint().windowId));
            ctx.blocked.add(operator);
        }
    }

    // the most recent checkpoint eligible for recovery based on downstream state
    Checkpoint maxCheckpoint = Checkpoint.INITIAL_CHECKPOINT;

    Set<OperatorMeta> checkpointGroup = ctx.checkpointGroups.get(operator.getOperatorMeta());
    if (checkpointGroup == null) {
        checkpointGroup = Collections.singleton(operator.getOperatorMeta());
    }
    // find intersection of checkpoints that group can collectively move to
    TreeSet<Checkpoint> commonCheckpoints = new TreeSet<>(new Checkpoint.CheckpointComparator());
    synchronized (operator.checkpoints) {
        commonCheckpoints.addAll(operator.checkpoints);
    }
    Set<PTOperator> groupOpers = new HashSet<>(checkpointGroup.size());
    boolean pendingDeploy = operator.getState() == PTOperator.State.PENDING_DEPLOY;
    if (checkpointGroup.size() > 1) {
        for (OperatorMeta om : checkpointGroup) {
            Collection<PTOperator> operators = plan.getAllOperators(om);
            for (PTOperator groupOper : operators) {
                synchronized (groupOper.checkpoints) {
                    commonCheckpoints.retainAll(groupOper.checkpoints);
                }
                // visit all downstream operators of the group
                ctx.visited.add(groupOper);
                groupOpers.add(groupOper);
                pendingDeploy |= operator.getState() == PTOperator.State.PENDING_DEPLOY;
            }
        }
        // highest common checkpoint
        if (!commonCheckpoints.isEmpty()) {
            maxCheckpoint = commonCheckpoints.last();
        }
    } else {
        // without logical grouping, treat partitions as independent
        // this is especially important for parallel partitioning
        ctx.visited.add(operator);
        groupOpers.add(operator);
        maxCheckpoint = operator.getRecentCheckpoint();
        if (ctx.recovery && maxCheckpoint.windowId == Stateless.WINDOW_ID && operator.isOperatorStateLess()) {
            long currentWindowId = WindowGenerator.getWindowId(ctx.currentTms, this.vars.windowStartMillis,
                    this.getLogicalPlan().getValue(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS));
            maxCheckpoint = new Checkpoint(currentWindowId, 0, 0);
        }
    }

    // DFS downstream operators
    for (PTOperator groupOper : groupOpers) {
        for (PTOperator.PTOutput out : groupOper.getOutputs()) {
            for (PTOperator.PTInput sink : out.sinks) {
                PTOperator sinkOperator = sink.target;
                if (groupOpers.contains(sinkOperator)) {
                    continue; // downstream operator within group
                }
                if (!ctx.visited.contains(sinkOperator)) {
                    // downstream traversal
                    updateRecoveryCheckpoints(sinkOperator, ctx);
                }
                // recovery window id cannot move backwards
                // when dynamically adding new operators
                if (sinkOperator.getRecoveryCheckpoint().windowId >= operator
                        .getRecoveryCheckpoint().windowId) {
                    maxCheckpoint = Checkpoint.min(maxCheckpoint, sinkOperator.getRecoveryCheckpoint());
                }

                if (ctx.blocked.contains(sinkOperator)) {
                    if (sinkOperator.stats.getCurrentWindowId() == operator.stats.getCurrentWindowId()) {
                        // downstream operator is blocked by this operator
                        ctx.blocked.remove(sinkOperator);
                    }
                }
            }
        }
    }

    // find the common checkpoint that is <= downstream recovery checkpoint
    if (!commonCheckpoints.contains(maxCheckpoint)) {
        if (!commonCheckpoints.isEmpty()) {
            maxCheckpoint = Objects.firstNonNull(commonCheckpoints.floor(maxCheckpoint), maxCheckpoint);
        }
    }

    for (PTOperator groupOper : groupOpers) {
        // checkpoint frozen during deployment
        if (!pendingDeploy || ctx.recovery) {
            // remove previous checkpoints
            Checkpoint c1 = Checkpoint.INITIAL_CHECKPOINT;
            LinkedList<Checkpoint> checkpoints = groupOper.checkpoints;
            synchronized (checkpoints) {
                if (!checkpoints.isEmpty() && (checkpoints.getFirst()).windowId <= maxCheckpoint.windowId) {
                    c1 = checkpoints.getFirst();
                    Checkpoint c2;
                    while (checkpoints.size() > 1
                            && ((c2 = checkpoints.get(1)).windowId) <= maxCheckpoint.windowId) {
                        checkpoints.removeFirst();
                        //LOG.debug("Checkpoint to delete: operator={} windowId={}", operator.getName(), c1);
                        this.purgeCheckpoints.add(new Pair<PTOperator, Long>(groupOper, c1.windowId));
                        c1 = c2;
                    }
                } else {
                    if (ctx.recovery && checkpoints.isEmpty() && groupOper.isOperatorStateLess()) {
                        LOG.debug("Adding checkpoint for stateless operator {} {}", groupOper,
                                Codec.getStringWindowId(maxCheckpoint.windowId));
                        c1 = groupOper.addCheckpoint(maxCheckpoint.windowId, this.vars.windowStartMillis);
                    }
                }
            }
            //LOG.debug("Operator {} checkpoints: commit {} recent {}", new Object[] {operator.getName(), c1, operator.checkpoints});
            groupOper.setRecoveryCheckpoint(c1);
        } else {
            LOG.debug("Skipping checkpoint update {} during {}", groupOper, groupOper.getState());
        }
    }

}

From source file:org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler.java

/**
 * The method that creates the Job corresponding to a MapReduceOper.
 * The assumption is that/*from  w ww.j  av a2s .  c  o m*/
 * every MapReduceOper will have a load and a store. The JobConf removes
 * the load operator and serializes the input filespec so that PigInputFormat can
 * take over the creation of splits. It also removes the store operator
 * and serializes the output filespec so that PigOutputFormat can take over
 * record writing. The remaining portion of the map plan and reduce plans are
 * serialized and stored for the PigMapReduce or PigMapOnly objects to take over
 * the actual running of the plans.
 * The Mapper &amp; Reducer classes and the required key value formats are set.
 * Checks if this is a map only job and uses PigMapOnly class as the mapper
 * and uses PigMapReduce otherwise.
 * If it is a Map Reduce job, it is bound to have a package operator. Remove it from
 * the reduce plan and serializes it so that the PigMapReduce class can use it to package
 * the indexed tuples received by the reducer.
 * @param mro - The MapReduceOper for which the JobConf is required
 * @param config - the Configuration object from which JobConf is built
 * @param pigContext - The PigContext passed on from execution engine
 * @return Job corresponding to mro
 * @throws JobCreationException
 */
@SuppressWarnings({ "unchecked" })
private Job getJob(MROperPlan plan, MapReduceOper mro, Configuration config, PigContext pigContext)
        throws JobCreationException {
    org.apache.hadoop.mapreduce.Job nwJob = null;

    try {
        nwJob = new org.apache.hadoop.mapreduce.Job(config);
    } catch (Exception e) {
        throw new JobCreationException(e);
    }

    Configuration conf = nwJob.getConfiguration();

    ArrayList<FileSpec> inp = new ArrayList<FileSpec>();
    ArrayList<List<OperatorKey>> inpTargets = new ArrayList<List<OperatorKey>>();
    ArrayList<String> inpSignatureLists = new ArrayList<String>();
    ArrayList<Long> inpLimits = new ArrayList<Long>();
    ArrayList<POStore> storeLocations = new ArrayList<POStore>();
    Path tmpLocation = null;

    // add settings for pig statistics
    String setScriptProp = conf.get(PigConfiguration.PIG_SCRIPT_INFO_ENABLED, "true");
    if (setScriptProp.equalsIgnoreCase("true")) {
        MRScriptState ss = MRScriptState.get();
        ss.addSettingsToConf(mro, conf);
    }

    conf.set(MRConfiguration.MAPPER_NEW_API, "true");
    conf.set(MRConfiguration.REDUCER_NEW_API, "true");

    String buffPercent = conf.get(MRConfiguration.JOB_REDUCE_MARKRESET_BUFFER_PERCENT);
    if (buffPercent == null || Double.parseDouble(buffPercent) <= 0) {
        log.info(MRConfiguration.JOB_REDUCE_MARKRESET_BUFFER_PERCENT + " is not set, set to default 0.3");
        conf.set(MRConfiguration.JOB_REDUCE_MARKRESET_BUFFER_PERCENT, "0.3");
    } else {
        log.info(MRConfiguration.JOB_REDUCE_MARKRESET_BUFFER_PERCENT + " is set to "
                + conf.get(MRConfiguration.JOB_REDUCE_MARKRESET_BUFFER_PERCENT));
    }

    configureCompression(conf);

    try {
        //Process the POLoads
        List<POLoad> lds = PlanHelper.getPhysicalOperators(mro.mapPlan, POLoad.class);

        if (lds != null && lds.size() > 0) {
            for (POLoad ld : lds) {
                LoadFunc lf = ld.getLoadFunc();
                lf.setLocation(ld.getLFile().getFileName(), nwJob);

                //Store the inp filespecs
                inp.add(ld.getLFile());
            }
        }

        if (!mro.reducePlan.isEmpty()) {
            log.info("Reduce phase detected, estimating # of required reducers.");
            adjustNumReducers(plan, mro, nwJob);
        } else {
            nwJob.setNumReduceTasks(0);
        }

        if (!pigContext.inIllustrator && !pigContext.getExecType().isLocal()) {
            if (okToRunLocal(nwJob, mro, lds)) {
                log.info(SMALL_JOB_LOG_MSG);
                // override with the default conf to run in local mode
                for (Entry<String, String> entry : defaultConf) {
                    String key = entry.getKey();
                    if (key.equals(MRConfiguration.REDUCE_TASKS) || key.equals(MRConfiguration.JOB_REDUCES)) {
                        // this must not be set back to the default in case it has been set to 0 for example.
                        continue;
                    }
                    if (key.startsWith("fs.")) {
                        // we don't want to change fs settings back
                        continue;
                    }
                    if (key.startsWith("io.")) {
                        // we don't want to change io settings back
                        continue;
                    }
                    String value = entry.getValue();
                    if (conf.get(key) == null || !conf.get(key).equals(value)) {
                        conf.set(key, value);
                    }
                }

                conf.setBoolean(PigImplConstants.CONVERTED_TO_LOCAL, true);
            } else {
                log.info(BIG_JOB_LOG_MSG);
                // Search to see if we have any UDF/LoadFunc/StoreFunc that need to pack things into the
                // distributed cache.
                List<String> cacheFiles = new ArrayList<String>();
                List<String> shipFiles = new ArrayList<String>();
                UdfCacheShipFilesVisitor mapUdfCacheFileVisitor = new UdfCacheShipFilesVisitor(mro.mapPlan);
                mapUdfCacheFileVisitor.visit();
                cacheFiles.addAll(mapUdfCacheFileVisitor.getCacheFiles());
                shipFiles.addAll(mapUdfCacheFileVisitor.getShipFiles());

                UdfCacheShipFilesVisitor reduceUdfCacheFileVisitor = new UdfCacheShipFilesVisitor(
                        mro.reducePlan);
                reduceUdfCacheFileVisitor.visit();
                cacheFiles.addAll(reduceUdfCacheFileVisitor.getCacheFiles());
                shipFiles.addAll(reduceUdfCacheFileVisitor.getShipFiles());

                setupDistributedCache(pigContext, conf, cacheFiles.toArray(new String[] {}), false);

                // Setup the DistributedCache for this job
                List<URL> allJars = new ArrayList<URL>();

                for (URL extraJar : pigContext.extraJars) {
                    if (!allJars.contains(extraJar)) {
                        allJars.add(extraJar);
                    }
                }

                for (String udf : mro.UDFs) {
                    Class clazz = pigContext.getClassForAlias(udf);
                    if (clazz != null) {
                        String jar = JarManager.findContainingJar(clazz);
                        if (jar != null) {
                            URL jarURL = new File(jar).toURI().toURL();
                            if (!allJars.contains(jarURL)) {
                                allJars.add(jarURL);
                            }
                        }
                    }
                }

                for (String scriptJar : pigContext.scriptJars) {
                    URL jar = new File(scriptJar).toURI().toURL();
                    if (!allJars.contains(jar)) {
                        allJars.add(jar);
                    }
                }

                for (String shipFile : shipFiles) {
                    URL jar = new File(shipFile).toURI().toURL();
                    if (!allJars.contains(jar)) {
                        allJars.add(jar);
                    }
                }

                for (String defaultJar : JarManager.getDefaultJars()) {
                    URL jar = new File(defaultJar).toURI().toURL();
                    if (!allJars.contains(jar)) {
                        allJars.add(jar);
                    }
                }

                for (URL jar : allJars) {
                    boolean predeployed = false;
                    for (String predeployedJar : pigContext.predeployedJars) {
                        if (predeployedJar.contains(new File(jar.toURI()).getName())) {
                            predeployed = true;
                        }
                    }
                    if (!predeployed) {
                        if (jar.getFile().toLowerCase().endsWith(".jar")) {
                            putJarOnClassPathThroughDistributedCache(pigContext, conf, jar);
                        } else {
                            setupDistributedCache(pigContext, conf, new String[] { jar.getPath() }, true);
                        }
                    }
                }

                File scriptUDFJarFile = JarManager.createPigScriptUDFJar(pigContext);
                if (scriptUDFJarFile != null) {
                    putJarOnClassPathThroughDistributedCache(pigContext, conf,
                            scriptUDFJarFile.toURI().toURL());
                }
            }
        }

        for (String udf : mro.UDFs) {
            if (udf.contains("GFCross")) {
                Object func = PigContext.instantiateFuncFromSpec(new FuncSpec(udf));
                if (func instanceof GFCross) {
                    String crossKey = ((GFCross) func).getCrossKey();
                    conf.set(PigImplConstants.PIG_CROSS_PARALLELISM + "." + crossKey,
                            Integer.toString(mro.getRequestedParallelism()));
                }
            }
        }

        if (lds != null && lds.size() > 0) {
            for (POLoad ld : lds) {
                //Store the target operators for tuples read
                //from this input
                List<PhysicalOperator> ldSucs = mro.mapPlan.getSuccessors(ld);
                List<OperatorKey> ldSucKeys = new ArrayList<OperatorKey>();
                if (ldSucs != null) {
                    for (PhysicalOperator operator2 : ldSucs) {
                        ldSucKeys.add(operator2.getOperatorKey());
                    }
                }
                inpTargets.add(ldSucKeys);
                inpSignatureLists.add(ld.getSignature());
                inpLimits.add(ld.getLimit());
                //Remove the POLoad from the plan
                if (!pigContext.inIllustrator)
                    mro.mapPlan.remove(ld);
            }
        }

        if (Utils.isLocal(pigContext, conf)) {
            ConfigurationUtil.replaceConfigForLocalMode(conf);
        }
        conf.set(PigInputFormat.PIG_INPUTS, ObjectSerializer.serialize(inp));
        conf.set(PigInputFormat.PIG_INPUT_TARGETS, ObjectSerializer.serialize(inpTargets));
        conf.set(PigInputFormat.PIG_INPUT_SIGNATURES, ObjectSerializer.serialize(inpSignatureLists));
        conf.set(PigInputFormat.PIG_INPUT_LIMITS, ObjectSerializer.serialize(inpLimits));

        // Removing job credential entry before serializing pigcontext into jobconf
        // since this path would be invalid for the new job being created
        pigContext.getProperties().remove("mapreduce.job.credentials.binary");

        conf.setBoolean(PigImplConstants.PIG_EXECTYPE_MODE_LOCAL, pigContext.getExecType().isLocal());
        conf.set(PigImplConstants.PIG_LOG4J_PROPERTIES,
                ObjectSerializer.serialize(pigContext.getLog4jProperties()));
        conf.set("udf.import.list", ObjectSerializer.serialize(PigContext.getPackageImportList()));
        // this is for unit tests since some don't create PigServer

        // if user specified the job name using -D switch, Pig won't reset the name then.
        if (System.getProperty(MRConfiguration.JOB_NAME) == null
                && pigContext.getProperties().getProperty(PigContext.JOB_NAME) != null) {
            nwJob.setJobName(pigContext.getProperties().getProperty(PigContext.JOB_NAME));
        }

        if (pigContext.getProperties().getProperty(PigContext.JOB_PRIORITY) != null) {
            // If the job priority was set, attempt to get the corresponding enum value
            // and set the hadoop job priority.
            String jobPriority = pigContext.getProperties().getProperty(PigContext.JOB_PRIORITY).toUpperCase();
            try {
                // Allow arbitrary case; the Hadoop job priorities are all upper case.
                conf.set(MRConfiguration.JOB_PRIORITY, JobPriority.valueOf(jobPriority).toString());

            } catch (IllegalArgumentException e) {
                StringBuffer sb = new StringBuffer("The job priority must be one of [");
                JobPriority[] priorities = JobPriority.values();
                for (int i = 0; i < priorities.length; ++i) {
                    if (i > 0)
                        sb.append(", ");
                    sb.append(priorities[i]);
                }
                sb.append("].  You specified [" + jobPriority + "]");
                throw new JobCreationException(sb.toString());
            }
        }

        setupDistributedCache(pigContext, conf, pigContext.getProperties(), "pig.streaming.ship.files", true);
        setupDistributedCache(pigContext, conf, pigContext.getProperties(), "pig.streaming.cache.files", false);

        nwJob.setInputFormatClass(PigInputFormat.class);

        // tmp file compression setups
        // PIG-3741 This must be done before setStoreLocation on POStores
        Utils.setTmpFileCompressionOnConf(pigContext, conf);

        //Process POStore and remove it from the plan
        LinkedList<POStore> mapStores = PlanHelper.getPhysicalOperators(mro.mapPlan, POStore.class);
        LinkedList<POStore> reduceStores = PlanHelper.getPhysicalOperators(mro.reducePlan, POStore.class);

        for (POStore st : mapStores) {
            storeLocations.add(st);
            StoreFuncInterface sFunc = st.getStoreFunc();
            sFunc.setStoreLocation(st.getSFile().getFileName(), nwJob);
            if (sFunc instanceof OverwritableStoreFunc) {
                OverwritableStoreFunc osf = (OverwritableStoreFunc) sFunc;
                if (osf.shouldOverwrite()) {
                    osf.cleanupOutput(st, nwJob);
                }
            }
        }

        for (POStore st : reduceStores) {
            storeLocations.add(st);
            StoreFuncInterface sFunc = st.getStoreFunc();
            sFunc.setStoreLocation(st.getSFile().getFileName(), nwJob);
            if (sFunc instanceof OverwritableStoreFunc) {
                OverwritableStoreFunc osf = (OverwritableStoreFunc) sFunc;
                if (osf.shouldOverwrite()) {
                    osf.cleanupOutput(st, nwJob);
                }
            }
        }

        setOutputFormat(nwJob);

        if (mapStores.size() + reduceStores.size() == 1) { // single store case
            log.info("Setting up single store job");

            POStore st;
            if (reduceStores.isEmpty()) {
                st = mapStores.get(0);
                if (!pigContext.inIllustrator)
                    mro.mapPlan.remove(st);
            } else {
                st = reduceStores.get(0);
                if (!pigContext.inIllustrator)
                    mro.reducePlan.remove(st);
            }

            MapRedUtil.setupStreamingDirsConfSingle(st, pigContext, conf);
        } else if (mapStores.size() + reduceStores.size() > 0) { // multi store case
            log.info("Setting up multi store job");
            MapRedUtil.setupStreamingDirsConfMulti(pigContext, conf);

            boolean disableCounter = conf.getBoolean("pig.disable.counter", false);
            if (disableCounter) {
                log.info("Disable Pig custom output counters");
            }
            int idx = 0;
            for (POStore sto : storeLocations) {
                sto.setDisableCounter(disableCounter);
                sto.setMultiStore(true);
                sto.setIndex(idx++);
            }
        }

        // store map key type
        // this is needed when the key is null to create
        // an appropriate NullableXXXWritable object
        conf.set("pig.map.keytype", ObjectSerializer.serialize(new byte[] { mro.mapKeyType }));

        // set parent plan in all operators in map and reduce plans
        // currently the parent plan is really used only when POStream is present in the plan
        new PhyPlanSetter(mro.mapPlan).visit();
        new PhyPlanSetter(mro.combinePlan).visit();
        new PhyPlanSetter(mro.reducePlan).visit();

        // this call modifies the ReplFiles names of POFRJoin operators
        // within the MR plans, must be called before the plans are
        // serialized
        setupDistributedCacheForJoin(mro, pigContext, conf);

        SchemaTupleFrontend.copyAllGeneratedToDistributedCache(pigContext, conf);

        POPackage pack = null;
        if (mro.reducePlan.isEmpty()) {
            //MapOnly Job
            nwJob.setMapperClass(PigMapOnly.Map.class);
            if (!pigContext.inIllustrator)
                conf.set("pig.mapPlan", ObjectSerializer.serialize(mro.mapPlan));
            if (mro.isEndOfAllInputSetInMap()) {
                // this is used in Map.close() to decide whether the
                // pipeline needs to be rerun one more time in the close()
                // The pipeline is rerun if there either was a stream or POMergeJoin
                conf.set(END_OF_INP_IN_MAP, "true");
            }
        } else {
            //Map Reduce Job
            //Process the POPackage operator and remove it from the reduce plan
            if (!mro.combinePlan.isEmpty()) {
                POPackage combPack = (POPackage) mro.combinePlan.getRoots().get(0);
                mro.combinePlan.remove(combPack);
                nwJob.setCombinerClass(PigCombiner.Combine.class);
                conf.set("pig.combinePlan", ObjectSerializer.serialize(mro.combinePlan));
                conf.set("pig.combine.package", ObjectSerializer.serialize(combPack));
            } else if (mro.needsDistinctCombiner()) {
                nwJob.setCombinerClass(DistinctCombiner.Combine.class);
                log.info("Setting identity combiner class.");
            }
            pack = (POPackage) mro.reducePlan.getRoots().get(0);

            if (!pigContext.inIllustrator) {
                mro.reducePlan.remove(pack);
            }
            nwJob.setMapperClass(PigMapReduce.Map.class);
            nwJob.setReducerClass(PigMapReduce.Reduce.class);

            if (mro.customPartitioner != null)
                nwJob.setPartitionerClass(PigContext.resolveClassName(mro.customPartitioner));

            if (!pigContext.inIllustrator)
                conf.set("pig.mapPlan", ObjectSerializer.serialize(mro.mapPlan));
            if (mro.isEndOfAllInputSetInMap()) {
                // this is used in Map.close() to decide whether the
                // pipeline needs to be rerun one more time in the close()
                // The pipeline is rerun only if there was a stream or merge-join.
                conf.set(END_OF_INP_IN_MAP, "true");
            }
            if (!pigContext.inIllustrator)
                conf.set("pig.reducePlan", ObjectSerializer.serialize(mro.reducePlan));
            if (mro.isEndOfAllInputSetInReduce()) {
                // this is used in Map.close() to decide whether the
                // pipeline needs to be rerun one more time in the close()
                // The pipeline is rerun only if there was a stream
                conf.set("pig.stream.in.reduce", "true");
            }
            if (!pigContext.inIllustrator)
                conf.set("pig.reduce.package", ObjectSerializer.serialize(pack));
            conf.set("pig.reduce.key.type", Byte.toString(pack.getPkgr().getKeyType()));

            if (mro.getUseSecondaryKey()) {
                nwJob.setGroupingComparatorClass(PigSecondaryKeyGroupComparator.class);
                nwJob.setPartitionerClass(SecondaryKeyPartitioner.class);
                nwJob.setSortComparatorClass(PigSecondaryKeyComparator.class);
                nwJob.setOutputKeyClass(NullableTuple.class);
                conf.set("pig.secondarySortOrder", ObjectSerializer.serialize(mro.getSecondarySortOrder()));

            } else {
                Class<? extends WritableComparable> keyClass = HDataType
                        .getWritableComparableTypes(pack.getPkgr().getKeyType()).getClass();
                nwJob.setOutputKeyClass(keyClass);
                selectComparator(mro, pack.getPkgr().getKeyType(), nwJob);
            }
            nwJob.setOutputValueClass(NullableTuple.class);
        }

        if (mro.isGlobalSort() || mro.isLimitAfterSort()) {
            if (mro.isGlobalSort()) {
                String symlink = addSingleFileToDistributedCache(pigContext, conf, mro.getQuantFile(),
                        "pigsample");
                conf.set("pig.quantilesFile", symlink);
                nwJob.setPartitionerClass(WeightedRangePartitioner.class);
            }

            if (mro.isUDFComparatorUsed) {
                boolean usercomparator = false;
                for (String compFuncSpec : mro.UDFs) {
                    Class comparator = PigContext.resolveClassName(compFuncSpec);
                    if (ComparisonFunc.class.isAssignableFrom(comparator)) {
                        nwJob.setMapperClass(PigMapReduce.MapWithComparator.class);
                        nwJob.setReducerClass(PigMapReduce.ReduceWithComparator.class);
                        conf.set("pig.reduce.package", ObjectSerializer.serialize(pack));
                        conf.set("pig.usercomparator", "true");
                        nwJob.setOutputKeyClass(NullableTuple.class);
                        nwJob.setSortComparatorClass(comparator);
                        usercomparator = true;
                        break;
                    }
                }
                if (!usercomparator) {
                    String msg = "Internal error. Can't find the UDF comparator";
                    throw new IOException(msg);
                }

            } else {
                conf.set("pig.sortOrder", ObjectSerializer.serialize(mro.getSortOrder()));
            }
        }

        if (mro.isSkewedJoin()) {
            String symlink = addSingleFileToDistributedCache(pigContext, conf, mro.getSkewedJoinPartitionFile(),
                    "pigdistkey");
            conf.set("pig.keyDistFile", symlink);
            nwJob.setPartitionerClass(SkewedPartitioner.class);
            nwJob.setMapperClass(PigMapReduce.MapWithPartitionIndex.class);
            nwJob.setMapOutputKeyClass(NullablePartitionWritable.class);
            nwJob.setGroupingComparatorClass(PigGroupingPartitionWritableComparator.class);
        }

        if (mro.isCounterOperation()) {
            if (mro.isRowNumber()) {
                nwJob.setMapperClass(PigMapReduceCounter.PigMapCounter.class);
            } else {
                nwJob.setReducerClass(PigMapReduceCounter.PigReduceCounter.class);
            }
        }

        if (mro.isRankOperation()) {
            Iterator<String> operationIDs = mro.getRankOperationId().iterator();

            while (operationIDs.hasNext()) {
                String operationID = operationIDs.next();
                Iterator<Pair<String, Long>> itPairs = globalCounters.get(operationID).iterator();
                Pair<String, Long> pair = null;
                while (itPairs.hasNext()) {
                    pair = itPairs.next();
                    conf.setLong(pair.first, pair.second);
                }
            }
        }

        if (!pigContext.inIllustrator) {
            // unset inputs for POStore, otherwise, map/reduce plan will be unnecessarily deserialized
            for (POStore st : mapStores) {
                st.setInputs(null);
                st.setParentPlan(null);
            }
            for (POStore st : reduceStores) {
                st.setInputs(null);
                st.setParentPlan(null);
            }
            conf.set(PIG_MAP_STORES, ObjectSerializer.serialize(mapStores));
            conf.set(PIG_REDUCE_STORES, ObjectSerializer.serialize(reduceStores));
        }

        String tmp;
        long maxCombinedSplitSize = 0;
        if (!mro.combineSmallSplits()
                || pigContext.getProperties().getProperty("pig.splitCombination", "true").equals("false"))
            conf.setBoolean("pig.noSplitCombination", true);
        else if ((tmp = pigContext.getProperties().getProperty("pig.maxCombinedSplitSize", null)) != null) {
            try {
                maxCombinedSplitSize = Long.parseLong(tmp);
            } catch (NumberFormatException e) {
                log.warn(
                        "Invalid numeric format for pig.maxCombinedSplitSize; use the default maximum combined split size");
            }
        }
        if (maxCombinedSplitSize > 0)
            conf.setLong("pig.maxCombinedSplitSize", maxCombinedSplitSize);

        // It's a hack to set distributed cache file for hadoop 23. Once MiniMRCluster do not require local
        // jar on fixed location, this can be removed
        if (pigContext.getExecType() == ExecType.MAPREDUCE) {
            String newfiles = conf.get("alternative.mapreduce.job.cache.files");
            if (newfiles != null) {
                String files = conf.get(MRConfiguration.JOB_CACHE_FILES);
                conf.set(MRConfiguration.JOB_CACHE_FILES,
                        files == null ? newfiles.toString() : files + "," + newfiles);
            }
        }
        // Serialize the UDF specific context info.
        UDFContext.getUDFContext().serialize(conf);
        Job cjob = new Job(new JobConf(conf), new ArrayList<Job>());
        jobStoreMap.put(cjob, new Pair<List<POStore>, Path>(storeLocations, tmpLocation));
        return cjob;

    } catch (JobCreationException jce) {
        throw jce;
    } catch (Exception e) {
        int errCode = 2017;
        String msg = "Internal error creating job configuration.";
        throw new JobCreationException(msg, errCode, PigException.BUG, e);
    }
}

From source file:org.alfresco.repo.search.impl.lucene.AbstractLuceneQueryParser.java

/**
 * @param fixedTokenSequences LinkedList<LinkedList<Token>>
 * @return boolean//from w w w .  j a  va2s . c o  m
 */
private boolean canUseMultiPhraseQuery(LinkedList<LinkedList<Token>> fixedTokenSequences) {
    if (fixedTokenSequences.size() <= 1) {
        return true;
    }
    LinkedList<Token> first = fixedTokenSequences.get(0);
    for (int i = 1; i < fixedTokenSequences.size(); i++) {
        LinkedList<Token> current = fixedTokenSequences.get(i);
        if (first.size() != current.size()) {
            return false;
        }
        for (int j = 0; j < first.size(); j++) {
            Token fromFirst = first.get(j);
            Token fromCurrent = current.get(j);
            if (fromFirst.startOffset() != fromCurrent.startOffset()) {
                return false;
            }
        }
    }
    return true;
}

From source file:org.alfresco.solr.query.Solr4QueryParser.java

/**
  * @param fixedTokenSequences LinkedList<LinkedList<Token>>
  * @return boolean//from ww  w.j  a v  a2  s .c  om
 */
private boolean canUseMultiPhraseQuery(LinkedList<LinkedList<Token>> fixedTokenSequences) {
    LinkedList<Token> first = fixedTokenSequences.get(0);
    for (int i = 0; i < fixedTokenSequences.size(); i++) {
        LinkedList<Token> current = fixedTokenSequences.get(i);
        if (first.size() != current.size()) {
            return false;
        }
        for (int j = 0; j < first.size(); j++) {
            Token fromFirst = first.get(j);
            Token fromCurrent = current.get(j);
            if (fromFirst.startOffset() != fromCurrent.startOffset()) {
                return false;
            }
            String termText = fromCurrent.toString();
            if ((termText != null) && (termText.contains("*") || termText.contains("?"))) {
                return false;
            }
        }
    }
    return true;
}

From source file:report.mainReport.java

public static String createinfoTab(Connection c, Statement s, String acId, String dvId, int irang,
        LinkedList pList) {
    // <tr>
    // <td>3</td> Rang
    // <td>113</td> Identif
    // <td>9114 TAN -JAC 3.5T</td> Info vhicule
    // <td>2015-12-08 06:59:07</td> date dbut
    // <td>2015-12-08 13:37:40</td> date fin
    // <td>00-00-00 02:15:34</td> roulage
    // <td>00-00-00 04:22:59</td> Arrt
    // <td>51</td> distance total
    // <td>22</td> vitesse moyenne
    // <td>79</td> Vitesse max
    // <td><input type="checkbox" style="cursor:pointer;" id="cb3"
    // title="Cliquez ici pour afficher le trajet de ce vhicule"
    // onclick="if (this.checked) {if (isReplayRunning)
    // pausePathReplay(true, true);if (curCheckedRowId > -1) {if (sb =
    // document.getElementById('sliderbg' + curCheckedRowId))
    // sb.style.display = 'none';if (cb = document.getElementById('cb' +
    // curCheckedRowId)) cb.title='Cliquez ici pour afficher le trajet de ce
    // vhicule';};gMapDisplayData(3, true, true,
    // false);secuDisplayData(3,true);curCheckedRowId=3;this.title='Cliquez
    // ici pour masquer le trajet de ce vhicule';}else
    // {pausePathReplay(true, true);if (sb =
    // document.getElementById('sliderbg' + 3)) sb.style.display =
    // 'none';pathFilterDiv.style.display = 'none';gMapHideData(3,
    // false);secuDisplayData(3,false);curCheckedRowId =
    // -1;this.title='Cliquez ici pour afficher le trajet de ce
    // vhicule';};"></td>
    // <input type="checkbox" style="cursor:pointer;" id="cb0"
    // title="Cliquez ici pour afficher le trajet de ce vhicule"
    // onclick="if (this.checked) {if (isReplayRunning)
    // pausePathReplay(true, true);if (curCheckedRowId > -1) {if (sb =
    // document.getElementById('sliderbg' + curCheckedRowId))
    // sb.style.display = 'none';if (cb = document.getElementById('cb' +
    // curCheckedRowId)) cb.title='Cliquez ici pour afficher le trajet de ce
    // vhicule';};gMapDisplayData(0, true, true,
    // false);secuDisplayData(0,true);curCheckedRowId=0;this.title='Cliquez
    // ici pour masquer le trajet de ce vhicule';}else {gMapHideData(0,
    // false);secuDisplayData(0,false);curCheckedRowId =
    // -1;this.title='Cliquez ici pour afficher le trajet de ce
    // vhicule';};">
    // <td>-18.93935 47.56441</td> coordonne evenement
    // </tr>
    boolean flag;
    ResultSet rs = null;//w w  w.  ja  v a2 s.c o m
    int nb = 0;
    String infoTab = "";
    double odometreDeb = 0;
    double odometreFin = 0;
    double latitudemax = 0;
    double longitudemax = 0;
    double odometerKMlast = 0;
    double odometerOffsetKMlast = 0;
    double distance = 0;
    Timestamp dateEVlast = null, dateEVdeb = null;
    int speedmax = 0;
    int dureeroulage = 0;
    int dureearret = 0;

    formatSymbols.setDecimalSeparator('.');
    DecimalFormat dec = new DecimalFormat("#00.000000", formatSymbols);
    DecimalFormat deckms = new DecimalFormat("#0.0");
    infoTab = "<tr><td>" + irang + "</td><td>" + dvId + "</td>";
    String sql = "select accountID, deviceID, vehicleMake, vehicleModel, licensePlate FROM Device where accountID='"
            + acId + "' and deviceID='" + dvId + "';";
    try {
        rs = s.executeQuery(sql);
        if (rs.next()) {
            String vehicleMake = rs.getObject("vehicleMake") != null ? rs.getString("vehicleMake") : null;
            String vehicleModel = rs.getObject("vehicleModel") != null ? rs.getString("vehicleModel") : null;
            String licensePlate = rs.getObject("licensePlate") != null ? rs.getString("licensePlate") : null;
            infoTab = infoTab + "<td>" + licensePlate + " " + vehicleModel + "</td>";
        }
    } catch (SQLException ex) {
        LOGGER.severe(ex.getMessage());
    }
    distance = 0.0D;
    dureearret = 0;
    dureeroulage = 0;
    double duree = 0.;
    double vitessemoy = 0.0;
    int nbvitesse = 0;
    int nbreq = 0;
    int indpoint = 0;
    int ipoint = 0;
    int j = 0;
    String request = "";
    Events evtlast = null;

    for (int i = 0; i < pList.size(); i++) {
        Events evt = (Events) pList.get(i);
        if (evt.getDeviceID().equals(dvId)) {
            if (nb == 0) {
                dateEVdeb = evt.getDateEvt();
                request = request + "loc=" + dec.format(evt.getLatitude()) + ","
                        + dec.format(evt.getLongitude()); // point de dpart
                nbreq++;
            }
            if (evt.getSpeed() > 0) {
                if (i + 1 < pList.size()) {
                    duree = 0;
                    Events evtp = (Events) pList.get(i + 1);
                    if (evtp.getDeviceID().equals(dvId))
                        duree = (evtp.getDateEvt().getTime() - evt.getDateEvt().getTime()) / 1000.0;
                }
                dureeroulage = (int) (dureeroulage + duree); // Calcul temps de roulage
                vitessemoy = vitessemoy + (int) evt.getSpeed();
                nbvitesse++;
            } else {
                duree = 0;
                if (i + 1 < pList.size()) {
                    Events evtp = (Events) pList.get(i + 1);
                    if (evtp.getDeviceID().equals(dvId))
                        duree = (evtp.getDateEvt().getTime() - evt.getDateEvt().getTime()) / 1000.0;
                }
                dureearret = (int) (dureearret + duree); // Calcul temps d'arrt
                //request = request + "loc=" + dec.format(evt.getLatitude()) + "," + dec.format(evt.getLongitude()); //pour le calcul de distance, en prend en compte que les arrts
                nbreq++;
            }
            if (evt.getSpeed() > speedmax) {
                speedmax = (int) evt.getSpeed();
                latitudemax = evt.getLatitude();
                longitudemax = evt.getLongitude();
            }
            if (evt.getSpeed() > evt.getSpeedMax()) { // Vrification vitesse maximum
                request = request + "loc=" + dec.format(evt.getLatitude()) + ","
                        + dec.format(evt.getLongitude());
                nbreq++;
            }
            dateEVlast = evt.getDateEvt();
            evtlast = evt;
            nb++;
        }
    }
    if (nb > 1) {
        request = request + "loc=" + dec.format(evtlast.getLatitude()) + ","
                + dec.format(evtlast.getLongitude());
        nbreq++;
    }
    if (nbreq > 1) {
        request = osrm + request + "&instructions=false&alt=false";
        String rep = sendGet(request, "viaroute");
        double distcalc = Double.parseDouble(rep) / 1000.0;
        distance = distcalc;
        System.out.println("Distance: " + distcalc + " total: " + (int) distance + "kms" + " req:" + request);
    }

    if (nb > 1) {
        //System.out.println("Distance OSRM: " + deckms.format(distance));
        // distance = (odometerKMlast + odometerOffsetKMlast) - odometreDeb;
        //System.out.println("Distance: " + (int) distance + "kms");
        //System.out.println("Vitesse max: " + speedmax + "kms/h");
        //System.out.println("Temps roulage: " + (int) dureeroulage + "s");
        //double vitessemoy = distance / (dureeroulage / 3600.0);
        vitessemoy = vitessemoy / nbvitesse;
        //System.out.println("Vitesse moy: " + (int) vitessemoy + " kms/h");
        int numberOfDays;
        int numberOfHours;
        int numberOfMinutes;
        int numberOfSeconds;

        if (distance < 1) {
            vitessemoy = 0;
            speedmax = 0;
            dureeroulage = 0;
            dureearret = 0;
        }
        // numberOfDays = dureeroulage / 86400;
        numberOfHours = ((int) dureeroulage % 86400) / 3600;
        numberOfMinutes = (((int) dureeroulage % 86400) % 3600) / 60;
        numberOfSeconds = (((int) dureeroulage % 86400) % 3600) % 60;
        String roulage = numberOfHours + ":" + numberOfMinutes + ":" + numberOfSeconds;
        numberOfHours = ((int) dureearret % 86400) / 3600;
        numberOfMinutes = (((int) dureearret % 86400) % 3600) / 60;
        numberOfSeconds = (((int) dureearret % 86400) % 3600) % 60;
        String arret = numberOfHours + ":" + numberOfMinutes + ":" + numberOfSeconds;
        //System.out.println("Temps arrt: " + (int) dureearret + "s" + " " + arret);
        // long diff = dateEVlast.getTime() - dateEVdeb.getTime() ;
        // int dureeTH = (int) diff / 1000;
        // if (vitessemoy > 150) vitessemoy = 0;
        // System.out.println("Temps Total: " + dureeTH + "s");
        infoTab = infoTab + "<td>" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(dateEVdeb.getTime())
                + "</td>";
        infoTab = infoTab + "<td>" + new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(dateEVlast.getTime())
                + "</td>";
        infoTab = infoTab + "<td>00-00-00 " + roulage + "</td>";
        infoTab = infoTab + "<td>00-00-00 " + arret + "</td>";
        infoTab = infoTab + "<td>" + (int) distance + "</td>";
        infoTab = infoTab + "<td>" + (int) vitessemoy + "</td>";
        infoTab = infoTab + "<td>" + speedmax + "</td>";
        infoTab = infoTab + "<td><input type=\"checkbox\" style=\"cursor:pointer;\" id=\"cb" + irang
                + "\" title=\"Cliquez ici pour afficher le trajet de ce v&eacute;hicule\" onclick=\"if (this.checked) {if (isReplayRunning) pausePathReplay(true, true);if (curCheckedRowId > -1) {if (sb = document.getElementById('sliderbg' + curCheckedRowId)) sb.style.display = 'none';if (cb = document.getElementById('cb' + curCheckedRowId)) cb.title='Cliquez ici pour afficher le trajet de ce v&eacutehicule';};gMapDisplayData("
                + irang + ", true, true, false);secuDisplayData(" + irang + ",true);curCheckedRowId=" + irang
                + ";this.title='Cliquez ici pour masquer le trajet de ce v&eacute;hicule';}else {pausePathReplay(true, true);if (sb = document.getElementById('sliderbg' + "
                + irang + ")) sb.style.display = 'none';pathFilterDiv.style.display = 'none';gMapHideData("
                + irang + ", false);secuDisplayData(" + irang
                + ",false);curCheckedRowId = -1;this.title='Cliquez ici pour afficher le trajet de ce v&eacute;;hicule';};\"></td>";
        infoTab = infoTab + "<td>" + dec.format(latitudemax) + " " + dec.format(longitudemax) + "</td></tr>";
    } else {
        infoTab = infoTab + "<td></td><td></td><td></td><td></td><td>0</td><td>0</td><td>0</td>";
        infoTab = infoTab + "<td><input type=\"checkbox\" style=\"cursor:pointer;\" id=\"cb" + irang
                + "\" title=\"Cliquez ici pour afficher le trajet de ce v&eacute;hicule\" onclick=\"if (this.checked) {if (isReplayRunning) pausePathReplay(true, true);if (curCheckedRowId > -1) {if (sb = document.getElementById('sliderbg' + curCheckedRowId)) sb.style.display = 'none';if (cb = document.getElementById('cb' + curCheckedRowId)) cb.title='Cliquez ici pour afficher le trajet de ce v&eacutehicule';};gMapDisplayData("
                + irang + ", true, true, false);secuDisplayData(" + irang + ",true);curCheckedRowId=" + irang
                + ";this.title='Cliquez ici pour masquer le trajet de ce v&eacute;hicule';}else {gMapHideData("
                + irang + ", false);secuDisplayData(" + irang
                + ",false);curCheckedRowId = -1;this.title='Cliquez ici pour afficher le trajet de ce v&eacute;hicule';};\"></td>";
        infoTab = infoTab + "<td></td></tr>";
    }
    //System.out.println("infoTab: " + infoTab);
    return infoTab;
}

From source file:ddf.catalog.source.solr.SolrProviderTest.java

/**
 * Tests the offset aka start index (startIndex) functionality.
 *
 * @throws Exception/*from w w w  .  j a  v a  2  s.  c  o  m*/
 */
@Test
public void testStartIndex() throws Exception {

    deleteAllIn(provider);

    List<Metacard> list = Arrays.asList((Metacard) new MockMetacard(Library.getFlagstaffRecord()),
            (Metacard) new MockMetacard(Library.getFlagstaffRecord()),
            (Metacard) new MockMetacard(Library.getFlagstaffRecord()),
            (Metacard) new MockMetacard(Library.getFlagstaffRecord()),
            (Metacard) new MockMetacard(Library.getFlagstaffRecord()),
            (Metacard) new MockMetacard(Library.getFlagstaffRecord()),
            (Metacard) new MockMetacard(Library.getFlagstaffRecord()),
            (Metacard) new MockMetacard(Library.getFlagstaffRecord()),
            (Metacard) new MockMetacard(Library.getFlagstaffRecord()));

    /** CREATE **/
    create(list);

    /** CONTEXTUAL QUERY **/

    CommonQueryBuilder queryBuilder = new CommonQueryBuilder();

    QueryImpl query = queryBuilder.queryByProperty(Metacard.TITLE, FLAGSTAFF_QUERY_PHRASE);

    int index = 0;
    int maxSize = 9;
    int startIndex = 1;

    query.setPageSize(maxSize);
    query.setStartIndex(startIndex);
    query.setRequestsTotalResultsCount(true);

    SourceResponse sourceResponse = provider.query(new QueryRequestImpl(query));

    assertEquals(9, sourceResponse.getResults().size());
    assertEquals(9L, sourceResponse.getHits());

    LinkedList<Result> allItems = new LinkedList<Result>();

    for (Result r : sourceResponse.getResults()) {
        allItems.add(r);
    }

    // 1
    maxSize = 1;
    startIndex = 2;
    index = startIndex - 1;

    query.setPageSize(maxSize);
    query.setStartIndex(startIndex);
    query.setRequestsTotalResultsCount(true);

    sourceResponse = provider.query(new QueryRequestImpl(query));

    assertEquals(ONE_HIT, sourceResponse.getResults().size());
    assertEquals(9L, sourceResponse.getHits());

    for (Result r : sourceResponse.getResults()) {

        assertEquals("Testing when startIndex = " + startIndex, allItems.get(index).getMetacard().getMetadata(),
                r.getMetacard().getMetadata());
        index++;
    }

    // 4
    maxSize = 1;
    startIndex = 4;
    index = startIndex - 1;
    query.setPageSize(maxSize);
    query.setStartIndex(startIndex);
    query.setRequestsTotalResultsCount(false);

    sourceResponse = provider.query(new QueryRequestImpl(query));

    assertEquals(ONE_HIT, sourceResponse.getResults().size());
    assertThat(sourceResponse.getHits(), anyOf(equalTo(-1L), equalTo(9L)));

    for (Result r : sourceResponse.getResults()) {

        assertEquals("Testing when startIndex = " + startIndex, allItems.get(index).getMetacard().getMetadata(),
                r.getMetacard().getMetadata());
        index++;
    }

    // 5
    maxSize = 5;
    startIndex = 5;
    index = startIndex - 1;
    query.setPageSize(maxSize);
    query.setStartIndex(startIndex);

    sourceResponse = provider.query(new QueryRequestImpl(query));

    assertEquals(5, sourceResponse.getResults().size());

    for (Result r : sourceResponse.getResults()) {

        assertEquals("Testing when startIndex = " + startIndex, allItems.get(index).getMetacard().getMetadata(),
                r.getMetacard().getMetadata());
        index++;
    }

    // 9
    maxSize = 9;
    startIndex = 9;
    index = startIndex - 1;
    query.setPageSize(maxSize);
    query.setStartIndex(startIndex);

    sourceResponse = provider.query(new QueryRequestImpl(query));

    assertEquals(ONE_HIT, sourceResponse.getResults().size());

    for (Result r : sourceResponse.getResults()) {

        assertEquals("Testing when startIndex = " + startIndex, allItems.get(index).getMetacard().getMetadata(),
                r.getMetacard().getMetadata());
        index++;
    }

    // Max size is very large
    maxSize = 100;
    startIndex = 9;
    index = startIndex - 1;
    query.setPageSize(maxSize);
    query.setStartIndex(startIndex);

    sourceResponse = provider.query(new QueryRequestImpl(query));

    assertEquals(ONE_HIT, sourceResponse.getResults().size());

    for (Result r : sourceResponse.getResults()) {

        assertEquals(allItems.get(index).getMetacard().getMetadata(), r.getMetacard().getMetadata());
        index++;
    }

    // bad start index
    maxSize = 2;
    startIndex = ALL_RESULTS;
    index = startIndex - 1;
    query.setPageSize(maxSize);
    query.setStartIndex(startIndex);

    try {
        sourceResponse = provider.query(new QueryRequestImpl(query));
        Assert.fail("Expected an exception stating that the start index should be greater than 0. ");
    } catch (UnsupportedQueryException e) {
        assertTrue(e.getMessage().indexOf("greater than 0") != ALL_RESULTS);
    }

}