Example usage for java.util.logging Level FINER

List of usage examples for java.util.logging Level FINER

Introduction

In this page you can find the example usage for java.util.logging Level FINER.

Prototype

Level FINER

To view the source code for java.util.logging Level FINER.

Click Source Link

Document

FINER indicates a fairly detailed tracing message.

Usage

From source file:com.ibm.jaggr.core.impl.AbstractAggregatorImpl.java

@Override
public IResource newResource(URI uri) {
    final String sourceMethod = "newResource"; //$NON-NLS-1$
    boolean isTraceLogging = log.isLoggable(Level.FINER);
    if (isTraceLogging) {
        log.entering(AbstractAggregatorImpl.class.getName(), sourceMethod, new Object[] { uri });
    }/*from w ww .  j av a  2s. co  m*/
    if (!uri.isAbsolute()) {
        // URI is not absolute, so make it absolute.
        try {
            uri = getPlatformServices().getAppContextURI().resolve(uri.getPath());
        } catch (URISyntaxException e) {
            throw new IllegalArgumentException(e);
        }
    }

    IResourceFactory factory = null;
    String scheme = uri.getScheme();

    for (IAggregatorExtension extension : getExtensions(IResourceFactoryExtensionPoint.ID)) {
        if (scheme.equals(extension.getAttribute(IResourceFactoryExtensionPoint.SCHEME_ATTRIBUTE))) {
            IResourceFactory test = (IResourceFactory) extension.getInstance();
            if (test.handles(uri)) {
                factory = test;
                break;
            }
        }
    }
    if (factory == null) {
        throw new UnsupportedOperationException("No resource factory for " + uri.toString() //$NON-NLS-1$
        );
    }

    IResource result = factory.newResource(uri);
    if (isTraceLogging) {
        log.exiting(AbstractAggregatorImpl.class.getName(), sourceMethod, result);
    }
    return result;
}

From source file:org.crank.javax.faces.component.MenuRenderer.java

void renderSelect(FacesContext context, UIComponent component) throws IOException {

    ResponseWriter writer = context.getResponseWriter();
    assert (writer != null);

    if (logger.isLoggable(Level.FINER)) {
        logger.log(Level.FINER, "Rendering 'select'");
    }//from  w w w  .  ja  v a2  s.c om
    writer.startElement("select", component);
    writeIdAttributeIfNecessary(context, writer, component);
    writer.writeAttribute("name", component.getClientId(context), "clientId");
    // render styleClass attribute if present.
    String styleClass = null;
    if (null != (styleClass = (String) component.getAttributes().get("styleClass"))) {
        writer.writeAttribute("class", styleClass, "styleClass");
    }
    if (!getMultipleText(component).equals("")) {
        writer.writeAttribute("multiple", true, "multiple");
    }

    // Determine how many option(s) we need to render, and update
    // the component's "size" attribute accordingly;  The "size"
    // attribute will be rendered as one of the "pass thru" attributes
    int itemCount = getOptionNumber(context, component);
    if (logger.isLoggable(Level.FINE)) {
        logger.fine("Rendering " + itemCount + " options");
    }
    // If "size" is *not* set explicitly, we have to default it correctly
    Integer size = Integer.getInteger((String) component.getAttributes().get("size"));
    if (size == null || size == Integer.MIN_VALUE) {
        //TODO: HACK... need to 'cifer why the size isn't getting processed correctly from the tag - Paul T.
        if (itemCount > 20) {
            size = 20;
        } else {
            size = itemCount;
        }
    }
    writeDefaultSize(writer, size);

    RenderKitUtils.renderPassThruAttributes(context, writer, component, new String[] { "size" });
    RenderKitUtils.renderXHTMLStyleBooleanAttributes(writer, component);
    // Now, render the "options" portion...
    renderOptions(context, component);

    writer.endElement("select");

}

From source file:org.geotools.gce.imagemosaic.GranuleDescriptor.java

/**
* Load a specified a raster as a portion of the granule describe by this {@link GranuleDescriptor}.
* 
* @param imageReadParameters the {@link ImageReadParam} to use for reading.
* @param index the index to use for the {@link ImageReader}.
* @param cropBBox the bbox to use for cropping. 
* @param mosaicWorldToGrid the cropping grid to world transform.
* @param request the incoming request to satisfy.
* @param hints {@link Hints} to be used for creating this raster.
* @return a specified a raster as a portion of the granule describe by this {@link GranuleDescriptor}.
* @throws IOException in case an error occurs.
*//* w w  w  . jav a2 s  . c  o  m*/
public GranuleLoadingResult loadRaster(final ImageReadParam imageReadParameters, final int index,
        final ReferencedEnvelope cropBBox, final MathTransform2D mosaicWorldToGrid,
        final RasterLayerRequest request, final Hints hints) throws IOException {

    if (LOGGER.isLoggable(java.util.logging.Level.FINER)) {
        final String name = Thread.currentThread().getName();
        LOGGER.finer("Thread:" + name + " Loading raster data for granuleDescriptor " + this.toString());
    }
    ImageReadParam readParameters = null;
    int imageIndex;
    final boolean useFootprint = roiProvider != null
            && request.getFootprintBehavior() != FootprintBehavior.None;
    Geometry inclusionGeometry = useFootprint ? roiProvider.getFootprint() : null;
    final ReferencedEnvelope bbox = useFootprint
            ? new ReferencedEnvelope(granuleBBOX.intersection(inclusionGeometry.getEnvelopeInternal()),
                    granuleBBOX.getCoordinateReferenceSystem())
            : granuleBBOX;
    boolean doFiltering = false;
    if (filterMe && useFootprint) {
        doFiltering = Utils.areaIsDifferent(inclusionGeometry, baseGridToWorld, granuleBBOX);
    }

    // intersection of this tile bound with the current crop bbox
    final ReferencedEnvelope intersection = new ReferencedEnvelope(bbox.intersection(cropBBox),
            cropBBox.getCoordinateReferenceSystem());
    if (intersection.isEmpty()) {
        if (LOGGER.isLoggable(java.util.logging.Level.FINE)) {
            LOGGER.fine(new StringBuilder("Got empty intersection for granule ").append(this.toString())
                    .append(" with request ").append(request.toString())
                    .append(" Resulting in no granule loaded: Empty result").toString());
        }
        return null;
    }

    // check if the requested bbox intersects or overlaps the requested area 
    if (useFootprint && inclusionGeometry != null && !JTS.toGeometry(cropBBox).intersects(inclusionGeometry)) {
        if (LOGGER.isLoggable(java.util.logging.Level.FINE)) {
            LOGGER.fine(new StringBuilder("Got empty intersection for granule ").append(this.toString())
                    .append(" with request ").append(request.toString())
                    .append(" Resulting in no granule loaded: Empty result").toString());
        }
        return null;
    }

    ImageInputStream inStream = null;
    ImageReader reader = null;
    try {
        //
        //get info about the raster we have to read
        //

        // get a stream
        assert cachedStreamSPI != null : "no cachedStreamSPI available!";
        inStream = cachedStreamSPI.createInputStreamInstance(granuleUrl, ImageIO.getUseCache(),
                ImageIO.getCacheDirectory());
        if (inStream == null)
            return null;

        // get a reader and try to cache the relevant SPI
        if (cachedReaderSPI == null) {
            reader = ImageIOExt.getImageioReader(inStream);
            if (reader != null)
                cachedReaderSPI = reader.getOriginatingProvider();
        } else
            reader = cachedReaderSPI.createReaderInstance();
        if (reader == null) {
            if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) {
                LOGGER.warning(new StringBuilder("Unable to get s reader for granuleDescriptor ")
                        .append(this.toString()).append(" with request ").append(request.toString())
                        .append(" Resulting in no granule loaded: Empty result").toString());
            }
            return null;
        }
        // set input
        customizeReaderInitialization(reader, hints);
        reader.setInput(inStream);

        // Checking for heterogeneous granules
        if (request.isHeterogeneousGranules()) {
            // create read parameters
            readParameters = new ImageReadParam();

            //override the overviews controller for the base layer
            imageIndex = ReadParamsController.setReadParams(
                    request.spatialRequestHelper.getRequestedResolution(), request.getOverviewPolicy(),
                    request.getDecimationPolicy(), readParameters, request.rasterManager, overviewsController);
        } else {
            imageIndex = index;
            readParameters = imageReadParameters;
        }

        //get selected level and base level dimensions
        final GranuleOverviewLevelDescriptor selectedlevel = getLevel(imageIndex, reader);

        // now create the crop grid to world which can be used to decide
        // which source area we need to crop in the selected level taking
        // into account the scale factors imposed by the selection of this
        // level together with the base level grid to world transformation
        AffineTransform2D cropWorldToGrid = new AffineTransform2D(selectedlevel.gridToWorldTransformCorner);
        cropWorldToGrid = (AffineTransform2D) cropWorldToGrid.inverse();
        // computing the crop source area which lives into the
        // selected level raster space, NOTICE that at the end we need to
        // take into account the fact that we might also decimate therefore
        // we cannot just use the crop grid to world but we need to correct
        // it.
        final Rectangle sourceArea = CRS.transform(cropWorldToGrid, intersection).toRectangle2D().getBounds();
        //gutter
        if (selectedlevel.baseToLevelTransform.isIdentity()) {
            sourceArea.grow(2, 2);
        }
        XRectangle2D.intersect(sourceArea, selectedlevel.rasterDimensions, sourceArea);//make sure roundings don't bother us
        // is it empty??
        if (sourceArea.isEmpty()) {
            if (LOGGER.isLoggable(java.util.logging.Level.FINE)) {
                LOGGER.fine("Got empty area for granuleDescriptor " + this.toString() + " with request "
                        + request.toString() + " Resulting in no granule loaded: Empty result");

            }
            return null;

        } else if (LOGGER.isLoggable(java.util.logging.Level.FINER)) {
            LOGGER.finer("Loading level " + imageIndex + " with source region: " + sourceArea + " subsampling: "
                    + readParameters.getSourceXSubsampling() + "," + readParameters.getSourceYSubsampling()
                    + " for granule:" + granuleUrl);
        }

        // Setting subsampling 
        int newSubSamplingFactor = 0;
        final String pluginName = cachedReaderSPI.getPluginClassName();
        if (pluginName != null && pluginName.equals(ImageUtilities.DIRECT_KAKADU_PLUGIN)) {
            final int ssx = readParameters.getSourceXSubsampling();
            final int ssy = readParameters.getSourceYSubsampling();
            newSubSamplingFactor = ImageIOUtilities.getSubSamplingFactor2(ssx, ssy);
            if (newSubSamplingFactor != 0) {
                if (newSubSamplingFactor > maxDecimationFactor && maxDecimationFactor != -1) {
                    newSubSamplingFactor = maxDecimationFactor;
                }
                readParameters.setSourceSubsampling(newSubSamplingFactor, newSubSamplingFactor, 0, 0);
            }
        }

        // set the source region
        readParameters.setSourceRegion(sourceArea);
        RenderedImage raster;
        try {
            // read
            raster = request.getReadType().read(readParameters, imageIndex, granuleUrl,
                    selectedlevel.rasterDimensions, reader, hints, false);

        } catch (Throwable e) {
            if (LOGGER.isLoggable(java.util.logging.Level.FINE)) {
                LOGGER.log(java.util.logging.Level.FINE,
                        "Unable to load raster for granuleDescriptor " + this.toString() + " with request "
                                + request.toString() + " Resulting in no granule loaded: Empty result",
                        e);
            }
            return null;
        }

        // use fixed source area
        sourceArea.setRect(readParameters.getSourceRegion());

        //
        // setting new coefficients to define a new affineTransformation
        // to be applied to the grid to world transformation
        // -----------------------------------------------------------------------------------
        //
        // With respect to the original envelope, the obtained planarImage
        // needs to be rescaled. The scaling factors are computed as the
        // ratio between the cropped source region sizes and the read
        // image sizes.
        //
        // place it in the mosaic using the coords created above;
        double decimationScaleX = ((1.0 * sourceArea.width) / raster.getWidth());
        double decimationScaleY = ((1.0 * sourceArea.height) / raster.getHeight());
        final AffineTransform decimationScaleTranform = XAffineTransform.getScaleInstance(decimationScaleX,
                decimationScaleY);

        // keep into account translation  to work into the selected level raster space
        final AffineTransform afterDecimationTranslateTranform = XAffineTransform
                .getTranslateInstance(sourceArea.x, sourceArea.y);

        // now we need to go back to the base level raster space
        final AffineTransform backToBaseLevelScaleTransform = selectedlevel.baseToLevelTransform;

        // now create the overall transform
        final AffineTransform finalRaster2Model = new AffineTransform(baseGridToWorld);
        finalRaster2Model.concatenate(CoverageUtilities.CENTER_TO_CORNER);

        if (!XAffineTransform.isIdentity(backToBaseLevelScaleTransform, Utils.AFFINE_IDENTITY_EPS))
            finalRaster2Model.concatenate(backToBaseLevelScaleTransform);
        if (!XAffineTransform.isIdentity(afterDecimationTranslateTranform, Utils.AFFINE_IDENTITY_EPS))
            finalRaster2Model.concatenate(afterDecimationTranslateTranform);
        if (!XAffineTransform.isIdentity(decimationScaleTranform, Utils.AFFINE_IDENTITY_EPS))
            finalRaster2Model.concatenate(decimationScaleTranform);

        // adjust roi
        if (useFootprint) {

            ROIGeometry transformed;
            try {
                transformed = roiProvider.getTransformedROI(finalRaster2Model.createInverse());
                if (transformed.getAsGeometry().isEmpty()) {
                    // inset might have killed the geometry fully
                    return null;
                }

                PlanarImage pi = PlanarImage.wrapRenderedImage(raster);
                if (!transformed.intersects(pi.getBounds())) {
                    return null;
                }
                pi.setProperty("ROI", transformed);
                raster = pi;

            } catch (NoninvertibleTransformException e) {
                if (LOGGER.isLoggable(java.util.logging.Level.INFO))
                    LOGGER.info("Unable to create a granuleDescriptor " + this.toString()
                            + " due to a problem when managing the ROI");
                return null;
            }

        }
        // keep into account translation factors to place this tile
        finalRaster2Model.preConcatenate((AffineTransform) mosaicWorldToGrid);
        final Interpolation interpolation = request.getInterpolation();

        //paranoiac check to avoid that JAI freaks out when computing its internal layouT on images that are too small
        Rectangle2D finalLayout = ImageUtilities.layoutHelper(raster, (float) finalRaster2Model.getScaleX(),
                (float) finalRaster2Model.getScaleY(), (float) finalRaster2Model.getTranslateX(),
                (float) finalRaster2Model.getTranslateY(), interpolation);
        if (finalLayout.isEmpty()) {
            if (LOGGER.isLoggable(java.util.logging.Level.INFO))
                LOGGER.info("Unable to create a granuleDescriptor " + this.toString()
                        + " due to jai scale bug creating a null source area");
            return null;
        }

        // apply the affine transform  conserving indexed color model
        final RenderingHints localHints = new RenderingHints(JAI.KEY_REPLACE_INDEX_COLOR_MODEL,
                interpolation instanceof InterpolationNearest ? Boolean.FALSE : Boolean.TRUE);
        if (XAffineTransform.isIdentity(finalRaster2Model, Utils.AFFINE_IDENTITY_EPS)) {
            return new GranuleLoadingResult(raster, null, granuleUrl, doFiltering, pamDataset);
        } else {
            //
            // In case we are asked to use certain tile dimensions we tile
            // also at this stage in case the read type is Direct since
            // buffered images comes up untiled and this can affect the
            // performances of the subsequent affine operation.
            //
            final Dimension tileDimensions = request.getTileDimensions();
            if (tileDimensions != null && request.getReadType().equals(ReadType.DIRECT_READ)) {
                final ImageLayout layout = new ImageLayout();
                layout.setTileHeight(tileDimensions.width).setTileWidth(tileDimensions.height);
                localHints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout));
            } else {
                if (hints != null && hints.containsKey(JAI.KEY_IMAGE_LAYOUT)) {
                    final Object layout = hints.get(JAI.KEY_IMAGE_LAYOUT);
                    if (layout != null && layout instanceof ImageLayout) {
                        localHints
                                .add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, ((ImageLayout) layout).clone()));
                    }
                }
            }
            if (hints != null && hints.containsKey(JAI.KEY_TILE_CACHE)) {
                final Object cache = hints.get(JAI.KEY_TILE_CACHE);
                if (cache != null && cache instanceof TileCache)
                    localHints.add(new RenderingHints(JAI.KEY_TILE_CACHE, (TileCache) cache));
            }
            if (hints != null && hints.containsKey(JAI.KEY_TILE_SCHEDULER)) {
                final Object scheduler = hints.get(JAI.KEY_TILE_SCHEDULER);
                if (scheduler != null && scheduler instanceof TileScheduler)
                    localHints.add(new RenderingHints(JAI.KEY_TILE_SCHEDULER, (TileScheduler) scheduler));
            }
            boolean addBorderExtender = true;
            if (hints != null && hints.containsKey(JAI.KEY_BORDER_EXTENDER)) {
                final Object extender = hints.get(JAI.KEY_BORDER_EXTENDER);
                if (extender != null && extender instanceof BorderExtender) {
                    localHints.add(new RenderingHints(JAI.KEY_BORDER_EXTENDER, (BorderExtender) extender));
                    addBorderExtender = false;
                }
            }
            // BORDER extender
            if (addBorderExtender) {
                localHints.add(ImageUtilities.BORDER_EXTENDER_HINTS);
            }

            ImageWorker iw = new ImageWorker(raster);
            iw.setRenderingHints(localHints);
            iw.affine(finalRaster2Model, interpolation, request.getBackgroundValues());
            return new GranuleLoadingResult(iw.getRenderedImage(), null, granuleUrl, doFiltering, pamDataset);
        }

    } catch (IllegalStateException e) {
        if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) {
            LOGGER.log(java.util.logging.Level.WARNING,
                    new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString())
                            .append(" with request ").append(request.toString())
                            .append(" Resulting in no granule loaded: Empty result").toString(),
                    e);
        }
        return null;
    } catch (org.opengis.referencing.operation.NoninvertibleTransformException e) {
        if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) {
            LOGGER.log(java.util.logging.Level.WARNING,
                    new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString())
                            .append(" with request ").append(request.toString())
                            .append(" Resulting in no granule loaded: Empty result").toString(),
                    e);
        }
        return null;
    } catch (TransformException e) {
        if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) {
            LOGGER.log(java.util.logging.Level.WARNING,
                    new StringBuilder("Unable to load raster for granuleDescriptor ").append(this.toString())
                            .append(" with request ").append(request.toString())
                            .append(" Resulting in no granule loaded: Empty result").toString(),
                    e);
        }
        return null;

    } finally {
        try {
            if (request.getReadType() != ReadType.JAI_IMAGEREAD && inStream != null) {
                inStream.close();
            }
        } finally {
            if (request.getReadType() != ReadType.JAI_IMAGEREAD && reader != null) {
                reader.dispose();
            }
        }
    }
}

From source file:org.cloudifysource.rest.controllers.DeploymentsController.java

/**
 * Delete an instance level attribute./*from w w w.j  a v a 2s.  c o m*/
 * 
 * @param appName
 *            The application name.
 * @param serviceName
 *            The service name.
 * @param instanceId
 *            The instance id.
 * @param attributeName
 *            The attribute name.
 * @return The previous value for this attribute in the response.
 * @throws ResourceNotFoundException
 *             Thrown in case the requested service or service instance does not exist.
 * @throws RestErrorException
 *             Thrown in case the requested attribute name is empty.
 */
@RequestMapping(value = "/{appName}/service/{serviceName}/instances/{instanceId}/attributes/{attributeName}", method = RequestMethod.DELETE)
public DeleteServiceInstanceAttributeResponse deleteServiceInstanceAttribute(@PathVariable final String appName,
        @PathVariable final String serviceName, @PathVariable final Integer instanceId,
        @PathVariable final String attributeName) throws ResourceNotFoundException, RestErrorException {
    // valid service
    controllerHelper.getService(appName, serviceName);

    // logger - request to delete attributes
    if (logger.isLoggable(Level.FINER)) {
        logger.finer("received request to delete attribute " + attributeName + " of instance Id " + instanceId
                + " of service " + ServiceUtils.getAbsolutePUName(appName, serviceName) + " of application "
                + appName);
    }

    // get delete attribute returned previous value
    final Object previous = controllerHelper.deleteAttribute(appName, serviceName, instanceId, attributeName);

    // create response object
    final DeleteServiceInstanceAttributeResponse siar = new DeleteServiceInstanceAttributeResponse();
    // set previous value
    siar.setPreviousValue(previous);
    // return response object
    return siar;

}

From source file:com.ibm.team.build.internal.hjplugin.RTCScm.java

@Override
public boolean checkout(AbstractBuild<?, ?> build, Launcher arg1, FilePath workspacePath,
        BuildListener listener, File changeLogFile) throws IOException, InterruptedException {

    listener.getLogger().println(Messages.RTCScm_checkout_started());

    File passwordFileFile = getPasswordFileFile();
    String baselineSetName = getBaselineSetName(build);
    String localBuildToolKit;//from  w  w w.  j  av  a  2  s.co m
    String nodeBuildToolKit;
    String passwordToUse = null;

    try {
        localBuildToolKit = getDescriptor().getMasterBuildToolkit(getBuildTool(), listener);
        nodeBuildToolKit = getDescriptor().getBuildToolkit(getBuildTool(), build.getBuiltOn(), listener);
        if (LOGGER.isLoggable(Level.FINER)) {
            LOGGER.finer("checkout : " + build.getProject().getName() + " " + build.getDisplayName() + " " //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
                    + build.getBuiltOnStr() + " Load directory=\"" + workspacePath.getRemote() + "\"" + //$NON-NLS-2$
                    " Build tool=\"" + getBuildTool() + "\"" + //$NON-NLS-1$ //$NON-NLS-2$
                    " Local Build toolkit=\"" + localBuildToolKit + "\"" + //$NON-NLS-1$ //$NON-NLS-2$
                    " Node Build toolkit=\"" + nodeBuildToolKit + "\"" + //$NON-NLS-1$ //$NON-NLS-2$
                    " Server URI=\"" + getServerURI() + "\"" + //$NON-NLS-1$ //$NON-NLS-2$
                    " Userid=\"" + getUserId() + "\"" + //$NON-NLS-1$ //$NON-NLS-2$
                    " Authenticating with " //$NON-NLS-1$
                    + (passwordFileFile == null ? " configured password " : passwordFileFile.getAbsolutePath()) //$NON-NLS-1$
                    + " Build workspace=\"" + getBuildWorkspace() + "\"" + //$NON-NLS-2$
                    " Baseline Set name=\"" + baselineSetName + "\""); //$NON-NLS-1$ //$NON-NLS-2$
        }

        RTCFacadeWrapper facade = RTCFacadeFactory.getFacade(localBuildToolKit, null);
        passwordToUse = (String) facade.invoke("determinePassword", new Class[] { //$NON-NLS-1$
                String.class, // password,
                File.class, // passwordFile,
        }, getPassword(), getPasswordFileFile());

    } catch (InvocationTargetException e) {
        Throwable eToReport = e.getCause();
        if (eToReport == null) {
            eToReport = e;
        }
        PrintWriter writer = listener.fatalError(Messages.RTCScm_checkout_failure(eToReport.getMessage()));
        eToReport.printStackTrace(writer);
        LOGGER.log(Level.FINER, "determinePassword had invocation failure " + eToReport.getMessage(), //$NON-NLS-1$
                eToReport);

        // if we can't check out then we can't build it
        throw new AbortException(Messages.RTCScm_checkout_failure2(eToReport.getMessage()));
    } catch (Exception e) {
        PrintWriter writer = listener.fatalError(Messages.RTCScm_checkout_failure3(e.getMessage()));
        e.printStackTrace(writer);
        LOGGER.log(Level.FINER, "determinePassword failure " + e.getMessage(), e); //$NON-NLS-1$

        // if we can't check out then we can't build it
        throw new AbortException(Messages.RTCScm_checkout_failure4(e.getMessage()));
    }

    OutputStream changeLogStream = new FileOutputStream(changeLogFile);
    RemoteOutputStream changeLog = new RemoteOutputStream(changeLogStream);

    if (workspacePath.isRemote()) {
        sendJarsToSlave(workspacePath);
    }

    boolean debug = Boolean.parseBoolean(build.getEnvironment(listener).get(DEBUG_PROPERTY));
    RTCCheckoutTask checkout = new RTCCheckoutTask(
            build.getProject().getName() + " " + build.getDisplayName() + " " + build.getBuiltOnStr(), //$NON-NLS-1$ //$NON-NLS-2$
            nodeBuildToolKit, getServerURI(), getUserId(), passwordToUse, getTimeout(), getBuildWorkspace(),
            baselineSetName, listener, changeLog, workspacePath.isRemote(), debug);

    workspacePath.act(checkout);

    return true;
}

From source file:com.cloudbees.api.BeesClient.java

public ApplicationDeployArchiveResponse applicationDeployArchive(ApplicationDeployArgs args) throws Exception {
    Map<String, String> params = new HashMap<String, String>();
    Map<String, File> fileParams = new HashMap<String, File>();
    params.put("app_id", args.appId);

    File archiveFile = args.archiveFile;

    // Currently do not support ear file for delta upload
    boolean deployDelta = false;
    boolean deployJarDelta = false;
    // Create delta deploy File
    if (args.deltaDeploy && !args.archiveType.equals("ear")) {
        trace("Get existing checksums");
        ApplicationCheckSumsResponse applicationCheckSumsResponse = applicationCheckSums(args.appId, false);
        if (logger.isLoggable(Level.FINER)) {
            for (Map.Entry<String, Long> entry : applicationCheckSumsResponse.getCheckSums().entrySet()) {
                logger.finer("Entry: " + entry.getKey() + " CRC: " + entry.getValue());
            }//from   ww  w. ja  v a2s.c  om
        }
        if (applicationCheckSumsResponse.getCheckSums().size() == 0) {
            trace("No existing checksums, upload full archive");
        } else {
            trace("Creating Delta archive for: " + archiveFile);
            archiveFile = ArchiveUtils.createDeltaWarFile(applicationCheckSumsResponse.getCheckSums(),
                    archiveFile, archiveFile.getParent());
            deployDelta = true;
            if (applicationCheckSumsResponse.getSnapshotID() != null)
                params.put("delta_snapshot_id", applicationCheckSumsResponse.getSnapshotID());
        }
    }

    if (args.deltaDeploy && !args.archiveType.equals("ear")) {
        trace("Get existing jar hashes");
        ApplicationJarHashesResponse applicationJarHashesResponse = applicationJarHashes(args.appId,
                JarUtils.getJarHashes(archiveFile));
        if (applicationJarHashesResponse.getJarHash().size() == 0) {
            trace("No existing jars");
        } else {
            trace("Creating Delta2 archive for: " + archiveFile);
            File archiveFile2 = JarUtils.createDeltaWarFile(applicationJarHashesResponse.getJarHash(),
                    archiveFile, archiveFile.getParent());
            // Delete the old delta archive
            if (deployDelta) {
                archiveFile.delete();
            }
            archiveFile = archiveFile2;
            deployJarDelta = true;
        }
    }

    if (deployDelta || deployJarDelta) {
        trace("Uploading delta archive: " + archiveFile);
    }

    File archiveFileSrc = args.srcFile;
    long uploadSize = archiveFile.length();
    if (archiveFileSrc != null) {
        uploadSize += archiveFileSrc.length();
    }

    fileParams.put("archive", archiveFile);
    params.put("archive_type", args.archiveType);

    params.put("create", Boolean.valueOf(args.create).toString());

    if (args.environment != null) {
        params.put("environment", args.environment);
    }

    if (args.description != null) {
        params.put("description", args.description);
    }

    if (archiveFileSrc != null) {
        fileParams.put("src", archiveFileSrc);
    }

    params.put("parameters", createParameter(args.parameters));
    params.put("variables", createParameter(args.variables));

    // extend the deploy invocation timeout to 4 hours
    long expireTime = System.currentTimeMillis() + 4 * 60 * 60 * 1000;
    params.put("expires", Long.toString(expireTime / 1000));

    String url = getApiUrl("application.deployArchive").toString();
    params.put("action", "application.deployArchive");
    String response = executeUpload(url, params, fileParams, args.progress);
    try {
        return (ApplicationDeployArchiveResponse) readResponse(response);
    } finally {
        // Delete the delta archive file
        if (deployDelta || deployJarDelta) {
            archiveFile.delete();
        }
    }
}

From source file:diet.gridr.g5k.gui.ClusterInfoPanel.java

/**
 * This method initializes jPanel2//from  w  w w.  j  ava2  s.c  o m
 *
 * @return javax.swing.JPanel
 */
private JPanel getJPanel2() {
    if (jPanel2 == null) {
        jPanel2 = new JPanel();
        jPanel2.setLayout(new BoxLayout(jPanel2, BoxLayout.Y_AXIS));
        jPanel2.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5));
        jobsTable = new JTable();
        jobsModel = new ClusterJobsSummaryModel();
        jobsTable.setModel(jobsModel);
        ClusterJobsSummaryCellRenderer renderer = new ClusterJobsSummaryCellRenderer();
        jobsTable.setDefaultRenderer(String.class, renderer);
        //jobsTable.createDefaultColumnsFromModel();
        JLabel jobsTableTitle = new JLabel("Jobs status");
        jobsTableTitle.setAlignmentX(JLabel.CENTER_ALIGNMENT);
        jobsTableTitle.setFont(new Font("Dialog", Font.BOLD, 14));
        jPanel2.add(Box.createVerticalStrut(5));
        jPanel2.add(jobsTableTitle);
        jPanel2.add(Box.createVerticalStrut(10));
        jPanel2.add(jobsTable.getTableHeader());
        jPanel2.add(jobsTable);
        LoggingManager.log(Level.FINER, LoggingManager.RESOURCESTOOL, this.getClass().getName(), "getJPanel2",
                "Cluster jobs summary table added");
    }
    return jPanel2;
}

From source file:com.ibm.jaggr.core.impl.transport.AbstractHttpTransport.java

@Override
public String getLayerContribution(HttpServletRequest request, LayerContributionType type, Object arg) {
    final String sourceMethod = "getLayerContribution"; //$NON-NLS-1$
    boolean isTraceLogging = log.isLoggable(Level.FINER);
    if (isTraceLogging) {
        log.entering(AbstractHttpTransport.class.getName(), sourceMethod, new Object[] { request, type, arg });
    }/* ww  w.  j a v a 2 s.  c  om*/
    String result = ""; //$NON-NLS-1$
    if (type == LayerContributionType.END_RESPONSE) {
        if (TypeUtil.asBoolean(request.getAttribute(WARN_DEPRECATED_USE_OF_MODULES_QUERYARG))) {
            result += MessageFormat.format(CONSOLE_WARNING_MSG_FMT,
                    new Object[] { Messages.AbstractHttpTransport_2 });
        }
        if (TypeUtil.asBoolean(request.getAttribute(WARN_DEPRECATED_USE_OF_REQUIRED_QUERYARG))) {
            result += MessageFormat.format(CONSOLE_WARNING_MSG_FMT,
                    new Object[] { Messages.AbstractHttpTransport_3 });
        }

    }
    if (isTraceLogging) {
        log.exiting(AbstractHttpTransport.class.getName(), sourceMethod, result);
    }
    return result;
}

From source file:org.b3log.latke.repository.jdbc.JdbcRepository.java

@Override
public long count() throws RepositoryException {
    final String cacheKey = CACHE_KEY_PREFIX + getName() + REPOSITORY_CACHE_COUNT;

    if (cacheEnabled) {
        final Object o = CACHE.get(cacheKey);

        if (null != o) {
            LOGGER.log(Level.FINER, "Got an object[cacheKey={0}] from repository cache[name={1}]",
                    new Object[] { cacheKey, getName() });
            try {
                return (Long) o;
            } catch (final Exception e) {
                LOGGER.log(Level.SEVERE, e.getMessage(), e);

                return -1;
            }/*from   www.  ja  v  a 2s .  c  o  m*/
        }
    }

    final StringBuilder sql = new StringBuilder("select count(" + JdbcRepositories.OID + ") from ")
            .append(getName());
    final long ret = count(sql, new ArrayList<Object>());

    if (cacheEnabled) {
        CACHE.putAsync(cacheKey, ret);
        LOGGER.log(Level.FINER, "Added an object[cacheKey={0}] in repository cache[{1}]",
                new Object[] { cacheKey, getName() });
    }

    return ret;
}