Example usage for com.badlogic.gdx.graphics Pixmap Pixmap

List of usage examples for com.badlogic.gdx.graphics Pixmap Pixmap

Introduction

In this page you can find the example usage for com.badlogic.gdx.graphics Pixmap Pixmap.

Prototype

public Pixmap(byte[] encodedData, int offset, int len) 

Source Link

Document

Creates a new Pixmap instance from the given encoded image data.

Usage

From source file:ta.firegreen.creation.creator.java

License:Apache License

@Override
public void render() {
    if (fileTexture != null) {
        bindTexture(mesh, fileTexture.getAbsolutePath());
        fileTexture = null;//from ww  w . j  a  v a2s .  com
    }
    if (fileMTA != null) {
        nouveau = MeshTA.loadMeshTA(fileMTA);
        fileMTA = null;
    }
    Gdx.gl.glClearColor(0, 0, 0, 1);
    Gdx.gl20.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
    cam.position.set(posX, posY, posZ);
    cam.lookAt(cibleX, cibleY, cibleZ);
    cam.update();
    if (afficheTexture)
        mesh.render(renduTexture, cam.combined);
    else
        mesh.render(rendu, cam.combined);

    if (saveIMGFile != null) {

        final Pixmap p = new Pixmap(Gdx.graphics.getWidth() - 200, Gdx.graphics.getHeight() - 200,
                Pixmap.Format.RGBA8888);
        ByteBuffer bytes = ByteBuffer.allocateDirect(p.getPixels().capacity());
        Gdx.gl20.glReadPixels(100, 100, Gdx.graphics.getWidth() - 200, Gdx.graphics.getHeight() - 200,
                GL20.GL_RGBA, GL20.GL_UNSIGNED_BYTE, bytes);
        int j = p.getPixels().capacity() - 1;
        for (int i = 0; i < p.getPixels().capacity(); i += 4) {
            p.getPixels().put(j, bytes.get(i + 3));
            j--;
            p.getPixels().put(j, bytes.get(i + 2));
            j--;
            p.getPixels().put(j, bytes.get(i + 1));
            j--;
            p.getPixels().put(j, bytes.get(i));
            j--;
        }

        if (!saveIMGFile.getName().endsWith(".png"))
            if (!saveIMGFile.renameTo(new File(saveIMGFile.getName() + ".png"))) {
                JOptionPane.showMessageDialog(null, "Le fichier n'a pas pu tre sauvegarder", "Erreur",
                        JOptionPane.ERROR_MESSAGE);
                return;
            }
        PixmapIO.writePNG(new FileHandle(saveIMGFile), p);
        saveIMGFile = null;
    }

    shape.setProjectionMatrix(cam.projection);
    shape.setTransformMatrix(cam.view);
    shape.begin(ShapeType.Line);
    shape.setColor(Color.GREEN);
    shape.line(new Vector3(-10, 0, 0), new Vector3(10, 0, 0));
    shape.setColor(Color.RED);
    shape.line(new Vector3(0, -10, 0), new Vector3(0, 10, 0));
    shape.setColor(Color.BLUE);
    shape.line(new Vector3(0, 0, -10), new Vector3(0, 0, 10));
    shape.end();
    cam.translate(0.05f, 0.05f, 0.05f);
    cam.update();
    trianglesSelected.render(rendu, cam.combined);
    cam.translate(-0.1f, -0.1f, -0.1f);
    cam.update();
    trianglesSelected.render(rendu, cam.combined);
    try {
        Thread.sleep(35);
    } catch (InterruptedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:test.ImageUtils.java

License:Apache License

public static void setScreenTextureRegion(final TextureRegion textureRegion) {

    if (textureRegion == null) {
        throw new IllegalArgumentException("Input the TextureRegion is null.");
    }//from   www .jav  a 2 s .c o  m
    Texture tempTexture = textureRegion.getTexture();
    if (tempTexture != null) {
        tempTexture.dispose();
    }
    BufferedImage bufferedImage = ImageUtils.getScreenBufferedImage();
    byte[] bytes = ImageUtils.getBufferedImageBytes(bufferedImage);
    Pixmap pixmap = new Pixmap(bytes, 0, bytes.length);
    int width = pixmap.getWidth();
    int height = pixmap.getHeight();

    Texture texture = new Texture(width, height, pixmap.getFormat());
    texture.draw(pixmap, 0, 0);
    textureRegion.setTexture(texture);
    textureRegion.setRegion(0, 0, width, height);
    pixmap.dispose();

}

From source file:test.ImageUtils.java

License:Apache License

public static void setScreenTextureRegionInThread(final TextureRegion textureRegion) {

    if (textureRegion == null) {
        throw new IllegalArgumentException("Input the TextureRegion is null.");
    }//from  ww  w  . j  av a  2 s  . c  o m

    Gdx.app.postRunnable(new Runnable() {

        @Override
        public void run() {

            Texture tempTexture = textureRegion.getTexture();
            if (tempTexture != null) {
                tempTexture.dispose();
            }
            BufferedImage bufferedImage = ImageUtils.getScreenBufferedImage();
            byte[] bytes = ImageUtils.getBufferedImageBytes(bufferedImage);
            Pixmap pixmap = new Pixmap(bytes, 0, bytes.length);
            int width = pixmap.getWidth();
            int height = pixmap.getHeight();

            Texture texture = new Texture(width, height, pixmap.getFormat());
            texture.draw(pixmap, 0, 0);
            textureRegion.setTexture(texture);
            textureRegion.setRegion(0, 0, width, height);
            pixmap.dispose();

        }
    });

}

From source file:util.Utils.java

public static Pixmap createPixmap(int width, int height, BufferedImage image, int sx, int sy) {

    int imgWidth = image.getWidth();
    int imgHeight = image.getHeight();

    Pixmap pix = new Pixmap(width, height, Pixmap.Format.RGBA8888);
    pix.setColor(0f, 0f, 0f, 1f);/*from ww w. j  a  v a  2  s . co m*/
    pix.fillRectangle(0, 0, width, height);

    int[] pixels = image.getRGB(0, 0, imgWidth, imgHeight, null, 0, width);

    for (int x = 0; x < imgWidth; x++) {
        for (int y = 0; y < imgHeight; y++) {
            int pixel = pixels[y * width + x];
            pix.drawPixel(sx + x, sy + y, getRGBA(pixel));
        }
    }

    return pix;
}

From source file:util.Utils.java

public static Texture fillRectangle(int width, int height, Color color, float alpha) {
    Pixmap pix = new Pixmap(width, height, Pixmap.Format.RGBA8888);
    pix.setColor(color.r, color.g, color.b, alpha);
    pix.fillRectangle(0, 0, width, height);
    Texture t = new Texture(pix);
    pix.dispose();/* ww w .j ava 2  s  .  co m*/
    return t;
}

From source file:ve.ucv.ciens.ccg.nxtar.NxtARCore.java

License:Apache License

/**
 * <p>Initialize the member fields and launch the networking threads. Also creates and
 * sets the application states.</p>
 *//* ww  w.  j  a v a 2  s .  c o m*/
public void create() {
    try {
        ScenarioGlobals.init(this);
    } catch (IllegalArgumentException e) {
        Gdx.app.error(TAG, CLASS_NAME + ".create(): Illegal argument initializing globals: ", e);
        System.exit(1);
        return;
    } catch (InstantiationException e) {
        Gdx.app.error(TAG, CLASS_NAME + ".create(): Instantiation exception initializing globals: ", e);
        System.exit(1);
        return;
    } catch (IllegalAccessException e) {
        Gdx.app.error(TAG, CLASS_NAME + ".create(): Illegal access exception initializing globals: ", e);
        System.exit(1);
        return;
    }

    // Set up rendering fields and settings.
    batch = new SpriteBatch();
    batch.enableBlending();
    batch.setBlendFunction(GL20.GL_SRC_ALPHA, GL20.GL_ONE_MINUS_SRC_ALPHA);
    pixelPerfectCamera = new OrthographicCamera(Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
    ShaderProgram.pedantic = false;

    // Create the state objects.
    states = new BaseState[game_states_t.getNumStates()];

    try {
        if (Ouya.runningOnOuya)
            states[game_states_t.MAIN_MENU.getValue()] = new OuyaMainMenuState(this);
        else
            states[game_states_t.MAIN_MENU.getValue()] = new TabletMainMenuState(this);

        try {
            states[game_states_t.IN_GAME.getValue()] = new InGameState(this);
        } catch (IllegalStateException e) {
            Gdx.app.error(TAG, CLASS_NAME + ".create(): Illegal state in IN_GAME_STATE: ", e);
            System.exit(1);
            return;
        }

        states[game_states_t.CALIBRATION.getValue()] = new CameraCalibrationState(this);

        try {
            states[game_states_t.AUTOMATIC_ACTION.getValue()] = new AutomaticActionState(this);
        } catch (IllegalStateException e) {
            Gdx.app.error(TAG, CLASS_NAME + ".create(): Illegal state in AUTOMATIC_ACTION_STATE: ", e);
            System.exit(1);
            return;
        }

        states[game_states_t.AUTOMATIC_ACTION_SUMMARY.getValue()] = new AutomaticActionSummaryState(this);
        states[game_states_t.SCENARIO_END_SUMMARY.getValue()] = new ScenarioEndSummaryState(this);
        states[game_states_t.HINTS.getValue()] = new InstructionsState(this);

    } catch (IllegalArgumentException e) {
        Gdx.app.error(TAG, CLASS_NAME + ".create(): Illegal argument caught creating states: ", e);
        System.exit(1);
        return;
    }

    // Register controller listeners.
    for (BaseState state : states) {
        Controllers.addListener(state);
    }

    // Set up the overlay font.
    overlayX = -(Utils.getScreenWidthWithOverscan() / 2) + 10;
    overlayY = (Utils.getScreenHeightWithOverscan() / 2) - 10;

    font = new BitmapFont();
    font.setColor(1.0f, 1.0f, 0.0f, 1.0f);
    if (!Ouya.runningOnOuya) {
        font.setScale(1.0f);
    } else {
        font.setScale(2.5f);
    }

    // Start networking.
    actionResolver.enableMulticast();

    Gdx.app.debug(TAG, CLASS_NAME + ".create() :: Creating network threads");
    serviceDiscoveryThread = ServiceDiscoveryThread.getInstance();
    videoThread = VideoStreamingThread.getInstance();
    robotThread = RobotControlThread.getInstance();
    sensorThread = SensorReportThread.getInstance();

    // Launch networking threads.
    serviceDiscoveryThread.start();

    videoThread.start();
    videoThread.startStreaming();
    videoThread.addNetworkConnectionListener(this);

    robotThread.addNetworkConnectionListener(this);
    robotThread.start();

    sensorThread.addNetworkConnectionListener(this);
    sensorThread.start();

    // Set the current and next states.
    currState = game_states_t.MAIN_MENU;
    nextState = null;
    this.setScreen(states[currState.getValue()]);
    states[currState.getValue()].onStateSet();

    // Prepare the fading effect.
    Pixmap pixmap = new Pixmap(Gdx.graphics.getWidth(), Gdx.graphics.getHeight(), Format.RGBA4444);
    pixmap.setColor(0, 0, 0, 1);
    pixmap.fill();
    fadeTexture = new Texture(pixmap);
    pixmap.dispose();

    alpha = new MutableFloat(0.0f);
    fadeOut = Tween.to(alpha, 0, 0.5f).target(1.0f).ease(TweenEquations.easeInQuint);
    fadeIn = Tween.to(alpha, 0, 0.5f).target(0.0f).ease(TweenEquations.easeInQuint);
    fading = false;

    // Set initial input handlers.
    Gdx.input.setInputProcessor(states[currState.getValue()]);
    Controllers.addListener(states[currState.getValue()]);

    // Set log level
    if (ProjectConstants.DEBUG) {
        Gdx.app.setLogLevel(Application.LOG_DEBUG);
    } else {
        Gdx.app.setLogLevel(Application.LOG_NONE);
    }
}

From source file:ve.ucv.ciens.ccg.nxtar.states.AutomaticActionState.java

License:Apache License

@Override
public void render(float delta) {
    int w, h;//from w ww  .  j  a v a2s .  c o  m
    byte[] frame;
    MarkerData data;
    TextureRegion region;
    float focalPointX, focalPointY, cameraCenterX, cameraCenterY;

    // Clear the screen.
    Gdx.gl.glClearColor(1, 1, 1, 1);
    Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);

    // Render the background.
    core.batch.setProjectionMatrix(pixelPerfectOrthographicCamera.combined);
    core.batch.begin();
    {
        if (backgroundShader != null) {
            core.batch.setShader(backgroundShader);
            backgroundShader.setUniform2fv("u_scaling", uScaling, 0, 2);
        }
        background.draw(core.batch);
        if (backgroundShader != null)
            core.batch.setShader(null);
    }
    core.batch.end();

    // Fetch the current video frame.
    frame = frameMonitor.getCurrentFrame();
    w = frameMonitor.getFrameDimensions().getWidth();
    h = frameMonitor.getFrameDimensions().getHeight();

    // Create the 3D perspective camera and the frame buffer object if they don't exist.
    if (perspectiveCamera == null && frameBuffer == null) {
        frameBuffer = new FrameBuffer(Format.RGBA8888, w, h, true);
        frameBuffer.getColorBufferTexture().setFilter(TextureFilter.Linear, TextureFilter.Linear);

        perspectiveCamera = new CustomPerspectiveCamera(67, w, h);
        perspectiveCamera.translate(0.0f, 0.0f, 0.0f);
        perspectiveCamera.near = NEAR;
        perspectiveCamera.far = FAR;
        perspectiveCamera.lookAt(0.0f, 0.0f, -1.0f);
        perspectiveCamera.update();
    }

    // Attempt to find the markers in the current video frame.
    data = core.cvProc.findMarkersInFrame(frame);

    // If a valid frame was fetched.
    if (data != null && data.outFrame != null) {
        if (automaticActionEnabled)
            performAutomaticAction(data);

        // Set the camera to the correct projection.
        focalPointX = core.cvProc.getFocalPointX();
        focalPointY = core.cvProc.getFocalPointY();
        cameraCenterX = core.cvProc.getCameraCenterX();
        cameraCenterY = core.cvProc.getCameraCenterY();
        perspectiveCamera.setCustomARProjectionMatrix(focalPointX, focalPointY, cameraCenterX, cameraCenterY,
                NEAR, FAR, w, h);
        perspectiveCamera.update(perspectiveCamera.projection);

        // Update the game state.
        gameWorld.setDelta(Gdx.graphics.getDeltaTime() * 1000);
        gameWorld.getSystem(MarkerPositioningSystem.class).setMarkerData(data);
        gameWorld.process();

        // Decode the video frame.
        videoFrame = new Pixmap(data.outFrame, 0, w * h);
        videoFrameTexture = new Texture(videoFrame);
        videoFrameTexture.setFilter(TextureFilter.Linear, TextureFilter.Linear);
        videoFrame.dispose();

        // Convert the decoded frame into a renderable texture.
        region = new TextureRegion(videoFrameTexture, 0, 0, w, h);
        if (renderableVideoFrame == null)
            renderableVideoFrame = new Sprite(region);
        else
            renderableVideoFrame.setRegion(region);
        renderableVideoFrame.setOrigin(renderableVideoFrame.getWidth() / 2,
                renderableVideoFrame.getHeight() / 2);
        renderableVideoFrame.setPosition(0, 0);

        // Set the 3D frame buffer for rendering.
        frameBuffer.begin();
        {
            // Set OpenGL state.
            Gdx.gl.glClearColor(0, 0, 0, 0);
            Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
            Gdx.gl.glDisable(GL20.GL_TEXTURE_2D);

            // Call rendering systems.
            markerRenderingSystem.begin(perspectiveCamera);
            markerRenderingSystem.process();
            markerRenderingSystem.end();
        }
        frameBuffer.end();

        // Set the frame buffer object texture to a renderable sprite.
        region = new TextureRegion(frameBuffer.getColorBufferTexture(), 0, 0, frameBuffer.getWidth(),
                frameBuffer.getHeight());
        region.flip(false, true);
        if (frameBufferSprite == null)
            frameBufferSprite = new Sprite(region);
        else
            frameBufferSprite.setRegion(region);
        frameBufferSprite.setOrigin(frameBufferSprite.getWidth() / 2, frameBufferSprite.getHeight() / 2);
        frameBufferSprite.setPosition(0, 0);

        // Set the position and orientation of the renderable video frame and the frame buffer.
        if (!Ouya.runningOnOuya) {
            renderableVideoFrame.setSize(1.0f,
                    renderableVideoFrame.getHeight() / renderableVideoFrame.getWidth());
            renderableVideoFrame.rotate90(true);
            renderableVideoFrame.translate(-renderableVideoFrame.getWidth() / 2,
                    0.5f - renderableVideoFrame.getHeight());

            frameBufferSprite.setSize(1.0f, frameBufferSprite.getHeight() / frameBufferSprite.getWidth());
            frameBufferSprite.rotate90(true);
            frameBufferSprite.translate(-frameBufferSprite.getWidth() / 2,
                    0.5f - frameBufferSprite.getHeight());
        } else {
            float xSize = Gdx.graphics.getHeight() * (w / h);
            renderableVideoFrame.setSize(xSize * ProjectConstants.OVERSCAN,
                    Utils.getScreenHeightWithOverscan());
            renderableVideoFrame.rotate90(true);
            renderableVideoFrame.translate(-renderableVideoFrame.getWidth() / 2,
                    -renderableVideoFrame.getHeight() / 2);

            frameBufferSprite.setSize(xSize * ProjectConstants.OVERSCAN, Utils.getScreenHeightWithOverscan());
            frameBufferSprite.rotate90(true);
            frameBufferSprite.translate(-frameBufferSprite.getWidth() / 2, -frameBufferSprite.getHeight() / 2);
        }

        // Set the correct camera for the device.
        if (!Ouya.runningOnOuya) {
            core.batch.setProjectionMatrix(unitaryOrthographicCamera.combined);
        } else {
            core.batch.setProjectionMatrix(pixelPerfectOrthographicCamera.combined);
        }

        // Render the video frame and the frame buffer.
        core.batch.begin();
        {
            renderableVideoFrame.draw(core.batch);
            frameBufferSprite.draw(core.batch);
        }
        core.batch.end();

        // Clear the video frame from memory.
        videoFrameTexture.dispose();
    }

    core.batch.setProjectionMatrix(pixelPerfectOrthographicCamera.combined);
    core.batch.begin();
    {
        startButton.draw(core.batch, 1.0f);
        if (Ouya.runningOnOuya)
            ouyaOButton.draw(core.batch);
    }
    core.batch.end();

    data = null;
}

From source file:ve.ucv.ciens.ccg.nxtar.states.CameraCalibrationState.java

License:Apache License

@Override
public void render(float delta) {
    byte[] frame;
    byte[] prevFrame = null;
    Size dimensions = null;//from  w ww. jav  a2 s . com

    // Clear the screen.
    Gdx.gl.glClearColor(1, 1, 1, 1);
    Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);

    // Render the background.
    core.batch.setProjectionMatrix(pixelPerfectCamera.combined);
    core.batch.begin();
    {
        if (backgroundShader != null) {
            core.batch.setShader(backgroundShader);
            backgroundShader.setUniform2fv("u_scaling", u_scaling, 0, 2);
        }
        background.draw(core.batch);
        if (backgroundShader != null)
            core.batch.setShader(null);
    }
    core.batch.end();

    // Fetch the current video frame.
    frame = frameMonitor.getCurrentFrame();

    // Find the calibration points in the video frame.
    CalibrationData data = core.cvProc.findCalibrationPattern(frame);

    // If the user requested a sample be taken.
    if (!cameraCalibrated && data.calibrationPoints != null) {
        Gdx.app.log(TAG, CLASS_NAME + ".render(): Sample taken.");

        // Save the calibration points to the samples array.
        for (int i = 0; i < data.calibrationPoints.length; i += 2) {
            Gdx.app.log(TAG,
                    CLASS_NAME + ".render(): Value " + Integer.toString(i) + " = ("
                            + Float.toString(data.calibrationPoints[i]) + ", "
                            + Float.toString(data.calibrationPoints[i + 1]) + ")");
            calibrationSamples[lastSampleTaken][i] = data.calibrationPoints[i];
            calibrationSamples[lastSampleTaken][i + 1] = data.calibrationPoints[i + 1];
        }

        // Move to the next sample.
        lastSampleTaken++;

        // If enough samples has been taken then calibrate the camera.
        if (lastSampleTaken == ProjectConstants.CALIBRATION_SAMPLES) {
            Gdx.app.log(TAG, CLASS_NAME + "render(): Last sample taken.");

            core.cvProc.calibrateCamera(calibrationSamples, frame);
            cameraCalibrated = core.cvProc.isCameraCalibrated();
            core.onCameraCalibrated();
            core.nextState = game_states_t.MAIN_MENU;
        }
    }

    if (frame != null && data != null && data.outFrame != null && !Arrays.equals(frame, prevFrame)) {
        // If the received frame is valid and is different from the previous frame.
        // Make a texture from the frame.
        dimensions = frameMonitor.getFrameDimensions();
        videoFrame = new Pixmap(data.outFrame, 0, dimensions.getWidth() * dimensions.getHeight());
        videoFrameTexture = new Texture(videoFrame);
        videoFrameTexture.setFilter(TextureFilter.Linear, TextureFilter.Linear);
        videoFrame.dispose();

        // Set up the frame texture as a rendereable sprite.
        TextureRegion region = new TextureRegion(videoFrameTexture, 0, 0, dimensions.getWidth(),
                dimensions.getHeight());
        renderableVideoFrame = new Sprite(region);
        renderableVideoFrame.setOrigin(renderableVideoFrame.getWidth() / 2,
                renderableVideoFrame.getHeight() / 2);
        if (!Ouya.runningOnOuya) {
            renderableVideoFrame.setSize(1.0f,
                    renderableVideoFrame.getHeight() / renderableVideoFrame.getWidth());
            renderableVideoFrame.rotate90(true);
            renderableVideoFrame.translate(-renderableVideoFrame.getWidth() / 2,
                    0.5f - renderableVideoFrame.getHeight());
        } else {
            float xSize = Gdx.graphics.getHeight() * (dimensions.getWidth() / dimensions.getHeight());
            renderableVideoFrame.setSize(xSize * ProjectConstants.OVERSCAN,
                    Gdx.graphics.getHeight() * ProjectConstants.OVERSCAN);
            renderableVideoFrame.rotate90(true);
            renderableVideoFrame.translate(-renderableVideoFrame.getWidth() / 2,
                    -renderableVideoFrame.getHeight() / 2);
        }

        // Render the frame.
        if (!Ouya.runningOnOuya)
            core.batch.setProjectionMatrix(camera.combined);
        else
            core.batch.setProjectionMatrix(pixelPerfectCamera.combined);
        core.batch.begin();
        {
            renderableVideoFrame.draw(core.batch);
        }
        core.batch.end();

        // Clear texture memory.
        videoFrameTexture.dispose();
    }

    // Save this frame as previous to avoid processing the same frame twice when network latency is high.
    prevFrame = frame;
}

From source file:ve.ucv.ciens.ccg.nxtar.states.InGameState.java

License:Apache License

@Override
public void render(float delta) {
    final float MIN_SLIDER_X = correctAngleLedOnSprite != null
            ? -(Utils.getScreenWidthWithOverscan() / 2) + 5 + correctAngleLedOnSprite.getWidth()
            : -(Utils.getScreenWidthWithOverscan() / 2) + 5;
    final float MAX_SLIDER_X = correctAngleLedOnSprite != null
            ? (Utils.getScreenWidthWithOverscan() / 2) - 5 - correctAngleLedOnSprite.getWidth()
            : (Utils.getScreenWidthWithOverscan() / 2) - 5;
    int w, h;/*  ww w . j a  v a2s. c  om*/
    float t, xSliderPos;
    byte[] frame;
    MarkerData data;
    TextureRegion region;
    float focalPointX, focalPointY, cameraCenterX, cameraCenterY;

    // Clear the screen.
    Gdx.gl.glClearColor(1, 1, 1, 1);
    Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT);

    // Render the background.
    core.batch.setProjectionMatrix(pixelPerfectOrthographicCamera.combined);
    core.batch.begin();
    {
        if (backgroundShader != null) {
            core.batch.setShader(backgroundShader);
            backgroundShader.setUniform2fv("u_scaling", uScaling, 0, 2);
        }
        background.draw(core.batch);
        if (backgroundShader != null)
            core.batch.setShader(null);
    }
    core.batch.end();

    // Fetch the current video frame.
    frame = frameMonitor.getCurrentFrame();
    w = frameMonitor.getFrameDimensions().getWidth();
    h = frameMonitor.getFrameDimensions().getHeight();

    // Create the 3D perspective camera and the frame buffer object if they don't exist.
    if (perspectiveCamera == null && frameBuffer == null) {
        frameBuffer = new FrameBuffer(Format.RGBA8888, w, h, true);
        frameBuffer.getColorBufferTexture().setFilter(TextureFilter.Linear, TextureFilter.Linear);

        perspectiveCamera = new CustomPerspectiveCamera(67, w, h);
        perspectiveCamera.translate(0.0f, 0.0f, 0.0f);
        perspectiveCamera.near = NEAR;
        perspectiveCamera.far = FAR;
        perspectiveCamera.lookAt(0.0f, 0.0f, -1.0f);
        perspectiveCamera.update();
    }

    // Attempt to find the markers in the current video frame.
    data = core.cvProc.findMarkersInFrame(frame);

    // If a valid frame was fetched.
    if (data != null && data.outFrame != null) {
        try {
            // Set the camera to the correct projection.
            focalPointX = core.cvProc.getFocalPointX();
            focalPointY = core.cvProc.getFocalPointY();
            cameraCenterX = core.cvProc.getCameraCenterX();
            cameraCenterY = core.cvProc.getCameraCenterY();
            perspectiveCamera.setCustomARProjectionMatrix(focalPointX, focalPointY, cameraCenterX,
                    cameraCenterY, NEAR, FAR, w, h);
            perspectiveCamera.update(perspectiveCamera.projection);

            // Update the game state.
            if (controlMode == robot_control_mode_t.ARM_CONTROL || Ouya.runningOnOuya)
                gameWorld.getSystem(CollisionDetectionSystem.class).enableCollisions();
            else
                gameWorld.getSystem(CollisionDetectionSystem.class).disableCollisions();

            gameWorld.setDelta(Gdx.graphics.getDeltaTime() * 1000);
            gameWorld.getSystem(MarkerPositioningSystem.class).setMarkerData(data);
            gameWorld.process();

            // Decode the video frame.
            videoFrame = new Pixmap(data.outFrame, 0, w * h);
            videoFrameTexture = new Texture(videoFrame);
            videoFrameTexture.setFilter(TextureFilter.Linear, TextureFilter.Linear);
            videoFrame.dispose();

            // Convert the decoded frame into a renderable texture.
            region = new TextureRegion(videoFrameTexture, 0, 0, w, h);
            if (renderableVideoFrame == null)
                renderableVideoFrame = new Sprite(region);
            else
                renderableVideoFrame.setRegion(region);
            renderableVideoFrame.setOrigin(renderableVideoFrame.getWidth() / 2,
                    renderableVideoFrame.getHeight() / 2);
            renderableVideoFrame.setPosition(0, 0);

            // Set the 3D frame buffer for rendering.
            frameBuffer.begin();
            {
                // Set OpenGL state.
                Gdx.gl.glClearColor(0, 0, 0, 0);
                Gdx.gl.glClear(GL20.GL_COLOR_BUFFER_BIT | GL20.GL_DEPTH_BUFFER_BIT);
                Gdx.gl.glDisable(GL20.GL_TEXTURE_2D);

                // Call rendering systems.
                markerRenderingSystem.begin(perspectiveCamera);
                markerRenderingSystem.process();
                markerRenderingSystem.end();

                if (controlMode.getValue() == robot_control_mode_t.ARM_CONTROL.getValue()
                        || Ouya.runningOnOuya) {
                    robotArmRenderingSystem.begin(perspectiveCamera);
                    robotArmRenderingSystem.process();
                    robotArmRenderingSystem.end();
                }
            }
            frameBuffer.end();

            // Set the frame buffer object texture to a renderable sprite.
            region = new TextureRegion(frameBuffer.getColorBufferTexture(), 0, 0, frameBuffer.getWidth(),
                    frameBuffer.getHeight());
            region.flip(false, true);
            if (frameBufferSprite == null)
                frameBufferSprite = new Sprite(region);
            else
                frameBufferSprite.setRegion(region);
            frameBufferSprite.setOrigin(frameBufferSprite.getWidth() / 2, frameBufferSprite.getHeight() / 2);
            frameBufferSprite.setPosition(0, 0);

            // Set the position and orientation of the renderable video frame and the frame buffer.
            if (!Ouya.runningOnOuya) {
                renderableVideoFrame.setSize(1.0f,
                        renderableVideoFrame.getHeight() / renderableVideoFrame.getWidth());
                renderableVideoFrame.rotate90(true);
                renderableVideoFrame.translate(-renderableVideoFrame.getWidth() / 2,
                        0.5f - renderableVideoFrame.getHeight());

                frameBufferSprite.setSize(1.0f, frameBufferSprite.getHeight() / frameBufferSprite.getWidth());
                frameBufferSprite.rotate90(true);
                frameBufferSprite.translate(-frameBufferSprite.getWidth() / 2,
                        0.5f - frameBufferSprite.getHeight());

            } else {
                float xSize = Gdx.graphics.getHeight() * (w / h);
                renderableVideoFrame.setSize(xSize * ProjectConstants.OVERSCAN,
                        Utils.getScreenHeightWithOverscan());
                renderableVideoFrame.rotate90(true);
                renderableVideoFrame.translate(-renderableVideoFrame.getWidth() / 2,
                        -renderableVideoFrame.getHeight() / 2);

                frameBufferSprite.setSize(xSize * ProjectConstants.OVERSCAN,
                        Utils.getScreenHeightWithOverscan());
                frameBufferSprite.rotate90(true);
                frameBufferSprite.translate(-frameBufferSprite.getWidth() / 2,
                        -frameBufferSprite.getHeight() / 2);
            }

            // Set the correct camera for the device.
            if (!Ouya.runningOnOuya) {
                core.batch.setProjectionMatrix(unitaryOrthographicCamera.combined);
            } else {
                core.batch.setProjectionMatrix(pixelPerfectOrthographicCamera.combined);
            }

            // Render the video frame and the frame buffer.
            core.batch.begin();
            {
                renderableVideoFrame.draw(core.batch);
                frameBufferSprite.draw(core.batch);
            }
            core.batch.end();

            // Clear the video frame from memory.
            videoFrameTexture.dispose();
        } catch (GdxRuntimeException e) {
            Gdx.app.error(TAG, CLASS_NAME + ".render(): Runtime exception caught: ", e);
        }
    }

    // Render the interface buttons.
    if (!Ouya.runningOnOuya) {
        core.batch.setProjectionMatrix(pixelPerfectOrthographicCamera.combined);
        core.batch.begin();
        {
            // Draw control mode button.
            if (controlMode.getValue() == robot_control_mode_t.WHEEL_CONTROL.getValue()) {
                // Draw motor control buttons.
                motorAButton.draw(core.batch);
                motorBButton.draw(core.batch);
                motorCButton.draw(core.batch);
                motorDButton.draw(core.batch);
                wheelControlButton.draw(core.batch);
            } else if (controlMode.getValue() == robot_control_mode_t.ARM_CONTROL.getValue()) {
                // Draw arm control buttons.
                armAButton.draw(core.batch);
                armBButton.draw(core.batch);
                armCButton.draw(core.batch);
                armDButton.draw(core.batch);
                armControlButton.draw(core.batch);
            } else {
                throw new IllegalStateException(
                        "Unrecognized control mode: " + Integer.toString(controlMode.getValue()));
            }

            headAButton.draw(core.batch);
            headBButton.draw(core.batch);
            headCButton.draw(core.batch);

            // Draw device rotation led.
            if (Utils.isDeviceRollValid()) {
                if (Math.abs(Gdx.input.getRoll()) < ProjectConstants.MAX_ABS_ROLL)
                    correctAngleLedOnSprite.draw(core.batch);
                else
                    correctAngleLedOffSprite.draw(core.batch);

                t = (Gdx.input.getRoll() + 60.0f) / 120.0f;
                xSliderPos = (MIN_SLIDER_X * t) + (MAX_SLIDER_X * (1.0f - t));
                xSliderPos = xSliderPos < MIN_SLIDER_X ? MIN_SLIDER_X
                        : (xSliderPos > MAX_SLIDER_X ? MAX_SLIDER_X : xSliderPos);
                orientationSlider.setPosition(xSliderPos, orientationSlider.getY());
                orientationSlider.draw(core.batch);
            } else {
                correctAngleLedOffSprite.draw(core.batch);
                orientationSlider.draw(core.batch);
            }
        }
        core.batch.end();
    }

    core.batch.setProjectionMatrix(pixelPerfectOrthographicCamera.combined);
    core.batch.begin();
    {
        hintButton.draw(core.batch);
    }
    core.batch.end();

    fadeEffectRenderingSystem.process();
    playerSystem.process();

    data = null;
}

From source file:YOGOSec.core.gui.ProgressBar.java

License:LGPL

public ProgressBar(Rectanglef bounds, float min, float max, float progress) {
    super(bounds);
    if (this.getWidth() < 10)
        this.setWidth(10f);
    if (this.getHeight() < 10)
        this.setHeight(10f);
    this.updateBounds();
    this.min = min;
    this.max = max;
    this.progress = (progress + Math.abs(min)) / (max - min);
    this.pixmap = new Pixmap((int) this.getWidth(), (int) this.getHeight(), Pixmap.Format.RGBA8888);
    this.update();
}