Advertisement
Guest User

RecordFBOActivity.java

a guest
Dec 3rd, 2014
382
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Java 56.41 KB | None | 0 0
  1. /*
  2.  * Copyright 2013 Google Inc. All rights reserved.
  3.  *
  4.  * Licensed under the Apache License, Version 2.0 (the "License");
  5.  * you may not use this file except in compliance with the License.
  6.  * You may obtain a copy of the License at
  7.  *
  8.  *      http://www.apache.org/licenses/LICENSE-2.0
  9.  *
  10.  * Unless required by applicable law or agreed to in writing, software
  11.  * distributed under the License is distributed on an "AS IS" BASIS,
  12.  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13.  * See the License for the specific language governing permissions and
  14.  * limitations under the License.
  15.  */
  16.  
  17. package com.android.grafika;
  18.  
  19. import android.media.ExifInterface;
  20. import android.net.Uri;
  21. import android.opengl.GLES20;
  22. import android.opengl.GLES30;
  23. import android.opengl.GLUtils;
  24. import android.opengl.Matrix;
  25. import android.os.Bundle;
  26. import android.os.Environment;
  27. import android.os.Handler;
  28. import android.os.Looper;
  29. import android.os.Message;
  30. import android.provider.MediaStore;
  31. import android.util.Log;
  32. import android.view.Choreographer;
  33. import android.view.Surface;
  34. import android.view.SurfaceHolder;
  35. import android.view.SurfaceView;
  36. import android.view.View;
  37. import android.widget.Button;
  38. import android.widget.RadioButton;
  39. import android.widget.TextView;
  40. import android.app.Activity;
  41. import android.content.Context;
  42. import android.database.Cursor;
  43. import android.graphics.Bitmap;
  44. import android.graphics.BitmapFactory;
  45. import android.graphics.Rect;
  46.  
  47. import com.android.grafika.gles.Drawable2d;
  48. import com.android.grafika.gles.EglCore;
  49. import com.android.grafika.gles.FlatShadedProgram;
  50. import com.android.grafika.gles.FullFrameRect;
  51. import com.android.grafika.gles.GlUtil;
  52. import com.android.grafika.gles.Sprite2d;
  53. import com.android.grafika.gles.Texture2dProgram;
  54. import com.android.grafika.gles.WindowSurface;
  55.  
  56. import java.io.File;
  57. import java.io.IOException;
  58. import java.lang.ref.WeakReference;
  59.  
  60. /**
  61.  * Demonstrates efficient display + recording of OpenGL rendering using an FBO.  This
  62.  * records only the GL surface (i.e. not the app UI, nav bar, status bar, or alert dialog).
  63.  * <p>
  64.  * This uses a plain SurfaceView, rather than GLSurfaceView, so we have full control
  65.  * over the EGL config and rendering.  When available, we use GLES 3, which allows us
  66.  * to do recording with one extra copy instead of two.
  67.  * <p>
  68.  * We use Choreographer so our animation matches vsync, and a separate rendering
  69.  * thread to keep the heavy lifting off of the UI thread.  Ideally we'd let the render
  70.  * thread receive the Choreographer events directly, but that appears to be creating
  71.  * a permanent JNI global reference to the render thread object, preventing it from
  72.  * being garbage collected (which, in turn, causes the Activity to be retained).  So
  73.  * instead we receive the vsync on the UI thread and forward it.
  74.  * <p>
  75.  * If the rendering is fairly simple, it may be more efficient to just render the scene
  76.  * twice (i.e. configure for display, call draw(), configure for video, call draw()).  If
  77.  * the video being created is at a lower resolution than the display, rendering at the lower
  78.  * resolution may produce better-looking results than a downscaling blit.
  79.  * <p>
  80.  * To reduce the impact of recording on rendering (which is probably a fancy-looking game),
  81.  * we want to perform the recording tasks on a separate thread.  The actual video encoding
  82.  * is performed in a separate process by the hardware H.264 encoder, so feeding input into
  83.  * the encoder requires little effort.  The MediaMuxer step runs on the CPU and performs
  84.  * disk I/O, so we really want to drain the encoder on a separate thread.
  85.  * <p>
  86.  * Some other examples use a pair of EGL contexts, configured to share state.  We don't want
  87.  * to do that here, because GLES3 allows us to improve performance by using glBlitFramebuffer(),
  88.  * and framebuffer objects aren't shared.  So we use a single EGL context for rendering to
  89.  * both the display and the video encoder.
  90.  * <p>
  91.  * It might appear that shifting the rendering for the encoder input to a different thread
  92.  * would be advantageous, but in practice all of the work is done by the GPU, and submitting
  93.  * the requests from different CPU cores isn't going to matter.
  94.  * <p>
  95.  * As always, we have to be careful about sharing state across threads.  By fully configuring
  96.  * the encoder before starting the encoder thread, we ensure that the new thread sees a
  97.  * fully-constructed object.  The encoder object then "lives" in the encoder thread.  The main
  98.  * thread doesn't need to talk to it directly, because all of the input goes through Surface.
  99.  * <p>
  100.  * TODO: add another bouncing rect that uses decoded video as a texture.  Useful for
  101.  * evaluating simultaneous video playback and recording.
  102.  * <p>
  103.  * TODO: show the MP4 file name somewhere in the UI so people can find it in the player
  104.  */
  105. public class RecordFBOActivity extends Activity implements SurfaceHolder.Callback,
  106.         Choreographer.FrameCallback {
  107.     private static final String TAG = MainActivity.TAG;
  108.  
  109.     // See the (lengthy) notes at the top of HardwareScalerActivity for thoughts about
  110.     // Activity / Surface lifecycle management.
  111.  
  112.     private static final int RECMETHOD_DRAW_TWICE = 0;
  113.     private static final int RECMETHOD_FBO = 1;
  114.     private static final int RECMETHOD_BLIT_FRAMEBUFFER = 2;
  115.  
  116.     private boolean mRecordingEnabled = false;          // controls button state
  117.     private boolean mBlitFramebufferAllowed = false;    // requires GLES3
  118.     private int mSelectedRecordMethod;                  // current radio button
  119.  
  120.     private RenderThread mRenderThread;
  121.  
  122.     @Override
  123.     protected void onCreate(Bundle savedInstanceState) {
  124.         super.onCreate(savedInstanceState);
  125.         setContentView(R.layout.activity_record_fbo);
  126.  
  127.         mSelectedRecordMethod = RECMETHOD_FBO;
  128.         updateControls();
  129.  
  130.         SurfaceView sv = (SurfaceView) findViewById(R.id.fboActivity_surfaceView);
  131.         sv.getHolder().addCallback(this);
  132.  
  133.         Log.d(TAG, "RecordFBOActivity: onCreate done");
  134.     }
  135.  
  136.     @Override
  137.     protected void onPause() {
  138.         super.onPause();
  139.  
  140.         // TODO: we might want to stop recording here.  As it is, we continue "recording",
  141.         //       which is pretty boring since we're not outputting any frames (test this
  142.         //       by blanking the screen with the power button).
  143.  
  144.         // If the callback was posted, remove it.  This stops the notifications.  Ideally we
  145.         // would send a message to the thread letting it know, so when it wakes up it can
  146.         // reset its notion of when the previous Choreographer event arrived.
  147.         Log.d(TAG, "onPause unhooking choreographer");
  148.         Choreographer.getInstance().removeFrameCallback(this);
  149.     }
  150.  
  151.     @Override
  152.     protected void onResume() {
  153.         super.onResume();
  154.  
  155.         // If we already have a Surface, we just need to resume the frame notifications.
  156.         if (mRenderThread != null) {
  157.             Log.d(TAG, "onResume re-hooking choreographer");
  158.             Choreographer.getInstance().postFrameCallback(this);
  159.         }
  160.  
  161.         updateControls();
  162.     }
  163.  
  164.     @Override
  165.     public void surfaceCreated(SurfaceHolder holder) {
  166.         Log.d(TAG, "surfaceCreated holder=" + holder);
  167.  
  168.         File outputFile = new File(getFilesDir(), "fbo-gl-recording.mp4");
  169.         SurfaceView sv = (SurfaceView) findViewById(R.id.fboActivity_surfaceView);
  170.         mRenderThread = new RenderThread(sv.getHolder(), new ActivityHandler(this), outputFile,
  171.                 MiscUtils.getDisplayRefreshNsec(this));
  172.         mRenderThread.setName("RecordFBO GL render");
  173.         mRenderThread.start();
  174.         mRenderThread.waitUntilReady();
  175.         mRenderThread.setRecordMethod(mSelectedRecordMethod);
  176.  
  177.         RenderHandler rh = mRenderThread.getHandler();
  178.         if (rh != null) {
  179.             rh.sendSurfaceCreated();
  180.         }
  181.  
  182.         // start the draw events
  183.         Choreographer.getInstance().postFrameCallback(this);
  184.     }
  185.  
  186.     @Override
  187.     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
  188.         Log.d(TAG, "surfaceChanged fmt=" + format + " size=" + width + "x" + height +
  189.                 " holder=" + holder);
  190.         RenderHandler rh = mRenderThread.getHandler();
  191.         if (rh != null) {
  192.             rh.sendSurfaceChanged(format, width, height);
  193.         }
  194.     }
  195.  
  196.     @Override
  197.     public void surfaceDestroyed(SurfaceHolder holder) {
  198.         Log.d(TAG, "surfaceDestroyed holder=" + holder);
  199.  
  200.         // We need to wait for the render thread to shut down before continuing because we
  201.         // don't want the Surface to disappear out from under it mid-render.  The frame
  202.         // notifications will have been stopped back in onPause(), but there might have
  203.         // been one in progress.
  204.         //
  205.         // TODO: the RenderThread doesn't currently wait for the encoder / muxer to stop,
  206.         //       so we can't use this as an indication that the .mp4 file is complete.
  207.  
  208.         RenderHandler rh = mRenderThread.getHandler();
  209.         if (rh != null) {
  210.             rh.sendShutdown();
  211.             try {
  212.                 mRenderThread.join();
  213.             } catch (InterruptedException ie) {
  214.                 // not expected
  215.                 throw new RuntimeException("join was interrupted", ie);
  216.             }
  217.         }
  218.         mRenderThread = null;
  219.         mRecordingEnabled = false;
  220.  
  221.         // If the callback was posted, remove it.  Without this, we could get one more
  222.         // call on doFrame().
  223.         Choreographer.getInstance().removeFrameCallback(this);
  224.         Log.d(TAG, "surfaceDestroyed complete");
  225.     }
  226.  
  227.     /*
  228.      * Choreographer callback, called near vsync.
  229.      *
  230.      * @see android.view.Choreographer.FrameCallback#doFrame(long)
  231.      */
  232.     @Override
  233.     public void doFrame(long frameTimeNanos) {
  234.         RenderHandler rh = mRenderThread.getHandler();
  235.         if (rh != null) {
  236.             Choreographer.getInstance().postFrameCallback(this);
  237.             if((callbackNr&1)==1) rh.sendDoFrame(frameTimeNanos);
  238.         }
  239.         callbackNr++;
  240.     }
  241.     private long callbackNr = 0;//used to issue a draw every two vsyncs, so that we don't get too busy
  242.     /**
  243.      * Updates the GLES version string.
  244.      * <p>
  245.      * Called from the render thread (via ActivityHandler) after the EGL context is created.
  246.      */
  247.     void handleShowGlesVersion(int version) {
  248.         TextView tv = (TextView) findViewById(R.id.glesVersionValue_text);
  249.         tv.setText("" + version);
  250.         if (version >= 3) {
  251.             mBlitFramebufferAllowed = true;
  252.             updateControls();
  253.         }
  254.     }
  255.  
  256.     /**
  257.      * Updates the FPS counter.
  258.      * <p>
  259.      * Called periodically from the render thread (via ActivityHandler).
  260.      */
  261.     void handleUpdateFps(int tfps, int dropped) {
  262.         String str = getString(R.string.frameRateFormat, tfps / 1000.0f, dropped);
  263.         TextView tv = (TextView) findViewById(R.id.frameRateValue_text);
  264.         tv.setText(str);
  265.     }
  266.  
  267.     /**
  268.      * onClick handler for "record" button.
  269.      * <p>
  270.      * Ideally we'd grey out the button while in a state of transition, e.g. while the
  271.      * MediaMuxer finishes creating the file, and in the (very brief) period before the
  272.      * SurfaceView's surface is created.
  273.      */
  274.     public void clickToggleRecording(@SuppressWarnings("unused") View unused) {
  275.         Log.d(TAG, "clickToggleRecording");
  276.         RenderHandler rh = mRenderThread.getHandler();
  277.         if (rh != null) {
  278.             mRecordingEnabled = !mRecordingEnabled;
  279.             updateControls();
  280.             rh.setRecordingEnabled(mRecordingEnabled);
  281.         }
  282.     }
  283.  
  284.     /**
  285.      * onClick handler for radio buttons.
  286.      */
  287.     public void onRadioButtonClicked(View view) {
  288.         RadioButton rb = (RadioButton) view;
  289.         if (!rb.isChecked()) {
  290.             Log.d(TAG, "Got click on non-checked radio button");
  291.             return;
  292.         }
  293.  
  294.         switch (rb.getId()) {
  295.             case R.id.recDrawTwice_radio:
  296.                 mSelectedRecordMethod = RECMETHOD_DRAW_TWICE;
  297.                 break;
  298.             case R.id.recFbo_radio:
  299.                 mSelectedRecordMethod = RECMETHOD_FBO;
  300.                 break;
  301.             case R.id.recFramebuffer_radio:
  302.                 mSelectedRecordMethod = RECMETHOD_BLIT_FRAMEBUFFER;
  303.                 break;
  304.             default:
  305.                 throw new RuntimeException("Click from unknown id " + rb.getId());
  306.         }
  307.  
  308.         Log.d(TAG, "Selected rec mode " + mSelectedRecordMethod);
  309.         RenderHandler rh = mRenderThread.getHandler();
  310.         if (rh != null) {
  311.             rh.setRecordMethod(mSelectedRecordMethod);
  312.         }
  313.     }
  314.  
  315.     /**
  316.      * Updates the on-screen controls to reflect the current state of the app.
  317.      */
  318.     private void updateControls() {
  319.         Button toggleRelease = (Button) findViewById(R.id.fboRecord_button);
  320.         int id = mRecordingEnabled ?
  321.                 R.string.toggleRecordingOff : R.string.toggleRecordingOn;
  322.         toggleRelease.setText(id);
  323.  
  324.         RadioButton rb;
  325.         rb = (RadioButton) findViewById(R.id.recDrawTwice_radio);
  326.         rb.setChecked(mSelectedRecordMethod == RECMETHOD_DRAW_TWICE);
  327.         rb = (RadioButton) findViewById(R.id.recFbo_radio);
  328.         rb.setChecked(mSelectedRecordMethod == RECMETHOD_FBO);
  329.         rb = (RadioButton) findViewById(R.id.recFramebuffer_radio);
  330.         rb.setChecked(mSelectedRecordMethod == RECMETHOD_BLIT_FRAMEBUFFER);
  331.         rb.setEnabled(mBlitFramebufferAllowed);
  332.  
  333.         TextView tv = (TextView) findViewById(R.id.nowRecording_text);
  334.         if (mRecordingEnabled) {
  335.             tv.setText(getString(R.string.nowRecording));
  336.         } else {
  337.             tv.setText("");
  338.         }
  339.     }
  340.  
  341.  
  342.     /**
  343.      * Handles messages sent from the render thread to the UI thread.
  344.      * <p>
  345.      * The object is created on the UI thread, and all handlers run there.
  346.      */
  347.     static class ActivityHandler extends Handler {
  348.         private static final int MSG_GLES_VERSION = 0;
  349.         private static final int MSG_UPDATE_FPS = 1;
  350.  
  351.         // Weak reference to the Activity; only access this from the UI thread.
  352.         private WeakReference<RecordFBOActivity> mWeakActivity;
  353.  
  354.         public ActivityHandler(RecordFBOActivity activity) {
  355.             mWeakActivity = new WeakReference<RecordFBOActivity>(activity);
  356.         }
  357.  
  358.         /**
  359.          * Send the GLES version.
  360.          * <p>
  361.          * Call from non-UI thread.
  362.          */
  363.         public void sendGlesVersion(int version) {
  364.             sendMessage(obtainMessage(MSG_GLES_VERSION, version, 0));
  365.         }
  366.  
  367.         /**
  368.          * Send an FPS update.  "fps" should be in thousands of frames per second
  369.          * (i.e. fps * 1000), so we can get fractional fps even though the Handler only
  370.          * supports passing integers.
  371.          * <p>
  372.          * Call from non-UI thread.
  373.          */
  374.         public void sendFpsUpdate(int tfps, int dropped) {
  375.             sendMessage(obtainMessage(MSG_UPDATE_FPS, tfps, dropped));
  376.         }
  377.  
  378.         @Override  // runs on UI thread
  379.         public void handleMessage(Message msg) {
  380.             int what = msg.what;
  381.             //Log.d(TAG, "ActivityHandler [" + this + "]: what=" + what);
  382.  
  383.             RecordFBOActivity activity = mWeakActivity.get();
  384.             if (activity == null) {
  385.                 Log.w(TAG, "ActivityHandler.handleMessage: activity is null");
  386.                 return;
  387.             }
  388.  
  389.             switch (what) {
  390.                 case MSG_GLES_VERSION:
  391.                     activity.handleShowGlesVersion(msg.arg1);
  392.                     break;
  393.                 case MSG_UPDATE_FPS:
  394.                     activity.handleUpdateFps(msg.arg1, msg.arg2);
  395.                     break;
  396.                 default:
  397.                     throw new RuntimeException("unknown msg " + what);
  398.             }
  399.         }
  400.     }
  401.  
  402.  
  403.     /**
  404.      * This class handles all OpenGL rendering.
  405.      * <p>
  406.      * We use Choreographer to coordinate with the device vsync.  We deliver one frame
  407.      * per vsync.  We can't actually know when the frame we render will be drawn, but at
  408.      * least we get a consistent frame interval.
  409.      * <p>
  410.      * Start the render thread after the Surface has been created.
  411.      */
  412.     private static class RenderThread extends Thread {
  413.         // Object must be created on render thread to get correct Looper, but is used from
  414.         // UI thread, so we need to declare it volatile to ensure the UI thread sees a fully
  415.         // constructed object.
  416.         private volatile RenderHandler mHandler;
  417.  
  418.         // Handler we can send messages to if we want to update the app UI.
  419.         private ActivityHandler mActivityHandler;
  420.  
  421.         // Used to wait for the thread to start.
  422.         private Object mStartLock = new Object();
  423.         private boolean mReady = false;
  424.  
  425.         private volatile SurfaceHolder mSurfaceHolder;  // may be updated by UI thread
  426.         private EglCore mEglCore;
  427.         private WindowSurface mWindowSurface;
  428.         private FlatShadedProgram mProgram;
  429.  
  430.         // Orthographic projection matrix.
  431.         private float[] mDisplayProjectionMatrix = new float[16];
  432.  
  433.         private final Drawable2d mTriDrawable = new Drawable2d(Drawable2d.Prefab.TRIANGLE);
  434.         private final Drawable2d mRectDrawable = new Drawable2d(Drawable2d.Prefab.RECTANGLE);
  435.  
  436.         // One spinning triangle, one bouncing rectangle, and four edge-boxes.
  437.         private Sprite2d mTri;
  438.         private Sprite2d mRect;
  439.         private Sprite2d mEdges[];
  440.         private Sprite2d mRecordRect;
  441.         private float mRectVelX, mRectVelY;     // velocity, in viewport units per second
  442.         private float mInnerLeft, mInnerTop, mInnerRight, mInnerBottom;
  443.  
  444.         private final float[] mIdentityMatrix;
  445.  
  446.         // Previous frame time.
  447.         private long mPrevTimeNanos;
  448.  
  449.         // FPS / drop counter.
  450.         private long mRefreshPeriodNanos;
  451.         private long mFpsCountStartNanos;
  452.         private int mFpsCountFrame;
  453.         private int mDroppedFrames;
  454.         private boolean mPreviousWasDropped;
  455.  
  456.         // Used for off-screen rendering.
  457.         private int mOffscreenTexture, mImgTex_bb;
  458.         private int mFramebuffer;
  459.         private int mDepthBuffer;
  460.         private FullFrameRect mFullScreen;
  461.  
  462.         // Used for recording.
  463.         private boolean mRecordingEnabled;
  464.         private File mOutputFile;
  465.         private WindowSurface mInputWindowSurface;
  466.         private TextureMovieEncoder2 mVideoEncoder;
  467.         private int mRecordMethod;
  468.         private boolean mRecordedPrevious;
  469.         private Rect mVideoRect;
  470.         private long frameInd = 0;
  471.  
  472.         /**
  473.          * Pass in the SurfaceView's SurfaceHolder.  Note the Surface may not yet exist.
  474.          */
  475.         public RenderThread(SurfaceHolder holder, ActivityHandler ahandler, File outputFile,
  476.                 long refreshPeriodNs) {
  477.             mSurfaceHolder = holder;
  478.             mActivityHandler = ahandler;
  479.             mOutputFile = outputFile;
  480.             mRefreshPeriodNanos = refreshPeriodNs;
  481.  
  482.             mVideoRect = new Rect();
  483.  
  484.             mIdentityMatrix = new float[16];
  485.             Matrix.setIdentityM(mIdentityMatrix, 0);
  486.  
  487.             mTri = new Sprite2d(mTriDrawable);
  488.             mRect = new Sprite2d(mRectDrawable);
  489.             mEdges = new Sprite2d[4];
  490.             for (int i = 0; i < mEdges.length; i++) {
  491.                 mEdges[i] = new Sprite2d(mRectDrawable);
  492.             }
  493.             mRecordRect = new Sprite2d(mRectDrawable);
  494.         }
  495.  
  496.         /**
  497.          * Thread entry point.
  498.          * <p>
  499.          * The thread should not be started until the Surface associated with the SurfaceHolder
  500.          * has been created.  That way we don't have to wait for a separate "surface created"
  501.          * message to arrive.
  502.          */
  503.         @Override
  504.         public void run() {
  505.             Looper.prepare();
  506.             mHandler = new RenderHandler(this);
  507.             mEglCore = new EglCore(null, EglCore.FLAG_RECORDABLE | EglCore.FLAG_TRY_GLES3);
  508.             synchronized (mStartLock) {
  509.                 mReady = true;
  510.                 mStartLock.notify();    // signal waitUntilReady()
  511.             }
  512.  
  513.             Looper.loop();
  514.  
  515.             Log.d(TAG, "looper quit");
  516.             releaseGl();
  517.             mEglCore.release();
  518.  
  519.             synchronized (mStartLock) {
  520.                 mReady = false;
  521.             }
  522.         }
  523.  
  524.         /**
  525.          * Waits until the render thread is ready to receive messages.
  526.          * <p>
  527.          * Call from the UI thread.
  528.          */
  529.         public void waitUntilReady() {
  530.             synchronized (mStartLock) {
  531.                 while (!mReady) {
  532.                     try {
  533.                         mStartLock.wait();
  534.                     } catch (InterruptedException ie) { /* not expected */ }
  535.                 }
  536.             }
  537.         }
  538.  
  539.         /**
  540.          * Shuts everything down.
  541.          */
  542.         private void shutdown() {
  543.             Log.d(TAG, "shutdown");
  544.             stopEncoder();
  545.             Looper.myLooper().quit();
  546.         }
  547.  
  548.         /**
  549.          * Returns the render thread's Handler.  This may be called from any thread.
  550.          */
  551.         public RenderHandler getHandler() {
  552.             return mHandler;
  553.         }
  554.  
  555.         /**
  556.          * Prepares the surface.
  557.          */
  558.         private void surfaceCreated() {
  559.             Surface surface = mSurfaceHolder.getSurface();
  560.             prepareGl(surface);
  561.         }
  562.  
  563.         /**
  564.          * Prepares window surface and GL state.
  565.          */
  566.         private void prepareGl(Surface surface) {
  567.             Log.d(TAG, "prepareGl");
  568.  
  569.             mWindowSurface = new WindowSurface(mEglCore, surface, false);
  570.             mWindowSurface.makeCurrent();
  571.  
  572.             // Used for blitting texture to FBO.
  573.             mFullScreen = new FullFrameRect(
  574.                     new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_2D));
  575.  
  576.             // Program used for drawing onto the screen.
  577.             mProgram = new FlatShadedProgram();
  578.  
  579.             // Set the background color.
  580.             GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  581.  
  582.             // Disable depth testing -- we're 2D only.
  583.             GLES20.glDisable(GLES20.GL_DEPTH_TEST);
  584.  
  585.             // Don't need backface culling.  (If you're feeling pedantic, you can turn it on to
  586.             // make sure we're defining our shapes correctly.)
  587.             GLES20.glDisable(GLES20.GL_CULL_FACE);
  588.  
  589.             mActivityHandler.sendGlesVersion(mEglCore.getGlVersion());
  590.         }
  591.  
  592.        /**
  593.          * Handles changes to the size of the underlying surface.  Adjusts viewport as needed.
  594.          * Must be called before we start drawing.
  595.          * (Called from RenderHandler.)
  596.          */
  597.         private void surfaceChanged(int width, int height) {
  598.             Log.d(TAG, "surfaceChanged " + width + "x" + height);
  599.  
  600.             prepareFramebuffer(width, height);
  601.  
  602.             // Use full window.
  603.             GLES20.glViewport(0, 0, width, height);
  604.  
  605.             // Simple orthographic projection, with (0,0) in lower-left corner.
  606.             Matrix.orthoM(mDisplayProjectionMatrix, 0, 0, width, 0, height, -1, 1);
  607.  
  608.             int smallDim = Math.min(width, height);
  609.  
  610.             // Set initial shape size / position / velocity based on window size.  Movement
  611.             // has the same "feel" on all devices, but the actual path will vary depending
  612.             // on the screen proportions.  We do it here, rather than defining fixed values
  613.             // and tweaking the projection matrix, so that our squares are square.
  614.             mTri.setColor(0.1f, 0.9f, 0.1f);
  615.             mTri.setScale(smallDim / 4.0f, smallDim / 4.0f);
  616.             mTri.setPosition(width / 2.0f, height / 2.0f);
  617.             mRect.setColor(0.9f, 0.1f, 0.1f);
  618.             mRect.setScale(smallDim / 8.0f, smallDim / 8.0f);
  619.             mRect.setPosition(width / 2.0f, height / 2.0f);
  620.             mRectVelX = 1 + smallDim / 4.0f;
  621.             mRectVelY = 1 + smallDim / 5.0f;
  622.  
  623.             // left edge
  624.             float edgeWidth = 1 + width / 64.0f;
  625.             mEdges[0].setScale(edgeWidth, height);
  626.             mEdges[0].setPosition(edgeWidth / 2.0f, height / 2.0f);
  627.             // right edge
  628.             mEdges[1].setScale(edgeWidth, height);
  629.             mEdges[1].setPosition(width - edgeWidth / 2.0f, height / 2.0f);
  630.             // top edge
  631.             mEdges[2].setScale(width, edgeWidth);
  632.             mEdges[2].setPosition(width / 2.0f, height - edgeWidth / 2.0f);
  633.             // bottom edge
  634.             mEdges[3].setScale(width, edgeWidth);
  635.             mEdges[3].setPosition(width / 2.0f, edgeWidth / 2.0f);
  636.  
  637.             mRecordRect.setColor(1.0f, 1.0f, 1.0f);
  638.             mRecordRect.setScale(edgeWidth * 2f, edgeWidth * 2f);
  639.             mRecordRect.setPosition(edgeWidth / 2.0f, edgeWidth / 2.0f);
  640.  
  641.             // Inner bounding rect, used to bounce objects off the walls.
  642.             mInnerLeft = mInnerBottom = edgeWidth;
  643.             mInnerRight = width - 1 - edgeWidth;
  644.             mInnerTop = height - 1 - edgeWidth;
  645.  
  646.             Log.d(TAG, "mTri: " + mTri);
  647.             Log.d(TAG, "mRect: " + mRect);
  648.         }
  649.  
  650.         /**
  651.          * Prepares the off-screen framebuffer.
  652.          */
  653.         private void prepareFramebuffer(int width, int height) {
  654.             GlUtil.checkGlError("prepareFramebuffer start");
  655.  
  656.             int[] values = new int[2];
  657.  
  658.             // Create a texture object and bind it.  This will be the color buffer.
  659.             GLES20.glGenTextures(2, values, 0);
  660.             GlUtil.checkGlError("glGenTextures");
  661.             mOffscreenTexture = values[0];   // expected > 0
  662.             GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mOffscreenTexture);
  663.             GlUtil.checkGlError("glBindTexture " + mOffscreenTexture);
  664.  
  665.             // Create texture storage.
  666.             GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
  667.                     GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
  668.  
  669.             // Set parameters.  We're probably using non-power-of-two dimensions, so
  670.             // some values may not be available for use.
  671.             GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
  672.                     GLES20.GL_NEAREST);
  673.             GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
  674.                     GLES20.GL_LINEAR);
  675.             GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
  676.                     GLES20.GL_CLAMP_TO_EDGE);
  677.             GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
  678.                     GLES20.GL_CLAMP_TO_EDGE);
  679.             GlUtil.checkGlError("glTexParameter");
  680.  
  681.             // Create framebuffer object and bind it.
  682.             GLES20.glGenFramebuffers(1, values, 0);
  683.             GlUtil.checkGlError("glGenFramebuffers");
  684.             mFramebuffer = values[0];    // expected > 0
  685.             GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebuffer);
  686.             GlUtil.checkGlError("glBindFramebuffer " + mFramebuffer);
  687.  
  688.             // Create a depth buffer and bind it.
  689.             GLES20.glGenRenderbuffers(1, values, 0);
  690.             GlUtil.checkGlError("glGenRenderbuffers");
  691.             mDepthBuffer = values[0];    // expected > 0
  692.             GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, mDepthBuffer);
  693.             GlUtil.checkGlError("glBindRenderbuffer " + mDepthBuffer);
  694.  
  695.             // Allocate storage for the depth buffer.
  696.             GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16,
  697.                     width, height);
  698.             GlUtil.checkGlError("glRenderbufferStorage");
  699.  
  700.             // Attach the depth buffer and the texture (color buffer) to the framebuffer object.
  701.             GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT,
  702.                     GLES20.GL_RENDERBUFFER, mDepthBuffer);
  703.             GlUtil.checkGlError("glFramebufferRenderbuffer");
  704.             GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
  705.                     GLES20.GL_TEXTURE_2D, mOffscreenTexture, 0);
  706.             GlUtil.checkGlError("glFramebufferTexture2D");
  707.  
  708.             // See if GLES is happy with all this.
  709.             int status = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
  710.             if (status != GLES20.GL_FRAMEBUFFER_COMPLETE) {
  711.                 throw new RuntimeException("Framebuffer not complete, status=" + status);
  712.             }
  713.  
  714.             // Switch back to the default framebuffer.
  715.             GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
  716.             GlUtil.checkGlError("prepareFramebuffer done");
  717.            
  718.            
  719.             mImgTex_bb = values[1];
  720.  
  721.             String imgPath = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MOVIES) + "/android_mediaclips/" + "IMG_2299.JPG";
  722.             // bitmap
  723.             Bitmap bmp = adaptOrientation(loadImage(imgPath, 1280, 720), getOrientation(imgPath));
  724. //          bmp = ThumbnailManager.adaptOrientation(ThumbnailManager.loadImage(c, path, 1280, 720), ThumbnailManager.getOrientation(path));
  725. //          bmpSize[0] = bmp.getWidth();
  726. //          bmpSize[1] = bmp.getHeight();
  727. //          Log.d(TAG, "loadImage2Bitmap() : " + bmpSize[0] + " " + bmpSize[1]);
  728.             // bmp = UtilityMethods.scaleCenterCrop(bitmap, getWindow().getDecorView().getWidth(), getWindow().getDecorView().getHeight(), false);
  729.  
  730.  
  731.             Log.d(TAG, "bbbbbbbbbbbbbbbbbbbbb texture: " + mImgTex_bb);
  732.             GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mImgTex_bb);// + (mJpgIndexInTimeline + 1) % 2]);// texture);//
  733.             Log.d(TAG, "@@@@@@@@@@@@@@@@@ texture: " + mImgTex_bb);// + (mJpgIndexInTimeline + 1) % 2]);
  734.  
  735.             GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0);
  736.             GlUtil.checkGlError("glTexParameter");
  737.             bmp.recycle();
  738.            
  739. //            // Create texture storage.
  740. //            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
  741. //                    GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, );
  742.             // Set parameters.  We're probably using non-power-of-two dimensions, so
  743.             // some values may not be available for use.
  744.             GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,
  745.                     GLES20.GL_NEAREST);
  746.             GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,
  747.                     GLES20.GL_LINEAR);
  748.             GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,
  749.                     GLES20.GL_CLAMP_TO_EDGE);
  750.             GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,
  751.                     GLES20.GL_CLAMP_TO_EDGE);
  752.             GlUtil.checkGlError("glTexParameter");
  753.            
  754.         }
  755.  
  756.         /**
  757.          * Releases most of the GL resources we currently hold.
  758.          * <p>
  759.          * Does not release EglCore.
  760.          */
  761.         private void releaseGl() {
  762.             GlUtil.checkGlError("releaseGl start");
  763.  
  764.             int[] values = new int[1];
  765.  
  766.             if (mWindowSurface != null) {
  767.                 mWindowSurface.release();
  768.                 mWindowSurface = null;
  769.             }
  770.             if (mProgram != null) {
  771.                 mProgram.release();
  772.                 mProgram = null;
  773.             }
  774.             if (mOffscreenTexture > 0) {
  775.                 values[0] = mOffscreenTexture;
  776.                 GLES20.glDeleteTextures(1, values, 0);
  777.                 mOffscreenTexture = -1;
  778.             }
  779.             if (mFramebuffer > 0) {
  780.                 values[0] = mFramebuffer;
  781.                 GLES20.glDeleteFramebuffers(1, values, 0);
  782.                 mFramebuffer = -1;
  783.             }
  784.             if (mDepthBuffer > 0) {
  785.                 values[0] = mDepthBuffer;
  786.                 GLES20.glDeleteRenderbuffers(1, values, 0);
  787.                 mDepthBuffer = -1;
  788.             }
  789.             if (mFullScreen != null) {
  790.                 mFullScreen.release(false); // TODO: should be "true"; must ensure mEglCore current
  791.                 mFullScreen = null;
  792.             }
  793.  
  794.             GlUtil.checkGlError("releaseGl done");
  795.  
  796.             mEglCore.makeNothingCurrent();
  797.         }
  798.  
  799.         /**
  800.          * Updates the recording state.  Stops or starts recording as needed.
  801.          */
  802.         private void setRecordingEnabled(boolean enabled) {
  803.             if (enabled == mRecordingEnabled) {
  804.                 return;
  805.             }
  806.             if (enabled) {
  807.                 startEncoder();
  808.             } else {
  809.                 stopEncoder();
  810.             }
  811.             mRecordingEnabled = enabled;
  812.         }
  813.  
  814.         /**
  815.          * Changes the method we use to render frames to the encoder.
  816.          */
  817.         private void setRecordMethod(int recordMethod) {
  818.             Log.d(TAG, "RT: setRecordMethod " + recordMethod);
  819.             mRecordMethod = recordMethod;
  820.         }
  821.  
  822.         /**
  823.          * Creates the video encoder object and starts the encoder thread.  Creates an EGL
  824.          * surface for encoder input.
  825.          */
  826.         private void startEncoder() {
  827.             Log.d(TAG, "starting to record");
  828.             // Record at 1280x720, regardless of the window dimensions.  The encoder may
  829.             // explode if given "strange" dimensions, e.g. a width that is not a multiple
  830.             // of 16.  We can box it as needed to preserve dimensions.
  831.             final int BIT_RATE = 7000000;   // 4Mbps
  832.             final int VIDEO_WIDTH = 1280;
  833.             final int VIDEO_HEIGHT = 720;
  834.             int windowWidth = mWindowSurface.getWidth();
  835.             int windowHeight = mWindowSurface.getHeight();
  836.             float windowAspect = (float) windowHeight / (float) windowWidth;
  837.             int outWidth, outHeight;
  838.             if (VIDEO_HEIGHT > VIDEO_WIDTH * windowAspect) {
  839.                 // limited by narrow width; reduce height
  840.                 outWidth = VIDEO_WIDTH;
  841.                 outHeight = (int) (VIDEO_WIDTH * windowAspect);
  842.             } else {
  843.                 // limited by short height; restrict width
  844.                 outHeight = VIDEO_HEIGHT;
  845.                 outWidth = (int) (VIDEO_HEIGHT / windowAspect);
  846.             }
  847.             int offX = (VIDEO_WIDTH - outWidth) / 2;
  848.             int offY = (VIDEO_HEIGHT - outHeight) / 2;
  849.             mVideoRect.set(offX, offY, offX + outWidth, offY + outHeight);
  850.             Log.d(TAG, "Adjusting window " + windowWidth + "x" + windowHeight +
  851.                     " to +" + offX + ",+" + offY + " " +
  852.                     mVideoRect.width() + "x" + mVideoRect.height());
  853.  
  854.             VideoEncoderCore encoderCore;
  855.             try {
  856.                 encoderCore = new VideoEncoderCore(VIDEO_WIDTH, VIDEO_HEIGHT,
  857.                         BIT_RATE, mOutputFile);
  858.             } catch (IOException ioe) {
  859.                 throw new RuntimeException(ioe);
  860.             }
  861.             mInputWindowSurface = new WindowSurface(mEglCore, encoderCore.getInputSurface(), true);
  862.             mVideoEncoder = new TextureMovieEncoder2(encoderCore);
  863.         }
  864.  
  865.         /**
  866.          * Stops the video encoder if it's running.
  867.          */
  868.         private void stopEncoder() {
  869.             if (mVideoEncoder != null) {
  870.                 Log.d(TAG, "stopping recorder, mVideoEncoder=" + mVideoEncoder);
  871.                 mVideoEncoder.stopRecording();
  872.                 // TODO: wait (briefly) until it finishes shutting down so we know file is
  873.                 //       complete, or have a callback that updates the UI
  874.                 mVideoEncoder = null;
  875.             }
  876.             if (mInputWindowSurface != null) {
  877.                 mInputWindowSurface.release();
  878.                 mInputWindowSurface = null;
  879.             }
  880.         }
  881.         long startTime_ns = 0;
  882.         /**
  883.          * Advance state and draw frame in response to a vsync event.
  884.          */
  885.         private void doFrame(long timeStampNanos) {
  886.             // If we're not keeping up 60fps -- maybe something in the system is busy, maybe
  887.             // recording is too expensive, maybe the CPU frequency governor thinks we're
  888.             // not doing and wants to drop the clock frequencies -- we need to drop frames
  889.             // to catch up.  The "timeStampNanos" value is based on the system monotonic
  890.             // clock, as is System.nanoTime(), so we can compare the values directly.
  891.             //
  892.             // Our clumsy collision detection isn't sophisticated enough to deal with large
  893.             // time gaps, but it's nearly cost-free, so we go ahead and do the computation
  894.             // either way.
  895.             //
  896.             // We can reduce the overhead of recording, as well as the size of the movie,
  897.             // by recording at ~30fps instead of the display refresh rate.  As a quick hack
  898.             // we just record every-other frame, using a "recorded previous" flag.
  899.  
  900.             update(timeStampNanos);
  901.  
  902.             long diff = System.nanoTime() - timeStampNanos;
  903.             long max = mRefreshPeriodNanos - 2000000;   // if we're within 2ms, don't bother
  904.             if(frameInd==0)startTime_ns = timeStampNanos;
  905.            
  906.             if (diff > max) {
  907.                 // too much, drop a frame
  908.                 Log.d(TAG, "diff is " + (diff / 1000000.0) + " ms, max " + (max / 1000000.0) +
  909.                         ", skipping render");
  910.                 mRecordedPrevious = false;
  911.                 mPreviousWasDropped = true;
  912.                 mDroppedFrames++;
  913.                 return;
  914.             }
  915.  
  916.             boolean swapResult;
  917.             if (!mRecordingEnabled ) {//|| mRecordedPrevious
  918.                 mRecordedPrevious = false;
  919.                 // Render the scene, swap back to front.
  920.                 draw();
  921.                 swapResult = mWindowSurface.swapBuffers();
  922.             } else {
  923.                 mRecordedPrevious = false;//true;
  924.  
  925.                 // recording
  926.                 if (mRecordMethod == RECMETHOD_DRAW_TWICE) {
  927.                     //Log.d(TAG, "MODE: draw 2x");
  928.  
  929.                     // Draw for display, swap.
  930.                     draw();
  931.                     swapResult = mWindowSurface.swapBuffers();
  932.  
  933.                     // Draw for recording, swap.
  934.                     mVideoEncoder.frameAvailableSoon();
  935.                     mInputWindowSurface.makeCurrent();
  936.                     // If we don't set the scissor rect, the glClear() we use to draw the
  937.                     // light-grey background will draw outside the viewport and muck up our
  938.                     // letterboxing.  Might be better if we disabled the test immediately after
  939.                     // the glClear().  Of course, if we were clearing the frame background to
  940.                     // black it wouldn't matter.
  941.                     //
  942.                     // We do still need to clear the pixels outside the scissor rect, of course,
  943.                     // or we'll get garbage at the edges of the recording.  We can either clear
  944.                     // the whole thing and accept that there will be a lot of overdraw, or we
  945.                     // can issue multiple scissor/clear calls.  Some GPUs may have a special
  946.                     // optimization for zeroing out the color buffer.
  947.                     //
  948.                     // For now, be lazy and zero the whole thing.  At some point we need to
  949.                     // examine the performance here.
  950.                     GLES20.glClearColor(0f, 0f, 0f, 1f);
  951.                     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
  952.  
  953.                     GLES20.glViewport(mVideoRect.left, mVideoRect.top,
  954.                             mVideoRect.width(), mVideoRect.height());
  955.                     GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
  956.                     GLES20.glScissor(mVideoRect.left, mVideoRect.top,
  957.                             mVideoRect.width(), mVideoRect.height());
  958.                     draw();
  959.                     GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
  960.                     mInputWindowSurface.setPresentationTime(timeStampNanos);
  961.                     mInputWindowSurface.swapBuffers();
  962.  
  963.                     // Restore.
  964.                     GLES20.glViewport(0, 0, mWindowSurface.getWidth(), mWindowSurface.getHeight());
  965.                     mWindowSurface.makeCurrent();
  966.  
  967.                 } else if (mEglCore.getGlVersion() >= 3 &&
  968.                         mRecordMethod == RECMETHOD_BLIT_FRAMEBUFFER) {
  969.                     //Log.d(TAG, "MODE: blitFramebuffer");
  970.                     // Draw the frame, but don't swap it yet.
  971.                     draw();
  972.  
  973.                     mVideoEncoder.frameAvailableSoon();
  974.                     mInputWindowSurface.makeCurrentReadFrom(mWindowSurface);
  975.                     // Clear the pixels we're not going to overwrite with the blit.  Once again,
  976.                     // this is excessive -- we don't need to clear the entire screen.
  977.                     GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  978.                     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
  979.                     GlUtil.checkGlError("before glBlitFramebuffer");
  980.                     Log.v(TAG, "glBlitFramebuffer: 0,0," + mWindowSurface.getWidth() + "," +
  981.                             mWindowSurface.getHeight() + "  " + mVideoRect.left + "," +
  982.                             mVideoRect.top + "," + mVideoRect.right + "," + mVideoRect.bottom +
  983.                             "  COLOR_BUFFER GL_NEAREST");
  984.                     GLES30.glBlitFramebuffer(
  985.                             0, 0, mWindowSurface.getWidth(), mWindowSurface.getHeight(),
  986.                             mVideoRect.left, mVideoRect.top, mVideoRect.right, mVideoRect.bottom,
  987.                             GLES30.GL_COLOR_BUFFER_BIT, GLES30.GL_NEAREST);
  988.                     int err;
  989.                     if ((err = GLES30.glGetError()) != GLES30.GL_NO_ERROR) {
  990.                         Log.w(TAG, "ERROR: glBlitFramebuffer failed: 0x" +
  991.                                 Integer.toHexString(err));
  992.                     }
  993.                     mInputWindowSurface.setPresentationTime(timeStampNanos);
  994.                     mInputWindowSurface.swapBuffers();
  995.  
  996.                     // Now swap the display buffer.
  997.                     mWindowSurface.makeCurrent();
  998.                     swapResult = mWindowSurface.swapBuffers();
  999.  
  1000.                 } else {
  1001.                     //Log.d(TAG, "MODE: offscreen + blit 2x");
  1002.                     // Render offscreen.
  1003. //                    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebuffer);
  1004. //                    GlUtil.checkGlError("glBindFramebuffer");
  1005. //                    draw();
  1006. ////                    mFullScreen.drawFrame(mImgTex_bb, mIdentityMatrix);
  1007. //                    // Blit to display.
  1008. //                    GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
  1009. //                    GlUtil.checkGlError("glBindFramebuffer");
  1010.  
  1011.                     //uncomment these 2 lines to render also to display.
  1012. //                    mFullScreen.drawFrame(mImgTex_bb , mIdentityMatrix); //mOffscreenTexture
  1013. //                    swapResult = mWindowSurface.swapBuffers();
  1014.  
  1015.                     // Blit to encoder.
  1016.                     Log.i(TAG, "Blit to encoder frame " + (frameInd) + " timestamp(ms): " + (timeStampNanos-startTime_ns)/1000000);
  1017.                     frameInd++;
  1018.                     mVideoEncoder.frameAvailableSoon();
  1019.                     if(frameInd<5) mInputWindowSurface.makeCurrent();
  1020.                     GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);    // again, only really need to
  1021.                     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);     //  clear pixels outside rect
  1022.                     GLES20.glViewport(mVideoRect.left, mVideoRect.top,
  1023.                             mVideoRect.width(), mVideoRect.height());
  1024.                     mFullScreen.drawFrame(mImgTex_bb, mIdentityMatrix); //mOffscreenTexture
  1025.                     mInputWindowSurface.setPresentationTime(timeStampNanos-startTime_ns);
  1026.                     mInputWindowSurface.swapBuffers();
  1027.  
  1028. //                    // Restore previous values.
  1029. //                    GLES20.glViewport(0, 0, mWindowSurface.getWidth(), mWindowSurface.getHeight());
  1030. //                    mWindowSurface.makeCurrent();
  1031.                 }
  1032.             }
  1033.  
  1034.             mPreviousWasDropped = false;
  1035.  
  1036. //            if (!swapResult) {
  1037. //                // This can happen if the Activity stops without waiting for us to halt.
  1038. //                Log.w(TAG, "swapBuffers failed, killing renderer thread");
  1039. //                shutdown();
  1040. //                return;
  1041. //            }
  1042.  
  1043.             // Update the FPS counter.
  1044.             //
  1045.             // Ideally we'd generate something approximate quickly to make the UI look
  1046.             // reasonable, then ease into longer sampling periods.
  1047.             final int NUM_FRAMES = 120;
  1048.             final long ONE_TRILLION = 1000000000000L;
  1049.             if (mFpsCountStartNanos == 0) {
  1050.                 mFpsCountStartNanos = timeStampNanos;
  1051.                 mFpsCountFrame = 0;
  1052.             } else {
  1053.                 mFpsCountFrame++;
  1054.                 if (mFpsCountFrame == NUM_FRAMES) {
  1055.                     // compute thousands of frames per second
  1056.                     long elapsed = timeStampNanos - mFpsCountStartNanos;
  1057.                     mActivityHandler.sendFpsUpdate((int)(NUM_FRAMES * ONE_TRILLION / elapsed),
  1058.                             mDroppedFrames);
  1059.  
  1060.                     // reset
  1061.                     mFpsCountStartNanos = timeStampNanos;
  1062.                     mFpsCountFrame = 0;
  1063.                 }
  1064.             }
  1065.         }
  1066.        
  1067.         /**
  1068.          * We use the time delta from the previous event to determine how far everything
  1069.          * moves.  Ideally this will yield identical animation sequences regardless of
  1070.          * the device's actual refresh rate.
  1071.          */
  1072.         private void update(long timeStampNanos) {
  1073.             // Compute time from previous frame.
  1074.             long intervalNanos;
  1075.             if (mPrevTimeNanos == 0) {
  1076.                 intervalNanos = 0;
  1077.             } else {
  1078.                 intervalNanos = timeStampNanos - mPrevTimeNanos;
  1079.  
  1080.                 final long ONE_SECOND_NANOS = 1000000000L;
  1081.                 if (intervalNanos > ONE_SECOND_NANOS) {
  1082.                     // A gap this big should only happen if something paused us.  We can
  1083.                     // either cap the delta at one second, or just pretend like this is
  1084.                     // the first frame and not advance at all.
  1085.                     Log.d(TAG, "Time delta too large: " +
  1086.                             (double) intervalNanos / ONE_SECOND_NANOS + " sec");
  1087.                     intervalNanos = 0;
  1088.                 }
  1089.             }
  1090.             mPrevTimeNanos = timeStampNanos;
  1091.  
  1092.             final float ONE_BILLION_F = 1000000000.0f;
  1093.             final float elapsedSeconds = intervalNanos / ONE_BILLION_F;
  1094.  
  1095.             // Spin the triangle.  We want one full 360-degree rotation every 3 seconds,
  1096.             // or 120 degrees per second.
  1097.             final int SECS_PER_SPIN = 3;
  1098.             float angleDelta = (360.0f / SECS_PER_SPIN) * elapsedSeconds;
  1099.             mTri.setRotation(mTri.getRotation() + angleDelta);
  1100.  
  1101.             // Bounce the rect around the screen.  The rect is a 1x1 square scaled up to NxN.
  1102.             // We don't do fancy collision detection, so it's possible for the box to slightly
  1103.             // overlap the edges.  We draw the edges last, so it's not noticeable.
  1104.             float xpos = mRect.getPositionX();
  1105.             float ypos = mRect.getPositionY();
  1106.             float xscale = mRect.getScaleX();
  1107.             float yscale = mRect.getScaleY();
  1108.             xpos += mRectVelX * elapsedSeconds;
  1109.             ypos += mRectVelY * elapsedSeconds;
  1110.             if ((mRectVelX < 0 && xpos - xscale/2 < mInnerLeft) ||
  1111.                     (mRectVelX > 0 && xpos + xscale/2 > mInnerRight+1)) {
  1112.                 mRectVelX = -mRectVelX;
  1113.             }
  1114.             if ((mRectVelY < 0 && ypos - yscale/2 < mInnerBottom) ||
  1115.                     (mRectVelY > 0 && ypos + yscale/2 > mInnerTop+1)) {
  1116.                 mRectVelY = -mRectVelY;
  1117.             }
  1118.             mRect.setPosition(xpos, ypos);
  1119.         }
  1120.  
  1121.         /**
  1122.          * Draws the scene.
  1123.          */
  1124.         private void draw() {
  1125.             GlUtil.checkGlError("draw start");
  1126.  
  1127.             // Clear to a non-black color to make the content easily differentiable from
  1128.             // the pillar-/letter-boxing.
  1129.             GLES20.glClearColor(0.2f, 0.2f, 0.2f, 1.0f);
  1130.             GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
  1131.  
  1132.             mTri.draw(mProgram, mDisplayProjectionMatrix);
  1133.             mRect.draw(mProgram, mDisplayProjectionMatrix);
  1134.             for (int i = 0; i < 4; i++) {
  1135.                 if (false && mPreviousWasDropped) {
  1136.                     mEdges[i].setColor(1.0f, 0.0f, 0.0f);
  1137.                 } else {
  1138.                     mEdges[i].setColor(0.5f, 0.5f, 0.5f);
  1139.                 }
  1140.                 mEdges[i].draw(mProgram, mDisplayProjectionMatrix);
  1141.             }
  1142.  
  1143.             // Give a visual indication of the recording method.
  1144.             switch (mRecordMethod) {
  1145.                 case RECMETHOD_DRAW_TWICE:
  1146.                     mRecordRect.setColor(1.0f, 0.0f, 0.0f);
  1147.                     break;
  1148.                 case RECMETHOD_FBO:
  1149.                     mRecordRect.setColor(0.0f, 1.0f, 0.0f);
  1150.                     break;
  1151.                 case RECMETHOD_BLIT_FRAMEBUFFER:
  1152.                     mRecordRect.setColor(0.0f, 0.0f, 1.0f);
  1153.                     break;
  1154.                 default:
  1155.             }
  1156.             mRecordRect.draw(mProgram, mDisplayProjectionMatrix);
  1157.  
  1158.             GlUtil.checkGlError("draw done");
  1159.         }
  1160.     }
  1161.  
  1162.     /**
  1163.      * Handler for RenderThread.  Used for messages sent from the UI thread to the render thread.
  1164.      * <p>
  1165.      * The object is created on the render thread, and the various "send" methods are called
  1166.      * from the UI thread.
  1167.      */
  1168.     private static class RenderHandler extends Handler {
  1169.         private static final int MSG_SURFACE_CREATED = 0;
  1170.         private static final int MSG_SURFACE_CHANGED = 1;
  1171.         private static final int MSG_DO_FRAME = 2;
  1172.         private static final int MSG_RECORDING_ENABLED = 3;
  1173.         private static final int MSG_RECORD_METHOD = 4;
  1174.         private static final int MSG_SHUTDOWN = 5;
  1175.  
  1176.         // This shouldn't need to be a weak ref, since we'll go away when the Looper quits,
  1177.         // but no real harm in it.
  1178.         private WeakReference<RenderThread> mWeakRenderThread;
  1179.  
  1180.         /**
  1181.          * Call from render thread.
  1182.          */
  1183.         public RenderHandler(RenderThread rt) {
  1184.             mWeakRenderThread = new WeakReference<RenderThread>(rt);
  1185.         }
  1186.  
  1187.         /**
  1188.          * Sends the "surface created" message.
  1189.          * <p>
  1190.          * Call from UI thread.
  1191.          */
  1192.         public void sendSurfaceCreated() {
  1193.             sendMessage(obtainMessage(RenderHandler.MSG_SURFACE_CREATED));
  1194.         }
  1195.  
  1196.         /**
  1197.          * Sends the "surface changed" message, forwarding what we got from the SurfaceHolder.
  1198.          * <p>
  1199.          * Call from UI thread.
  1200.          */
  1201.         public void sendSurfaceChanged(@SuppressWarnings("unused") int format,
  1202.                 int width, int height) {
  1203.             // ignore format
  1204.             sendMessage(obtainMessage(RenderHandler.MSG_SURFACE_CHANGED, width, height));
  1205.         }
  1206.  
  1207.         /**
  1208.          * Sends the "do frame" message, forwarding the Choreographer event.
  1209.          * <p>
  1210.          * Call from UI thread.
  1211.          */
  1212.         public void sendDoFrame(long frameTimeNanos) {
  1213.             sendMessage(obtainMessage(RenderHandler.MSG_DO_FRAME,
  1214.                     (int) (frameTimeNanos >> 32), (int) frameTimeNanos));
  1215.         }
  1216.  
  1217.         /**
  1218.          * Enable or disable recording.
  1219.          * <p>
  1220.          * Call from non-UI thread.
  1221.          */
  1222.         public void setRecordingEnabled(boolean enabled) {
  1223.             sendMessage(obtainMessage(MSG_RECORDING_ENABLED, enabled ? 1 : 0, 0));
  1224.         }
  1225.  
  1226.         /**
  1227.          * Set the method used to render a frame for the encoder.
  1228.          * <p>
  1229.          * Call from non-UI thread.
  1230.          */
  1231.         public void setRecordMethod(int recordMethod) {
  1232.             sendMessage(obtainMessage(MSG_RECORD_METHOD, recordMethod, 0));
  1233.         }
  1234.  
  1235.         /**
  1236.          * Sends the "shutdown" message, which tells the render thread to halt.
  1237.          * <p>
  1238.          * Call from UI thread.
  1239.          */
  1240.         public void sendShutdown() {
  1241.             sendMessage(obtainMessage(RenderHandler.MSG_SHUTDOWN));
  1242.         }
  1243.  
  1244.         @Override  // runs on RenderThread
  1245.         public void handleMessage(Message msg) {
  1246.             int what = msg.what;
  1247.             //Log.d(TAG, "RenderHandler [" + this + "]: what=" + what);
  1248.  
  1249.             RenderThread renderThread = mWeakRenderThread.get();
  1250.             if (renderThread == null) {
  1251.                 Log.w(TAG, "RenderHandler.handleMessage: weak ref is null");
  1252.                 return;
  1253.             }
  1254.  
  1255.             switch (what) {
  1256.                 case MSG_SURFACE_CREATED:
  1257.                     renderThread.surfaceCreated();
  1258.                     break;
  1259.                 case MSG_SURFACE_CHANGED:
  1260.                     renderThread.surfaceChanged(msg.arg1, msg.arg2);
  1261.                     break;
  1262.                 case MSG_DO_FRAME:
  1263.                     long timestamp = (((long) msg.arg1) << 32) |
  1264.                                      (((long) msg.arg2) & 0xffffffffL);
  1265.                     renderThread.doFrame(timestamp);
  1266.                     break;
  1267.                 case MSG_RECORDING_ENABLED:
  1268.                     renderThread.setRecordingEnabled(msg.arg1 != 0);
  1269.                     break;
  1270.                 case MSG_RECORD_METHOD:
  1271.                     renderThread.setRecordMethod(msg.arg1);
  1272.                     break;
  1273.                 case MSG_SHUTDOWN:
  1274.                     renderThread.shutdown();
  1275.                     break;
  1276.                default:
  1277.                     throw new RuntimeException("unknown message " + what);
  1278.             }
  1279.         }
  1280.     }
  1281.    
  1282.    
  1283.    
  1284.    
  1285.     /**
  1286.      * Loads an image from storage to be used as thumb.
  1287.      *
  1288.      * @param context Context.
  1289.      * @param url Url of the image (can be uri or path); if uri needs to be converted to path first (so, if possible, send it directly is
  1290.      *            path).
  1291.      * @param w Preferred width of the result.
  1292.      * @param h Preferred height of the result.
  1293.      * @return The Bitmap created from url
  1294.      */
  1295.     public static Bitmap loadImage(String url, int w, int h) {
  1296.  
  1297.         // Get the dimensions of the bitmap
  1298.         BitmapFactory.Options bmOptions = new BitmapFactory.Options();
  1299.         bmOptions.inJustDecodeBounds = true;
  1300.         BitmapFactory.decodeFile(url, bmOptions);
  1301.         double ow = (double) bmOptions.outWidth, oh = (double) bmOptions.outHeight;
  1302.  
  1303.         // Determine how much to scale down the image
  1304.         int scaleFactor = (int) Math.min(Math.ceil(ow / w), Math.ceil(oh / h));
  1305.  
  1306.         // fallback to a quarter of the size
  1307.         if (scaleFactor == 0)
  1308.             scaleFactor = 2;
  1309.  
  1310.         // Decode the image file into a Bitmap sized to fill the View
  1311.         bmOptions.inJustDecodeBounds = false;
  1312.         bmOptions.inSampleSize = scaleFactor;
  1313.         bmOptions.inPurgeable = true;
  1314.  
  1315.         return BitmapFactory.decodeFile(url, bmOptions);
  1316.     }
  1317.  
  1318.     public static float getAspectRatio(int w, int h) {
  1319.         return (Math.max(w, h) / (float) Math.min(w, h));
  1320.     }
  1321.  
  1322.     public static float getOriginalAspectRatio(String uri) {
  1323.  
  1324.         BitmapFactory.Options options = new BitmapFactory.Options();
  1325.         options.inJustDecodeBounds = true;
  1326.  
  1327.         // Returns null, sizes are in the options variable
  1328.         BitmapFactory.decodeFile(uri, options);
  1329.         int width = options.outWidth;
  1330.         int height = options.outHeight;
  1331.  
  1332.         return getAspectRatio(width, height);
  1333.     }
  1334.  
  1335.     public static int getOrientation(String path) {
  1336.         if (path == null)
  1337.             return 0;
  1338.        
  1339.         try {
  1340.             Log.d("TEST", "get orientation path: " + path);
  1341.             final ExifInterface exif = new ExifInterface(path);
  1342.             final int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
  1343.             switch (orientation) {
  1344.                 case ExifInterface.ORIENTATION_ROTATE_90:
  1345.                     return 90;
  1346.                 case ExifInterface.ORIENTATION_ROTATE_180:
  1347.                     return 180;
  1348.                 case ExifInterface.ORIENTATION_ROTATE_270:
  1349.                     return 270;
  1350.                 default:
  1351.                     return 0;
  1352.             }
  1353.         } catch (Exception e) {
  1354.             Log.e(TAG, "Error handling orientation");
  1355.         }
  1356.  
  1357.         return 0;
  1358.     }
  1359.  
  1360.     public static Bitmap adaptOrientation(final Bitmap bitmap, int r) {
  1361.         try {
  1362.             if (r != 0) {
  1363.                 final android.graphics.Matrix m = new android.graphics.Matrix();
  1364.                 m.postRotate((float) r);
  1365.                 final Bitmap b = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), m, false);
  1366.  
  1367.                 if (b != bitmap)
  1368.                     bitmap.recycle();
  1369.  
  1370.                 return b;
  1371.             } else
  1372.                 return bitmap;
  1373.         } catch (Exception e) {
  1374.             return bitmap;
  1375.         }
  1376.     }
  1377. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement