+ * The (width,height) constructor for this class will prepare GL, create a SurfaceTexture, + * and then create a Surface for that SurfaceTexture. The Surface can be passed to + * MediaCodec.configure() to receive decoder output. When a frame arrives, we latch the + * texture with updateTexImage, then render the texture with GL to a pbuffer. + *
+ * The no-arg constructor skips the GL preparation step and doesn't allocate a pbuffer. + * Instead, it just creates the Surface and SurfaceTexture, and when a frame arrives + * we just draw it on whatever surface is current. + *
+ * By default, the Surface will be using a BufferQueue in asynchronous mode, so we
+ * can potentially drop frames.
+ */
+class OutputSurfaceTwo implements SurfaceTexture.OnFrameAvailableListener {
+ private static final String TAG = "OutputSurface";
+ private static final boolean VERBOSE = false;
+ private static final int EGL_OPENGL_ES2_BIT = 4;
+ private EGL10 mEGL;
+ private EGLDisplay mEGLDisplay;
+ private EGLContext mEGLContext;
+ private EGLSurface mEGLSurface;
+ private SurfaceTexture mSurfaceTexture;
+ private Surface mSurface;
+ private Object mFrameSyncObject = new Object(); // guards mFrameAvailable
+ private boolean mFrameAvailable;
+ private TextureRender mTextureRender;
+
+ /**
+ * Creates an OutputSurface using the current EGL context. Creates a Surface that can be
+ * passed to MediaCodec.configure().
+ */
+ public OutputSurfaceTwo(VideoInfo info) {
+ if (info.width <= 0 || info.height <= 0) {
+ throw new IllegalArgumentException();
+ }
+ setup(info);
+ }
+
+ /**
+ * Creates instances of TextureRender and SurfaceTexture, and a Surface associated
+ * with the SurfaceTexture.
+ */
+ private void setup(VideoInfo info) {
+ mTextureRender = new TextureRender(info);
+ mTextureRender.surfaceCreated();
+ // Even if we don't access the SurfaceTexture after the constructor returns, we
+ // still need to keep a reference to it. The Surface doesn't retain a reference
+ // at the Java level, so if we don't either then the object can get GCed, which
+ // causes the native finalizer to run.
+ if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
+ mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
+ // This doesn't work if OutputSurface is created on the thread that CTS started for
+ // these test cases.
+ //
+ // The CTS-created thread has a Looper, and the SurfaceTexture constructor will
+ // create a Handler that uses it. The "frame available" message is delivered
+ // there, but since we're not a Looper-based thread we'll never see it. For
+ // this to do anything useful, OutputSurface must be created on a thread without
+ // a Looper, so that SurfaceTexture uses the main application Looper instead.
+ //
+ // Java language note: passing "this" out of a constructor is generally unwise,
+ // but we should be able to get away with it here.
+ mSurfaceTexture.setOnFrameAvailableListener(this);
+ mSurface = new Surface(mSurfaceTexture);
+ }
+
+ /**
+ * just for clip
+ * @param info
+ * @param clipMode
+ */
+ public OutputSurfaceTwo(VideoInfo info, int clipMode) {
+ if (info.width <= 0 || info.height <= 0) {
+ throw new IllegalArgumentException();
+ }
+ mTextureRender = new TextureRender(info);
+ mTextureRender.setClipMode(clipMode);
+ mTextureRender.surfaceCreated();
+ if (VERBOSE) Log.d(TAG, "textureID=" + mTextureRender.getTextureId());
+ mSurfaceTexture = new SurfaceTexture(mTextureRender.getTextureId());
+ mSurfaceTexture.setOnFrameAvailableListener(this);
+ mSurface = new Surface(mSurfaceTexture);
+ }
+
+ /**
+ * Discard all resources held by this class, notably the EGL context.
+ */
+ public void release() {
+ if (mEGL != null) {
+ if (mEGL.eglGetCurrentContext().equals(mEGLContext)) {
+ // Clear the current context and surface to ensure they are discarded immediately.
+ mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
+ EGL10.EGL_NO_CONTEXT);
+ }
+ mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
+ mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
+ //mEGL.eglTerminate(mEGLDisplay);
+ }
+ mSurface.release();
+ // this causes a bunch of warnings that appear harmless but might confuse someone:
+ // W BufferQueue: [unnamed-3997-2] cancelBuffer: BufferQueue has been abandoned!
+ //mSurfaceTexture.release();
+ // null everything out so future attempts to use this object will cause an NPE
+ mEGLDisplay = null;
+ mEGLContext = null;
+ mEGLSurface = null;
+ mEGL = null;
+ mTextureRender = null;
+ mSurface = null;
+ mSurfaceTexture = null;
+ }
+
+ /**
+ * Makes our EGL context and surface current.
+ */
+ public void makeCurrent() {
+ if (mEGL == null) {
+ throw new RuntimeException("not configured for makeCurrent");
+ }
+ checkEglError("before makeCurrent");
+ if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
+ throw new RuntimeException("eglMakeCurrent failed");
+ }
+ }
+
+ /**
+ * Returns the Surface that we draw onto.
+ */
+ public Surface getSurface() {
+ return mSurface;
+ }
+
+ /**
+ * Replaces the fragment shader.
+ */
+ public void changeFragmentShader(String fragmentShader) {
+ mTextureRender.changeFragmentShader(fragmentShader);
+ }
+
+ /**
+ * Latches the next buffer into the texture. Must be called from the thread that created
+ * the OutputSurface object, after the onFrameAvailable callback has signaled that new
+ * data is available.
+ */
+ public void awaitNewImage() {
+ final int TIMEOUT_MS = 500;
+ synchronized (mFrameSyncObject) {
+ while (!mFrameAvailable) {
+ try {
+ // Wait for onFrameAvailable() to signal us. Use a timeout to avoid
+ // stalling the test if it doesn't arrive.
+ mFrameSyncObject.wait(TIMEOUT_MS);
+ if (!mFrameAvailable) {
+ // TODO: if "spurious wakeup", continue while loop
+ throw new RuntimeException("Surface frame wait timed out");
+ }
+ } catch (InterruptedException ie) {
+ // shouldn't happen
+ throw new RuntimeException(ie);
+ }
+ }
+ mFrameAvailable = false;
+ }
+ // Latch the data.
+ mTextureRender.checkGlError("before updateTexImage");
+ mSurfaceTexture.updateTexImage();
+ }
+
+ /**
+ * Draws the data from SurfaceTexture onto the current EGL surface.
+ */
+ public void drawImage() {
+ mTextureRender.drawFrame(mSurfaceTexture);
+ }
+
+ @Override
+ public void onFrameAvailable(SurfaceTexture st) {
+ if (VERBOSE) Log.d(TAG, "new frame available");
+ synchronized (mFrameSyncObject) {
+ if (mFrameAvailable) {
+ throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
+ }
+ mFrameAvailable = true;
+ mFrameSyncObject.notifyAll();
+ }
+ }
+
+ /**
+ * Che cks for EGL errors.
+ */
+ private void checkEglError(String msg) {
+ boolean failed = false;
+ int error;
+ while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) {
+ Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
+ failed = true;
+ }
+ if (failed) {
+ throw new RuntimeException("EGL error encountered (see log)");
+ }
+ }
+
+ public void onVideoSizeChanged(VideoInfo info){
+ mTextureRender.onVideoSizeChanged(info);
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/example/cj/videoeditor/mediacodec/TextureRender.java b/app/src/main/java/com/example/cj/videoeditor/mediacodec/TextureRender.java
new file mode 100644
index 0000000..b860b69
--- /dev/null
+++ b/app/src/main/java/com/example/cj/videoeditor/mediacodec/TextureRender.java
@@ -0,0 +1,397 @@
+package com.example.cj.videoeditor.mediacodec;
+
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import android.graphics.SurfaceTexture;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.util.Log;
+
+
+import com.example.cj.videoeditor.Constants;
+import com.example.cj.videoeditor.MyApplication;
+import com.example.cj.videoeditor.filter.AFilter;
+import com.example.cj.videoeditor.filter.NoFilter;
+import com.example.cj.videoeditor.filter.RotationOESFilter;
+import com.example.cj.videoeditor.gpufilter.basefilter.GPUImageFilter;
+import com.example.cj.videoeditor.media.VideoInfo;
+import com.example.cj.videoeditor.utils.EasyGlUtils;
+import com.example.cj.videoeditor.utils.MatrixUtils;
+import com.example.cj.videoeditor.utils.OpenGlUtils;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+/**
+ * Code for rendering a texture onto a surface using OpenGL ES 2.0.
+ */
+class TextureRender {
+ private static final String TAG = "TextureRender";
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+ private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+
+ private final float[] mTriangleVerticesData = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 1.f,
+ 1.0f, -1.0f, 0, 1.f, 1.f,
+ -1.0f, 1.0f, 0, 0.f, 0.f,
+ 1.0f, 1.0f, 0, 1.f, 0.f,
+ };
+ private FloatBuffer mTriangleVertices;
+
+ private float[] mMVPMatrix = new float[16];
+ private float[] mSTMatrix = new float[16];
+ private int mProgram;
+ private int mTextureID = -12345;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+
+ //======================clip========================
+ float[] SM = new float[16]; //用于显示的变换矩阵
+ boolean isClipMode;
+ int curMode;
+ int clipViewWidth;
+ int clipViewHeight;
+ int clipEncodeWidth;
+ int clipEncodeHeight;
+
+
+ //======================zoom========================
+ //创建帧缓冲区
+ private int[] fFrame = new int[1];
+ private int[] fTexture = new int[2];
+ AFilter mShow;
+ RotationOESFilter rotationFilter;
+ GPUImageFilter mGpuFilter;
+ //第一段视频宽高(旋转后)
+ int viewWidth;
+ int viewHeight;
+ //当前视频宽高(旋转后)
+ int videoWidth;
+ int videoHeight;
+ //最终显示的宽高
+ int width;
+ int height;
+ int x;
+ int y;
+ boolean videoChanged = false;
+ //第一段视频信息
+ VideoInfo info;
+
+ public TextureRender(VideoInfo info) {
+ this.info=info;
+ mTriangleVertices = ByteBuffer.allocateDirect(
+ mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mTriangleVertices.put(mTriangleVerticesData).position(0);
+ Matrix.setIdentityM(mSTMatrix, 0);
+ mShow = new NoFilter(MyApplication.getContext().getResources());
+ mShow.setMatrix(MatrixUtils.flip(MatrixUtils.getOriginalMatrix(), false, true));
+ rotationFilter = new RotationOESFilter(MyApplication.getContext().getResources());
+ }
+
+ public int getTextureId() {
+ return mTextureID;
+ }
+
+ public void drawFrame(SurfaceTexture st) {
+ if(!isClipMode){
+ zoomDraw(st);
+ }else{
+ clipDraw(st);
+ }
+
+ }
+ public void clipDraw(SurfaceTexture st){
+ EasyGlUtils.bindFrameTexture(fFrame[0], fTexture[0]);
+ GLES20.glViewport(0, 0, clipViewWidth, clipViewHeight);
+ rotationFilter.draw();
+ EasyGlUtils.unBindFrameBuffer();
+
+ if (mGpuFilter != null) {
+ EasyGlUtils.bindFrameTexture(fFrame[0], fTexture[1]);
+ mGpuFilter.onDrawFrame(fTexture[0]);
+ EasyGlUtils.unBindFrameBuffer();
+ }
+ GLES20.glViewport(0, 0, clipEncodeWidth, clipEncodeHeight);
+ mShow.setTextureId(fTexture[mGpuFilter == null ? 0 : 1]);
+ mShow.draw();
+ GLES20.glFinish();
+ }
+ public void zoomDraw(SurfaceTexture st){
+ EasyGlUtils.bindFrameTexture(fFrame[0], fTexture[0]);
+ GLES20.glViewport(0, 0, viewWidth, viewHeight);
+ rotationFilter.draw();
+ EasyGlUtils.unBindFrameBuffer();
+
+ if (mGpuFilter != null) {
+ EasyGlUtils.bindFrameTexture(fFrame[0], fTexture[1]);
+ mGpuFilter.onDrawFrame(fTexture[0]);
+ EasyGlUtils.unBindFrameBuffer();
+ }
+
+ if (videoChanged) {
+ GLES20.glViewport(x, y, width, height);
+ }
+
+ mShow.setTextureId(fTexture[mGpuFilter == null ? 0 : 1]);
+ mShow.draw();
+ GLES20.glFinish();
+ }
+
+ public void preDraw(SurfaceTexture st) {
+ checkGlError("onDrawFrame start");
+// st.getTransformMatrix(mSTMatrix);//当视频角度不为0时,这里会导致视频方向不对
+ GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+ Matrix.setIdentityM(mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ checkGlError("glDrawArrays");
+ }
+
+ /**
+ * Initializes GL state. Call this after the EGL surface has been created and made current.
+ */
+ public void surfaceCreated() {
+ mProgram = createProgram(OpenGlUtils.uRes(
+ "shader/base_record_vertex.sh"),OpenGlUtils.uRes("shader/base_record_fragment"));
+ if (mProgram == 0) {
+ throw new RuntimeException("failed creating program");
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aTextureCoord");
+ }
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+ }
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muSTMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uSTMatrix");
+ }
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+ mTextureID = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ checkGlError("glBindTexture mTextureID");
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_LINEAR);//改为线性过滤,是画面更加平滑(抗锯齿)
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+
+ mShow.create();
+ rotationFilter.create();
+ rotationFilter.setTextureId(mTextureID);
+ GLES20.glGenFramebuffers(1, fFrame, 0);
+
+ if(!isClipMode){
+ if (info.rotation == 0 || info.rotation == 180) {
+ EasyGlUtils.genTexturesWithParameter(2, fTexture, 0, GLES20.GL_RGBA, info.width, info.height);
+ viewWidth = info.width;
+ viewHeight = info.height;
+ } else {
+ EasyGlUtils.genTexturesWithParameter(2, fTexture, 0, GLES20.GL_RGBA, info.height, info.width);
+ viewWidth = info.height;
+ viewHeight = info.width;
+ }
+ }else{
+ switch (curMode){
+ case Constants.MODE_POR_9_16:
+ clipViewWidth=Constants.mode_por_width_9_16;
+ clipViewHeight=Constants.mode_por_height_9_16;
+ clipEncodeWidth=Constants.mode_por_encode_width_9_16;
+ clipEncodeHeight=Constants.mode_por_encode_height_9_16;
+ break;
+ case Constants.MODE_POR_1_1:
+ clipViewWidth=Constants.mode_por_width_1_1;
+ clipViewHeight=Constants.mode_por_height_1_1;
+ clipEncodeWidth=Constants.mode_por_encode_width_1_1;
+ clipEncodeHeight=Constants.mode_por_encode_height_1_1;
+ break;
+ case Constants.MODE_POR_16_9:
+ clipViewWidth=Constants.mode_por_width_16_9;
+ clipViewHeight=Constants.mode_por_height_16_9;
+ clipEncodeWidth=Constants.mode_por_encode_width_16_9;
+ clipEncodeHeight=Constants.mode_por_encode_height_16_9;
+ break;
+ }
+ EasyGlUtils.genTexturesWithParameter(2, fTexture, 0, GLES20.GL_RGBA, clipViewWidth, clipViewHeight);
+ if (info.rotation == 0 || info.rotation == 180) {
+ MatrixUtils.getShowMatrix(SM,info.width,info.height,clipViewWidth,clipViewHeight);
+ } else {
+ MatrixUtils.getShowMatrix(SM,info.height,info.width,clipViewWidth,clipViewHeight);
+ }
+ rotationFilter.setMatrix(SM);
+ }
+ rotationFilter.setRotation(info.rotation);
+ }
+
+ /**
+ * Replaces the fragment shader.
+ */
+ public void changeFragmentShader(String fragmentShader) {
+ GLES20.glDeleteProgram(mProgram);
+ mProgram = createProgram(OpenGlUtils.uRes("shader/base_record_vertex.sh"), fragmentShader);
+ if (mProgram == 0) {
+ throw new RuntimeException("failed creating program");
+ }
+ }
+
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ return shader;
+ }
+
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ Log.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ return program;
+ }
+
+ public void checkGlError(String op) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(TAG, op + ": glError " + error);
+ throw new RuntimeException(op + ": glError " + error);
+ }
+ }
+
+ public void addGpuFilter(GPUImageFilter filter) {
+ if (mGpuFilter != null) {
+ mGpuFilter.destroy();
+ }
+ mGpuFilter = filter;
+ if (filter != null) {
+ mGpuFilter.init();
+ mGpuFilter.onDisplaySizeChanged(info.width, info.height);
+ mGpuFilter.onInputSizeChanged(info.width, info.height);
+ }
+ }
+
+ public void onVideoSizeChanged(VideoInfo info) {
+ setVideoWidthAndHeight(info);
+ adjustVideoPosition();
+ videoChanged = true;
+ }
+
+ public void setVideoWidthAndHeight(VideoInfo info) {
+ rotationFilter.setRotation(info.rotation);
+ if (info.rotation == 0 || info.rotation == 180) {
+ this.videoWidth = info.width;
+ this.videoHeight = info.height;
+ } else {
+ this.videoWidth = info.height;
+ this.videoHeight = info.width;
+ }
+ }
+
+ private void adjustVideoPosition() {
+ float w = (float) viewWidth / videoWidth;
+ float h = (float) viewHeight / videoHeight;
+ if (w < h) {
+ width = viewWidth;
+ height = (int) ((float) videoHeight * w);
+ } else {
+ width = (int) ((float) videoWidth * h);
+ height = viewHeight;
+ }
+ x = (viewWidth - width) / 2;
+ y = (viewHeight - height) / 2;
+ }
+
+ /**
+ * just for clip video
+ * @param curMode
+ */
+ public void setClipMode(int curMode) {
+ isClipMode=true;
+ this.curMode = curMode;
+ }
+}
\ No newline at end of file
diff --git a/app/src/main/java/com/example/cj/videoeditor/mediacodec/VideoClipper.java b/app/src/main/java/com/example/cj/videoeditor/mediacodec/VideoClipper.java
index 252c2d1..cf354ec 100644
--- a/app/src/main/java/com/example/cj/videoeditor/mediacodec/VideoClipper.java
+++ b/app/src/main/java/com/example/cj/videoeditor/mediacodec/VideoClipper.java
@@ -32,38 +32,39 @@ public class VideoClipper {
private String mInputVideoPath;
private String mOutputVideoPath;
- MediaCodec videoDecoder;
- MediaCodec videoEncoder;
- MediaCodec audioDecoder;
- MediaCodec audioEncoder;
-
- MediaExtractor mVideoExtractor;
- MediaExtractor mAudioExtractor;
- MediaMuxer mMediaMuxer;
- static ExecutorService executorService = Executors.newFixedThreadPool(4);
- int muxVideoTrack = -1;
- int muxAudioTrack = -1;
- int videoTrackIndex = -1;
- int audioTrackIndex = -1;
- long startPosition;
- long clipDur;
- int videoWidth;
- int videoHeight;
- int videoRotation;
- OutputSurface outputSurface = null;
- InputSurface inputSurface = null;
- MediaFormat videoFormat;
- MediaFormat audioFormat;
- GPUImageFilter mFilter;
- boolean isOpenBeauty;
- boolean videoFinish = false;
- boolean audioFinish = false;
- boolean released = false;
- long before;
- long after;
- Object lock = new Object();
- boolean muxStarted = false;
- OnVideoCutFinishListener listener;
+ private MediaCodec videoDecoder;
+ private MediaCodec videoDecoder2;
+ private MediaCodec videoEncoder;
+ private MediaCodec audioDecoder;
+ private MediaCodec audioEncoder;
+
+ private MediaExtractor mVideoExtractor;
+ private MediaExtractor mAudioExtractor;
+ private MediaMuxer mMediaMuxer;
+ private static ExecutorService executorService = Executors.newFixedThreadPool(4);
+ private int muxVideoTrack = -1;
+ private int muxAudioTrack = -1;
+ private int videoTrackIndex = -1;
+ private int audioTrackIndex = -1;
+ private long startPosition;
+ private long clipDur;
+ private int videoWidth;
+ private int videoHeight;
+ private int videoRotation;
+ private OutputSurface outputSurface = null;
+ private InputSurface inputSurface = null;
+ private MediaFormat videoFormat;
+ private MediaFormat audioFormat;
+ private GPUImageFilter mFilter;
+ private boolean isOpenBeauty;
+ private boolean videoFinish = false;
+ private boolean audioFinish = false;
+ private boolean released = false;
+ private long before;
+ private long after;
+ private Object lock = new Object();
+ private boolean muxStarted = false;
+ private OnVideoCutFinishListener listener;
//初始化音视频解码器和编码器
public VideoClipper() {
@@ -90,16 +91,18 @@ public void setOutputVideoPath(String outputPath) {
public void setOnVideoCutFinishListener(OnVideoCutFinishListener listener) {
this.listener = listener;
}
+
/**
* 设置滤镜
- * */
+ */
public void setFilter(GPUImageFilter filter) {
- if (filter == null ) {
+ if (filter == null) {
mFilter = null;
return;
}
mFilter = filter;
}
+
public void setFilterType(MagicFilterType type) {
if (type == null || type == MagicFilterType.NONE) {
mFilter = null;
@@ -110,8 +113,8 @@ public void setFilterType(MagicFilterType type) {
/**
* 开启美颜
- * */
- public void showBeauty(){
+ */
+ public void showBeauty() {
isOpenBeauty = true;
}
@@ -157,7 +160,7 @@ public void run() {
long firstVideoTime = mVideoExtractor.getSampleTime();
mVideoExtractor.seekTo(firstVideoTime + startPosition, SEEK_TO_PREVIOUS_SYNC);
-
+ Log.e("hero","_____videoCliper------run");
initVideoCodec();//暂时统一处理,为音频转换采样率做准备
startVideoCodec(videoDecoder, videoEncoder, mVideoExtractor, inputSurface, outputSurface, firstVideoTime, startPosition, clipDur);
@@ -210,10 +213,10 @@ private void startAudioCodec(MediaCodec decoder, MediaCodec encoder, MediaExtrac
boolean inputDone = false;
boolean decodeDone = false;
extractor.seekTo(firstSampleTime + startPosition, SEEK_TO_PREVIOUS_SYNC);
- int decodeinput=0;
- int encodeinput=0;
- int encodeoutput=0;
- long lastEncodeOutputTimeStamp=-1;
+ int decodeinput = 0;
+ int encodeinput = 0;
+ int encodeoutput = 0;
+ long lastEncodeOutputTimeStamp = -1;
while (!done) {
if (!inputDone) {
int inputIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
@@ -225,7 +228,7 @@ private void startAudioCodec(MediaCodec decoder, MediaCodec encoder, MediaExtrac
if ((dur < duration) && readSampleData > 0) {
decoder.queueInputBuffer(inputIndex, 0, readSampleData, extractor.getSampleTime(), 0);
decodeinput++;
- System.out.println("videoCliper audio decodeinput"+decodeinput+" dataSize"+readSampleData+" sampeTime"+extractor.getSampleTime());
+ System.out.println("videoCliper audio decodeinput" + decodeinput + " dataSize" + readSampleData + " sampeTime" + extractor.getSampleTime());
extractor.advance();
} else {
decoder.queueInputBuffer(inputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
@@ -247,11 +250,16 @@ private void startAudioCodec(MediaCodec decoder, MediaCodec encoder, MediaExtrac
} else {
boolean canEncode = (info.size != 0 && info.presentationTimeUs - firstSampleTime > startPosition);
boolean endOfStream = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
- if (canEncode&&!endOfStream) {
- ByteBuffer decoderOutputBuffer = decoderOutputBuffers[index];
+ if (canEncode && !endOfStream) {
+ ByteBuffer decoderOutputBuffer;
+ if (Build.VERSION.SDK_INT >= 21){
+ decoderOutputBuffer = decoder.getOutputBuffer(index);
+ }else {
+ decoderOutputBuffer = decoderOutputBuffers[index];
+ }
int encodeInputIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
- if(encodeInputIndex>=0){
+ if (encodeInputIndex >= 0) {
ByteBuffer encoderInputBuffer = encoderInputBuffers[encodeInputIndex];
encoderInputBuffer.clear();
if (info.size < 4096) {//这里看起来应该是16位单声道转16位双声道
@@ -269,16 +277,16 @@ private void startAudioCodec(MediaCodec decoder, MediaCodec encoder, MediaExtrac
encoderInputBuffer.put(stereoBytes);
encoder.queueInputBuffer(encodeInputIndex, 0, stereoBytes.length, info.presentationTimeUs, 0);
encodeinput++;
- System.out.println("videoCliper audio encodeInput"+encodeinput+" dataSize"+info.size+" sampeTime"+info.presentationTimeUs);
- }else{
+ System.out.println("videoCliper audio encodeInput" + encodeinput + " dataSize" + info.size + " sampeTime" + info.presentationTimeUs);
+ } else {
encoderInputBuffer.put(decoderOutputBuffer);
encoder.queueInputBuffer(encodeInputIndex, info.offset, info.size, info.presentationTimeUs, 0);
encodeinput++;
- System.out.println("videoCliper audio encodeInput"+encodeinput+" dataSize"+info.size+" sampeTime"+info.presentationTimeUs);
+ System.out.println("videoCliper audio encodeInput" + encodeinput + " dataSize" + info.size + " sampeTime" + info.presentationTimeUs);
}
}
}
- if(endOfStream){
+ if (endOfStream) {
int encodeInputIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
encoder.queueInputBuffer(encodeInputIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
System.out.println("videoCliper audio encodeInput end");
@@ -309,12 +317,12 @@ private void startAudioCodec(MediaCodec decoder, MediaCodec encoder, MediaExtrac
if (outputInfo.presentationTimeUs == 0 && !done) {
continue;
}
- if (outputInfo.size != 0&&outputInfo.presentationTimeUs>0) {
+ if (outputInfo.size != 0 && outputInfo.presentationTimeUs > 0) {
/*encodedData.position(outputInfo.offset);
encodedData.limit(outputInfo.offset + outputInfo.size);*/
- if(!muxStarted){
- synchronized (lock){
- if(!muxStarted){
+ if (!muxStarted) {
+ synchronized (lock) {
+ if (!muxStarted) {
try {
lock.wait();
} catch (InterruptedException e) {
@@ -323,11 +331,11 @@ private void startAudioCodec(MediaCodec decoder, MediaCodec encoder, MediaExtrac
}
}
}
- if(outputInfo.presentationTimeUs>lastEncodeOutputTimeStamp){//为了避免有问题的数据
+ if (outputInfo.presentationTimeUs > lastEncodeOutputTimeStamp) {//为了避免有问题的数据
encodeoutput++;
- System.out.println("videoCliper audio encodeOutput"+encodeoutput+" dataSize"+outputInfo.size+" sampeTime"+outputInfo.presentationTimeUs);
+ System.out.println("videoCliper audio encodeOutput" + encodeoutput + " dataSize" + outputInfo.size + " sampeTime" + outputInfo.presentationTimeUs);
mMediaMuxer.writeSampleData(muxAudioTrack, encodedData, outputInfo);
- lastEncodeOutputTimeStamp=outputInfo.presentationTimeUs;
+ lastEncodeOutputTimeStamp = outputInfo.presentationTimeUs;
}
}
@@ -343,10 +351,19 @@ private void startAudioCodec(MediaCodec decoder, MediaCodec encoder, MediaExtrac
private void initVideoCodec() {
//不对视频进行压缩
- int encodeW = videoWidth;
- int encodeH = videoHeight;
+ VideoInfo info = new VideoInfo();
+ info.width = videoWidth;
+ info.height = videoHeight;
+ info.rotation = videoRotation;
+
+ MediaFormat mediaFormat;
+ if (info.rotation == 0 || info.rotation == 180) {
+ mediaFormat = MediaFormat.createVideoFormat("video/avc", info.width, info.height);
+ }else {
+ mediaFormat = MediaFormat.createVideoFormat("video/avc", info.height, info.width);
+ }
//设置视频的编码参数
- MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", encodeW, encodeH);
+
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 3000000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
@@ -356,16 +373,11 @@ private void initVideoCodec() {
inputSurface.makeCurrent();
videoEncoder.start();
-
- VideoInfo info = new VideoInfo();
- info.width = videoWidth;
- info.height = videoHeight;
- info.rotation = videoRotation;
outputSurface = new OutputSurface(info);
- outputSurface.isBeauty(isOpenBeauty);
+// outputSurface.isBeauty(isOpenBeauty);
if (mFilter != null) {
- Log.e("hero","---gpuFilter 不为null哟----设置进outputSurface里面");
+ Log.e("hero", "---gpuFilter 不为null哟----设置进outputSurface里面");
outputSurface.addGpuFilter(mFilter);
}
@@ -462,9 +474,9 @@ private void startVideoCodec(MediaCodec decoder, MediaCodec encoder, MediaExtrac
if (outputInfo.size != 0) {
encodedData.position(outputInfo.offset);
encodedData.limit(outputInfo.offset + outputInfo.size);
- if(!muxStarted){
- synchronized (lock){
- if(!muxStarted){
+ if (!muxStarted) {
+ synchronized (lock) {
+ if (!muxStarted) {
try {
lock.wait();
} catch (InterruptedException e) {
diff --git a/app/src/main/java/com/example/cj/videoeditor/mediacodec/VideoRunnable.java b/app/src/main/java/com/example/cj/videoeditor/mediacodec/VideoRunnable.java
new file mode 100644
index 0000000..881e0f9
--- /dev/null
+++ b/app/src/main/java/com/example/cj/videoeditor/mediacodec/VideoRunnable.java
@@ -0,0 +1,368 @@
+package com.example.cj.videoeditor.mediacodec;
+
+import android.annotation.TargetApi;
+import android.media.MediaCodec;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+
+
+import com.example.cj.videoeditor.media.MediaCodecInfo;
+import com.example.cj.videoeditor.media.VideoInfo;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Created by cj on 2017/6/30.
+ * desc 视频编解码线程
+ */
+@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
+public class VideoRunnable extends Thread {
+ private static final String MIME_TYPE = "video/avc";
+ private static final int bitRate = 3000000; //视频编码波特率
+ private static final int frameRate = 30; //视频编码帧率
+ private static final int frameInterval = 1;
+ private MediaMuxerRunnable mMediaMuxer;
+
+// private GPUImageFilter filter;
+
+ private MediaFormat videoOutputFormat;
+
+ //处理多段视频
+ private List