2020-07-24

一、美顏類框架以及常見問題總結(jié)

1. 美顏預覽流程

(1)在相機重構(gòu)的時候,需要將美顏合入mtk的架構(gòu),為了代碼可讀性以及降低耦合性,摒棄了之前老相機的做法,將美顏單獨作為一個模式,而不是和普通的拍照模式混在一起。這樣就需要參照mtk的代碼結(jié)構(gòu),為美顏單獨制定一個預覽容器的管理類,即EffectViewController.java, 其具體的方法都是mtk的結(jié)構(gòu),只是把預覽容器替換為了我們美顏的。

host/src/com/freeme/camera/ui/CameraAppUI.java

    public void onCreate() {
        ...

        //mPreviewManager = new PreviewManager(mApp);
        //Set gesture listener to receive touch event.
        //mPreviewManager.setOnTouchListener(new OnTouchListenerImpl());
        mNormalPreviewManager = new PreviewManager(mApp, true, false);
        mBeautyFacePreviewManager = new PreviewManager(mApp, false, false);
        //美顏
        mEffectPreviewManager = new PreviewManager(mApp, false, true);
        mNormalPreviewManager.setOnTouchListener(new OnTouchListenerImpl());
        mBeautyFacePreviewManager.setOnTouchListener(new OnTouchListenerImpl());
        mEffectPreviewManager.setOnTouchListener(new OnTouchListenerImpl());
        mPreviewManager = mNormalPreviewManager;

        ...
    }

host/src/com/freeme/camera/ui/preview/PreviewManager.java

    public PreviewManager(IApp app, boolean isTextureView, boolean isEffectView) {
        ...

        //if (enabledValue == SURFACEVIEW_ENABLED_VALUE || appVersion == DEFAULT_APP_VERSION) {
        if (isTextureView) {
            mPreviewController = new TextureViewController(app);
        } else if (isEffectView) {
            mPreviewController = new EffectViewController(app);
        } else {
            mPreviewController = new BeautyFaceViewController(app);
        }

        ...
    }

(2)美顏預覽容器管理流程

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/EffectMode.java

    public void resume(@Nonnull DeviceUsage deviceUsage) {
        ...
        prepareAndOpenCamera(false, mCameraId, false, false);

        ...
    }

    private void prepareAndOpenCamera(boolean needOpenCameraSync, String cameraId,
                                      boolean needFastStartPreview, boolean isFromSelectedCamera) {
        ..
        mIDeviceController.openCamera(info);
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/device/EffectDevice2Controller.java

    private void doOpenCamera(boolean sync) throws CameraOpenException {
        if (sync) {
            mCameraDeviceManager.openCameraSync(mCurrentCameraId, mDeviceCallback, null);
        } else {
            mCameraDeviceManager.openCamera(mCurrentCameraId, mDeviceCallback, null);
        }
    }

    public class DeviceStateCallback extends Camera2Proxy.StateCallback {

        @Override
        public void onOpened(@Nonnull Camera2Proxy camera2proxy) {
            mModeHandler.obtainMessage(MSG_DEVICE_ON_CAMERA_OPENED,
                    camera2proxy).sendToTarget();
        }
       ...
    }

    private class ModeHandler extends Handler {
        ...
        @Override
        public void handleMessage(Message msg) {
            switch (msg.what) {
                case MSG_DEVICE_ON_CAMERA_OPENED:
                    doCameraOpened((Camera2Proxy) msg.obj);
                    break;
                default:
                    break;
            }
        }
    }
    public void doCameraOpened(@Nonnull Camera2Proxy camera2proxy) {
        try {
            if (CameraState.CAMERA_OPENING == getCameraState()
                    && camera2proxy != null && camera2proxy.getId().equals(mCurrentCameraId)) {
                ...
                if (mPreviewSizeCallback != null) {
                        mPreviewSizeCallback.onPreviewSizeReady(new Size(mPreviewWidth,
                                mPreviewHeight));
                }
                ...
            }
        } catch (RuntimeException e) {
            e.printStackTrace();
        }
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/EffectMode.java

    public void onPreviewSizeReady(Size previewSize) {
        updatePictureSizeAndPreviewSize(previewSize);
    }

    private void updatePictureSizeAndPreviewSize(Size previewSize) {
        ...
        if (size != null && mIsResumed) {
            ...
            if (width != mPreviewWidth || height != mPreviewHeight) {
                onPreviewSizeChanged(width, height);
            }
        }
    }

    private void onPreviewSizeChanged(int width, int height) {
        ...
        mIApp.getAppUi().setPreviewSize(mPreviewHeight, mPreviewWidth, mISurfaceStatusListener);
        ...
    }

host/src/com/freeme/camera/ui/CameraAppUI.java

    public void setPreviewSize(final int width, final int height,
                               final ISurfaceStatusListener listener) {
        mApp.getActivity().runOnUiThread(new Runnable() {
            @Override
            public void run() {
                mPreviewManager.updatePreviewSize(width, height, listener);
                ...
            }
        });
    }

host/src/com/freeme/camera/ui/preview/PreviewManager.java

    public void updatePreviewSize(int width, int height, ISurfaceStatusListener listener) {
        ...
        if (mPreviewController != null) {
            mPreviewController.updatePreviewSize(width, height, listener);
        }
    }

host/src/com/freeme/camera/ui/preview/EffectViewController.java

    public void updatePreviewSize(int width, int height, ISurfaceStatusListener listener) {
        if (mPreviewWidth == width && mPreviewHeight == height) {
            ...
            if (mIsSurfaceCreated) {
                if (listener != null) {
                    ...
                    //設(shè)置預覽容器
                    listener.surfaceAvailable(((CameraActivity) mApp.getActivity()).getEffectView().getSurfaceTexture(),
                            mPreviewHeight, mPreviewWidth);
                }
            }
            return;
        }
        ...
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/EffectMode.java

    private class SurfaceChangeListener implements ISurfaceStatusListener {

        public void surfaceAvailable(Object surfaceObject, int width, int height) {
            if (mModeHandler != null) {
                mModeHandler.post(new Runnable() {
                    @Override
                    public void run() {
                        if (mIDeviceController != null && mIsResumed) {
                            mIDeviceController.updatePreviewSurface(surfaceObject);
                        }
                    }
                });
            }
        }

        ...
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/device/EffectDevice2Controller.java

    public void updatePreviewSurface(Object surfaceObject) {
        
        synchronized (mSurfaceHolderSync) {
            if (surfaceObject instanceof SurfaceHolder) {
                mPreviewSurface = surfaceObject == null ? null :
                        ((SurfaceHolder) surfaceObject).getSurface();
            } else if (surfaceObject instanceof SurfaceTexture) {
                mPreviewSurface = surfaceObject == null ? null :
                        new Surface((SurfaceTexture) surfaceObject);
            }
            boolean isStateReady = CameraState.CAMERA_OPENED == mCameraState;
            if (isStateReady && mCamera2Proxy != null) {
                boolean onlySetSurface = mSurfaceObject == null && surfaceObject != null;
                mSurfaceObject = surfaceObject;
                if (surfaceObject == null) {
                    stopPreview();
                } else if (onlySetSurface && mNeedSubSectionInitSetting) {
                    mOutputConfigs.get(0).addSurface(mPreviewSurface);
                    if (mSession != null) {
                        mSession.finalizeOutputConfigurations(mOutputConfigs);
                        mNeedFinalizeOutput = false;
                        if (CameraState.CAMERA_OPENED == getCameraState()) {
                            repeatingPreview(false);
                            configSettingsByStage2();
                            repeatingPreview(false);
                        }
                    } else {
                        mNeedFinalizeOutput = true;
                    }
                } else {
                    configureSession(false);
                }
            }
        }
    }
//后續(xù),美顏預覽容器surfaceTexture,完全按照mtk的代碼結(jié)構(gòu)進行管理。

(3)美顏效果繪制流程

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/byted/EffectView.java

    public void onDrawFrame(GL10 gl) {
        if (mCameraChanging || mIsPaused) {
            return;
        }
        //將紋理圖像更新為圖像流中的最新幀。
        mSurfaceTexture.updateTexImage();
        if(mPauseed){
            GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
            GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
            return;
        }
        //清空緩沖區(qū)顏色
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

        BytedEffectConstants.Rotation rotation = OrientationSensor.getOrientation();
        //調(diào)用字節(jié)跳動的美顏算法,處理預覽數(shù)據(jù),得到處理后的紋理dstTexture
        dstTexture = mEffectRenderHelper.processTexture(mSurfaceTextureID, rotation, getSurfaceTimeStamp());

        synchronized (this) {
            if (mVideoEncoder != null) {
                //美視相關(guān),后面介紹
                mVideoEncoder.frameAvailableSoon();
            }
        }

        if (dstTexture != ShaderHelper.NO_TEXTURE) {
            //繪制紋理
            mEffectRenderHelper.drawFrame(dstTexture);
        }
        mFrameRator.addFrameStamp();
    }

(4)常見問題及常用解決思路

  • 問題:
  1. 美顏預覽卡住,停留在上一個模式的最后一幀:因為美顏與普通模式用的預覽容器不同,在模式切換的切換的時候,容器沒有正常的切換以及顯示??梢杂肁ndroidStudio的布局查看器,查看effectview是否正常顯示
  2. 往美顏模式切換,預覽閃黑:這個問題的根本原因就是美顏和普通模式預覽容器不同,所以在模式切換之間加了動畫。
  3. 美顏模式下,切換攝像頭,預覽閃黑:這個問題需要調(diào)整EffectView.setCameraId 和 EffectView.setPauseed 在mtk代碼結(jié)構(gòu)里面的位置,這兩個方法的初衷就是為了在切換攝像頭的時候,停止繪制,否則會出現(xiàn)倒幀等現(xiàn)象。 就目前而言,效果可以接受。
  4. 其他的瑣碎的問題,例如美顏效果控制面板等問題,就不做介紹了,普通界面問題,好改。

2. 基于字節(jié)跳動sdk開發(fā)的美視功能介紹

(1)思路:http://www.itdecent.cn/p/9dc03b01bae3 參考這位大神的的思路,很詳細。簡單來說,就是另外開一個線程將字節(jié)跳動sdk處理后的紋理,即上文提到的dstTexture繪制到我們的錄像容器,即MediaCode.createInputSurface()

(2)以視頻流處理為例介紹一下流程,兩個線程,一個即上文所說渲染(繪制)線程,另外一個錄制線程(視頻編碼線程)

feature/mode/effectvideo/src/com/freeme/camera/feature/mode/effectvideo/EffectVideoMode.java

    private void startRecording() {
        ...
        mModeHandler.postDelayed(new Runnable() {
            @Override
            public void run() {
                mSurfaceView.startRecording(mCurrentVideoFilename, EffectVideoMode.this,
                        "on".equals(mSettingManager.getSettingController().queryValue("key_microphone")), mOrientationHint);
            }
        }, 300);
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/byted/EffectView.java

    public void startRecording(String currentDescriptorName, MediaMuxerListener mediaMuxerListener, boolean isRecordAudio, int orientation) {
        try {
            //創(chuàng)建寫入指定路徑的媒體混合器,將編碼后的音視頻按規(guī)則寫入指定文件
            mMuxer = new MediaMuxerWrapper(currentDescriptorName, mediaMuxerListener);
            if (true) {
                //視頻錄制器,這里僅僅創(chuàng)建了一個對象,將mMuxer給到它,以便后續(xù)編碼好的視頻寫入文件
                new MediaVideoEncoder(mMuxer, mMediaEncoderListener, mImageHeight, mImageWidth);
            }
            if (isRecordAudio) {
                //音頻錄制器
                new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
            }
            //這里才是音視頻錄制器的準備工作,以視頻為例介紹
            mMuxer.prepare();
            mMuxer.setOrientationHint(orientation);
            //開始錄制
            mMuxer.startRecording();
        } catch (final IOException e) {
            Log.e(TAG, "startCapture:", e);
        }
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/encoder/MediaVideoEncoder.java    

    public MediaVideoEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener, final int width, final int height) {
        super(muxer, listener);
        if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
        mWidth = width;
        mHeight = height;
        //渲染線程,RenderHandler implements Runnable,后面介紹
        mRenderHandler = RenderHandler.createHandler("VideoRenderThread");
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/glutils/RenderHandler.java

    public static final RenderHandler createHandler(final String name) {
        final RenderHandler handler = new RenderHandler();
        synchronized (handler.mSync) {
            //開啟渲染線程,等待后續(xù)命令,開始渲染
            new Thread(handler, !TextUtils.isEmpty(name) ? name : TAG).start();
        }
        return handler;
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/encoder/MediaEncoder.java

    public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
        ...
        mWeakMuxer = new WeakReference<MediaMuxerWrapper>(muxer);
        muxer.addEncoder(this);
        mListener = listener;
        synchronized (mSync) {
            mBufferInfo = new MediaCodec.BufferInfo();
            //編碼線程,MediaEncoder implements Runnable,后面介紹
            new Thread(this, getClass().getSimpleName()).start();
            try {
                mSync.wait();
            } catch (final InterruptedException e) {
            }
        }
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/encoder/MediaMuxerWrapper.java

    public void prepare() throws IOException {
        if (mVideoEncoder != null)
            mVideoEncoder.prepare();
        if (mAudioEncoder != null)
            mAudioEncoder.prepare();
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/encoder/MediaVideoEncoder.java

    private static final String MIME_TYPE = "video/avc";

    protected void prepare() throws IOException {
        //格式、比特率、幀率、關(guān)鍵幀,這都是android固定的格式,不做介紹
        final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);    // API >= 18
        format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);

        //創(chuàng)建錄制器,此處指定的為視頻錄制器
        mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
        mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        //關(guān)鍵的地方,前面說原理的時候,要將字節(jié)跳動處理后的紋理繪制到錄制容器,這里便是那個容器了。
        mSurface = mMediaCodec.createInputSurface();    // API >= 18
        //開啟
        mMediaCodec.start();
        if (DEBUG) Log.i(TAG, "prepare finishing");
        if (mListener != null) {
            try {
                //回調(diào)提醒已準備好
                mListener.onPrepared(this);
            } catch (final Exception e) {
                Log.e(TAG, "prepare:", e);
            }
        }
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/byted/EffectView.java

    private final MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
        @Override
        public void onPrepared(final MediaEncoder encoder) {
            if (encoder instanceof MediaVideoEncoder) {
                setVideoEncoder((MediaVideoEncoder) encoder);
            } else if (encoder instanceof MediaAudioEncoder) {
                mAudioEncoder = (MediaAudioEncoder) encoder;
            }
        }

        ...
    };

    public void setVideoEncoder(final MediaVideoEncoder encoder) {
        queueEvent(new Runnable() {
            @Override
            public void run() {
                synchronized (this) {
                    if (encoder != null) {
                        //這里三個參數(shù)很關(guān)鍵:1.將effectView關(guān)聯(lián)的GLContext,給到視頻錄制器,用以構(gòu)建EGL環(huán)境
                        //2.dstTexture很熟悉了,字節(jié)跳動美顏sdk處理后的紋理
                        //3.mEffectRenderHelper也很熟悉,美顏流程里面不就是調(diào)用mEffectRenderHelper.drawFrame(dstTexture);將紋理繪制到預覽容器上的嗎?類比一下,后面將用這個“畫筆”將處理后的紋理繪制到錄制容器
                        //至此,脈絡(luò)比較清晰了,環(huán)境有了,紋理有了,“畫筆”有了,后面就是畫紋理了。
                        encoder.setEglContext(EGL14.eglGetCurrentContext(), dstTexture, mEffectRenderHelper);
                    }
                    mVideoEncoder = encoder;
                }
            }
        });
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/byted/EffectView.java

    public void onDrawFrame(GL10 gl) {
        ...
        dstTexture = mEffectRenderHelper.processTexture(mSurfaceTextureID, rotation, getSurfaceTimeStamp());

        synchronized (this) {
            if (mVideoEncoder != null) {
                //開始錄制
                mVideoEncoder.frameAvailableSoon();
            }
        }

        if (dstTexture != ShaderHelper.NO_TEXTURE) {
            mEffectRenderHelper.drawFrame(dstTexture);
        }
        mFrameRator.addFrameStamp();
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/encoder/MediaVideoEncoder.java

    public boolean frameAvailableSoon() {
        boolean result;
        if (result = super.frameAvailableSoon())
            mRenderHandler.draw(null);
        return result;
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/encoder/MediaEncoder.java

    public boolean frameAvailableSoon() {
        synchronized (mSync) {
            if (!mIsCapturing || mRequestStop || isPause) {
                return false;
            }
            mRequestDrain++;
            mSync.notifyAll();
        }
        return true;
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/glutils/RenderHandler.java
    //渲染線程
    public final void run() {
        ...
        for (; ; ) {
            ...

            if (localRequestDraw) {
                if ((mEglCore != null) && mTexId >= 0) {
                    mInputWindowSurface.makeCurrent();
                    
                    GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
                    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
                    //美顏渲染一幀,這邊也跟著渲染一幀
                    mEffectRenderHelper.drawFrame(mTexId);
                    mInputWindowSurface.swapBuffers();
                }
            } else {
                synchronized (mSync) {
                    try {
                        mSync.wait();
                    } catch (final InterruptedException e) {
                        break;
                    }
                }
            }
        }
        ...
    }

feature/mode/effect/src/com/freeme/camera/feature/mode/effect/encoder/MediaEncoder.java
    //編碼線程
    public void run() {
        synchronized (mSync) {
            mRequestStop = false;
            mRequestDrain = 0;
            mSync.notify();
        }
        final boolean isRunning = true;
        boolean localRequestStop;
        boolean localRequestDrain;
        while (isRunning) {
            ...
            if (localRequestDrain) {
                //清空編碼的數(shù)據(jù)并將其寫入多路復用器,即將編碼的數(shù)據(jù)取出來,用muxer寫入指定的視頻文件
                drain();
            } else {
                synchronized (mSync) {
                    try {
                        mSync.wait();
                    } catch (final InterruptedException e) {
                        break;
                    }
                }
            }
        } // end of while
        if (DEBUG) Log.d(TAG, "Encoder thread exiting");
        synchronized (mSync) {
            mRequestStop = true;
            mIsCapturing = false;
        }
    }


    protected void drain() {
        //這個方法稍微長一點,流程都是按照google規(guī)定的,具體細節(jié)自己去看源碼,這里只介紹關(guān)鍵處,拿編碼后的數(shù)據(jù)以及寫入視頻文件
        if (mMediaCodec == null) return;
        if (isPause) return;
        ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
        int encoderStatus, count = 0;
        final MediaMuxerWrapper muxer = mWeakMuxer.get();
        if (muxer == null) {
            Log.w(TAG, "muxer is unexpectedly null");
            return;
        }
        LOOP:
        while (mIsCapturing) {
            // get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
            //1.獲取最長超時時間為TIMEOUT_USEC(= 10 [msec])的編碼數(shù)據(jù)
            encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
                if (!mIsEOS) {
                    if (++count > 5)
                        break LOOP;        // out of while
                }
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
                // this shoud not come when encoding
                //2.檢索輸出緩沖區(qū)的集合。
                encoderOutputBuffers = mMediaCodec.getOutputBuffers();
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
                // this status indicate the output format of codec is changed
                // this should come only once before actual encoded data
                // but this status never come on Android4.3 or less
                // and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
                if (mMuxerStarted) {    // second time request is error
                    throw new RuntimeException("format changed twice");
                }
                // get output format from codec and pass them to muxer
                // getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
                final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
                mTrackIndex = muxer.addTrack(format);
                mMuxerStarted = true;
                if (!muxer.start()) {
                    // we should wait until muxer is ready
                    synchronized (muxer) {
                        while (!muxer.isStarted())
                            try {
                                muxer.wait(100);
                            } catch (final InterruptedException e) {
                                break LOOP;
                            }
                    }
                }
            } else if (encoderStatus < 0) {
                // unexpected status
                if (DEBUG)
                    Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
            } else {
                //3.編碼好的數(shù)據(jù)
                final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                if (encodedData == null) {
                    // this never should come...may be a MediaCodec internal error
                    throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
                }
                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    // You shoud set output format to muxer here when you target Android4.3 or less
                    // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
                    // therefor we should expand and prepare output format from buffer data.
                    // This sample is for API>=18(>=Android 4.3), just ignore this flag here
                    if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
                    mBufferInfo.size = 0;
                }

                if (mBufferInfo.size != 0) {
                    // encoded data is ready, clear waiting counter
                    count = 0;
                    if (!mMuxerStarted) {
                        // muxer is not ready...this will prrograming failure.
                        throw new RuntimeException("drain:muxer hasn't started");
                    }
                    // 4.將編碼的數(shù)據(jù)寫入多路復用器
                    mBufferInfo.presentationTimeUs = getPTSUs();
                    muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
                    prevOutputPTSUs = mBufferInfo.presentationTimeUs;
                }
                // return buffer to encoder
                mMediaCodec.releaseOutputBuffer(encoderStatus, false);
                if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    // when EOS come.
                    mIsCapturing = false;
                    break;      // out of while
                }
            }
        }
    }

//至此,視頻渲染以及將編碼好的數(shù)據(jù)寫入指定的視頻文件的流程就清楚了,音頻同理,比這個簡單。
//特別地:為什么要開發(fā)新美視,首先是效果,其次是tutu的sdk中渲染與資源釋放的異步問題,導致老美視十分容易報錯,已經(jīng)停止合作,我們也改不了sdk。
//現(xiàn)在的新美視,已經(jīng)穩(wěn)定,整明白了流程,后面出現(xiàn)問題,具體問題具體分析。

3. 臉萌模式簡單介紹

(1)臉萌模式原理:利用tutu美顏sdk返回的人臉坐標數(shù)據(jù),調(diào)用第三方庫libgdx,在紋理上繼續(xù)繪制臉萌圖案,libgdx也是封裝好的opengl

(2)簡單看下流程

feature/mode/beautyface/src/com/freeme/camera/feature/mode/beautyface/BeautyFaceView.java

    public void onDrawFrame(GL10 gl10) {
        mSurfaceTexture.updateTexImage();
        if (mPauseed) {
            return;
        }
        ...

        GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);

        // 濾鏡引擎處理,返回的 textureID 為 TEXTURE_2D 類型
        int textureWidth = mMeasuredHeight;/*mDrawBounds.height();*/
        int textureHeight = mMeasuredWidth;/*mDrawBounds.width();*/

        textureHeight = (int) (textureHeight / SAMPLER_RATIO);
        textureWidth = (int) (textureWidth / SAMPLER_RATIO);

        if (mDrawBounds.width() >= 972) {
            textureHeight = (int) (textureHeight / SAMPLER_RATIO);
            textureWidth = (int) (textureWidth / SAMPLER_RATIO);
        }

        mMeasuredHeight, /*textureHeight*/mMeasuredWidth);
        final int textureId = mFilterEngine.processFrame(mOESTextureId, textureWidth, textureHeight);
        
        textureProgram.draw(textureId);
        
        if (mCameraActivity.getCurrentCameraMode() == FreemeSceneModeData.FREEME_SCENE_MODE_FC_ID) {
            FaceAligment[] faceAligments = mFilterEngine.getFaceFeatures();
            float deviceAngle = mFilterEngine.getDeviceAngle();
            //繪制臉萌效果
            mFunnyFaceView.render(deviceAngle, faceAligments);
            //臉萌拍照
            mFunnyFaceView.capture();
        }
    }

feature/mode/facecute/src/com/freeme/camera/feature/mode/facecute/gles/FunnyFaceView.java

    public void render(float deviceAngle, FaceAligment[] faceAligments) {
        if (!mIsShowing || mIsSwitching || mIsDispose) {
            return;
        }
        long time = System.nanoTime();
        deltaTime = (time - lastFrameTime) / 1000000000.0f;
        lastFrameTime = time;
        mStateTime += deltaTime;
        if (faceAligments != null && faceAligments.length > 0) {
            ...
            int faceW = (int) face.width();
            int faceH = (int) face.height();
            int abs = Math.abs(faceH - faceW);
            //常見問題點:臉萌沒有效果
            //原因:1.依賴tutu sdk人臉數(shù)據(jù),遠了識別不到人臉,就會沒有臉萌效果
            //2.在渲染臉萌效果的時候會判斷人臉寬高比,去掉會引起效果閃爍、閃白。這里對其進行了改良,加入了屏幕密度,使得大部分項目能夠滿足正常效果。
            if (faceW < mFaceMinSizePx || faceW > mFaceMaxSizePx || abs > 70 * mDensity) {
                mCamera.showOrNotFFBNoFaceIndicator(true);
                return;
            }
            ...
            drawItem(scale, 0, angle, landmarkInfo);
            mSpriteBatch.end();
            mCamera.showOrNotFFBNoFaceIndicator(false);
        } else {
            mCamera.showOrNotFFBNoFaceIndicator(true);
        }
    }

    private void drawItem(float scale, int orientation, float angle, LandmarkInfo markInfo) {
        if (mCurrItemList != null) {
            for (ItemInfo item : mCurrItemList) {
                TextureRegion currRegion = item.anim.getKeyFrame(mStateTime, true);
                AnchorInfo anchor = computeAnchorInfo(item, markInfo, scale, orientation);
                drawElements(currRegion, anchor, scale, orientation, angle);
            }
        }
    }

    private void drawElements(TextureRegion currRegion, AnchorInfo anchor, float scale,
                              int orientation, float angle) {
        ...
        //繪制
        mSpriteBatch.draw(currRegion, x, y, orignX, orignY, orignW, orignH, scale, scale,
                finalAngle);
    }

    public void capture() {
        if (mIsNeedCapture) {
            mIsNeedCapture = false;
            handleRGB565Data();
        }
    }

    private void handleRGB565Data() {
        long time = System.currentTimeMillis();
        final int data[] = this.getJpegDataFromGpu565(0, 0, mWidth, mHeight);
        ...
    }

    public int[] getJpegDataFromGpu565(int x, int y, int w, int h) {
        int size = w * h;
        ByteBuffer buf = ByteBuffer.allocateDirect(size * 4);
        buf.order(ByteOrder.nativeOrder());
        //glReadPixels
        GLES20.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, buf);
        int data[] = new int[size];
        buf.asIntBuffer().get(data);
        buf = null;
        return data;
    }
//臉萌的流程比較簡單,依賴于tutu 美顏。 常見的問題就是上面說的那個。

至此,美顏類的三種,美顏、美視、臉萌介紹完畢。

二、插件類以及常見問題總結(jié)

1. 外部插件:模特、兒童;水印、大片;掃碼

(1)外部插件框架:參考documents/FreemeOS/other/training/Camera/pluginmanager/Android插件化開發(fā).md,上一位camera負責人大廚走的時候詳細介紹過插件的來龍去脈,很詳細,自己看文檔。

(2)這里以掃碼為例,看一下

feature/mode/qrcodescan/src/com/freeme/camera/feature/mode/qrcodescan/QrCodeScanMode.java

    //camera api2,用ImageReader獲取預覽數(shù)據(jù)
    public void onImageAvailable(ImageReader reader) {
        Image image = reader.acquireNextImage();
        //image->plane->buffer->byte[]
        //getBytesFromImageAsType:根據(jù)要求的結(jié)果類型進行填充,二維碼需要的是亮度(Y)信息,簡單的將UV數(shù)據(jù)拼接在Y數(shù)據(jù)之后即可
        mIApp.getmPluginManagerAgent().blendOutput(CameraUtil.getBytesFromImageAsType(image, 1), FreemeSceneModeData.FREEME_SCENE_MODE_QRCODE_ID);
        image.close();
    }

common/src/com/freeme/camera/common/pluginmanager/PluginManagerAgent.java

    public byte[] blendOutput(byte[] jpegData, int mode) {
        if (mModules != null && mModules.size() > 0) {
            IPluginModuleEntry plugin = mModules.get(mode, null);
            if (plugin != null) {
                return plugin.blendOutput(jpegData);
            }
        }
        return null;
    }

FreemeCameraPlugin/CameraQrCodeScan/app/src/main/java/com/freeme/cameraplugin/qrcodescan/QrCodeScan.java

    public byte[] blendOutput(byte[] jpegData) {
        if (QrCodeScanView.sFramingRect == null) {
            return super.blendOutput(jpegData);
        }
        synchronized (mDecodeHandlerObject) {
            if (mDecodeHandler != null && !mIsCoding) {
                mIsCoding = true;
                Point cameraResolution = mCameraConfigManager.getCameraResolution();
                Message message = mDecodeHandler.obtainMessage(MSG_START_DECODE, cameraResolution.x, cameraResolution.y, jpegData);
                message.sendToTarget();
            } else {
                Log.d(TAG, "Got preview callback, but no handler for it");
            }
        }
        return super.blendOutput(jpegData);
    }

    public void handleMessage(Message msg) {
        super.handleMessage(msg);
        if (msg.what == MSG_START_DECODE) {
            decode((byte[]) msg.obj, msg.arg1, msg.arg2);
        } else if (msg.what == MSG_QUIT_DECODE) {
            Looper.myLooper().quit();
        }
    }

    private void decode(byte[] data, int width, int height) {
           
            Result rawResult = null;
            Log.i(TAG, "decode bate length : " + data.length + ",width : " + width + ",height : " + height);
            //modify here
            byte[] rotatedData = new byte[data.length];
            for (int y = 0; y < height; y++) {
                for (int x = 0; x < width; x++)
                    rotatedData[x * height + height - y - 1] = data[x + y * width];
            }
            ...
            PlanarYUVLuminanceSource source = buildLuminanceSource(rotatedData, width, height, rect);
            BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
            try {
                //調(diào)用google 的 zxing庫去識別
                rawResult = multiFormatReader.decodeWithState(bitmap);
            } catch (ReaderException re) {
                // continue
            } finally {
                multiFormatReader.reset();
            }

            ...
        }
    }
//掃碼常見問題:識別不到二維碼,就我遇到的而言,都是機器的對焦有問題。讓項目檢查對焦。
//提一句資源無法加載的問題:屏幕尺寸不符合后臺判斷條件的要求。

2. 內(nèi)部插件(模式):假單反模式、人像模式

(1)原理:在預覽容器(TextureView)之上覆蓋一層BvirtualView

feature/mode/slr/src/com/freeme/camera/feature/mode/slr/BvirtualView.java

    protected void onDraw(Canvas canvas) {
        super.onDraw(canvas);
        canvas.setDrawFilter(mPFDF);
        drawTrueBgVirtualWithCanvas(canvas);
        drawDiaphragm(canvas);
    }

    private void drawTrueBgVirtualWithCanvas(Canvas canvas) {
        ...
        //獲取預覽幀的bitmap
        Bitmap preview = ((CameraAppUI) mApp.getAppUi()).getmPreviewManager().getPreviewBitmap(sampleFactor);//mScreenShotProvider.getPreviewFrame(sampleFactor);
        ...
        if (preview != null && mBlur != null) {
            ...
            //調(diào)用android自帶的ScriptIntrinsicBlur將圖片全部模糊
            Bitmap bgBlurBitmap = mBlur.blurBitmap(preview, mBlurDegress);
            if (SHOW_PREVIEW_DEBUG_LOG) {
                time1 = System.currentTimeMillis();
                Log.e(TAG, "blur bitmap :" + (time1 - time0) + " ms");
                time0 = System.currentTimeMillis();
            }
            BlurInfo info = new BlurInfo();
            info.x = (int) (mOnSingleX / apectScale);
            info.y = (int) (mOnSingleY / apectScale);
            info.inRadius = (int) (IN_SHARPNESS_RADIUS * scale / apectScale);
            info.outRadius = (int) (OUT_SHARPNESS_RADIUS * scale / apectScale);
            //調(diào)用blur庫進行拼接,大廚開發(fā)好的
            //源碼:https://github.com/azmohan/BvArithmetic
            SmoothBlurJni.smoothRender(bgBlurBitmap, preview, info);
            if (SHOW_PREVIEW_DEBUG_LOG) {
                time1 = System.currentTimeMillis();
                Log.e(TAG, "smooth render :" + (time1 - time0) + " ms");
            }
            Matrix matrix = new Matrix();
            matrix.setScale(apectScale, apectScale);
            //繪制
            canvas.drawBitmap(bgBlurBitmap, matrix, null);
            preview.recycle();
            bgBlurBitmap.recycle();
        }
    }
//常見問題:卡頓。 根本原因就是假單反的這一套太吃資源,在預覽之上又覆蓋了一層view
//可調(diào)小BvirtualView.java中的值來優(yōu)化
    private final static int IN_SHARPNESS_RADIUS = 200;
    private final static int OUT_SHARPNESS_RADIUS = 320;
    private static int REFERENCE_ASPECT_SIZE = 720;
    private static int SUPPORT_MAX_ASPECT_SIZE = 720;
//如果想從根本上優(yōu)化,可以像美顏那樣,用opengl對紋理進行模糊算法處理之后,再繪制到預覽容器上。
最后編輯于
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
【社區(qū)內(nèi)容提示】社區(qū)部分內(nèi)容疑似由AI輔助生成,瀏覽時請結(jié)合常識與多方信息審慎甄別。
平臺聲明:文章內(nèi)容(如有圖片或視頻亦包括在內(nèi))由作者上傳并發(fā)布,文章內(nèi)容僅代表作者本人觀點,簡書系信息發(fā)布平臺,僅提供信息存儲服務(wù)。

相關(guān)閱讀更多精彩內(nèi)容

  • 鎖 OSSpinLock 自旋鎖 實現(xiàn)機制:忙等 操作重點:原子操作1.自旋鎖2.互斥鎖3.讀寫鎖4.信號量5.條...
    王家小雷閱讀 462評論 0 0
  • 背景介紹 Kafka簡介 Kafka是一種分布式的,基于發(fā)布/訂閱的消息系統(tǒng)。主要設(shè)計目標如下: 以時間復雜度為O...
    奇妙林林閱讀 239評論 0 0
  • 集合(一) 為了解決數(shù)組的定長問題, JDK在1.2版本開發(fā)了集合框架, 集合和數(shù)組的相同點和不同點 集合是容器,...
    涼小楓閱讀 169評論 0 0
  • 用到的組件 1、通過CocoaPods安裝 2、第三方類庫安裝 3、第三方服務(wù) 友盟社會化分享組件 友盟用戶反饋 ...
    SunnyLeong閱讀 15,191評論 1 180
  • # 基礎(chǔ)知識預備 ### 1、C/C++程序編譯的四個過程 (以g++編譯器為例) - 預處理:宏的替換,還有注...
    上進的小白_閱讀 602評論 0 0

友情鏈接更多精彩內(nèi)容