当前位置:   article > 正文

基于OpenCV和OpenGL 的简易美颜相机_opencv android 相机预览实现改变除鼻子嘴巴眼睛外的颜色

opencv android 相机预览实现改变除鼻子嘴巴眼睛外的颜色

版本信息

AndroidStudio 3.5.2

OpenCV 4.1.2

OpenGL 2

OpenCV是什么

维基百科

在本Demo中,OpenCV实现面部识别功能

OpenGL是什么

维基百科

在本Demo中,OpenGL实现美颜功能

配置OpenCV环境

在AndroidStudio中新建C++项目

下载OpenCV Android版

下载OpenCV Windows版

将Android版本的OpenCV解压之后,F:\OpenCV-android-sdk\sdk\native\libs目录的内容复制到项目中, 这个目录中是.so文件。

将Windows版本的OpenCV解压之后,F:\opencv\opencv\build目录中的include内容复制到项目中,这个目录中的是头文件。

将Windows版本的OpenCV解压之后,F:\opencv\opencv\sources\samples\android\face-detection\res\raw的lbpcascade_frontalface.xml文件复制到assets, 这是人脸识别的模型数据。

复制进来大概是这个样子的。我仅复制了armeabi-v7a的,其它的没有复制。

编辑CMakeLists.txt文件,将OpenCV的库打包到native-lib.so中。

  1. cmake_minimum_required(VERSION 3.4.1)
  2. add_library(
  3. native-lib
  4. SHARED
  5. native-lib.cpp
  6. FaceTracker.cpp)
  7. include_directories(include)
  8. add_subdirectory(facealigenment)
  9. # 需要在app的build.gradle文件中声明jniLibs的位置
  10. #sourceSets {
  11. #main {
  12. #//jni库的调用会到资源文件夹下libs里面找so文件
  13. #jniLibs.srcDirs = ['libs']
  14. #}
  15. #}
  16. set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${CMAKE_SOURCE_DIR}/../../../libs/${ANDROID_ABI}")
  17. find_library(
  18. log-lib
  19. log)
  20. target_link_libraries(
  21. native-lib
  22. opencv_java4
  23. android
  24. seeta_fa_lib
  25. ${log-lib})

rebuild项目成功即代表配置OpenCV成功。

配置OpenGL。 因为Android本身就支持OpenGL,所以只需要在配置文件中指定版本即可。

在ManifestAndroid.xml中增加

  1. <uses-feature
  2. android:glEsVersion="0x00020000"
  3. android:required="true" />

将摄像头采集的数据通过OpenGL渲染出来

OpenGL的渲染流程

图片由顶点着色器构建纹理,然后栅格化,交给片元着色器上色,最后给OpenGL渲染。

整个渲染的流程是在GPU直接完成,需要CPU跟GPU进行数据交换。

GL语言

OpenGL编程语言-glsl基础

新建一个MyGLSurfaceView继承自GLSurfaceView

  1. import android.content.Context;
  2. import android.opengl.GLSurfaceView;
  3. import android.util.AttributeSet;
  4. /**
  5. * 作者: ZGQ
  6. * 创建时间: 2019/11/19 11:26
  7. * 注释:
  8. */
  9. public class MyGLSurfaceView extends GLSurfaceView {
  10. private MyRenderer renderer;
  11. public MyGLSurfaceView(Context context) {
  12. super(context);
  13. }
  14. public MyGLSurfaceView(Context context, AttributeSet attrs) {
  15. super(context, attrs);
  16. setEGLContextClientVersion(2);
  17. setRenderer(renderer = new MyRenderer(this));
  18. // 设置按需渲染 当我们调用requestRender的时候,请求GLThread回调一次onDrawFrame方法
  19. // 连续渲染,自动回到onDrawFrame
  20. setRenderMode(RENDERMODE_WHEN_DIRTY);
  21. }
  22. @Override
  23. protected void onDetachedFromWindow() {
  24. super.onDetachedFromWindow();
  25. if (renderer != null) {
  26. renderer.release();
  27. }
  28. }
  29. public void swichCamera(){
  30. if (renderer != null) {
  31. renderer.swichCamera();
  32. }
  33. }
  34. }

新建MyRenderer继承自GLSurfaceView.Renderer

  1. import android.graphics.SurfaceTexture;
  2. import android.hardware.Camera;
  3. import android.opengl.GLES20;
  4. import android.opengl.GLSurfaceView;
  5. import com.bigeye.demo.face.OpenCv;
  6. import com.bigeye.demo.filter.BeautyFilter;
  7. import com.bigeye.demo.filter.BigEyesFilter;
  8. import com.bigeye.demo.filter.CameraFilter;
  9. import com.bigeye.demo.filter.ScreenFilter;
  10. import com.bigeye.demo.filter.StickerFilter;
  11. import com.bigeye.demo.util.CameraHelper;
  12. import com.bigeye.demo.util.Utils;
  13. import java.io.File;
  14. import javax.microedition.khronos.egl.EGLConfig;
  15. import javax.microedition.khronos.opengles.GL10;
  16. /**
  17. * 作者: ZGQ
  18. * 创建时间: 2019/11/19 11:29
  19. * 注释:
  20. */
  21. public class MyRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener, Camera.PreviewCallback {
  22. private MyGLSurfaceView myGLSurfaceView;
  23. private CameraHelper mCameraHelper;
  24. private SurfaceTexture mSurfaceTexture;
  25. private int[] mTextures;
  26. private float[] mtx = new float[16];
  27. private CameraFilter mCameraFilter;
  28. private ScreenFilter mScreenFilter;
  29. private BigEyesFilter mBigEyesFilter;
  30. private StickerFilter mStickerFilter;
  31. private BeautyFilter beautyFilter;
  32. private File faceCascade;
  33. private File eyeCascade;
  34. private OpenCv openCv;
  35. public void swichCamera() {
  36. if (mCameraHelper != null) {
  37. mCameraHelper.switchCamera();
  38. }
  39. }
  40. public MyRenderer(MyGLSurfaceView glSurfaceView) {
  41. myGLSurfaceView = glSurfaceView;
  42. }
  43. @Override
  44. public void onSurfaceCreated(GL10 gl, EGLConfig config) {
  45. // GLES的所有操作要在GLES的Thread中执行,否则会失败
  46. mCameraFilter = new CameraFilter(myGLSurfaceView.getContext());
  47. mScreenFilter = new ScreenFilter(myGLSurfaceView.getContext());
  48. mBigEyesFilter = new BigEyesFilter(myGLSurfaceView.getContext());
  49. mStickerFilter = new StickerFilter(myGLSurfaceView.getContext());
  50. beautyFilter = new BeautyFilter(myGLSurfaceView.getContext());
  51. mTextures = new int[1];
  52. GLES20.glGenTextures(mTextures.length, mTextures, 0);
  53. mSurfaceTexture = new SurfaceTexture(mTextures[0]);
  54. mSurfaceTexture.setOnFrameAvailableListener(this);
  55. mSurfaceTexture.getTransformMatrix(mtx);
  56. mCameraFilter.setMatrix(mtx);
  57. faceCascade = Utils.copyAssets(myGLSurfaceView.getContext(), "lbpcascade_frontalface.xml");
  58. eyeCascade = Utils.copyAssets(myGLSurfaceView.getContext(), "seeta_fa_v1.1.bin");
  59. openCv = new OpenCv(faceCascade.getAbsolutePath(), eyeCascade.getAbsolutePath());
  60. }
  61. @Override
  62. public void onSurfaceChanged(GL10 gl, int width, int height) {
  63. mCameraHelper = new CameraHelper(Camera.CameraInfo.CAMERA_FACING_BACK, width, height);
  64. mCameraHelper.startPreview(mSurfaceTexture);
  65. mCameraHelper.setPreviewCallback(this);
  66. mCameraFilter.onReady(width, height);
  67. mScreenFilter.onReady(width, height);
  68. mBigEyesFilter.onReady(width, height);
  69. mStickerFilter.onReady(width, height);
  70. beautyFilter.onReady(width, height);
  71. openCv.startTrack();
  72. openCv.setCameraHelper(mCameraHelper);
  73. }
  74. @Override
  75. public void onDrawFrame(GL10 gl) {
  76. GLES20.glClearColor(0, 0, 0, 0);
  77. // 执行清空
  78. GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
  79. mSurfaceTexture.updateTexImage();
  80. mSurfaceTexture.getTransformMatrix(mtx);
  81. mCameraFilter.setMatrix(mtx);
  82. // 责任链模式
  83. int textureId = mCameraFilter.onDrawFrame(mTextures[0]);
  84. // if (openCv != null) {
  85. // mBigEyesFilter.setFace(openCv.getFace());
  86. // mStickerFilter.setFace(openCv.getFace());
  87. // }
  88. // textureId = mBigEyesFilter.onDrawFrame(textureId);
  89. // textureId = mStickerFilter.onDrawFrame(textureId);
  90. textureId = beautyFilter.onDrawFrame(textureId);
  91. mScreenFilter.onDrawFrame(textureId);
  92. }
  93. @Override
  94. public void onFrameAvailable(SurfaceTexture surfaceTexture) {
  95. // 摄像头获得一帧数据,请求GL绘制
  96. myGLSurfaceView.requestRender();
  97. }
  98. public void release() {
  99. if (mCameraHelper != null) {
  100. mCameraHelper.release();
  101. }
  102. }
  103. @Override
  104. public void onPreviewFrame(byte[] data, Camera camera) {
  105. if (openCv != null) {
  106. // openCv.dector(data);
  107. }
  108. }
  109. }

AbstractFilter 过滤器

  1. import android.content.Context;
  2. import android.opengl.GLES20;
  3. import com.bigeye.demo.util.OpenGLUtils;
  4. import java.nio.ByteBuffer;
  5. import java.nio.ByteOrder;
  6. import java.nio.FloatBuffer;
  7. /**
  8. * 作者: ZGQ
  9. * 创建时间: 2019/11/19 11:57
  10. * 注释:
  11. */
  12. public abstract class AbstractFilter {
  13. // 顶点着色器
  14. protected int mVertexShaderId;
  15. // 片元着色器
  16. protected int mFragmentShaderId;
  17. // 顶点GPU地址
  18. protected FloatBuffer mTextureBuffer;
  19. // 片元GPU地址
  20. protected FloatBuffer mVertexBuffer;
  21. protected int mProgram;
  22. // 纹理id
  23. protected int vTexture;
  24. // 用于处理图像的旋转和镜像
  25. protected int vMatrix;
  26. protected int vCoord;
  27. protected int vPosition;
  28. protected int mWidth;
  29. protected int mHeight;
  30. public AbstractFilter(Context context, int vertexShaderId, int fragmentShaderId) {
  31. this.mVertexShaderId = vertexShaderId;
  32. this.mFragmentShaderId = fragmentShaderId;
  33. // 摄像头是2D的
  34. //
  35. mVertexBuffer = ByteBuffer.allocateDirect(4 * 2 * 4)
  36. .order(ByteOrder.nativeOrder())
  37. .asFloatBuffer();
  38. mVertexBuffer.clear();
  39. float[] VERTEX = {
  40. -1.0f, -1.0f,
  41. 1.0f, -1.0f,
  42. -1.0f, 1.0f,
  43. 1.0f, 1.0f
  44. };
  45. mVertexBuffer.put(VERTEX);
  46. mTextureBuffer = ByteBuffer.allocateDirect(4 * 2 * 4)
  47. .order(ByteOrder.nativeOrder())
  48. .asFloatBuffer();
  49. mTextureBuffer.clear();
  50. float[] TEXTURE = {
  51. 0.0f, 1.0f,
  52. 1.0f, 1.0f,
  53. 0.0f, 0.0f,
  54. 1.0f, 0.0f
  55. };
  56. mTextureBuffer.put(TEXTURE);
  57. init(context);
  58. initCoordinate();
  59. }
  60. public void onReady(int width, int height) {
  61. this.mWidth = width;
  62. this.mHeight = height;
  63. }
  64. private void init(Context context) {
  65. String vertexShader = OpenGLUtils.readRawTextFile(context, mVertexShaderId);
  66. String fragmentShader = OpenGLUtils.readRawTextFile(context, mFragmentShaderId);
  67. mProgram = OpenGLUtils.loadProgram(vertexShader, fragmentShader);
  68. vPosition = GLES20.glGetAttribLocation(mProgram, "vPosition");
  69. vCoord = GLES20.glGetAttribLocation(mProgram, "vCoord");
  70. vMatrix = GLES20.glGetUniformLocation(mProgram, "vMatrix");
  71. vTexture = GLES20.glGetUniformLocation(mProgram, "vTexture");
  72. }
  73. protected abstract void initCoordinate();
  74. public int onDrawFrame(int textureId) {
  75. // 设置显示窗口
  76. GLES20.glViewport(0, 0, mWidth, mHeight);
  77. // 使用程序
  78. GLES20.glUseProgram(mProgram);
  79. mVertexBuffer.position(0);
  80. GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
  81. GLES20.glEnableVertexAttribArray(vPosition);
  82. mTextureBuffer.position(0);
  83. GLES20.glVertexAttribPointer(vCoord, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
  84. GLES20.glEnableVertexAttribArray(vCoord);
  85. GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
  86. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
  87. GLES20.glUniform1i(vTexture, 0);
  88. GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
  89. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
  90. return textureId;
  91. }
  92. }

CameraFilter,将摄像头的数据传递到FBO

  1. import android.content.Context;
  2. import android.opengl.GLES11Ext;
  3. import android.opengl.GLES20;
  4. import com.bigeye.demo.R;
  5. import com.bigeye.demo.util.OpenGLUtils;
  6. /**
  7. * 作者: ZGQ
  8. * 创建时间: 2019/11/19 14:42
  9. * 注释: 与父类中不同
  10. * 1.坐标转换 旋转90度, 镜像
  11. */
  12. public class CameraFilter extends AbstractFilter {
  13. // FBO 的指针
  14. protected int[] mFrameBuffer;
  15. protected int[] mFrameBufferTextures;
  16. private float[] mMatrix;
  17. public CameraFilter(Context context) {
  18. super(context, R.raw.camera_vertex, R.raw.camera_frag);
  19. }
  20. @Override
  21. protected void initCoordinate() {
  22. mTextureBuffer.clear();
  23. // 摄像头原始坐标是颠倒的逆时针旋转90+是镜像的
  24. // float[] TEXTURE = {
  25. // 0.0f, 0.0f,
  26. // 1.0f, 0.0f,
  27. // 0.0f, 1.0f,
  28. // 1.0f, 1.0f
  29. // };
  30. // 修复代码
  31. float[] TEXTURE = {
  32. 0.0f, 0.0f,
  33. 0.0f, 1.0f,
  34. 1.0f, 0.0f,
  35. 1.0f, 1.0f
  36. };
  37. mTextureBuffer.put(TEXTURE);
  38. }
  39. @Override
  40. public void onReady(int width, int height) {
  41. super.onReady(width, height);
  42. mFrameBuffer = new int[1];
  43. // 生成FBO
  44. GLES20.glGenFramebuffers(mFrameBuffer.length, mFrameBuffer, 0);
  45. // 实例化一个纹理,目的是与FBO进行绑定,从而操作纹理,不直接操作FBO
  46. mFrameBufferTextures = new int[1];
  47. OpenGLUtils.glGenTextures(mFrameBufferTextures);
  48. // 使用全局变量
  49. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mFrameBufferTextures[0]);
  50. GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffer[0]);
  51. // 设置纹理显示参数
  52. GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, mWidth, mHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
  53. // 将纹理与FBO绑定
  54. GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0,
  55. GLES20.GL_TEXTURE_2D,
  56. mFrameBufferTextures[0], 0);
  57. // 不再使用,告诉GPU其它人可以用GL_TEXTURE_2D了
  58. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
  59. GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
  60. }
  61. /**
  62. * 不同处
  63. *
  64. * @param textureId
  65. * @return
  66. */
  67. @Override
  68. public int onDrawFrame(int textureId) {
  69. // 设置显示窗口
  70. GLES20.glViewport(0, 0, mWidth, mHeight);
  71. // 不同处 1
  72. // 不调用的话就是默认的操作glSurfaceView中的纹理了,显示到屏幕上了
  73. // 这里我们只是把它画到fbo中(缓存)
  74. GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffer[0]);
  75. // 使用程序
  76. GLES20.glUseProgram(mProgram);
  77. mVertexBuffer.position(0);
  78. GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
  79. GLES20.glEnableVertexAttribArray(vPosition);
  80. mTextureBuffer.position(0);
  81. GLES20.glVertexAttribPointer(vCoord, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
  82. GLES20.glEnableVertexAttribArray(vCoord);
  83. // 不同处 2
  84. // 变换矩阵
  85. GLES20.glUniformMatrix4fv(vMatrix, 1, false, mMatrix, 0);
  86. // 不同处 3
  87. // 从摄像头拿到的数据,是所以纹理要是GLES11Ext.GL_TEXTURE_EXTERNAL_OES,不能是texture2D
  88. // 激活图层
  89. GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
  90. GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureId);
  91. GLES20.glUniform1i(vTexture, 0);
  92. // 绘制
  93. GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
  94. // 解绑
  95. GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
  96. GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
  97. return mFrameBufferTextures[0];
  98. }
  99. public void setMatrix(float[] mtx) {
  100. this.mMatrix = mtx;
  101. }
  102. }

ScreenFilter将数据渲染到TextureView中

  1. /**
  2. * 作者: ZGQ
  3. * 创建时间: 2019/11/19 14:42
  4. * 注释:
  5. */
  6. public class ScreenFilter extends AbstractFilter {
  7. public ScreenFilter(Context context) {
  8. super(context, R.raw.base_vertex, R.raw.base_frag);
  9. }
  10. @Override
  11. protected void initCoordinate() {
  12. }
  13. }

人脸识别

初始化OpenCV

  1. extern "C"
  2. JNIEXPORT jlong JNICALL
  3. Java_com_bigeye_demo_face_OpenCv_init_1face_1track(JNIEnv *env, jobject thiz, jstring face_cascade_,
  4. jstring eye_cascade_) {
  5. const char *face_cascade = env->GetStringUTFChars(face_cascade_, 0);// 人脸识别模型,从assets拷贝到本机的绝对路径
  6. const char *eye_cascade = env->GetStringUTFChars(eye_cascade_, 0);
  7. FaceTracker *faceTracker = new FaceTracker(face_cascade, eye_cascade);
  8. env->ReleaseStringUTFChars(face_cascade_, face_cascade);
  9. env->ReleaseStringUTFChars(eye_cascade_, eye_cascade);
  10. return reinterpret_cast<jlong>(faceTracker);
  11. }

摄像头采集的数据,发送给OpenCV识别

  1. extern "C"
  2. JNIEXPORT jobject JNICALL
  3. Java_com_bigeye_demo_face_OpenCv_native_1detector(JNIEnv *env, jobject thiz, jlong native_facetrack,
  4. jbyteArray data_, jint camera_id, jint width,
  5. jint height) {
  6. if (native_facetrack == 0) {
  7. return nullptr;
  8. }
  9. FaceTracker *faceTracker = reinterpret_cast<FaceTracker *>(native_facetrack);
  10. jbyte *data = env->GetByteArrayElements(data_, NULL);
  11. // src 此时是NV21
  12. Mat src(height + height / 2, width, CV_8UC1, data);
  13. // 将src的NV21格式转换成RGBA格式
  14. cvtColor(src, src, COLOR_YUV2RGBA_NV21);
  15. if (camera_id == 1) {
  16. // 前置摄像头
  17. // 逆时针旋转90
  18. rotate(src, src, ROTATE_90_COUNTERCLOCKWISE);
  19. // 1-水平翻转, 0 垂直翻转
  20. flip(src, src, 1);
  21. } else {
  22. // 后置摄像头
  23. // 顺时针旋转90
  24. rotate(src, src, ROTATE_90_CLOCKWISE);
  25. }
  26. Mat gray;
  27. cvtColor(src, gray, COLOR_RGBA2GRAY);
  28. equalizeHist(gray, gray);
  29. // 识别人脸 返回识别面部特征5
  30. std::vector<cv::Rect2f> rects = faceTracker->dector(gray);
  31. env->ReleaseByteArrayElements(data_, data, 0);
  32. int imgWidth = src.cols;
  33. int imgHeight = src.rows;
  34. int ret = rects.size();
  35. if (ret) {
  36. jclass clazz = env->FindClass("com/bigeye/demo/face/Face");
  37. jmethodID construct = env->GetMethodID(clazz, "<init>", "(IIII[F)V");
  38. int size = ret * 2;
  39. jfloatArray jfloatArray1 = env->NewFloatArray(size);
  40. for (int i = 0, j = 0; i < size; j++) {
  41. float f[2] = {rects[j].x, rects[j].y};
  42. LOGE("rects[j].x = %ld , rects[j].y = %ld , x = %ld , y = %ld", rects[j].x,
  43. rects[j].y, rects[j].x / imgWidth, rects[j].y / imgHeight);
  44. env->SetFloatArrayRegion(jfloatArray1, i, 2, f);
  45. i += 2;
  46. }
  47. Rect2f faceRect = rects[0];
  48. width = faceRect.width;
  49. height = faceRect.height;
  50. jobject face = env->NewObject(clazz, construct, width, height, imgWidth, imgHeight,
  51. jfloatArray1);
  52. return face;
  53. }
  54. return nullptr;
  1. std::vector<cv::Rect2f> FaceTracker::dector(Mat src) {
  2. std::vector<cv::Rect> faces;
  3. std::vector<cv::Rect2f> rects;
  4. // 执行人脸识别
  5. tracker->process(src);
  6. // 获取识别结果
  7. tracker->getObjects(faces);
  8. seeta::FacialLandmark points[5];
  9. if (faces.size() > 0) {
  10. cv::Rect face = faces[0];
  11. rects.push_back(cv::Rect2f(face.x, face.y, face.width, face.height));
  12. ImageData image_data(src.cols, src.rows);
  13. image_data.data = src.data;
  14. seeta::Rect bbox;
  15. bbox.x = face.x;
  16. bbox.y = face.y;
  17. bbox.width = face.width;
  18. bbox.height = face.height;
  19. seeta::FaceInfo faceInfo;
  20. faceInfo.bbox = bbox;
  21. faceAlignment->PointDetectLandmarks(image_data, faceInfo, points);
  22. // 识别到特征点 5个特征点 双眼中心 , 鼻子中心 嘴巴 两个嘴角
  23. for (int i = 0; i < 5; ++i) {
  24. rects.push_back(cv::Rect2f(points[i].x, points[i].y, 0, 0));
  25. }
  26. }
  27. return rects;

 

 

写美颜的片元着色器GL程序。美颜的原理是

  1. 反向
  2. 高反差保留
  3. 高斯模糊
  1. precision mediump float;
  2. varying vec2 aCoord;
  3. uniform sampler2D vTexture;
  4. uniform int width;
  5. uniform int height;
  6. //
  7. vec2 blurCoordinates[20];
  8. void main(){
  9. // 1. 模糊: 平滑处理 步长
  10. vec2 singleStepOffest = vec2(1.0/float(width), 1.0/float(height));
  11. blurCoordinates[0] = aCoord.xy + singleStepOffest * vec2(0.0, -10.0);
  12. blurCoordinates[1] = aCoord.xy + singleStepOffest * vec2(0.0, 10.0);
  13. blurCoordinates[2] = aCoord.xy + singleStepOffest * vec2(-10.0, 0.0);
  14. blurCoordinates[3] = aCoord.xy + singleStepOffest * vec2(10.0, 0.0);
  15. blurCoordinates[4] = aCoord.xy + singleStepOffest * vec2(5.0, -8.0);
  16. blurCoordinates[5] = aCoord.xy + singleStepOffest * vec2(5.0, 8.0);
  17. blurCoordinates[6] = aCoord.xy + singleStepOffest * vec2(-8.0, 5.0);
  18. blurCoordinates[7] = aCoord.xy + singleStepOffest * vec2(8.0, 5.0);
  19. blurCoordinates[8] = aCoord.xy + singleStepOffest * vec2(8.0, -5.0);
  20. blurCoordinates[9] = aCoord.xy + singleStepOffest * vec2(8.0, 5.0);
  21. blurCoordinates[10] = aCoord.xy + singleStepOffest * vec2(-5.0, 8.0);
  22. blurCoordinates[11] = aCoord.xy + singleStepOffest * vec2(5.0, 8.0);
  23. blurCoordinates[12] = aCoord.xy + singleStepOffest * vec2(0.0, -6.0);
  24. blurCoordinates[13] = aCoord.xy + singleStepOffest * vec2(0.0, 6.0);
  25. blurCoordinates[14] = aCoord.xy + singleStepOffest * vec2(-6.0, 0.0);
  26. blurCoordinates[15] = aCoord.xy + singleStepOffest * vec2(6.0, 0.0);
  27. blurCoordinates[16] = aCoord.xy + singleStepOffest * vec2(-4.0, -4.0);
  28. blurCoordinates[17] = aCoord.xy + singleStepOffest * vec2(-4.0, 4.0);
  29. blurCoordinates[18] = aCoord.xy + singleStepOffest * vec2(4.0, -4.0);
  30. blurCoordinates[19] = aCoord.xy + singleStepOffest * vec2(4.0, 4.0);
  31. // 获取中心点坐标
  32. vec4 currentColor = texture2D(vTexture, aCoord);
  33. vec3 totalRGB = currentColor.rgb;
  34. for (int i = 0; i < 20; i++){
  35. totalRGB += texture2D(vTexture, blurCoordinates[i].xy).rgb;
  36. }
  37. vec4 blur = vec4(totalRGB * 1.0 / 21.0, currentColor.a);
  38. // 高通道颜色
  39. vec4 highPassColor = currentColor - blur;
  40. // 反向
  41. highPassColor.r = clamp(2.0 * highPassColor.r * highPassColor.r * 24.0, 0.0, 1.0);
  42. highPassColor.g = clamp(2.0 * highPassColor.g * highPassColor.g * 24.0, 0.0, 1.0);
  43. highPassColor.b = clamp(2.0 * highPassColor.b * highPassColor.b * 24.0, 0.0, 1.0);
  44. vec3 rgb = mix(currentColor.rgb, blur.rgb, 0.2);
  45. gl_FragColor = vec4(rgb,1.0);
  46. }

美颜过滤器BeautyFilter

  1. import com.bigeye.demo.R;
  2. /**
  3. * 作者: ZGQ
  4. * 创建时间: 2019/11/25 15:52
  5. * 注释:
  6. */
  7. public class BeautyFilter extends AbstractFrameFilter {
  8. private int width;
  9. private int height;
  10. public BeautyFilter(Context context) {
  11. super(context, R.raw.base_vertex, R.raw.beauty_frag);
  12. width = GLES20.glGetUniformLocation(mProgram, "width");
  13. height = GLES20.glGetUniformLocation(mProgram, "height");
  14. }
  15. @Override
  16. public int onDrawFrame(int textureId) {
  17. // 设置显示窗口
  18. GLES20.glViewport(0, 0, mWidth, mHeight);
  19. // 不同处 1
  20. // 不调用的话就是默认的操作glSurfaceView中的纹理了,显示到屏幕上了
  21. // 这里我们只是把它画到fbo中(缓存)
  22. GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFrameBuffer[0]);
  23. // 使用程序
  24. GLES20.glUseProgram(mProgram);
  25. GLES20.glUniform1i(width, mWidth);
  26. GLES20.glUniform1i(height, mHeight);
  27. mVertexBuffer.position(0);
  28. GLES20.glVertexAttribPointer(vPosition, 2, GLES20.GL_FLOAT, false, 0, mVertexBuffer);
  29. GLES20.glEnableVertexAttribArray(vPosition);
  30. mTextureBuffer.position(0);
  31. GLES20.glVertexAttribPointer(vCoord, 2, GLES20.GL_FLOAT, false, 0, mTextureBuffer);
  32. GLES20.glEnableVertexAttribArray(vCoord);
  33. // 激活图层
  34. GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
  35. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
  36. GLES20.glUniform1i(vTexture, 0);
  37. // 绘制
  38. GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
  39. // 解绑
  40. GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
  41. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
  42. return mFrameBufferTextures[0];
  43. }
  44. }

 

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/笔触狂放9/article/detail/222791
推荐阅读
相关标签
  

闽ICP备14008679号