将GLSurfaceView的屏幕捕获到位图

我需要能够在某个特定时刻捕捉到GLSurfaceView的图像。 我有以下代码:

 relative.setDrawingCacheEnabled(true); screenshot = Bitmap.createBitmap(relative.getDrawingCache()); relative.setDrawingCacheEnabled(false); Log.v(TAG, "Screenshot height: " + screenshot.getHeight()); image.setImageBitmap(screenshot); 

GLSurfaceView包含在一个RelativeLayout ,但我也直接使用GLSurfaceView来尝试捕捉图像。 有了这个,我认为屏幕捕捉到一个透明的图像,即没有。 任何帮助将不胜感激。

SurfaceViewGLSurfaceView在他们的窗口中打孔以允许显示其表面。 换句话说,他们有透明的领域。

所以你不能通过调用GLSurfaceView.getDrawingCache()来捕获图像。

如果你想从GLSurfaceView获取图像,你应该调用gl.glReadPixels()中的GLSurfaceView.onDrawFrame()

我修补了createBitmapFromGLSurface方法,并在onDrawFrame()调用它。

(原始代码可能来自skuld的代码。)

 private Bitmap createBitmapFromGLSurface(int x, int y, int w, int h, GL10 gl) throws OutOfMemoryError { int bitmapBuffer[] = new int[w * h]; int bitmapSource[] = new int[w * h]; IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer); intBuffer.position(0); try { gl.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, intBuffer); int offset1, offset2; for (int i = 0; i < h; i++) { offset1 = i * w; offset2 = (h - i - 1) * w; for (int j = 0; j < w; j++) { int texturePixel = bitmapBuffer[offset1 + j]; int blue = (texturePixel >> 16) & 0xff; int red = (texturePixel << 16) & 0x00ff0000; int pixel = (texturePixel & 0xff00ff00) | red | blue; bitmapSource[offset2 + j] = pixel; } } } catch (GLException e) { return null; } return Bitmap.createBitmap(bitmapSource, w, h, Bitmap.Config.ARGB_8888); } 

如果您正在使用第三方库,只需“传入”在布局中定义的GLSurfaceView,那么这是一个完整的解决scheme。 你不会有渲染器的onDrawFrame()的句柄,这可能是一个问题…

要做到这一点,你需要排队为GLSurfaceView来处理。

 private GLSurfaceView glSurfaceView; // findById() in onCreate private Bitmap snapshotBitmap; private interface BitmapReadyCallbacks { void onBitmapReady(Bitmap bitmap); } /* Usage code captureBitmap(new BitmapReadyCallbacks() { @Override public void onBitmapReady(Bitmap bitmap) { someImageView.setImageBitmap(bitmap); } }); */ // supporting methods private void captureBitmap(final BitmapReadyCallbacks bitmapReadyCallbacks) { glSurfaceView.queueEvent(new Runnable() { @Override public void run() { EGL10 egl = (EGL10) EGLContext.getEGL(); GL10 gl = (GL10)egl.eglGetCurrentContext().getGL(); snapshotBitmap = createBitmapFromGLSurface(0, 0, glSurfaceView.getWidth(), glSurfaceView.getHeight(), gl); runOnUiThread(new Runnable() { @Override public void run() { bitmapReadyCallbacks.onBitmapReady(snapshotBitmap); } }); } }); } // from other answer in this question private Bitmap createBitmapFromGLSurface(int x, int y, int w, int h, GL10 gl) { int bitmapBuffer[] = new int[w * h]; int bitmapSource[] = new int[w * h]; IntBuffer intBuffer = IntBuffer.wrap(bitmapBuffer); intBuffer.position(0); try { gl.glReadPixels(x, y, w, h, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, intBuffer); int offset1, offset2; for (int i = 0; i < h; i++) { offset1 = i * w; offset2 = (h - i - 1) * w; for (int j = 0; j < w; j++) { int texturePixel = bitmapBuffer[offset1 + j]; int blue = (texturePixel >> 16) & 0xff; int red = (texturePixel << 16) & 0x00ff0000; int pixel = (texturePixel & 0xff00ff00) | red | blue; bitmapSource[offset2 + j] = pixel; } } } catch (GLException e) { Log.e(TAG, "createBitmapFromGLSurface: " + e.getMessage(), e); return null; } return Bitmap.createBitmap(bitmapSource, w, h, Config.ARGB_8888); } 

注意:在这个代码中,当我点击button时,它将屏幕截图作为图像并保存在SD卡位置。 我在onDraw方法中使用了布尔条件和if语句,因为渲染器类可能随时调用onDraw方法,而且if此代码可能会在存储卡中保存大量图像。

MainActivity类:

 protected boolean printOptionEnable = false; saveImageButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Log.v("hari", "pan button clicked"); isSaveClick = true; myRenderer.printOptionEnable = isSaveClick; } }); 

MyRenderer类:

 int width_surface , height_surface ; @Override public void onSurfaceChanged(GL10 gl, int width, int height) { Log.i("JO", "onSurfaceChanged"); // Adjust the viewport based on geometry changes, // such as screen rotation GLES20.glViewport(0, 0, width, height); float ratio = (float) width / height; width_surface = width ; height_surface = height ; } @Override public void onDrawFrame(GL10 gl) { try { if (printOptionEnable) { printOptionEnable = false ; Log.i("hari", "printOptionEnable if condition:" + printOptionEnable); int w = width_surface ; int h = height_surface ; Log.i("hari", "w:"+w+"-----h:"+h); int b[]=new int[(int) (w*h)]; int bt[]=new int[(int) (w*h)]; IntBuffer buffer=IntBuffer.wrap(b); buffer.position(0); GLES20.glReadPixels(0, 0, w, h,GLES20.GL_RGBA,GLES20.GL_UNSIGNED_BYTE, buffer); for(int i=0; i<h; i++) { //remember, that OpenGL bitmap is incompatible with Android bitmap //and so, some correction need. for(int j=0; j<w; j++) { int pix=b[i*w+j]; int pb=(pix>>16)&0xff; int pr=(pix<<16)&0x00ff0000; int pix1=(pix&0xff00ff00) | pr | pb; bt[(hi-1)*w+j]=pix1; } } Bitmap inBitmap = null ; if (inBitmap == null || !inBitmap.isMutable() || inBitmap.getWidth() != w || inBitmap.getHeight() != h) { inBitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); } //Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); inBitmap.copyPixelsFromBuffer(buffer); //return inBitmap ; // return Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888); inBitmap = Bitmap.createBitmap(bt, w, h, Bitmap.Config.ARGB_8888); ByteArrayOutputStream bos = new ByteArrayOutputStream(); inBitmap.compress(CompressFormat.JPEG, 90, bos); byte[] bitmapdata = bos.toByteArray(); ByteArrayInputStream fis = new ByteArrayInputStream(bitmapdata); final Calendar c=Calendar.getInstance(); long mytimestamp=c.getTimeInMillis(); String timeStamp=String.valueOf(mytimestamp); String myfile="hari"+timeStamp+".jpeg"; dir_image = new File(Environment.getExternalStorageDirectory()+File.separator+ "printerscreenshots"+File.separator+"image"); dir_image.mkdirs(); try { File tmpFile = new File(dir_image,myfile); FileOutputStream fos = new FileOutputStream(tmpFile); byte[] buf = new byte[1024]; int len; while ((len = fis.read(buf)) > 0) { fos.write(buf, 0, len); } fis.close(); fos.close(); } catch (FileNotFoundException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } Log.v("hari", "screenshots:"+dir_image.toString()); } } catch(Exception e) { e.printStackTrace(); } }