Android 屏幕截图
1.同时按下电源键+音量下键截屏
View Codeprivate void interceptScreenshotChord() { if (mScreenshotChordEnabled && mVolumeDownKeyTriggered && mPowerKeyTriggered && !mVolumeUpKeyTriggered) { final long now = SystemClock.uptimeMillis(); if (now <= mVolumeDownKeyTime + SCREENSHOT_CHORD_DEBOUNCE_DELAY_MILLIS && now <= mPowerKeyTime + SCREENSHOT_CHORD_DEBOUNCE_DELAY_MILLIS) { mVolumeDownKeyConsumedByScreenshotChord = true; cancelPendingPowerKeyAction(); mHandler.postDelayed(mScreenshotRunnable, getScreenshotChordLongPressDelay());② } } } private final Runnable mScreenshotRunnable = new Runnable() { @Override public void run() { takeScreenshot();③ } }; @Override public int interceptKeyBeforeQueueing(KeyEvent event, int policyFlags, boolean isScreenOn) { ... final boolean down = event.getAction() == KeyEvent.ACTION_DOWN; ... // Handle special keys. switch (keyCode) { case KeyEvent.KEYCODE_VOLUME_DOWN: case KeyEvent.KEYCODE_VOLUME_UP: case KeyEvent.KEYCODE_VOLUME_MUTE: { if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN) { if (down) { if (isScreenOn && !mVolumeDownKeyTriggered && (event.getFlags() & KeyEvent.FLAG_FALLBACK) == 0) { mVolumeDownKeyTriggered = true; mVolumeDownKeyTime = event.getDownTime(); mVolumeDownKeyConsumedByScreenshotChord = false; cancelPendingPowerKeyAction(); interceptScreenshotChord();① } } else { mVolumeDownKeyTriggered = false; cancelPendingScreenshotChordAction(); } ... case KeyEvent.KEYCODE_POWER: { result &= ~ACTION_PASS_TO_USER; if (down) { if (isScreenOn && !mPowerKeyTriggered && (event.getFlags() & KeyEvent.FLAG_FALLBACK) == 0) { mPowerKeyTriggered = true; mPowerKeyTime = event.getDownTime(); interceptScreenshotChord();① } ... // Assume this is called from the Handler thread. private void takeScreenshot() { synchronized (mScreenshotLock) { if (mScreenshotConnection != null) { return; } ComponentName cn = new ComponentName("com.android.systemui", "com.android.systemui.screenshot.TakeScreenshotService");④ Intent intent = new Intent(); intent.setComponent(cn); ServiceConnection conn = new ServiceConnection() { @Override public void onServiceConnected(ComponentName name, IBinder service) { synchronized (mScreenshotLock) { if (mScreenshotConnection != this) { return; } Messenger messenger = new Messenger(service); Message msg = Message.obtain(null, 1); final ServiceConnection myConn = this; Handler h = new Handler(mHandler.getLooper()) { @Override public void handleMessage(Message msg) { synchronized (mScreenshotLock) { if (mScreenshotConnection == myConn) { mContext.unbindService(mScreenshotConnection); mScreenshotConnection = null; mHandler.removeCallbacks(mScreenshotTimeout); } } } }; msg.replyTo = new Messenger(h); msg.arg1 = msg.arg2 = 0; if (mStatusBar != null && mStatusBar.isVisibleLw()) msg.arg1 = 1; if (mNavigationBar != null && mNavigationBar.isVisibleLw()) msg.arg2 = 1; try { messenger.send(msg); } catch (RemoteException e) { } } } @Override public void onServiceDisconnected(ComponentName name) {} }; if (mContext.bindServiceAsUser( intent, conn, Context.BIND_AUTO_CREATE, UserHandle.CURRENT)) { mScreenshotConnection = conn; mHandler.postDelayed(mScreenshotTimeout, 10000); } } }
View Codepublic class TakeScreenshotService extends Service { private static final String TAG = "TakeScreenshotService"; private static GlobalScreenshot mScreenshot; private Handler mHandler = new Handler() { @Override public void handleMessage(Message msg) { switch (msg.what) { case 1: final Messenger callback = msg.replyTo; if (mScreenshot == null) { mScreenshot = new GlobalScreenshot(TakeScreenshotService.this); } mScreenshot.takeScreenshot(new Runnable() { @Override public void run() { Message reply = Message.obtain(null, 1); try { callback.send(reply); } catch (RemoteException e) { } } }, msg.arg1 > 0, msg.arg2 > 0); } } }; @Override public IBinder onBind(Intent intent) { return new Messenger(mHandler).getBinder(); } }
View CodeSaveImageInBackgroundTask(Context context, SaveImageInBackgroundData data, NotificationManager nManager, int nId) { Resources r = context.getResources(); // Prepare all the output metadata mImageTime = System.currentTimeMillis(); String imageDate = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss").format(new Date(mImageTime)); mImageFileName = String.format(SCREENSHOT_FILE_NAME_TEMPLATE, imageDate); mScreenshotDir = new File(Environment.getExternalStoragePublicDirectory( Environment.DIRECTORY_PICTURES), SCREENSHOTS_DIR_NAME); mImageFilePath = new File(mScreenshotDir, mImageFileName).getAbsolutePath();④ // Create the large notification icon mImageWidth = data.image.getWidth(); mImageHeight = data.image.getHeight(); int iconSize = data.iconSize; final int shortSide = mImageWidth < mImageHeight ? mImageWidth : mImageHeight; Bitmap preview = Bitmap.createBitmap(shortSide, shortSide, data.image.getConfig()); Canvas c = new Canvas(preview); Paint paint = new Paint(); ColorMatrix desat = new ColorMatrix(); desat.setSaturation(0.25f); paint.setColorFilter(new ColorMatrixColorFilter(desat)); Matrix matrix = new Matrix(); matrix.postTranslate((shortSide - mImageWidth) / 2, (shortSide - mImageHeight) / 2); c.drawBitmap(data.image, matrix, paint); c.drawColor(0x40FFFFFF); c.setBitmap(null); Bitmap croppedIcon = Bitmap.createScaledBitmap(preview, iconSize, iconSize, true); // Show the intermediate notification mTickerAddSpace = !mTickerAddSpace; mNotificationId = nId; mNotificationManager = nManager; mNotificationBuilder = new Notification.Builder(context) .setTicker(r.getString(R.string.screenshot_saving_ticker) + (mTickerAddSpace ? "" : "")) .setContentTitle(r.getString(R.string.screenshot_saving_title)) .setContentText(r.getString(R.string.screenshot_saving_text)) .setSmallIcon(R.drawable.stat_notify_image) .setWhen(System.currentTimeMillis()); mNotificationStyle = new Notification.BigPictureStyle() .bigPicture(preview); mNotificationBuilder.setStyle(mNotificationStyle); Notification n = mNotificationBuilder.build(); n.flags |= Notification.FLAG_NO_CLEAR; mNotificationManager.notify(nId, n); // On the tablet, the large icon makes the notification appear as if it is clickable (and // on small devices, the large icon is not shown) so defer showing the large icon until // we compose the final post-save notification below. mNotificationBuilder.setLargeIcon(croppedIcon); // But we still don't set it for the expanded view, allowing the smallIcon to show here. mNotificationStyle.bigLargeIcon(null); } /** * Creates a new worker thread and saves the screenshot to the media store. */ private void saveScreenshotInWorkerThread(Runnable finisher) { SaveImageInBackgroundData data = new SaveImageInBackgroundData(); data.context = mContext; data.image = mScreenBitmap; data.iconSize = mNotificationIconSize; data.finisher = finisher; if (mSaveInBgTask != null) { mSaveInBgTask.cancel(false); } mSaveInBgTask = new SaveImageInBackgroundTask(mContext, data, mNotificationManager, SCREENSHOT_NOTIFICATION_ID).execute(data);③ } /** * Takes a screenshot of the current display and shows an animation. */ void takeScreenshot(Runnable finisher, boolean statusBarVisible, boolean navBarVisible) { // We need to orient the screenshot correctly (and the Surface api seems to take screenshots // only in the natural orientation of the device :!) mDisplay.getRealMetrics(mDisplayMetrics); float[] dims = {mDisplayMetrics.widthPixels, mDisplayMetrics.heightPixels}; float degrees = getDegreesForRotation(mDisplay.getRotation()); boolean requiresRotation = (degrees > 0); if (requiresRotation) { // Get the dimensions of the device in its native orientation mDisplayMatrix.reset(); mDisplayMatrix.preRotate(-degrees); mDisplayMatrix.mapPoints(dims); dims[0] = Math.abs(dims[0]); dims[1] = Math.abs(dims[1]); } // Take the screenshot mScreenBitmap = SurfaceControl.screenshot((int) dims[0], (int) dims[1]);① if (mScreenBitmap == null) { notifyScreenshotError(mContext, mNotificationManager); finisher.run(); return; } if (requiresRotation) { // Rotate the screenshot to the current orientation Bitmap ss = Bitmap.createBitmap(mDisplayMetrics.widthPixels, mDisplayMetrics.heightPixels, Bitmap.Config.ARGB_8888); Canvas c = new Canvas(ss); c.translate(ss.getWidth() / 2, ss.getHeight() / 2); c.rotate(degrees); c.translate(-dims[0] / 2, -dims[1] / 2); c.drawBitmap(mScreenBitmap, 0, 0, null); c.setBitmap(null); // Recycle the previous bitmap mScreenBitmap.recycle(); mScreenBitmap = ss; } // Optimizations mScreenBitmap.setHasAlpha(false); mScreenBitmap.prepareToDraw(); // Start the post-screenshot animation startAnimation(finisher, mDisplayMetrics.widthPixels, mDisplayMetrics.heightPixels, statusBarVisible, navBarVisible); } /** * Starts the animation after taking the screenshot */ private void startAnimation(final Runnable finisher, int w, int h, boolean statusBarVisible, boolean navBarVisible) { // Add the view for the animation mScreenshotView.setImageBitmap(mScreenBitmap); mScreenshotLayout.requestFocus(); // Setup the animation with the screenshot just taken if (mScreenshotAnimation != null) { mScreenshotAnimation.end(); mScreenshotAnimation.removeAllListeners(); } mWindowManager.addView(mScreenshotLayout, mWindowLayoutParams); ValueAnimator screenshotDropInAnim = createScreenshotDropInAnimation(); ValueAnimator screenshotFadeOutAnim = createScreenshotDropOutAnimation(w, h, statusBarVisible, navBarVisible); mScreenshotAnimation = new AnimatorSet(); mScreenshotAnimation.playSequentially(screenshotDropInAnim, screenshotFadeOutAnim); mScreenshotAnimation.addListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { // Save the screenshot once we have a bit of time now saveScreenshotInWorkerThread(finisher);② mWindowManager.removeView(mScreenshotLayout); // Clear any references to the bitmap mScreenBitmap = null; mScreenshotView.setImageBitmap(null); } }); mScreenshotLayout.post(new Runnable() { @Override public void run() { // Play the shutter sound to notify that we've taken a screenshot mCameraSound.play(MediaActionSound.SHUTTER_CLICK); mScreenshotView.setLayerType(View.LAYER_TYPE_HARDWARE, null); mScreenshotView.buildLayer(); mScreenshotAnimation.start(); } }); }
View Code/** * Like {@link SurfaceControl#screenshot(int, int, int, int)} but includes all * Surfaces in the screenshot. * * @param width The desired width of the returned bitmap; the raw * screen will be scaled down to this size. * @param height The desired height of the returned bitmap; the raw * screen will be scaled down to this size. * @return Returns a Bitmap containing the screen contents, or null * if an error occurs. Make sure to call Bitmap.recycle() as soon as * possible, once its content is not needed anymore. */ public static Bitmap screenshot(int width, int height) { // TODO: should take the display as a parameter IBinder displayToken = SurfaceControl.getBuiltInDisplay( SurfaceControl.BUILT_IN_DISPLAY_ID_MAIN); return nativeScreenshot(displayToken, width, height, 0, 0, true); }
android_view_SurfaceControl.cpp
View Codestatic void nativeScreenshot(JNIEnv* env, jclass clazz, jobject displayTokenObj, jobject surfaceObj, jint width, jint height, jint minLayer, jint maxLayer, bool allLayers) { sp<IBinder> displayToken = ibinderForJavaObject(env, displayTokenObj); if (displayToken != NULL) { sp<Surface> consumer = android_view_Surface_getSurface(env, surfaceObj); if (consumer != NULL) { if (allLayers) { minLayer = 0; maxLayer = -1; } ScreenshotClient::capture( displayToken, consumer->getIGraphicBufferProducer(), width, height, uint32_t(minLayer), uint32_t(maxLayer)); } } } static JNINativeMethod sSurfaceControlMethods[] = { ... {"nativeScreenshot", "(Landroid/os/IBinder;IIIIZ)Landroid/graphics/Bitmap;", (void*)nativeScreenshotBitmap }, {"nativeScreenshot", "(Landroid/os/IBinder;Landroid/view/Surface;IIIIZ)V", (void*)nativeScreenshot }, ...
View Codestatus_t ScreenshotClient::capture( const sp<IBinder>& display, const sp<IGraphicBufferProducer>& producer, uint32_t reqWidth, uint32_t reqHeight, uint32_t minLayerZ, uint32_t maxLayerZ) { sp<ISurfaceComposer> s(ComposerService::getComposerService()); if (s == NULL) return NO_INIT; return s->captureScreen(display, producer, reqWidth, reqHeight, minLayerZ, maxLayerZ, false); }
View Codestatus_t SurfaceFlinger::captureScreen(const sp<IBinder>& display, const sp<IGraphicBufferProducer>& producer, uint32_t reqWidth, uint32_t reqHeight, uint32_t minLayerZ, uint32_t maxLayerZ, bool isCpuConsumer) { if (CC_UNLIKELY(display == 0)) return BAD_VALUE; if (CC_UNLIKELY(producer == 0)) return BAD_VALUE; class MessageCaptureScreen : public MessageBase { SurfaceFlinger* flinger; sp<IBinder> display; sp<IGraphicBufferProducer> producer; uint32_t reqWidth, reqHeight; uint32_t minLayerZ,maxLayerZ; bool useReadPixels; status_t result; public: MessageCaptureScreen(SurfaceFlinger* flinger, const sp<IBinder>& display, const sp<IGraphicBufferProducer>& producer, uint32_t reqWidth, uint32_t reqHeight, uint32_t minLayerZ, uint32_t maxLayerZ, bool useReadPixels) : flinger(flinger), display(display), producer(producer), reqWidth(reqWidth), reqHeight(reqHeight), minLayerZ(minLayerZ), maxLayerZ(maxLayerZ), useReadPixels(useReadPixels), result(PERMISSION_DENIED) { } status_t getResult() const { return result; } virtual bool handler() { Mutex::Autolock _l(flinger->mStateLock); sp<const DisplayDevice> hw(flinger->getDisplayDevice(display)); if (!useReadPixels) { result = flinger->captureScreenImplLocked(hw, producer, reqWidth, reqHeight, minLayerZ, maxLayerZ); } else { result = flinger->captureScreenImplCpuConsumerLocked(hw, producer, reqWidth, reqHeight, minLayerZ, maxLayerZ); } static_cast<GraphicProducerWrapper*>(producer->asBinder().get())->exit(result); return true; } }; // make sure to process transactions before screenshots -- a transaction // might already be pending but scheduled for VSYNC; this guarantees we // will handle it before the screenshot. When VSYNC finally arrives // the scheduled transaction will be a no-op. If no transactions are // scheduled at this time, this will end-up being a no-op as well. mEventQueue.invalidateTransactionNow(); bool useReadPixels = false; if (isCpuConsumer) { bool formatSupportedBytBitmap = (mEGLNativeVisualId == HAL_PIXEL_FORMAT_RGBA_8888) || (mEGLNativeVisualId == HAL_PIXEL_FORMAT_RGBX_8888); if (formatSupportedBytBitmap == false) { // the pixel format we have is not compatible with // Bitmap.java, which is the likely client of this API, // so we just revert to glReadPixels() in that case. useReadPixels = true; } if (mGpuToCpuSupported == false) { // When we know the GL->CPU path works, we can call // captureScreenImplLocked() directly, instead of using the // glReadPixels() workaround. useReadPixels = true; } } // this creates a "fake" BBinder which will serve as a "fake" remote // binder to receive the marshaled calls and forward them to the // real remote (a BpGraphicBufferProducer) sp<GraphicProducerWrapper> wrapper = new GraphicProducerWrapper(producer); // the asInterface() call below creates our "fake" BpGraphicBufferProducer // which does the marshaling work forwards to our "fake remote" above. sp<MessageBase> msg = new MessageCaptureScreen(this, display, IGraphicBufferProducer::asInterface( wrapper ), reqWidth, reqHeight, minLayerZ, maxLayerZ, useReadPixels); status_t res = postMessageAsync(msg); if (res == NO_ERROR) { res = wrapper->waitForResponse(); } return res; } status_t SurfaceFlinger::captureScreenImplLocked( const sp<const DisplayDevice>& hw, const sp<IGraphicBufferProducer>& producer, uint32_t reqWidth, uint32_t reqHeight, uint32_t minLayerZ, uint32_t maxLayerZ) { ATRACE_CALL(); // get screen geometry const uint32_t hw_w = hw->getWidth(); const uint32_t hw_h = hw->getHeight(); // if we have secure windows on this display, never allow the screen capture if (hw->getSecureLayerVisible()) { ALOGW("FB is protected: PERMISSION_DENIED"); return PERMISSION_DENIED; } if ((reqWidth > hw_w) || (reqHeight > hw_h)) { ALOGE("size mismatch (%d, %d) > (%d, %d)", reqWidth, reqHeight, hw_w, hw_h); return BAD_VALUE; } reqWidth = (!reqWidth) ? hw_w : reqWidth; reqHeight = (!reqHeight) ? hw_h : reqHeight; // Create a surface to render into sp<Surface> surface = new Surface(producer); ANativeWindow* const window = surface.get(); // set the buffer size to what the user requested native_window_set_buffers_user_dimensions(window, reqWidth, reqHeight); // and create the corresponding EGLSurface EGLSurface eglSurface = eglCreateWindowSurface( mEGLDisplay, mEGLConfig, window, NULL); if (eglSurface == EGL_NO_SURFACE) { ALOGE("captureScreenImplLocked: eglCreateWindowSurface() failed 0x%4x", eglGetError()); return BAD_VALUE; } if (!eglMakeCurrent(mEGLDisplay, eglSurface, eglSurface, mEGLContext)) { ALOGE("captureScreenImplLocked: eglMakeCurrent() failed 0x%4x", eglGetError()); eglDestroySurface(mEGLDisplay, eglSurface); return BAD_VALUE; } renderScreenImplLocked(hw, reqWidth, reqHeight, minLayerZ, maxLayerZ, false); // and finishing things up... if (eglSwapBuffers(mEGLDisplay, eglSurface) != EGL_TRUE) { ALOGE("captureScreenImplLocked: eglSwapBuffers() failed 0x%4x", eglGetError()); eglDestroySurface(mEGLDisplay, eglSurface); return BAD_VALUE; } eglDestroySurface(mEGLDisplay, eglSurface); return NO_ERROR; } status_t SurfaceFlinger::captureScreenImplCpuConsumerLocked( const sp<const DisplayDevice>& hw, const sp<IGraphicBufferProducer>& producer, uint32_t reqWidth, uint32_t reqHeight, uint32_t minLayerZ, uint32_t maxLayerZ) { ATRACE_CALL(); if (!GLExtensions::getInstance().haveFramebufferObject()) { return INVALID_OPERATION; } // get screen geometry const uint32_t hw_w = hw->getWidth(); const uint32_t hw_h = hw->getHeight(); // if we have secure windows on this display, never allow the screen capture if (hw->getSecureLayerVisible()) { ALOGW("FB is protected: PERMISSION_DENIED"); return PERMISSION_DENIED; } if ((reqWidth > hw_w) || (reqHeight > hw_h)) { ALOGE("size mismatch (%d, %d) > (%d, %d)", reqWidth, reqHeight, hw_w, hw_h); return BAD_VALUE; } reqWidth = (!reqWidth) ? hw_w : reqWidth; reqHeight = (!reqHeight) ? hw_h : reqHeight; GLuint tname; glGenRenderbuffersOES(1, &tname); glBindRenderbufferOES(GL_RENDERBUFFER_OES, tname); glRenderbufferStorageOES(GL_RENDERBUFFER_OES, GL_RGBA8_OES, reqWidth, reqHeight); // create a FBO GLuint name; glGenFramebuffersOES(1, &name); glBindFramebufferOES(GL_FRAMEBUFFER_OES, name); glFramebufferRenderbufferOES(GL_FRAMEBUFFER_OES, GL_COLOR_ATTACHMENT0_OES, GL_RENDERBUFFER_OES, tname); GLenum status = glCheckFramebufferStatusOES(GL_FRAMEBUFFER_OES); status_t result = NO_ERROR; if (status == GL_FRAMEBUFFER_COMPLETE_OES) { renderScreenImplLocked(hw, reqWidth, reqHeight, minLayerZ, maxLayerZ, true); // Below we render the screenshot into the // CpuConsumer using glReadPixels from our FBO. // Some older drivers don't support the GL->CPU path so we // have to wrap it with a CPU->CPU path, which is what // glReadPixels essentially is. sp<Surface> sur = new Surface(producer); ANativeWindow* window = sur.get(); if (native_window_api_connect(window, NATIVE_WINDOW_API_CPU) == NO_ERROR) { int err = 0; err = native_window_set_buffers_dimensions(window, reqWidth, reqHeight); err |= native_window_set_buffers_format(window, HAL_PIXEL_FORMAT_RGBA_8888); err |= native_window_set_usage(window, GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN); if (err == NO_ERROR) { ANativeWindowBuffer* buffer; if (native_window_dequeue_buffer_and_wait(window, &buffer) == NO_ERROR) { sp<GraphicBuffer> buf = static_cast<GraphicBuffer*>(buffer); void* vaddr; if (buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, &vaddr) == NO_ERROR) { glReadPixels(0, 0, buffer->stride, reqHeight, GL_RGBA, GL_UNSIGNED_BYTE, vaddr); buf->unlock(); } window->queueBuffer(window, buffer, -1); } } native_window_api_disconnect(window, NATIVE_WINDOW_API_CPU); } } else { ALOGE("got GL_FRAMEBUFFER_COMPLETE_OES while taking screenshot"); result = INVALID_OPERATION; } // back to main framebuffer glBindFramebufferOES(GL_FRAMEBUFFER_OES, 0); glDeleteRenderbuffersOES(1, &tname); glDeleteFramebuffersOES(1, &name); DisplayDevice::setViewportAndProjection(hw); return result; }
2.基于framebuffer显存设备/dev/graphics/fb0截屏
a.自带工具screencap:screencap.cpp
View Codeint main(int argc, char** argv) { ProcessState::self()->startThreadPool(); const char* pname = argv[0]; bool png = false; int32_t displayId = DEFAULT_DISPLAY_ID; int c; while ((c = getopt(argc, argv, "phd:")) != -1) { switch (c) { case 'p': png = true; break; case 'd': displayId = atoi(optarg); break; case '?': case 'h': usage(pname); return 1; } } argc -= optind; argv += optind; int fd = -1; if (argc == 0) { fd = dup(STDOUT_FILENO); } else if (argc == 1) { const char* fn = argv[0]; fd = open(fn, O_WRONLY | O_CREAT | O_TRUNC, 0664); if (fd == -1) { fprintf(stderr, "Error opening file: %s (%s)\n", fn, strerror(errno)); return 1; } const int len = strlen(fn); if (len >= 4 && 0 == strcmp(fn+len-4, ".png")) { png = true; } } if (fd == -1) { usage(pname); return 1; } void const* mapbase = MAP_FAILED; ssize_t mapsize = -1; void const* base = 0; uint32_t w, s, h, f; size_t size = 0; ScreenshotClient screenshot; sp<IBinder> display = SurfaceComposerClient::getBuiltInDisplay(displayId); if (display != NULL && screenshot.update(display) == NO_ERROR) { base = screenshot.getPixels(); w = screenshot.getWidth(); h = screenshot.getHeight(); s = screenshot.getStride(); f = screenshot.getFormat(); size = screenshot.getSize(); } else { const char* fbpath = "/dev/graphics/fb0"; int fb = open(fbpath, O_RDONLY); if (fb >= 0) { struct fb_var_screeninfo vinfo; if (ioctl(fb, FBIOGET_VSCREENINFO, &vinfo) == 0) { uint32_t bytespp; if (vinfoToPixelFormat(vinfo, &bytespp, &f) == NO_ERROR) { size_t offset = (vinfo.xoffset + vinfo.yoffset*vinfo.xres) * bytespp; w = vinfo.xres; h = vinfo.yres; s = vinfo.xres; size = w*h*bytespp; mapsize = offset + size; mapbase = mmap(0, mapsize, PROT_READ, MAP_PRIVATE, fb, 0); if (mapbase != MAP_FAILED) { base = (void const *)((char const *)mapbase + offset); } } } close(fb); } } if (base) { if (png) { SkBitmap b; b.setConfig(flinger2skia(f), w, h, s*bytesPerPixel(f)); b.setPixels((void*)base); SkDynamicMemoryWStream stream; SkImageEncoder::EncodeStream(&stream, b, SkImageEncoder::kPNG_Type, SkImageEncoder::kDefaultQuality); SkData* streamData = stream.copyToData(); write(fd, streamData->data(), streamData->size()); streamData->unref(); } else { write(fd, &w, 4); write(fd, &h, 4); write(fd, &f, 4); size_t Bpp = bytesPerPixel(f); for (size_t y=0 ; y<h ; y++) { write(fd, base, w*Bpp); base = (void *)((char *)base + s*Bpp); } } } close(fd); if (mapbase != MAP_FAILED) { munmap((void *)mapbase, mapsize); } return 0; }
b.DDMS
View Code/** * Captures a new image from the device, and display it. */ private void updateDeviceImage(Shell shell) { mBusyLabel.setText("Capturing..."); // no effect shell.setCursor(shell.getDisplay().getSystemCursor(SWT.CURSOR_WAIT)); mRawImage = getDeviceImage();① updateImageDisplay(shell); } /** * Updates the display with {@link #mRawImage}. * @param shell */ private void updateImageDisplay(Shell shell) { Image image; if (mRawImage == null) { Display display = shell.getDisplay(); image = ImageLoader.createPlaceHolderArt( display, 320, 240, display.getSystemColor(SWT.COLOR_BLUE)); mSave.setEnabled(false); mBusyLabel.setText("Screen not available"); } else { // convert raw data to an Image. PaletteData palette = new PaletteData( mRawImage.getRedMask(), mRawImage.getGreenMask(), mRawImage.getBlueMask()); ImageData imageData = new ImageData(mRawImage.width, mRawImage.height, mRawImage.bpp, palette, 1, mRawImage.data); image = new Image(getParent().getDisplay(), imageData); mSave.setEnabled(true); mBusyLabel.setText("Captured image:"); } mImageLabel.setImage(image); mImageLabel.pack(); shell.pack(); // there's no way to restore old cursor; assume it's ARROW shell.setCursor(shell.getDisplay().getSystemCursor(SWT.CURSOR_ARROW)); } /** * Grabs an image from an ADB-connected device and returns it as a {@link RawImage}. */ private RawImage getDeviceImage() { try { return mDevice.getScreenshot();② } catch (IOException ioe) { Log.w("ddms", "Unable to get frame buffer: " + ioe.getMessage()); return null; } catch (TimeoutException e) { Log.w("ddms", "Unable to get frame buffer: timeout "); return null; } catch (AdbCommandRejectedException e) { Log.w("ddms", "Unable to get frame buffer: " + e.getMessage()); return null; } }
View Code@Override public RawImage getScreenshot() throws TimeoutException, AdbCommandRejectedException, IOException { return AdbHelper.getFrameBuffer(AndroidDebugBridge.getSocketAddress(), this);③ }
View Code/** * Retrieve the frame buffer from the device. * @throws TimeoutException in case of timeout on the connection. * @throws AdbCommandRejectedException if adb rejects the command * @throws IOException in case of I/O error on the connection. */ static RawImage getFrameBuffer(InetSocketAddress adbSockAddr, Device device) throws TimeoutException, AdbCommandRejectedException, IOException { RawImage imageParams = new RawImage(); byte[] request = formAdbRequest("framebuffer:");④ //$NON-NLS-1$④ byte[] nudge = { 0 }; byte[] reply; SocketChannel adbChan = null; try { adbChan = SocketChannel.open(adbSockAddr); adbChan.configureBlocking(false); // if the device is not -1, then we first tell adb we're looking to talk // to a specific device setDevice(adbChan, device); write(adbChan, request); AdbResponse resp = readAdbResponse(adbChan, false /* readDiagString */); if (!resp.okay) { throw new AdbCommandRejectedException(resp.message); } // first the protocol version. reply = new byte[4]; read(adbChan, reply); ByteBuffer buf = ByteBuffer.wrap(reply); buf.order(ByteOrder.LITTLE_ENDIAN); int version = buf.getInt(); // get the header size (this is a count of int) int headerSize = RawImage.getHeaderSize(version); // read the header reply = new byte[headerSize * 4]; read(adbChan, reply); buf = ByteBuffer.wrap(reply); buf.order(ByteOrder.LITTLE_ENDIAN); // fill the RawImage with the header if (!imageParams.readHeader(version, buf)) { Log.e("Screenshot", "Unsupported protocol: " + version); return null; } Log.d("ddms", "image params: bpp=" + imageParams.bpp + ", size=" + imageParams.size + ", width=" + imageParams.width + ", height=" + imageParams.height); write(adbChan, nudge); reply = new byte[imageParams.size]; read(adbChan, reply); imageParams.data = reply; } finally { if (adbChan != null) { adbChan.close(); } } return imageParams; }
View Codestatic int create_service_thread(void (*func)(int, void *), void *cookie) { stinfo *sti; adb_thread_t t; int s[2]; if(adb_socketpair(s)) { printf("cannot create service socket pair\n"); return -1; } sti = malloc(sizeof(stinfo)); if(sti == 0) fatal("cannot allocate stinfo"); sti->func = func; sti->cookie = cookie; sti->fd = s[1]; if(adb_thread_create( &t, service_bootstrap_func, sti)){ free(sti); adb_close(s[0]); adb_close(s[1]); printf("cannot create service thread\n"); return -1; } D("service thread started, %d:%d\n",s[0], s[1]); return s[0]; } int service_to_fd(const char *name) { ... else if(!strncmp(name, "framebuffer:", 12)) { ret = create_service_thread(framebuffer_service, 0);⑤ ...
View Codestatic __inline__ int adb_thread_create( adb_thread_t *thread, adb_thread_func_t func, void* arg) { thread->tid = _beginthread( (win_thread_func_t)func, 0, arg ); if (thread->tid == (unsigned)-1L) { return -1; } return 0; }
View Codevoid framebuffer_service(int fd, void *cookie) { struct fbinfo fbinfo; unsigned int i; char buf[640]; int fd_screencap; int w, h, f; int fds[2]; if (pipe(fds) < 0) goto done; pid_t pid = fork(); if (pid < 0) goto done; if (pid == 0) { dup2(fds[1], STDOUT_FILENO); close(fds[0]); close(fds[1]); const char* command = "screencap";⑥ const char *args[2] = {command, NULL}; execvp(command, (char**)args); exit(1); } fd_screencap = fds[0]; /* read w, h & format */ if(readx(fd_screencap, &w, 4)) goto done; if(readx(fd_screencap, &h, 4)) goto done; if(readx(fd_screencap, &f, 4)) goto done; fbinfo.version = DDMS_RAWIMAGE_VERSION; /* see hardware/hardware.h */ switch (f) { case 1: /* RGBA_8888 */ fbinfo.bpp = 32; fbinfo.size = w * h * 4; fbinfo.width = w; fbinfo.height = h; fbinfo.red_offset = 0; fbinfo.red_length = 8; fbinfo.green_offset = 8; fbinfo.green_length = 8; fbinfo.blue_offset = 16; fbinfo.blue_length = 8; fbinfo.alpha_offset = 24; fbinfo.alpha_length = 8; break; case 2: /* RGBX_8888 */ fbinfo.bpp = 32; fbinfo.size = w * h * 4; fbinfo.width = w; fbinfo.height = h; fbinfo.red_offset = 0; fbinfo.red_length = 8; fbinfo.green_offset = 8; fbinfo.green_length = 8; fbinfo.blue_offset = 16; fbinfo.blue_length = 8; fbinfo.alpha_offset = 24; fbinfo.alpha_length = 0; break; case 3: /* RGB_888 */ fbinfo.bpp = 24; fbinfo.size = w * h * 3; fbinfo.width = w; fbinfo.height = h; fbinfo.red_offset = 0; fbinfo.red_length = 8; fbinfo.green_offset = 8; fbinfo.green_length = 8; fbinfo.blue_offset = 16; fbinfo.blue_length = 8; fbinfo.alpha_offset = 24; fbinfo.alpha_length = 0; break; case 4: /* RGB_565 */ fbinfo.bpp = 16; fbinfo.size = w * h * 2; fbinfo.width = w; fbinfo.height = h; fbinfo.red_offset = 11; fbinfo.red_length = 5; fbinfo.green_offset = 5; fbinfo.green_length = 6; fbinfo.blue_offset = 0; fbinfo.blue_length = 5; fbinfo.alpha_offset = 0; fbinfo.alpha_length = 0; break; case 5: /* BGRA_8888 */ fbinfo.bpp = 32; fbinfo.size = w * h * 4; fbinfo.width = w; fbinfo.height = h; fbinfo.red_offset = 16; fbinfo.red_length = 8; fbinfo.green_offset = 8; fbinfo.green_length = 8; fbinfo.blue_offset = 0; fbinfo.blue_length = 8; fbinfo.alpha_offset = 24; fbinfo.alpha_length = 8; break; default: goto done; } /* write header */ if(writex(fd, &fbinfo, sizeof(fbinfo))) goto done; /* write data */ for(i = 0; i < fbinfo.size; i += sizeof(buf)) { if(readx(fd_screencap, buf, sizeof(buf))) goto done; if(writex(fd, buf, sizeof(buf))) goto done; } if(readx(fd_screencap, buf, fbinfo.size % sizeof(buf))) goto done; if(writex(fd, buf, fbinfo.size % sizeof(buf))) goto done; done: TEMP_FAILURE_RETRY(waitpid(pid, NULL, 0)); close(fds[0]); close(fds[1]); close(fd); }
c.screenshot2:Screenshot.java
View Code/* * Grab an image from an ADB-connected device. */ private static void getDeviceImage(IDevice device, String filepath, boolean landscape) throws IOException { RawImage rawImage; try { rawImage = device.getScreenshot(); } catch (TimeoutException e) { printAndExit("Unable to get frame buffer: timeout", true /* terminate */); return; } catch (Exception ioe) { printAndExit("Unable to get frame buffer: " + ioe.getMessage(), true /* terminate */); return; } // device/adb not available? if (rawImage == null) return; if (landscape) { rawImage = rawImage.getRotated(); } // convert raw data to an Image BufferedImage image = new BufferedImage(rawImage.width, rawImage.height, BufferedImage.TYPE_INT_ARGB); int index = 0; int IndexInc = rawImage.bpp >> 3; for (int y = 0 ; y < rawImage.height ; y++) { for (int x = 0 ; x < rawImage.width ; x++) { int value = rawImage.getARGB(index); index += IndexInc; image.setRGB(x, y, value); } } if (!ImageIO.write(image, "png", new File(filepath))) { throw new IOException("Failed to find png writer"); } }
3.robotium:ScreenshotTaker.java
View Code/** * Takes a screenshot and saves it in "/sdcard/Robotium-Screenshots/". * Requires write permission (android.permission.WRITE_EXTERNAL_STORAGE) in AndroidManifest.xml of the application under test. * * @param view the view to take screenshot of * @param name the name to give the screenshot image * @param quality the compression rate. From 0 (compress for lowest size) to 100 (compress for maximum quality). */ public void takeScreenshot(final String name, final int quality) { View decorView = getScreenshotView(); if(decorView == null) return; initScreenShotSaver(); ScreenshotRunnable runnable = new ScreenshotRunnable(decorView, name, quality); activityUtils.getCurrentActivity(false).runOnUiThread(runnable); } /** * Gets the proper view to use for a screenshot. */ private View getScreenshotView() { View decorView = viewFetcher.getRecentDecorView(viewFetcher.getWindowDecorViews()); final long endTime = SystemClock.uptimeMillis() + Timeout.getSmallTimeout(); while (decorView == null) { final boolean timedOut = SystemClock.uptimeMillis() > endTime; if (timedOut){ return null; } sleeper.sleepMini(); decorView = viewFetcher.getRecentDecorView(viewFetcher.getWindowDecorViews()); } wrapAllGLViews(decorView); return decorView; } /** * Extract and wrap the all OpenGL ES Renderer. */ private void wrapAllGLViews(View decorView) { ArrayList<GLSurfaceView> currentViews = viewFetcher.getCurrentViews(GLSurfaceView.class, decorView); final CountDownLatch latch = new CountDownLatch(currentViews.size()); for (GLSurfaceView glView : currentViews) { Object renderContainer = new Reflect(glView).field("mGLThread") .type(GLSurfaceView.class).out(Object.class); Renderer renderer = new Reflect(renderContainer).field("mRenderer").out(Renderer.class); if (renderer == null) { renderer = new Reflect(glView).field("mRenderer").out(Renderer.class); renderContainer = glView; } if (renderer == null) { latch.countDown(); continue; } if (renderer instanceof GLRenderWrapper) { GLRenderWrapper wrapper = (GLRenderWrapper) renderer; wrapper.setTakeScreenshot(); wrapper.setLatch(latch); } else { GLRenderWrapper wrapper = new GLRenderWrapper(glView, renderer, latch); new Reflect(renderContainer).field("mRenderer").in(wrapper); } } try { latch.await(); } catch (InterruptedException ex) { ex.printStackTrace(); } } /** * Returns a bitmap of a given WebView. * * @param webView the webView to save a bitmap from * @return a bitmap of the given web view * */ private Bitmap getBitmapOfWebView(final WebView webView){ Picture picture = webView.capturePicture(); Bitmap b = Bitmap.createBitmap( picture.getWidth(), picture.getHeight(), Bitmap.Config.ARGB_8888); Canvas c = new Canvas(b); picture.draw(c); return b; } /** * Returns a bitmap of a given View. * * @param view the view to save a bitmap from * @return a bitmap of the given view * */ private Bitmap getBitmapOfView(final View view){ view.destroyDrawingCache(); view.buildDrawingCache(false); Bitmap orig = view.getDrawingCache(); Bitmap.Config config = null; if(orig != null) { config = orig.getConfig(); } if(config == null) { config = Bitmap.Config.ARGB_8888; } Bitmap b = orig.copy(config, false); view.destroyDrawingCache(); return b; } /** * Here we have a Runnable which is responsible for taking the actual screenshot, * and then posting the bitmap to a Handler which will save it. * * This Runnable is run on the UI thread. */ private class ScreenshotRunnable implements Runnable { private View view; private String name; private int quality; public ScreenshotRunnable(final View _view, final String _name, final int _quality) { view = _view; name = _name; quality = _quality; } public void run() { if(view !=null){ Bitmap b; if(view instanceof WebView){ b = getBitmapOfWebView((WebView) view); } else{ b = getBitmapOfView(view); } if(b != null) screenShotSaver.saveBitmap(b, name, quality); else Log.d(LOG_TAG, "NULL BITMAP!!"); } } } /** * This class is a Handler which deals with saving the screenshots on a separate thread. * * The screenshot logic by necessity has to run on the ui thread. However, in practice * it seems that saving a screenshot (with quality 100) takes approx twice as long * as taking it in the first place. * * Saving the screenshots in a separate thread like this will thus make the screenshot * process approx 3x faster as far as the main thread is concerned. * */ private class ScreenShotSaver extends Handler { public ScreenShotSaver(HandlerThread thread) { super(thread.getLooper()); } /** * This method posts a Bitmap with meta-data to the Handler queue. * * @param bitmap the bitmap to save * @param name the name of the file * @param quality the compression rate. From 0 (compress for lowest size) to 100 (compress for maximum quality). */ public void saveBitmap(Bitmap bitmap, String name, int quality) { Message message = this.obtainMessage(); message.arg1 = quality; message.obj = bitmap; message.getData().putString("name", name); this.sendMessage(message); } /** * Here we process the Handler queue and save the bitmaps. * * @param message A Message containing the bitmap to save, and some metadata. */ public void handleMessage(Message message) { String name = message.getData().getString("name"); int quality = message.arg1; Bitmap b = (Bitmap)message.obj; if(b != null) { saveFile(name, b, quality); b.recycle(); } else { Log.d(LOG_TAG, "NULL BITMAP!!"); } } /** * Saves a file. * * @param name the name of the file * @param b the bitmap to save * @param quality the compression rate. From 0 (compress for lowest size) to 100 (compress for maximum quality). * */ private void saveFile(String name, Bitmap b, int quality){ FileOutputStream fos = null; String fileName = getFileName(name); File directory = new File(Environment.getExternalStorageDirectory() + "/Robotium-Screenshots/"); directory.mkdir(); File fileToSave = new File(directory,fileName); try { fos = new FileOutputStream(fileToSave); if (b.compress(Bitmap.CompressFormat.JPEG, quality, fos) == false) Log.d(LOG_TAG, "Compress/Write failed"); fos.flush(); fos.close(); } catch (Exception e) { Log.d(LOG_TAG, "Can't save the screenshot! Requires write permission (android.permission.WRITE_EXTERNAL_STORAGE) in AndroidManifest.xml of the application under test."); e.printStackTrace(); } } }
View Code@Override public Picture capturePicture() { if (mNativeClass == 0) return null; Picture result = new Picture(); nativeCopyBaseContentToPicture(result); return result; }
View Codestatic void nativeCopyBaseContentToPicture(JNIEnv *env, jobject obj, jobject pict) { SkPicture* picture = GraphicsJNI::getNativePicture(env, pict); GET_NATIVE_VIEW(env, obj)->copyBaseContentToPicture(picture); } void copyBaseContentToPicture(SkPicture* picture) { if (!m_baseLayer || !m_baseLayer->content()) return; LayerContent* content = m_baseLayer->content(); SkCanvas* canvas = picture->beginRecording(content->width(), content->height(), SkPicture::kUsePathBoundsForClip_RecordingFlag); // clear the BaseLayerAndroid's previous matrix (set at each draw) SkMatrix baseMatrix; baseMatrix.reset(); m_baseLayer->setMatrix(baseMatrix); m_baseLayer->draw(canvas, 0); picture->endRecording(); }
View CodeSkPicture* GraphicsJNI::getNativePicture(JNIEnv* env, jobject picture) { SkASSERT(env); SkASSERT(picture); SkASSERT(env->IsInstanceOf(picture, gPicture_class)); SkPicture* p = (SkPicture*)env->GetIntField(picture, gPicture_nativeInstanceID); SkASSERT(p); return p; }
Cafe:SnapshotHelper.java使用了除wrapper外相同的调用方法;
同样还有去除状态栏给Android设备屏幕截图。
4.UiDevice.takeScreenshot (File storePath)/takeScreenshot (File storePath, float scale, int quality)
View Code/** * Take a screenshot of current window and store it as PNG * * Default scale of 1.0f (original size) and 90% quality is used * The screenshot is adjusted per screen rotation * * @param storePath where the PNG should be written to * @return true if screen shot is created successfully, false otherwise * @since API Level 17 */ public boolean takeScreenshot(File storePath) { Tracer.trace(storePath); return takeScreenshot(storePath, 1.0f, 90);① } /** * Take a screenshot of current window and store it as PNG * * The screenshot is adjusted per screen rotation * * @param storePath where the PNG should be written to * @param scale scale the screenshot down if needed; 1.0f for original size * @param quality quality of the PNG compression; range: 0-100 * @return true if screen shot is created successfully, false otherwise * @since API Level 17 */ public boolean takeScreenshot(File storePath, float scale, int quality) { Tracer.trace(storePath, scale, quality); return getAutomatorBridge().takeScreenshot(storePath, quality);② }
View Codepublic boolean takeScreenshot(File storePath, int quality) { Bitmap screenshot = mUiAutomation.takeScreenshot();③ if (screenshot == null) { return false; } BufferedOutputStream bos = null; try { bos = new BufferedOutputStream(new FileOutputStream(storePath)); if (bos != null) { screenshot.compress(Bitmap.CompressFormat.PNG, quality, bos); bos.flush(); } } catch (IOException ioe) { Log.e(LOG_TAG, "failed to save screen shot to file", ioe); return false; } finally { if (bos != null) { try { bos.close(); } catch (IOException ioe) { /* ignore */ } } screenshot.recycle(); } return true; }
View Code/** * Takes a screenshot. * * @return The screenshot bitmap on success, null otherwise. */ public Bitmap takeScreenshot() { synchronized (mLock) { throwIfNotConnectedLocked(); } Display display = DisplayManagerGlobal.getInstance() .getRealDisplay(Display.DEFAULT_DISPLAY); Point displaySize = new Point(); display.getRealSize(displaySize); final int displayWidth = displaySize.x; final int displayHeight = displaySize.y; final float screenshotWidth; final float screenshotHeight; final int rotation = display.getRotation(); switch (rotation) { case ROTATION_FREEZE_0: { screenshotWidth = displayWidth; screenshotHeight = displayHeight; } break; case ROTATION_FREEZE_90: { screenshotWidth = displayHeight; screenshotHeight = displayWidth; } break; case ROTATION_FREEZE_180: { screenshotWidth = displayWidth; screenshotHeight = displayHeight; } break; case ROTATION_FREEZE_270: { screenshotWidth = displayHeight; screenshotHeight = displayWidth; } break; default: { throw new IllegalArgumentException("Invalid rotation: " + rotation); } } // Take the screenshot Bitmap screenShot = null; try { // Calling out without a lock held. screenShot = mUiAutomationConnection.takeScreenshot((int) screenshotWidth, (int) screenshotHeight);④ if (screenShot == null) { return null; } } catch (RemoteException re) { Log.e(LOG_TAG, "Error while taking screnshot!", re); return null; } // Rotate the screenshot to the current orientation if (rotation != ROTATION_FREEZE_0) { Bitmap unrotatedScreenShot = Bitmap.createBitmap(displayWidth, displayHeight, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(unrotatedScreenShot); canvas.translate(unrotatedScreenShot.getWidth() / 2, unrotatedScreenShot.getHeight() / 2); canvas.rotate(getDegreesForRotation(rotation)); canvas.translate(- screenshotWidth / 2, - screenshotHeight / 2); canvas.drawBitmap(screenShot, 0, 0, null); canvas.setBitmap(null); screenShot = unrotatedScreenShot; } // Optimization screenShot.setHasAlpha(false); return screenShot; }
View Code@Override public Bitmap takeScreenshot(int width, int height) { synchronized (mLock) { throwIfCalledByNotTrustedUidLocked(); throwIfShutdownLocked(); throwIfNotConnectedLocked(); } final long identity = Binder.clearCallingIdentity(); try { return SurfaceControl.screenshot(width, height);⑤ } finally { Binder.restoreCallingIdentity(identity); } }
可以看到,绕来绕去又回到方法1了。