111

handler导致内存泄露

内存泄漏(Memory Leak)是指程序中己动态分配的堆内存由于某种原因程序未释放或无法释放,造成系统内存的浪费,导致程序运行速度减慢甚至系统崩溃等严重后果。

public class SampleActivity extends Activity {

private final Handler mLeakyHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
// ...
}
};

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

// Post a message and delay its execution for 10 minutes.
mLeakyHandler.postDelayed(new Runnable() {
@Override
public void run() { /* ... */ }
}, 1000 * 60 * 10);

// Go back to the previous Activity.
finish();
}
}


如上所示,

当 activity 被 finish 的时候,延迟发送的消息仍然会存活在 UI 线程的消息队列中,直到 10 分钟后它才被处理掉。这个消息持有 activity 的 Handler 的引用,Handler 又隐式的持有它的外部类(这里就是 SampleActivity)的引用。这个引用会一直存在直到这个消息被处理,所以垃圾回收机制就没法回收这个 activity,内存泄露就发生了。非静态的匿名类会隐式的持有外部类的引用,所以 context 会被泄露掉。需要注意的是:代码里匿名的 Runnable 子类也会导致内存泄露。


解决方案

在新的类文件中实现 Handler 的子类或者使用 static 修饰内部类。静态的内部类不会持有外部类的引用,所以 activity 不会被泄露。如果你要在 Handler 内调用外部 activity 类的方法的话,可以让 Handler 持有外部 activity 类的弱引用,这样也不会有泄露 activity 的风险。关于匿名类造成的泄露问题,我们可以用 static 修饰这个匿名类对象解决这个问题,因为静态的匿名类也不会持有它外部类的引用。
 
可以将上demo修改为:
public class SampleActivity extends Activity {

/**
* Instances of static inner classes do not hold an implicit
* reference to their outer class.
*/
private static class MyHandler extends Handler {
private final WeakReference<SampleActivity> mActivity;

public MyHandler(SampleActivity activity) {
mActivity = new WeakReference<SampleActivity>(activity);
}

@Override
public void handleMessage(Message msg) {
SampleActivity activity = mActivity.get();
if (activity != null) {
// ...
}
}
}

private final MyHandler mHandler = new MyHandler(this);

/**
* Instances of anonymous classes do not hold an implicit
* reference to their outer class when they are "static".
*/
private static final Runnable sRunnable = new Runnable() {
@Override
public void run() { /* ... */ }
};

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);

// Post a message and delay its execution for 10 minutes.
mHandler.postDelayed(sRunnable, 1000 * 60 * 10);

// Go back to the previous Activity.
finish();
}



视频获取分帧缩略图方式

1. 通过MediaMetaRetrivier来进行获取

代码较为简单,就是一个循环

 

MediaMetadataRetriever metadataRetriever = new MediaMetadataRetriever();
metadataRetriever.setDataSource(fileName);

String duration = metadataRetriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION);
Log.d(TAG, "duration = " + duration);
int durationMs = Integer.parseInt(duration);

//每秒取一次
for (int i = 0; i < durationMs; i += 1000) {
long start = System.nanoTime();
Log.d(TAG, "getFrameAtTime time = " + i);
//这里传入的是ms
Bitmap frameAtIndex = metadataRetriever.getFrameAtTime(i * 1000);
Bitmap frame=Bitmap.createScaledBitmap(frameAtIndex,frameAtIndex.getWidth()/8,frameAtIndex.getHeight()/8,false);
frameAtIndex.recycle();
long end = System.nanoTime();
long cost = end - start;
Log.d(TAG, "cost time in millis = " + (cost * 1f / 1000000));

if (callBack != null) {
callBack.onComplete(frame);
}
}
metadataRetriever.release();

 

用MediaExtrator,将MediaCodec解码后的数据,传递给ImageReader。来进行显示。

    MediaExtractor extractor = null;
MediaCodec codec = null;
try {
extractor = new MediaExtractor();
extractor.setDataSource(fileName);
int trackCount = extractor.getTrackCount();
MediaFormat videoFormat = null;
for (int i = 0; i < trackCount; i++) {
MediaFormat trackFormat = extractor.getTrackFormat(i);
if (trackFormat.getString(MediaFormat.KEY_MIME).contains("video")) {
videoFormat = trackFormat;
extractor.selectTrack(i);
break;
}
}
if (videoFormat == null) {
Log.d(TAG, "Can not get video format");
return;
}

int imageFormat = ImageFormat.YUV_420_888;
int colorFormat = COLOR_FormatYUV420Flexible;
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
videoFormat.setInteger(MediaFormat.KEY_WIDTH, videoFormat.getInteger(MediaFormat.KEY_WIDTH) / 4);
videoFormat.setInteger(MediaFormat.KEY_HEIGHT, videoFormat.getInteger(MediaFormat.KEY_HEIGHT) / 4);

long duration = videoFormat.getLong(MediaFormat.KEY_DURATION);

codec = MediaCodec.createDecoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
ImageReader imageReader = ImageReader
.newInstance(
videoFormat.getInteger(MediaFormat.KEY_WIDTH),
videoFormat.getInteger(MediaFormat.KEY_HEIGHT),
imageFormat,
3);
final ImageReaderHandlerThread imageReaderHandlerThread = new ImageReaderHandlerThread();

imageReader.setOnImageAvailableListener(new MyOnImageAvailableListener(callBack), imageReaderHandlerThread.getHandler());
codec.configure(videoFormat, imageReader.getSurface(), null, 0);
codec.start();
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
long timeOut = 5 * 1000;//10ms
boolean inputDone = false;
boolean outputDone = false;
ByteBuffer[] inputBuffers = null;
if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) {
inputBuffers = codec.getInputBuffers();
}
//开始进行解码。
int count = 1;
while (!outputDone) {
if (requestStop) {
return;
}
if (!inputDone) {
//feed data
int inputBufferIndex = codec.dequeueInputBuffer(timeOut);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
inputBuffer = codec.getInputBuffer(inputBufferIndex);
} else {
inputBuffer = inputBuffers[inputBufferIndex];
}
int sampleData = extractor.readSampleData(inputBuffer, 0);
if (sampleData > 0) {
long sampleTime = extractor.getSampleTime();
codec.queueInputBuffer(inputBufferIndex, 0, sampleData, sampleTime, 0);
//继续
if (interval == 0) {
extractor.advance();
} else {
extractor.seekTo(count * interval * 1000, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
count++;
// extractor.advance();
}
} else {
//小于0,说明读完了
codec.queueInputBuffer(inputBufferIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
inputDone = true;
Log.d(TAG, "end of stream");
}
}
}
if (!outputDone) {
//get data
int status = codec.dequeueOutputBuffer(bufferInfo, timeOut);
if (status ==
MediaCodec.INFO_TRY_AGAIN_LATER) {
//继续
} else if (status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
//开始进行解码
} else if (status == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
//同样啥都不做
} else {
//在这里判断,当前编码器的状态
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "output EOS");
outputDone = true;
}
boolean doRender = (bufferInfo.size != 0);
long presentationTimeUs = bufferInfo.presentationTimeUs;
if (lastPresentationTimeUs == 0) {
lastPresentationTimeUs = presentationTimeUs;
} else {
long diff = presentationTimeUs - lastPresentationTimeUs;
if (interval != 0) {
if (diff < interval * 1000) {
doRender = false;
} else {
lastPresentationTimeUs = presentationTimeUs;
}
Log.d(TAG,
"diff time in ms =" + diff / 1000);
}
}
//有数据了.因为会直接传递给Surface,所以说明都不做好了
Log.d(TAG, "surface decoder given buffer " + status +
" (size=" + bufferInfo.size + ")" + ",doRender = " + doRender + ", presentationTimeUs=" + presentationTimeUs);
//直接送显就可以了
codec.releaseOutputBuffer(status, doRender);
}
}
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (codec != null) {
codec.stop();
codec.release();
}
if (extractor != null) {
extractor.release();
}
}
}

 

最后根据获取的数据流转换格式生成bitmap 即可。

private static class MyOnImageAvailableListener implements ImageReader.OnImageAvailableListener {
private final BitmapCallBack callBack;

private MyOnImageAvailableListener(BitmapCallBack callBack) {
this.callBack = callBack;
}

@Override
public void onImageAvailable(ImageReader reader) {
Log.i(TAG, "in OnImageAvailable");
Image img = null;
try {
img = reader.acquireLatestImage();
if (img != null) {
//这里得到的YUV的数据。需要将YUV的数据变成Bitmap
Image.Plane[] planes = img.getPlanes();
if (planes[0].getBuffer() == null) {
return;
}

// Bitmap bitmap = getBitmap(img);
Bitmap bitmap = getBitmapScale(img, 8);
// Bitmap bitmap = getBitmapFromNv21(img);
if (callBack != null && bitmap != null) {
Log.d(TAG, "onComplete bitmap ");
callBack.onComplete(bitmap);
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
if (img != null) {
img.close();
}
}

}

@NonNull
private Bitmap getBitmapScale(Image img, int scale) {
int width = img.getWidth() / scale;
int height = img.getHeight() / scale;
final byte[] bytesImage = getDataFromYUV420Scale(img, scale);
Bitmap bitmap = null;
bitmap = Bitmap.createBitmap(height, width, Bitmap.Config.ARGB_8888);
bitmap.copyPixelsFromBuffer(ByteBuffer.wrap(bytesImage));
return bitmap;
}

private byte[] getDataFromYUV420Scale(Image image, int scale) {
int width = image.getWidth();
int height = image.getHeight();
// Read image data
Image.Plane[] planes = image.getPlanes();

byte[] argb = new byte[width / scale * height / scale * 4];

//值得注意的是在Java层传入byte[]RGBA顺序排列时,libyuv是用ABGR来表示这个排列
//libyuv表示的排列顺序和BitmapRGBA表示的顺序是反向的。
// 所以实际要调用libyuv::ABGRToI420才能得到正确的结果。
YuvUtils.yuvI420ToABGRWithScale(
argb,
planes[0].getBuffer(), planes[0].getRowStride(),
planes[1].getBuffer(), planes[1].getRowStride(),
planes[2].getBuffer(), planes[2].getRowStride(),
width, height,
scale
);
return argb;
}
}



posted on 2022-02-25 17:31  归臻  阅读(162)  评论(0编辑  收藏  举报

导航