ps流提取H264并解码播放

因为需要从海康ps流中提取H264数据并进行解码播放,才有了这篇文章.因为是视频编解码领域的纯入门新手,个别理解或者方法有误,需要自行判断,不过相关方法已经测试通过,对于
像我这样的新手还是有一定的借鉴的.断断续续搞了很长一段时间,把相关经验分享给各个新手.

---------------------------------------------------------------------------------------------------------

分为3个部分来说吧,仅供参考.
1. 接收并且解析RTP流部分
2. 解析ps流部分,包括解析海康部分私有格式
3. 将提取的标准H264流进行解码播放

--------------------------------------------------------------------------------------------------------

接收并且解析RTP流:

当时想到了两种方案:1.编写udp-socket来接受流,然后自己解析rtp包. 2.使用jrtplib库来接受流,自动解析rtp包. 由于第二种方案简单易用,使用了第二种方案.实验过程发现jrtplib会
出现丢包的情况,导致花屏的出现.具体丢包原因未查明.(jrtplib使用了最简单的框架,摘自jrtplib的其中一个example,未精简,未使用thread,效果还可以)

//该函数主要作用是使用jrtplib接收rtp流,并且解析出H264,将一帧帧的H264存入一个deque中
DWORD WINAPI GetSocketData(LPVOID lpparentet) 
{
	//////////////////////////////////////////////////////////////////////////jrtplib的相关初始化
	WSADATA dat;
	WSAStartup(MAKEWORD(2,2),&dat);
	RTPSession session;
	RTPSessionParams sessionparams;
	sessionparams.SetOwnTimestampUnit(1.0/3600.0);
	sessionparams.SetUsePollThread(true);
	RTPUDPv4TransmissionParams transparams;
	transparams.SetPortbase(6000);
	session.SetMaximumPacketSize(sessionparams.GetMaximumPacketSize()+1000);
	transparams.SetRTPReceiveBuffer(transparams.GetRTCPReceiveBuffer()*3);
	transparams.SetRTCPReceiveBuffer(transparams.GetRTCPReceiveBuffer()*3);
	int status = session.Create(sessionparams,&transparams);
	if (status < 0)
	{
		std::cerr << RTPGetErrorString(status) << std::endl;
		exit(-1);
	}
	uint8_t localip[]={127,0,0,1};
	RTPIPv4Address addr(localip,9000);
	status = session.AddDestination(addr);
	if (status < 0)
	{
		std::cerr << RTPGetErrorString(status) << std::endl;
		exit(-1);
	}
	session.SetDefaultPayloadType(96);
	session.SetDefaultMark(false);
	session.SetDefaultTimestampIncrement(160);
	uint8_t silencebuffer[160];
	for (int i = 0 ; i < 160 ; i++)
		silencebuffer[i] = 128;
	RTPTime delay(0.02);
	RTPTime starttime = RTPTime::CurrentTime();
	/////////////////////////////////////////////////////////////////////////收到rtp包之后进行数据处理
	bool full=true;
	bool done = false;
	int i=0;
	while (!done)
	{
		session.BeginDataAccess();
		if (session.GotoFirstSource())
		{
			do
			{
				RTPPacket *packet;
				while ((packet = session.GetNextPacket()) != 0)
				{
					//查找ps头 0x000001BA
					if (packet->GetPacketData()[12]==0x00 && packet->GetPacketData()[13]==0x00 && packet->GetPacketData()[14]==0x01 && packet->GetPacketData()[15]==0xba)
					{
						if (i!=0)
						{
							//此包为ps新的一帧,每次到这里都先处理存储好的前一帧
							int iPsLength=0;
							GetH246FromPs(jimbak,jimlen,&returnps,&iPsLength); //从ps流中提取h264
							//海康流特殊处理部分:分界符数据(nal_unit_type=9)或补充增强信息单元(nal_unit_type=6),如果直接送入解码器,有可能会出现问题,直接舍弃.00 00 01 bd和 00 00 01 c0为私有标志和音频数据舍弃
							if (returnps[0]>>5==0x06 || returnps[0]>>5==0x09 ||returnps[0]>>5==0x0a || returnps[0]>>5==0x0b ||returnps[0]>>5==0x0c)
							{
							}
							else  //这就是获取的含有标准正常H264数据的帧 我们开始进行处理,将该帧存入Deque,该deque只负责存储所有的一帧帧的数据
							{
								char *h264buffer=new char[iPsLength];
								memcpy(h264buffer,returnps,iPsLength);
								BUFFERINFO bi;
								bi.h264buf=h264buffer;
								bi.lLength=iPsLength;
								EnterCriticalSection(&g_cs);
								gDeque.push_back(bi);
								LeaveCriticalSection(&g_cs);
							}
						}
						//各个变量初始化,开始拼接下一个帧(可能收到的N个包才能组成一个帧,所以这里有一个拼接操作,主要是一个帧的头部指针一直memcpy,把内存向后叠加)
						jimlen=0;
						jim=jimbak;
						memcpy(jim,(char *)packet->GetPacketData()+12,packet->GetPacketLength()-12);
						jim+=(packet->GetPacketLength()-12);
						jimlen+=packet->GetPacketLength()-12;
						i++;					
					}
					else  //当然如果开头不是0x000001BA,默认为一个帧的中间部分,我们将这部分内存顺着帧的开头向后存储
					{
						if (i!=0)
						{
							//排除音频和私有数据
							if (packet->GetPacketData()[12]==0x00 && packet->GetPacketData()[13]==0x00 && packet->GetPacketData()[14]==0x01 && packet->GetPacketData()[15]==0xc0)
							{

							}
							else if (packet->GetPacketData()[12]==0x00 && packet->GetPacketData()[13]==0x00 && packet->GetPacketData()[14]==0x01 && packet->GetPacketData()[15]==0xbd)
							{

							}
							else   //这是正常的帧数据,像贪吃蛇一样,将它放在帧开头的后边
							{
								memcpy(jim,(char *)packet->GetPacketData()+12,packet->GetPacketLength()-12);
								jim+=(packet->GetPacketLength()-12);
								jimlen+=packet->GetPacketLength()-12;
							}
						}
					}
					session.DeletePacket(packet);
					i++;
				}
			} while (session.GotoNextSource());
		}
		session.EndDataAccess();
		RTPTime::Wait(delay);
	}

	delay = RTPTime(10.0);
	session.BYEDestroy(delay,"Time's up",9);
	WSACleanup();
}

---------------------------------------------------------------------------------------------

海康PS流解析,可参考http://blog.csdn.net/wwyyxx26/article/details/15224879#

union littel_endian_size
{
	unsigned short int	length;
	unsigned char		byte[2];
};

struct pack_start_code
{
	unsigned char start_code[3];
	unsigned char stream_id[1];
};

struct program_stream_pack_header
{
	pack_start_code PackStart;// 4
	unsigned char Buf[9];
	unsigned char stuffinglen;
};

struct program_stream_pack_bb_header
{
	unsigned char head[4];
	unsigned char num1;
	unsigned char num2;
};

struct program_stream_map
{
	pack_start_code PackStart;
	littel_endian_size PackLength;//we mast do exchange
	//program_stream_info_length
	//info
	//elementary_stream_map_length
	//elem
};

struct program_stream_e
{
	pack_start_code		PackStart;
	littel_endian_size	PackLength;//we mast do exchange
	char				PackInfo1[2];
	unsigned char		stuffing_length;
};

#pragma pack()

int inline ProgramStreamPackHeader(char* Pack, int length, char **NextPack, int *leftlength)
{
	//cout <<WRITE_LOG(LOG_LEVEL_SUB_1, "%02x %02x %02x %02x",Pack[0],Pack[1],Pack[2],Pack[3]);
	//通过 00 00 01 ba头的第14个字节的最后3位来确定头部填充了多少字节
	program_stream_pack_header *PsHead = (program_stream_pack_header *)Pack;
	unsigned char pack_stuffing_length = PsHead->stuffinglen & '\x07';

	*leftlength = length - sizeof(program_stream_pack_header) - pack_stuffing_length;//减去头和填充的字节
	*NextPack = Pack+sizeof(program_stream_pack_header) + pack_stuffing_length;

	//如果开头含有bb 则去掉bb
	if(*NextPack && (*NextPack)[0]=='\x00' && (*NextPack)[1]=='\x00' && (*NextPack)[2]=='\x01' && (*NextPack)[3]=='\xBB')
	{
		program_stream_pack_bb_header *pbbHeader=(program_stream_pack_bb_header *)(*NextPack);
		unsigned char bbheaderlen=pbbHeader->num2;
		(*NextPack) = (*NextPack) + sizeof(program_stream_pack_bb_header)+bbheaderlen;
		*leftlength = length - sizeof(program_stream_pack_bb_header) - bbheaderlen;
		int a=0;
		a++;
	}

	if(*leftlength<4) return 0;

	//printf("[%s]2 %x %x %x %x\n", __FUNCTION__, (*NextPack)[0], (*NextPack)[1], (*NextPack)[2], (*NextPack)[3]);

	return *leftlength;
}

inline int ProgramStreamMap(char* Pack, int length, char **NextPack, int *leftlength, char **PayloadData, int *PayloadDataLen)
{
	//printf("[%s]%x %x %x %x\n", __FUNCTION__, Pack[0], Pack[1], Pack[2], Pack[3]);

	program_stream_map* PSMPack = (program_stream_map*)Pack;

	//no payload
	*PayloadData = 0;
	*PayloadDataLen = 0;

	if(length < sizeof(program_stream_map)) return 0;

	littel_endian_size psm_length;
	psm_length.byte[0] = PSMPack->PackLength.byte[1];
	psm_length.byte[1] = PSMPack->PackLength.byte[0];

	*leftlength = length - psm_length.length - sizeof(program_stream_map);

	//printf("[%s]leftlength %d\n", __FUNCTION__, *leftlength);

	if(*leftlength<=0) return 0;

	*NextPack = Pack + psm_length.length + sizeof(program_stream_map);

	return *leftlength;
}

inline int Pes(char* Pack, int length, char **NextPack, int *leftlength, char **PayloadData, int *PayloadDataLen)
{
	//printf("[%s]%x %x %x %x\n", __FUNCTION__, Pack[0], Pack[1], Pack[2], Pack[3]);
	program_stream_e* PSEPack = (program_stream_e*)Pack;

	*PayloadData = 0;
	*PayloadDataLen = 0;

	if(length < sizeof(program_stream_e)) return 0;

	littel_endian_size pse_length;
	pse_length.byte[0] = PSEPack->PackLength.byte[1];
	pse_length.byte[1] = PSEPack->PackLength.byte[0];

	*PayloadDataLen = pse_length.length - 2 - 1 - PSEPack->stuffing_length;
	if(*PayloadDataLen>0) 
		*PayloadData = Pack + sizeof(program_stream_e) + PSEPack->stuffing_length;

	*leftlength = length - pse_length.length - sizeof(pack_start_code) - sizeof(littel_endian_size);

	//printf("[%s]leftlength %d\n", __FUNCTION__, *leftlength);

	if(*leftlength<=0) return 0;

	*NextPack = Pack + sizeof(pack_start_code) + sizeof(littel_endian_size) + pse_length.length;

	return *leftlength;
}

int inline GetH246FromPs(char* buffer,int length, char **h264Buffer, int *h264length)
{
	int leftlength = 0;
	char *NextPack = 0;

	*h264Buffer = buffer;
	*h264length = 0;

	if(ProgramStreamPackHeader(buffer, length, &NextPack, &leftlength)==0)
		return 0;

	char *PayloadData=NULL; 
	int PayloadDataLen=0;

	while(leftlength >= sizeof(pack_start_code))
	{
		PayloadData=NULL;
		PayloadDataLen=0;

		if(NextPack 
			&& NextPack[0]=='\x00' 
			&& NextPack[1]=='\x00' 
			&& NextPack[2]=='\x01' 
			&& NextPack[3]=='\xE0')
		{
			//接着就是流包,说明是非i帧
			if(Pes(NextPack, leftlength, &NextPack, &leftlength, &PayloadData, &PayloadDataLen))
			{
				if(PayloadDataLen)
				{
					memcpy(buffer, PayloadData, PayloadDataLen);
					buffer += PayloadDataLen;
					*h264length += PayloadDataLen;
				}
			}
			else 
			{
				if(PayloadDataLen)
				{
					memcpy(buffer, PayloadData, PayloadDataLen);
					buffer += PayloadDataLen;
					*h264length += PayloadDataLen;
				}

				break;
			}
		}
		else if(NextPack 
			&& NextPack[0]=='\x00' 
			&& NextPack[1]=='\x00'
			&& NextPack[2]=='\x01'
			&& NextPack[3]=='\xBC')
		{
			if(ProgramStreamMap(NextPack, leftlength, &NextPack, &leftlength, &PayloadData, &PayloadDataLen)==0)
				break;
		}
		else
		{
			//printf("[%s]no konw %x %x %x %x\n", __FUNCTION__, NextPack[0], NextPack[1], NextPack[2], NextPack[3]);
			break;
		}
	}
	return *h264length;
}

  H264解码并播放

BOOL H264_Init_and_SDL()
{
	avcodec_init();
	av_register_all();
	AVCodec *pCodec=avcodec_find_decoder(CODEC_ID_H264);
	g_pCodecCtx=avcodec_alloc_context();

	g_pCodecCtx->time_base.num = 1; //这两行:一秒钟25帧 
	g_pCodecCtx->time_base.den = 25; 
	g_pCodecCtx->bit_rate = 0; //初始化为0 
	g_pCodecCtx->frame_number = 1; //每包一个视频帧 
	g_pCodecCtx->codec_type = CODEC_TYPE_VIDEO; 
	g_pCodecCtx->width = 1280; //这两行:视频的宽度和高度 
	g_pCodecCtx->height = 720; 

	if (avcodec_open(g_pCodecCtx,pCodec)>=0)
	{
		g_pavfFrame=avcodec_alloc_frame();
		g_pYUVavfFrame=avcodec_alloc_frame();
	}

	//////////////////////////////////////////////////////////////////////////
	SDL_putenv("SDL_VIDEO_WINDOW_POS=0,0");
	if(SDL_Init(SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER)) {    
		printf( "Could not initialize SDL - %s\n", SDL_GetError());   
		return -1;  
	}   

	screen_w = g_pCodecCtx->width;  
	screen_h = g_pCodecCtx->height;  
	g_screen = SDL_SetVideoMode(screen_w, screen_h, 0,0);  

	if(!g_screen) 
	{    
		printf("SDL: could not set video mode - exiting:%s\n",SDL_GetError());    
		return FALSE;  
	}  

	g_bmp = SDL_CreateYUVOverlay(g_pCodecCtx->width, g_pCodecCtx->height,SDL_YV12_OVERLAY, g_screen);   

	rect.x = 0;      
	rect.y = 0;      
	rect.w = screen_w;      
	rect.h = screen_h;    
	//SDL End------------------------
	img_convert_ctx = sws_getContext(g_pCodecCtx->width, g_pCodecCtx->height, g_pCodecCtx->pix_fmt, g_pCodecCtx->width, g_pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL); 

	return (BOOL)g_pavfFrame;
}

BOOL H264_Decode(const PBYTE pSrcData, const DWORD dwDataLen, PBYTE pDeData,  int * pnWidth, int * pnHeight)
{
	//pSrcData – 待解码数据 
	//dwDataLen – 待解码数据字节数 
	//pDeData – 用来返回解码后的YUV数据 
	//pnWidth, pnHeight – 用来返回视频的长度和宽度 
	BOOL n_Got=FALSE;
	avcodec_decode_video(g_pCodecCtx,g_pavfFrame,(int *)&n_Got,(unsigned __int8*)pSrcData,dwDataLen);
	if (n_Got)
	{
		*pnWidth=g_pCodecCtx->width;
		*pnHeight=g_pCodecCtx->height;
		//ASSERT(g_pCodecCtx->pix_fmt==PIX_FMT_YUV420P);
		if (g_pCodecCtx->pix_fmt!=PIX_FMT_YUV420P)
		{
			return FALSE;
		}
		//转为YUV
		int ndatalen=0;
		for (int i=0;i<3;i++)
		{
			int nShift=(i==0)?0:1;
			PBYTE pYUVData=(PBYTE)g_pavfFrame->data[i];
			for (int j=0;j<(g_pCodecCtx->height>>nShift);j++)
			{
				memcpy(&pDeData[ndatalen],pYUVData,(g_pCodecCtx->width >> nShift));
				pYUVData+=g_pavfFrame->linesize[i];
				ndatalen+=(g_pCodecCtx->width >> nShift);
			}
		}
		//////////////////////////////////////////////////////////////////////////
		SDL_LockYUVOverlay(g_bmp);  
		g_pYUVavfFrame->data[0]=g_bmp->pixels[0];  
		g_pYUVavfFrame->data[1]=g_bmp->pixels[2];  
		g_pYUVavfFrame->data[2]=g_bmp->pixels[1];       
		g_pYUVavfFrame->linesize[0]=g_bmp->pitches[0];  
		g_pYUVavfFrame->linesize[1]=g_bmp->pitches[2];     
		g_pYUVavfFrame->linesize[2]=g_bmp->pitches[1];  
		sws_scale(img_convert_ctx, g_pavfFrame->data, g_pavfFrame->linesize, 0, g_pCodecCtx->height, g_pYUVavfFrame->data, g_pYUVavfFrame->linesize);  
		SDL_UnlockYUVOverlay(g_bmp);
		SDL_DisplayYUVOverlay(g_bmp, &rect);   
		//Delay 40ms   
		//SDL_Delay(40);  

	}
	
	return n_Got;
}

  

 

posted @ 2015-10-12 15:02  woojim  阅读(10807)  评论(2编辑  收藏  举报