H264云台型网络摄像机学习
RTSP交互过程
C表示rtsp客户端,S表示rtsp服务端
1.C->S:OPTION request //询问S有哪些方法可用
1.S->C:OPTION response //S回应信息中包括提供的所有可用方法
2.C->S:DESCRIBE request //要求得到S提供的媒体初始化描述信息
2.S->C:DESCRIBE response //S回应媒体初始化描述信息,主要是sdp
3.C->S:SETUP request //设置会话的属性,以及传输模式,提醒S建立会话
3.S->C:SETUP response //S建立会话,返回会话标识符,以及会话相关信息
4.C->S:PLAY request //C请求播放
4.S->C:PLAY response //S回应该请求的信息
S->C:发送流媒体数据
5.C->S:TEARDOWN request //C请求关闭会话
5.S->C:TEARDOWN response //S回应该请求
RTSP连接、断开、视频播放、停止的工具类文件
/* * RTSPVideoUtil.h * * Created on: 2012-10-4 */ #include<sys/socket.h> #include<unistd.h> #include<ctype.h> #include<arpa/inet.h> #include<pthread.h> #include"include/rtppackage.h" #include"include/type.h" #include"include/DecodeFrame.h" //全局变量 JNIEnv *env_play; int rtsp_Socket = -1; int size = 1024; int mWidth = 352, mHeight = 288;//显示视频屏幕大小 char Session[100]; uint8_t videoDecodeData[352 * 288 * 2];//视频数据缓冲,使其与mWidth、mHeight相同 jbyteArray jarrayVideo; char *IP; int Port; /** * RTSP连接 * 返回0,连接失败;返回1,连接成功 */ int getConnectRTSP(char *ip, int port) { //LOGD("begin to connect rtsp\n"); char szResponse[1024]; char szSendBuf[1024]; int nRet = 0; int nTimeOut = 5000; IP = ip; Port = port; setsockopt(rtsp_Socket, SOL_SOCKET, SO_RCVTIMEO, (char*) &nTimeOut, sizeof(nTimeOut)); rtsp_Socket = socket(AF_INET, SOCK_STREAM, 0);//创建socket if (rtsp_Socket < 0) {//告诉Java创建Sock失败 return 0; } //Socket连接 struct sockaddr_in addrSrv; addrSrv.sin_addr.s_addr = inet_addr(ip); addrSrv.sin_family = AF_INET; addrSrv.sin_port = htons(port); if (-1 == connect(rtsp_Socket, (struct sockaddr*) &addrSrv, sizeof(struct sockaddr))) {//告诉Java创建Sock失败 LOGD("Connect失败\n"); close(rtsp_Socket); rtsp_Socket = -1; return 0; } //OPTIONS方法 char lstrOptions[1024] = "OPTIONS rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 1\r\n" "\r\n"; sprintf(szSendBuf, lstrOptions, ip, port); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } LOGD("send1 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); LOGD("recv1 nRet=%d,response=%s", nRet, szResponse); if (nRet == -1) { return 0; } //DESCRIBE方法 char lstrDescribe[1024] = "DESCRIBE rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 2\r\n" "Content-Length: 0\r\n" "\r\n"; memset(szSendBuf, 0, sizeof(szSendBuf)); sprintf(szSendBuf, lstrDescribe, ip, port); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } //LOGD("send2 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); if (nRet == -1) { return 0; } //LOGD("recv2 nRet=%d,response=%s", nRet, szResponse); //SETUP方法 char lstrSetup[1024] = "SETUP rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 3\r\n" "Transport: RTP/AVP;unicast;client_port=6666-6667\r\n" "\r\n"; memset(szSendBuf, 0, sizeof(szSendBuf)); sprintf(szSendBuf, lstrSetup, ip, port); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } //LOGD("send3 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); if (nRet == -1) { return 0; } //LOGD("recv3 nRet=%d,response=%s", nRet, szResponse); //获取Session的内容 int i = 0, j = 0; for (; i < strlen(szResponse) - 9; i++) { if ('S' == szResponse[i] && 'e' == szResponse[i + 1] && 's' == szResponse[i + 2] && 's' == szResponse[i + 3] && 'i' == szResponse[i + 4] && 'o' == szResponse[i + 5] && 'n' == szResponse[i + 6] && ':' == szResponse[i + 7] && ' ' == szResponse[i + 8]) { while (szResponse[i + 9 + j] != '\r' && szResponse[i + 10 + j] != '\n') { Session[j] = szResponse[i + 9 + j]; j++; } break; } } //PLAY方法 char lstrPlay[1024] = "PLAY rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 4\r\n" "Content-Length: 0\r\n" "Session: %s\r\n" "Range: npt=0.000-\r\n" "\r\n"; memset(szSendBuf, 0, sizeof(szSendBuf)); sprintf(szSendBuf, lstrPlay, ip, port, Session); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } //LOGD("send4 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); if (nRet == -1) { return 0; } //LOGD("recv4 nRet=%d,response=%s", nRet, szResponse); //LOGD("connect rtsp success\n"); return 1; } /** * 释放视频播放资源 */ void releaseResource() { close(rtsp_Socket); //释放jarrayVideo、videoVideoData内存 (env_play)->ReleaseByteArrayElements(jarrayVideo, (jbyte*) videoDecodeData, 0); } /** *RTSP断开连接 */ int DisconnectRTSP() { char szResponse[1024]; char szSendBuf[1024]; int nRet; //TEARDOWN方法 char teardown[1024] = "TEARDOWN rtsp://%s:%d/H264 RTSP/1.0\r\n" "CSeq: 5\r\n" "Session: %s\r\n" "\r\n"; sprintf(szSendBuf, teardown, IP, Port, Session); nRet = send(rtsp_Socket, szSendBuf, 1024, 0); if (nRet == -1) { return 0; } //LOGD("send1 nRet=%d,buf=%s", nRet, szSendBuf); memset(szResponse, 0, sizeof(szResponse)); nRet = recv(rtsp_Socket, &szResponse[0], 1024, 0); //LOGD("recv1 nRet=%d,response=%s", nRet, szResponse); if (nRet == -1) { return 0; } releaseResource(); return 1; } /** * 调用Android的SurfaceView进行播放 */ void displayVideo(int status, char *pRGBBuf, int nWidth, int nHeight, int nDepth) { //创建子线程 (jvm_save)->AttachCurrentThread(&env_play, NULL); jarrayVideo = (env_play)->NewByteArray(nWidth * nHeight * 2); //将解码完的数据转成byte数组jarrayVideo (env_play)->SetByteArrayRegion(jarrayVideo, 0, nWidth * nHeight * 2, (const jbyte *) pRGBBuf); //调用SurfaceView的方法进行播放,displayVideo_methodId方法需要在JniHelp.cpp中实例化 (env_play)->CallStaticVoidMethod(cls_save, displayVideo_methodId, status, jarrayVideo, nWidth, nHeight, nDepth); //释放jarrayVideo、videoVideoData内存 (env_play)->ReleaseByteArrayElements(jarrayVideo, (jbyte*) videoDecodeData, 0); (jvm_save)->DetachCurrentThread(); } /** * 接收数据、拼帧、解码、播放 */ void do_echo(int sockfd, struct sockaddr *pcliaddr, socklen_t client) { int hr, n, outSize = 0; int isFindIFrame = 0; CH264_RTP_UNPACK *unpack = new CH264_RTP_UNPACK(hr); socklen_t len = client; unsigned char mesg[20480]; long timebase = 0; InitDecoder(mWidth, mHeight); while (true) { /* waiting for receive data */ memset(mesg, 0, sizeof(mesg)); n = recvfrom(sockfd, mesg, 2048, 0, pcliaddr, &len); outSize = 0; //接受到视频数据开始拼桢 BYTE *pFrame = unpack->Parse_RTP_Packet(mesg, n, &outSize); if (timebase == 0 || timebase <= unpack->m_RTP_Header.ts) { timebase = unpack->m_RTP_Header.ts; } else { continue; } if (pFrame != NULL) { if (isFindIFrame == 0) { if (outSize > 1456) { isFindIFrame = 1; } else { continue; } } int outLen = outSize; //解码播放 if (DecoderNal(pFrame, outLen, videoDecodeData) > 0) { displayVideo(0, (char *) videoDecodeData, c->width, c->height, 0); } } } //释放变量 UninitDecoder(); } /** * 接收处理数据线程 */ void *recvThread(void *lpParam) { int sockfd; struct sockaddr_in servaddr, cliaddr; sockfd = socket(AF_INET, SOCK_DGRAM, 0); /* create a socket */ /* init servaddr */ bzero(&servaddr, sizeof(servaddr)); servaddr.sin_family = AF_INET; servaddr.sin_addr.s_addr = htonl(INADDR_ANY); servaddr.sin_port = htons(6666);//绑定1024后的端口 /* bind address and port to socket */ if (bind(sockfd, (struct sockaddr *) &servaddr, sizeof(servaddr)) == -1) { perror("bind error"); exit(1); } do_echo(sockfd, (struct sockaddr *) &cliaddr, sizeof(cliaddr)); } /** * 外部调用播放视频的方法 */ void displayRTSP() { int nSendBuf = 1024 * 10;//设置为1M pthread_t m_hRecvThread = -1; pthread_create(&m_hRecvThread, NULL, recvThread, NULL); }