广州行

  来中山大学一周了,陪师兄做一个项目,也没帮上什么忙,多数时间打酱油度过。学习了下c#,在unity里面写了个socket视频传输模块,速度还行,就是图片错误率降不下来。之前做的用android向pc发送视频倒是不卡,看来还有待改善。

记录下代码吧。

server:

using UnityEngine;
using System.Collections;
using System.Net.Sockets;
using System.Threading;
using System.Net;
using System;
using System.IO;

public class drawTexture2D : MonoBehaviour
{
    public byte[] image;
    static byte[] recv=new byte[640*480*3];
    static int conLen=0;
    static int mLength=0;
    static Texture2D image2D;
    static Socket mServer;
    Thread mSocket = new Thread(accept);
    static bool mstop = false;
    public GameObject plane;
    static int countflag=0;
    static bool drawflag = false;
	void Start ()
    {
        //image = new byte[640*480*3];
        image2D = new Texture2D(640,480,TextureFormat.RGB24,false);
        IPAddress ip = IPAddress.Parse("192.168.1.112");  
        mServer = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp);  
        mServer.Bind(new IPEndPoint(ip, 12345));                                             //绑定IP地址:端口  
        mServer.Listen(10);                                                                  //设定最多10个排队连接请求  
        mSocket.IsBackground = true;
        mSocket.Start();
	}	
	// Update is called once per frame
	void Update ()
    {
        //if (countflag == 2)
        //{
        //    if (drawflag==true)
        //    {
        //        image2D.LoadImage(recv);
        //        countflag = 1;
        //    }
        //}
        //plane.GetComponent<Renderer>().material.mainTexture = image2D;
	}
    void OnGUI()
    {
        if (countflag == 2)
        {
            if (drawflag == true)
            {
                image2D.LoadImage(recv);
                countflag = 1;
            }
        }
        GUI.DrawTexture(new Rect(100, 100, 640, 480), image2D);
    }
    void OnDestroy()
    {
        mstop = true;
        mSocket.Abort();
    }
    static void accept()
    {
        Socket mClient = mServer.Accept();
       // mClient.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReceiveBuffer, 40000);           //设置接收缓冲区大小
        Debug.Log("accept is ok!");
        byte[] recvheader=new byte[5];
        //recv = new byte[40000];
        while (!mstop)
        {
            mClient.Receive(recvheader);
            if(BitConverter.ToString(recvheader,0,1)!="2B")continue;
            conLen = BitConverter.ToInt32(recvheader, 1);
            if (conLen <10000||conLen>40000) continue;
            mLength = conLen;
            //recv = new byte[conLen];
            mClient.Receive(recv);
            //if (recv[conLen-1] ==0) continue;
            for (int i = 0; i < conLen;i++ )
            {
                if(recv[i]==0)
                {
                    drawflag = false;
                }
                else
                {
                    drawflag = true;
                }
            }
                countflag = 2;

        }
    }
client:
using UnityEngine; using System.Collections; using System.Runtime.InteropServices; using System.Net.Sockets; using System.Net; using System.Threading; public class ImageConver : MonoBehaviour { public GameObject background;//绑定高通库的显示摄像头图像的plane public static byte[] cameraImageBytes; public byte[] cameraImageBytes2; public int currentImage = 0; private WebCamTexture mm; private WebCamTexture mm2; public ReadCamera readCamera; public Texture2D cameraTexture; public Texture2D cameraTexture2; static Socket mClient; static IPAddress ip; private static int getLength1 = 0; private int getLength2 = 0; static byte[] header; static byte[] header1=new byte[5]; static byte[] header2; static byte[] _header; Thread mSocket1 = new Thread(send1); static bool mstop = false; static string test = "."; string mPath=@"F:\1.jpg"; // Use this for initialization void Start() { cameraImageBytes = new byte[ReadCamera.WIDTH * ReadCamera.HEIGHT * 3]; cameraImageBytes2 = new byte[ReadCamera.WIDTH * ReadCamera.HEIGHT * 3]; cameraTexture = new Texture2D(ReadCamera.WIDTH, ReadCamera.HEIGHT, TextureFormat.RGB24, false); cameraTexture2 = new Texture2D(ReadCamera.WIDTH, ReadCamera.HEIGHT, TextureFormat.RGB24, false); header = System.Text.Encoding.ASCII.GetBytes(test); ip = IPAddress.Parse("127.0.0.1"); mClient = new Socket(AddressFamily.InterNetwork, SocketType.Stream, ProtocolType.Tcp); mSocket1.Start(); } static void send1() { mClient.Connect(new IPEndPoint(ip, 12345)); //配置服务器IP与端口 Debug.Log("connect is ok!"); while (!mstop) { mClient.Send(header1); Thread.Sleep(2); mClient.Send(cameraImageBytes); } } void Update() { if (readCamera.tex != null) { mm = readCamera.tex; for (int i = 0; i < ReadCamera.HEIGHT; i++) { for (int j = 0; j < ReadCamera.WIDTH; j++) { cameraTexture.SetPixel(j, i, mm.GetPixel(j, i)); } } cameraImageBytes = cameraTexture.EncodeToJPG(); getLength1 = cameraImageBytes.Length; header1 = Int2Byte(getLength1); } //if (readCamera.tex2 != null) //{ // mm2 = readCamera.tex2; // for (int i = 0; i < ReadCamera.HEIGHT; i++) // { // for (int j = 0; j < ReadCamera.WIDTH; j++) // { // cameraTexture2.SetPixel(j, i, mm2.GetPixel(j, i)); // } // } // cameraImageBytes2 = cameraTexture2.EncodeToJPG(); // getLength2 = cameraImageBytes2.Length; //} } byte[] Int2Byte(int len) { byte[] rtn = new byte[5]; rtn[0] = (byte)('+'); rtn[1] = (byte)(len & 0xff); rtn[2] = (byte)((len >> 8)&0xff); rtn[3] = (byte)((len >> 16)&0xff); rtn[4] = (byte)((len >> 24)&0xff); return rtn; } byte[] Int2Byte2(int value) { byte[] src = new byte[4]; //src[0] = (byte)(value & 0xFF); //src[1] = (byte)((value & 0xFF00) >> 8); //src[2] = (byte)((value & 0xFF0000) >> 16); //src[3] = (byte)((value >> 24) & 0xFF); src[3] = (byte)(value >> 24); src[2] = (byte)(value >> 16); src[1] = (byte)(value >> 8); src[0] = (byte)value; return src; } void OnDestroy() { mstop = true; mSocket1.Abort(); } }

posted @ 2015-07-05 17:24  transfercai  阅读(167)  评论(0编辑  收藏  举报