Unity接收FFmpeg的UDP推流

  1. 将下方的脚本挂在Unity中的一个空物体上:
// proof of concept, ffmpeg raw video into unity texture 2D using UDP streaming using System; using System.Collections.Generic; using System.Net; using System.Net.Sockets; using UnityEngine; using Debug = UnityEngine.Debug; namespace UnityCoder.RawVideoUDP { public class RawVideoReceiver : MonoBehaviour { public Material targetMat; UdpClient client; int port = 8888; int receiveBufferSize = 1472 * 1000; IPEndPoint ipEndPoint; private object obj = null; private AsyncCallback AC; byte[] receivedBytes; Texture2D tex; public int size = 256; int imageSize = 0; byte[] dump; int bufferSize = 0; int bufferIndex = 0; int bufferFrameStart = 0; byte[] temp; bool frameReady = false; void Start() { //tex = new Texture2D(size, size, TextureFormat.RGB24, false, false); tex = new Texture2D(size, size, TextureFormat.RGBA32, false, false); tex.filterMode = FilterMode.Point; tex.wrapMode = TextureWrapMode.Clamp; imageSize = size * size * 4; temp = new byte[imageSize]; // init pixels wit bright color for (int i = 0; i < imageSize; i += 4) { temp[i] = 255; temp[i + 1] = 0; temp[i + 2] = 255; } tex.LoadRawTextureData(temp); tex.Apply(false); bufferSize = imageSize * 100; dump = new byte[bufferSize]; targetMat.mainTexture = tex; InitializeUDPClient(); } Queue<int> frameIndex = new Queue<int>(); int frameBufferCount = 0; void FixedUpdate() { // if we have frames, draw them to texture if (frameBufferCount > 0) { Buffer.BlockCopy(dump, frameIndex.Dequeue(), temp, 0, imageSize); frameBufferCount--; tex.LoadRawTextureData(temp); tex.Apply(false); } } void ReceivedUDPPacket(IAsyncResult result) { try { receivedBytes = client.EndReceive(result, ref ipEndPoint); var len = receivedBytes.Length; // we only use the buffer until the end, should wrap around if (bufferIndex + len > bufferSize) { Debug.LogError("Buffer finished, should fix this.."); return; } Buffer.BlockCopy(receivedBytes, 0, dump, bufferIndex, len); bufferIndex += len; if (bufferIndex - bufferFrameStart >= imageSize) { frameIndex.Enqueue(bufferFrameStart); frameBufferCount++; bufferFrameStart += imageSize; } } catch (Exception e) { Debug.LogException(e); } client.BeginReceive(AC, obj); } public void InitializeUDPClient() { ipEndPoint = new IPEndPoint(IPAddress.Any, port); client = new UdpClient(); client.Client.ReceiveBufferSize = receiveBufferSize; client.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, optionValue: true); client.ExclusiveAddressUse = false; client.EnableBroadcast = true; client.Client.Bind(ipEndPoint); client.DontFragment = true; client.Client.ReceiveBufferSize = 1472 * 100000; AC = new AsyncCallback(ReceivedUDPPacket); client.BeginReceive(AC, obj); Debug.Log("Started UDP listener.."); } private void OnDestroy() { if (client != null) { client.Close(); } } } }

  1. 创建一个Plane或Quad,并选择一个Unlit Shader作为承载影像的材质使用Ulit/Texture挂载到该物体上;


  1. 将用在这个Plane或Quad的Unlit Shader材质挂载在第一步中创建的空物体Raw Video Player组件中的Target Mat上;


  1. 将相机调整到合适位置;

  1. 在本机上进行针对于桌面的ffmpeg推流,命令如下:
ffmpeg -f gdigrab -i desktop -pixel_format rgb8 -video_size 256x256 -vf scale=256:256 -framerate 5 -r 5 -f rawvideo udp://127.0.0.1:8888

  1. 点击运行Unity的场景,可以看到桌面推流的效果。





作者:艾孜尔江


__EOF__

本文作者艾孜尔江
本文链接https://www.cnblogs.com/ezhar/p/14221746.html
关于博主:评论和私信会在第一时间回复。或者直接私信我。
版权声明:本博客所有文章除特别声明外,均采用 BY-NC-SA 许可协议。转载请注明出处!
声援博主:如果您觉得文章对您有帮助,可以点击文章右下角推荐一下。您的鼓励是博主的最大动力!
posted @   艾孜尔江  阅读(1632)  评论(4编辑  收藏  举报
编辑推荐:
· SQL Server 2025 AI相关能力初探
· Linux系列:如何用 C#调用 C方法造成内存泄露
· AI与.NET技术实操系列(二):开始使用ML.NET
· 记一次.NET内存居高不下排查解决与启示
· 探究高空视频全景AR技术的实现原理
阅读排行:
· 阿里最新开源QwQ-32B,效果媲美deepseek-r1满血版,部署成本又又又降低了!
· 单线程的Redis速度为什么快?
· SQL Server 2025 AI相关能力初探
· AI编程工具终极对决:字节Trae VS Cursor,谁才是开发者新宠?
· 展开说说关于C#中ORM框架的用法!
点击右上角即可分享
微信分享提示