windows system d3d bit stream media renderer

第一步:创建ID3D11Texture2D

‌ID3D11Texture2D是DirectX11中的一个纹理对象用于存储和处理图像数据,它允许开发者在GPU上直接操作和处理二维图像数据,从而实现高效的图形渲染和纹理映射。ID3D11Texture2D不仅可以用于存储静态的图像数据,还支持动态地更新纹理数据,这在实时图形处理中尤为重要。通过调整ID3D11Texture2D的大小可以实现图像的缩放和纹理重建以及动态纹理更新等操作,从而满足不同的图形处理需求。

public Renderer(VideoDecoder videoDecoder, IntPtr handle = new IntPtr(), int uniqueId = -1)
{
	UniqueId    = uniqueId == -1 ? Utils.GetUniqueId() : uniqueId;
	VideoDecoder= videoDecoder;
	Config      = videoDecoder.Config;	
	singleStageDesc = new Texture2DDescription()
	{
		Usage       = ResourceUsage.Staging,
		Format      = Format.B8G8R8A8_UNorm,
		ArraySize   = 1,
		MipLevels   = 1,
		BindFlags   = BindFlags.None,
		CPUAccessFlags      = CpuAccessFlags.Read,
		SampleDescription   = new SampleDescription(1, 0),
		Width       = -1,
		Height      = -1
	};
	singleGpuDesc = new Texture2DDescription()
	{
		Usage       = ResourceUsage.Default,
		Format      = Format.B8G8R8A8_UNorm,
		ArraySize   = 1,
		MipLevels   = 1,
		BindFlags   = BindFlags.RenderTarget | BindFlags.ShaderResource,
		SampleDescription   = new SampleDescription(1, 0)
	};
	wndProcDelegate = new(WndProc);
	wndProcDelegatePtr = Marshal.GetFunctionPointerForDelegate(wndProcDelegate);
	ControlHandle = handle;
	Initialize();
}

第二步:创建D3D11CreateDevice

‌D3D11CreateDevice‌函数在Direct3D11编程中扮演着至关重要的角色,它用于创建设备Device和设备上下文DeviceContext。这个函数是Direct3D11初始化阶段的关键部分,通过它开发者可以指定硬件加速HAL或软件模拟REF作为驱动器类型,从而选择使用硬件加速功能还是参考光栅化器进行渲染。此外D3D11CreateDevice还允许开发者指定特定的显示器适配器,通常是主要的显示器适配器,以及选择设备的功能级别,这包括了对所有功能级别的支持或者特定功能级别的选择。设备Device在Direct3D11中是一个核心概念,它提供了创建资源、着色器对象、状态对象、查询对象等功能,并且能够检查硬件功能、进行调试等,可以将其视为资源的提供者,通过ID3D11Device接口实现。设备上下文DeviceContext则用于实际使用这些资源并操纵渲染管道本身,包括绑定资源、着色器对象、状态对象到渲染管道,以及控制渲染和计算管道的执行。ID3D11DeviceContext接口支持两种类型的上下文:即时上下文ImmediateContext和延迟上下文DeferredContext,前者直接链接到渲染管线,后者提供线程安全机制,用于异步线程模型。总的来说D3D11CreateDevice不仅是创建Direct3D11设备的入口点,而且也是配置渲染环境和指定硬件使用方式的关键步骤,对于实现高效的图形渲染和游戏开发至关重要‌。

public void Initialize(bool swapChain = true)
{
    ID3D11Device tempDevice;
	IDXGIAdapter1 adapter = null;
	var creationFlags       = DeviceCreationFlags.BgraSupport /*| DeviceCreationFlags.VideoSupport*/; // Let FFmpeg failed for VA if does not support it
	var creationFlagsWarp   = DeviceCreationFlags.None;              
	if (Environment.OSVersion.Version.Major <= 7)
	{
		for (int i = 0; Engine.Video.Factory.EnumAdapters1(i, out adapter).Success; i++)
		{
			break;                    
		}                 
		if (D3D11.D3D11CreateDevice(adapter, adapter == null ? DriverType.Hardware : DriverType.Unknown, creationFlags, featureLevelsAll, out tempDevice).Failure)
		{
			if (D3D11.D3D11CreateDevice(adapter, adapter == null ? DriverType.Hardware : DriverType.Unknown, creationFlags, featureLevels, out tempDevice).Failure)
			{
				Config.Video.GPUAdapter = "WARP";
				D3D11.D3D11CreateDevice(null, DriverType.Warp, creationFlagsWarp, featureLevels, out tempDevice).CheckError();
			}
		}                     
		Device = tempDevice.QueryInterface<ID3D11Device1>();
	}
	else
	{                  
		if (!string.IsNullOrWhiteSpace(Config.Video.GPUAdapter) && Config.Video.GPUAdapter.ToUpper() != "WARP")
		{
			for (int i = 0; Engine.Video.Factory.EnumAdapters1(i, out adapter).Success; i++)
			{
				if (adapter.Description1.Description == Config.Video.GPUAdapter)
					break;
				if (Regex.IsMatch(adapter.Description1.Description + " luid=" + adapter.Description1.Luid, Config.Video.GPUAdapter, RegexOptions.IgnoreCase))
					break;
				adapter.Dispose();
			}
			if (adapter == null)
			{                           
				Config.Video.GPUAdapter = null;
			}
		}                  
		if (!string.IsNullOrWhiteSpace(Config.Video.GPUAdapter) && Config.Video.GPUAdapter.ToUpper() == "WARP")
		{
			D3D11.D3D11CreateDevice(null, DriverType.Warp, creationFlagsWarp, featureLevels, out tempDevice).CheckError();
		}                                      
		else
		{                     
			if (D3D11.D3D11CreateDevice(adapter, adapter == null ? DriverType.Hardware : DriverType.Unknown, creationFlags, featureLevelsAll, out tempDevice).Failure)
			{
				if (D3D11.D3D11CreateDevice(adapter, adapter == null ? DriverType.Hardware : DriverType.Unknown, creationFlags, featureLevels, out tempDevice).Failure)
				{
					Config.Video.GPUAdapter = "WARP";
					D3D11.D3D11CreateDevice(null, DriverType.Warp, creationFlagsWarp, featureLevels, out tempDevice).CheckError();
				}
			}                        
		}
		Device = tempDevice.QueryInterface<ID3D11Device1>();
	}
	context = Device.ImmediateContext;               
	if (adapter == null)
	{
		Device.Tag = new Luid().ToString();
		using var deviceTmp = Device.QueryInterface<IDXGIDevice1>();
		using var adapterTmp = deviceTmp.GetAdapter();
		adapter = adapterTmp.QueryInterface<IDXGIAdapter1>();
	}
	else
	{
		Device.Tag = adapter.Description.Luid.ToString();
	}                    
	GPUAdapter = Engine.Video.GPUAdapters[adapter.Description1.Luid];
	Config.Video.MaxVerticalResolutionAuto = GPUAdapter.MaxHeight;              
	tempDevice.Dispose();
	adapter.Dispose();
	using (var mthread    = Device.QueryInterface<ID3D11Multithread>()) mthread.SetMultithreadProtected(true);
	using (var dxgidevice = Device.QueryInterface<IDXGIDevice1>())      dxgidevice.MaximumFrameLatency = 1;
	ReadOnlySpan<float> vertexBufferData = new float[]
	{
		-1.0f,  -1.0f,  0,      0.0f, 1.0f,
		-1.0f,   1.0f,  0,      0.0f, 0.0f,
		 1.0f,  -1.0f,  0,      1.0f, 1.0f,

		 1.0f,  -1.0f,  0,      1.0f, 1.0f,
		-1.0f,   1.0f,  0,      0.0f, 0.0f,
		 1.0f,   1.0f,  0,      1.0f, 0.0f
	};
	vertexBuffer = Device.CreateBuffer(vertexBufferData, new BufferDescription() { BindFlags = BindFlags.VertexBuffer });
	context.IASetVertexBuffer(0, vertexBuffer, sizeof(float) * 5);
	InitPS();             
	ShaderVS = Device.CreateVertexShader(ShaderCompiler.VSBlob);
	vertexLayout = Device.CreateInputLayout(inputElements, ShaderCompiler.VSBlob);
	context.IASetInputLayout(vertexLayout);
	context.IASetPrimitiveTopology(PrimitiveTopology.TriangleList);
	context.VSSetShader(ShaderVS);
	psBuffer = Device.CreateBuffer(new BufferDescription()
	{
		Usage           = ResourceUsage.Default,
		BindFlags       = BindFlags.ConstantBuffer,
		CPUAccessFlags  = CpuAccessFlags.None,
		ByteWidth       = sizeof(PSBufferType) + (16 - (sizeof(PSBufferType) % 16))
	});
	context.PSSetConstantBuffer(0, psBuffer);
	psBufferData.hdrmethod = HDRtoSDRMethod.None;
	context.UpdateSubresource(psBufferData, psBuffer);
	vsBuffer = Device.CreateBuffer(new BufferDescription()
	{
		Usage           = ResourceUsage.Default,
		BindFlags       = BindFlags.ConstantBuffer,
		CPUAccessFlags  = CpuAccessFlags.None,
		ByteWidth       = sizeof(VSBufferType) + (16 - (sizeof(VSBufferType) % 16))
	});
	context.VSSetConstantBuffer(0, vsBuffer);
	vsBufferData.mat = Matrix4x4.Identity;
	context.UpdateSubresource(vsBufferData, vsBuffer);      
	InitializeVideoProcessor();                                     
	if (swapChain)
	{
		if (ControlHandle != IntPtr.Zero)
			InitializeSwapChain(ControlHandle);
		else if (SwapChainWinUIClbk != null)
			InitializeWinUISwapChain();
	}
    InitializeChildSwapChain();                                              
}

第三步:创建CreateVideoProcessor

‌CreateVideoProcessor‌是Microsoft Media Foundation中的一个功能,用于创建视频处理设备,封装了用于处理未压缩视频图像的图形硬件的功能。此外它还涉及到视频进程位块传送图像构成等操作,以及封装了用于处理未压缩视频图像的图形硬件的功能,进一步扩展了其在视频处理领域的应用范围‌。

void InitializeVideoProcessor()
{      
    vpcd.InputWidth = 1;
	vpcd.InputHeight= 1;
	vpcd.OutputWidth = vpcd.InputWidth;
	vpcd.OutputHeight= vpcd.InputHeight;
	outputColorSpace = new VideoProcessorColorSpace()
	{
		Usage           = 0,
		RGB_Range       = 0,
		YCbCr_Matrix    = 1,
		YCbCr_xvYCC     = 0,
		Nominal_Range   = 2
	};
	if (VideoProcessorsCapsCache.ContainsKey(Device.Tag.ToString()))
	{
		if (VideoProcessorsCapsCache[Device.Tag.ToString()].Failed)
		{
			InitializeFilters();
			return;
		}
		vd1 = Device.QueryInterface<ID3D11VideoDevice1>();
		vc  = context.QueryInterface<ID3D11VideoContext1>();
		vd1.CreateVideoProcessorEnumerator(ref vpcd, out vpe);
		if (vpe == null)
		{
			VPFailed();
			return;
		}		
		vd1.CreateVideoProcessor(vpe, VideoProcessorsCapsCache[Device.Tag.ToString()].TypeIndex, out vp);
		InitializeFilters();
		return;
	}
	VideoProcessorCapsCache cache = new();
	VideoProcessorsCapsCache.Add(Device.Tag.ToString(), cache);
	vd1 = Device.QueryInterface<ID3D11VideoDevice1>();
	vc  = context.QueryInterface<ID3D11VideoContext>();
	vd1.CreateVideoProcessorEnumerator(ref vpcd, out vpe);
	if (vpe == null || Device.FeatureLevel < Vortice.Direct3D.FeatureLevel.Level_10_0)
	{
		VPFailed();
		return;
	}
	var vpe1 = vpe.QueryInterface<ID3D11VideoProcessorEnumerator1>();
	bool supportHLG = vpe1.CheckVideoProcessorFormatConversion(Format.P010, ColorSpaceType.YcbcrStudioGhlgTopLeftP2020, Format.B8G8R8A8_UNorm, ColorSpaceType.RgbFullG22NoneP709);
	bool supportHDR10Limited = vpe1.CheckVideoProcessorFormatConversion(Format.P010, ColorSpaceType.YcbcrStudioG2084TopLeftP2020, Format.B8G8R8A8_UNorm, ColorSpaceType.RgbStudioG2084NoneP2020);
	var vpCaps = vpe.VideoProcessorCaps;
	foreach (VideoProcessorFilterCaps filter in Enum.GetValues(typeof(VideoProcessorFilterCaps)))
	{
		if ((vpCaps.FilterCaps & filter) != 0)
		{
			vpe1.GetVideoProcessorFilterRange(ConvertFromVideoProcessorFilterCaps(filter), out var range);			
			var vf = ConvertFromVideoProcessorFilterRange(range);
			vf.Filter = (VideoFilters)filter;
			cache.Filters.Add((VideoFilters)filter, vf);
		}		
	}		
	int typeIndex = -1;
	VideoProcessorRateConversionCaps rcCap = new();
	for (int i = 0; i < vpCaps.RateConversionCapsCount; i++)
	{
		vpe.GetVideoProcessorRateConversionCaps(i, out rcCap);
		VideoProcessorProcessorCaps pCaps = (VideoProcessorProcessorCaps) rcCap.ProcessorCaps;		
		typeIndex = i;
		if (((VideoProcessorProcessorCaps)rcCap.ProcessorCaps & VideoProcessorProcessorCaps.DeinterlaceBob) != 0)
			break;
	}
	vpe1.Dispose();             
	cache.TypeIndex = typeIndex;
	cache.HLG = supportHLG;
	cache.HDR10Limited = supportHDR10Limited;
	cache.VideoProcessorCaps = vpCaps;
	cache.VideoProcessorRateConversionCaps = rcCap;
	vd1.CreateVideoProcessor(vpe, typeIndex, out vp);
	if (vp == null)
	{
		VPFailed();
		return;
	}                       
	cache.Failed = false;                     
    InitializeFilters();
}

第四步:创建CreateRenderTargetView

‌CreateRenderTargetView的主要作用是将渲染目标与GPU进行绑定使其能够作为渲染的输出目标。‌在图形编程中渲染目标通常指的是一个可以接收渲染输出的表面,它可以是屏幕或者一个纹理或者是一个离屏的渲染缓冲区,当我们在DirectX或类似的图形API中创建一个渲染目标时我们需要将其与GPU绑定以便GPU知道在哪里进行渲染操作。

internal void InitializeSwapChain(IntPtr handle)
{
	lock (lockDevice)
	{           
		ControlHandle   = handle;
		RECT rect       = new();
		GetWindowRect(ControlHandle,ref rect);
		ControlWidth    = rect.Right  - rect.Left;
		ControlHeight   = rect.Bottom - rect.Top;
		if (cornerRadius == zeroCornerRadius)
		{                  
			swapChain = Engine.Video.Factory.CreateSwapChainForHwnd(Device, handle, GetSwapChainDesc(ControlWidth, ControlHeight));
		}
		else
		{                  
			swapChain = Engine.Video.Factory.CreateSwapChainForComposition(Device, GetSwapChainDesc(ControlWidth, ControlHeight, true, true));
			using (var dxgiDevice = Device.QueryInterface<IDXGIDevice>())
			dCompDevice = DComp.DCompositionCreateDevice<IDCompositionDevice>(dxgiDevice);
			dCompDevice.CreateTargetForHwnd(handle, false, out dCompTarget).CheckError();
			dCompDevice.CreateVisual(out dCompVisual).CheckError();
			dCompVisual.SetContent(swapChain).CheckError();
			dCompTarget.SetRoot(dCompVisual).CheckError();
			dCompDevice.Commit().CheckError();
			int styleEx = GetWindowLong(handle, (int)WindowLongFlags.GWL_EXSTYLE).ToInt32() | WS_EX_NOREDIRECTIONBITMAP;
			SetWindowLong(handle, (int)WindowLongFlags.GWL_EXSTYLE, new IntPtr(styleEx));
		}                    
		backBuffer      = swapChain.GetBuffer<ID3D11Texture2D>(0);
		backBufferRtv   = Device.CreateRenderTargetView(backBuffer);
		SCDisposed      = false;          
		ResizeBuffers(ControlWidth, ControlHeight); // maybe not required (only for vp)?
	}
}

public void ResizeBuffers(int width, int height)
{
	lock (lockDevice)
	{		
		ControlWidth = width;
		ControlHeight = height;
		backBufferRtv.Dispose();
		vpov?.Dispose();
		backBuffer.Dispose();
		swapChain.ResizeBuffers(0, ControlWidth, ControlHeight, Format.Unknown, SwapChainFlags.None);
		UpdateCornerRadius();
		backBuffer = swapChain.GetBuffer<ID3D11Texture2D>(0);
		backBufferRtv = Device.CreateRenderTargetView(backBuffer);
		if (videoProcessor == VideoProcessors.D3D11)
		{
			vd1.CreateVideoProcessorOutputView(backBuffer, vpe, vpovd, out vpov);
		}
		SetViewport();
	}
}

posted on 2024-09-02 18:42  维尔维尔  阅读(11)  评论(0编辑  收藏  举报

导航