osg使用整理(12):SSAO屏幕空间环境光遮蔽

一、基础概念

1、SSAO:通过将褶皱、孔洞和非常靠近墙面变暗的方法,近似模拟间接光照。SSAO称为屏幕空间环境光遮蔽 ,使用屏幕空间场景的深度而不是真实的几何体数据来确定遮蔽量,速度快效果好。

2、实现原理:根据物体表面法线方向生成一个半球随机深度采样,主要看物体周围深度值大小,通过这个值来确定是否被遮蔽。

3、关键点:

​​  a. 投影采样点到屏幕空间,然后获取深度纹理

​  b. 采样深度缓冲

​  c. 如果采样位置比深度纹理深度大,说明被遮挡,遮挡系数增加

​  由此发现采样密度决定了最后遮蔽效果质量,但过多的采样点会导致渲染卡顿。可以通过随机旋转采样核,达到采样点少且遮蔽效果好的目的。

4、法向半球采样步骤:

​  a. 生成随机三维点坐标,分布类似于朝向z轴的半球

//随机数生成函数
int xorshift32()
{
	static usigned int x=1424447641;
	x^=x<<13;
	x^=x>>17;
	x^=x<<5;
	return x;
}

float random(float min,float max)
{
	return min+static_cast<float>(xorshift32()/static_cast<float>(0xFFFFFFFF/(max-min)));
}
//线性插值
float lerp(float min,float max,float t)
{
	return min*(1.0-t)+max*t;
}
osg::Vec3f* generateHemisphereSamples(int kernelSize)
{
	osg::Vec3f* kernel=new osg::Vec3f[kernelSize];
    for (int i = 0; i < kernelSize; ++i) {
        kernel[i]=osg::Vec3f(random(-0.95,0.95),random(-0.95,0.95),random(0.0,1.0));
        kernel[i].normalize();
        kernel[i] *= random(0.0f, 1.0f);
        float scale = float(i) / float(kernelSize);
        scale = lerp(0.1f, 1.0f, scale * scale);
        kernel[i] *= scale;
    }
    return kernel;
}

​  b. 离散化分布点,并保证距离中心点越近,采样点越多

kernel[i] *= random(0.0f, 1.0f);
float scale = float(i) / float(kernelSize);
scale = lerp(0.1f, 1.0f, scale * scale);
kernel[i] *= scale;

​  c. 生成噪声纹理

osg::Vec3f* generateNoise(int noise)
{
	osg::Vec3f* noiseData=new osg::Vec3f[noiseSize];
	for(int i=0;i<noiseSize;++i)
	{
		noiseData[i]=osg::Vec3f(random(-1.0,1.0),random(-1.0,1.0),0.0);
		noiseData[i].normalize();
		noiseData[i]=noiseData[i]+osg::Vec3f(1.0,1.0,1.0);
		noiseData[i]=noiseData[i]/2.f;
	}
	return noiseData[i];
}

osg::ref_ptr<osg::Texture2D> createDataTexture(int width,int height)
{
	osg::ref_ptr<osg::Texture2D> texture=new osg::Texture2D;
	osg:Image* image=new osg:Image;
	auto data=generateNoise(width*height);
	image->setImge(width,height,1,GLRGB,GL_FLOAT,(unsigned char*)data);
	texture->setImage(image);
	texture->setWrap(osg::Texture::WRAP_S,osg::Texture::REPEAT);
	texture->setWrap(osg::Texture::WRAP_T,osg::Texture::REPEAT);
	texture->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
	texture->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
	return texture;
}

5、创建延迟渲染shader

/*延迟渲染相机*/
osg::ref_ptr<RttCamera> createDeferCamera(osg::Camera::BufferComponent buffer1,osg::Texture* tex1,
										  osg::Camera::BufferComponent buffer2,osg::Texture* tex2,
                                          osg::Camera::BufferComponent buffer3,osg::Texture* tex3,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setRenderTargetImplementation(osg::Camera::RenderTargetImplementation::FRAME_BUFFER_OBJECT);
	camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::PRE_RENDER,20);
	camera->setViewPort(0,0,width,height);
	if(tex1)
	{
		tex1->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex1->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer1,tex1);
	}
	if(tex2)
	{
		tex2->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex2->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer2,tex2);
	}
	if(tex3)
	{
		tex3->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex3->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer3,tex3);
	}
	///顶点着色器
	const char* vertCode=R"(
		#version 330
		layout(location = 0) in vec3 Position;
		layout(location = 2) in vec3 normal;
		layout(location = 3) in vec3 TexCoord;
		
		uniform mat4 osg_ModelViewProjectionMatrix;
		uniform mat4 osg_ModelViewMatrix;
		uniform mat4 osg_NormalMatrix;
		
		out vec3 vNormal;
		out vec2 texCoord;
		out vec4 fragPos;
		void main()
		{
			texCoord=TexCoord;
			fragPos=osg_ModelViewMatrix*vec4(Position,1.0);
			vec4 viewNorm=transpose(inverse(osg_ModelViewMatrix))*vec4(-normal,1.0);
			vNormal=normalize(viewNorm.xyz);
			gl_Position=osg_ModelViewProjectionMatrix*vec4(Position,1.0);
		}
	)";
	
	const char* fragCode=R"(
		#version 330 core
		uniform vec3 frontCol=vec3(1.0,0.0,0.2);
        layout (location = 0) out vec4 gColor;
        layout (location = 1) out vec4 gNormal;
        layout (location = 2) out vec4 gPosition;

        in vec2 texCoord;
        in vec4 fragPos;
        in vec3 vNormal;

        void main()
        {    
            // Store the fragment position vector in the first gbuffer texture
            gPosition.xyz = fragPos.xyz;
            // Also store the per-fragment normals into the gbuffer
            gNormal = vec4(vNormal,1.0);          
            gColor=vec4(frontCol,1.0);
        }
	)";
	
	osg::ref_ptr<osg::Shader> vertShader=new osg::Shader(osg::Shader::VERTEX,vertCode);
    osg::ref_ptr<osg::Shader> fragShader=new osg::Shader(osg::Shader::FRAGMENT,fragCode);
    osg::ref_ptr<osg::Program>  program=new osg::Program;
    program->addShader(vertShader);
    program->addShader(fragShader);
    camera->getOrCreateStateSet()->setAttributeAndModes(program,OVERRIDE_ON);
    return camera;
}

6、创建ssao的shader。读sampleDepth出深度缓冲区(uTexLinearDepth)。如果它在样本位置的前面,则样本位于几何图形的“内部”并有助于遮挡。如果sampleDepth在样本位置的后面,则样本不会对遮挡因子做出贡献。引入rangeCheck有助于防止较大的深度不连续性之间的错误遮挡。

osg::ref_ptr<RttCamera> createSSAOCamera(osg::Texture* postionTex,osg::Texture* normalTex,osg::Matrix& projMat,osg::Camera::BufferComponent buffer,osg::Texture* tex,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setRenderTargetImplementation(osg::Camera::RenderTargetImplementation::FRAME_BUFFER_OBJECT);
	camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::PRE_RENDER,20);
	camera->setViewPort(0,0,width,height);
	if(tex)
	{
		tex1->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex1->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		
		camera->setViewPort(0,0,tex->getTextureWidth(),tex->getTextureHeight());
		camera->attach(buffer,tex);
	}
	
	int noise=4,kernelSize=8;
	float radius=5.f,power=3.f;
	///创建相机的stateset
	auto ss=camera->getOrcreateStateSet();
	ss->addUniform(new osg::Uniform("noiseTexture",0));
	ss->setTextureAttributeAndModes(0,createDatatexture(noiseSize,noiseSize));
	ss->addUniform(new osg::Uniform("postionTex",1));
	ss->setTextureAttributeAndModes(1,postionTex);
	ss->addUniform(new osg::Uniform("normalTex",2));
	ss->setTextureAttributeAndModes(2,normalTex);
	
	ss->addUniform(new osg::Uniform("ssaoRadius",radius));
	ss->addUniform(new osg::Uniform("ssaoPower",power));
	ss->addUniform(new osg::Uniform("kernelSize",kernelSize*kernelSize));
	ss->addUniform(new osg::Uniform("noiseTextureRcp",osg::Vec2(width/noiseSize,height/noiseSize)));
	ss->addUniform(new osg::Uniform("projMat",(osg::Matrixf)projMat));
	///创建采样半球随机数组
	auto kernelUniform=new osg::Uniform(osg::Uniform::FLOAT_VEC3,"ssaoKernel",kernelSize*kernelSize);
	auto kernelData=generateHemisphereSamples(kernelSize*kernelSize);
	for(int i=0;i<kernelSize*kernelSize;i++)
	{
		kernelUniform->setElement(i,kernelData[i]);
	}
    ///顶点着色器
	const char* vertCode=R"(
		#version 330
		layout(location = 0) in vec3 Position;
		layout(location = 3) in vec3 TexCoord;
		
		uniform mat4 osg_ModelViewProjectionMatrix;
		uniform mat4 osg_ModelViewMatrix;
		uniform mat4 osg_NormalMatrix;
		
		out vec2 texCoord;
		void main()
		{
			texCoord=TexCoord;
			gl_Position=osg_ModelViewProjectionMatrix*vec4(Position,1.0);
		}
	)";
	///片段着色器
	const char* fragCode=R"(
		#version 330 core
		uniform sampler2D positionTex;
		uniform sampler2D normalTex;
		uniform sampler2D noiseTex;
		
		const int MAX_KERNEL_SIZE=128;
		uniform vec3 ssaoKernel[MAX_KERNEL_SIZE];
        
		uniform mat4 projMatrix;
		uniform vec2 noiseTextureRep;
		uniform int kernelSize;
		uniform float ssaoRadius;
		uniform float ssaoPower;
		const float bias=0.0;
		
        void main()
        {    
            // 计算屏幕坐标系下像素点位置
            vec3 fragPos=texture2D(positionTex,texCoord).xyz;
            // 计算屏幕坐标系下的法线
            vec3 normal=normalize(texture2D(normalTex,texCoord).xyz);
            // 计算随机噪声向量
            vec3 rvec=texture2D(noiseTex,texCoord*noiseTextureRcp).xyz;
            //计算切线空间到屏幕空间转换矩阵
            vec3 tangent=normalize(rvec-dot(rvec,normal)*normal);
            vec3 bitangent=cross(tangent,normal);
            mat3 tbn=mat3(tangent,bitangent,normal);
            
            float occlusion=0.0;
            for(int i=0;i<kerSize;++i)
            {
            	//获取采样位置
            	vec3 _sample=fragPos+(tbn*ssaoKernel[i])*ssaoRadius;
            	//投影采样位置
            	vec4 offset=projMatrix*vec4(_sample,1.0);
            	offset.xyz/=offset.w;
            	offset.xyz=offset.xyz*0.5+0.5;
            	//获取采样深度
            	float sampleDepth=texture2D(postionTex,offset.xy).z;
            	float dist=abs(fragPos.z-sampleDepth);
            	float rangeCheck=smoothstep(0.0,1.0,ssaoRadius/dist);
            	occlusion+=rangeCheck*(sampleDepth>=_sample.z+bias?1.0:0.0);
            }
            occlusion=1.0-(occlusion/float(kernelSize));
            fragColor=vec4(vec3(occlusion),1.0);
        }
	)";
	///创建四边形顶点
	osg::ref_ptr<osg:Vec3Array> vertices= new osg::Vec3Array;
	vertices->push_back(osg::Vec3(-width,-height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	vertices->push_back(osg::Vec3(width,height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	///创建四边形法线
	osg::ref_ptr<osg:Vec3Array> normals= new osg::Vec3Array;
	normals->push_back(osg::Vec3(0.0,0.0,2.f));
	///创建四边形纹理坐标
	osg::ref_ptr<osg:Vec2Array> texCoords= new osg::Vec2Array;
	texCoords->push_back(osg::Vec2(1.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,1.f));
	texCoords->push_back(osg::Vec2(1.0,1.f));
	///创建四边形几何
	osg::ref_ptr<osg:Geometry> quad= new osg::Geometry;
	quad->setVertexArray(vertices);
	quad->setNormalArray(normals);
	quad->setTexCoordArray(0,texCoords);
	quad->addPrimitiveSet(new osg::DrawArrays(GL_QUADS,0,4));
	///创建四边形节点
	osg::ref_ptr<osg::Geode> quadGeode=new osg::Geode;
	quadGeode->addDrawable(quad);
	osg::ref_ptr<osg::Shader> vertShader=new osg::Shader(osg::Shader::VERTEX,vertCode);
    osg::ref_ptr<osg::Shader> fragShader=new osg::Shader(osg::Shader::FRAGMENT,fragCode);
    osg::ref_ptr<osg::Program>  program=new osg::Program;
    program->addShader(vertShader);
    program->addShader(fragShader);
    quadGeode->getOrCreateStateSet()->setAttributeAndModes(program,OVERRIDE_ON);
    
    camera->addChild(quadGeode);
    camera-》setReferenceFrame(osg::Transform::ABSOLUTE_RF);
    camera->setProjectionMatrix(osg::Matrix::ortho2D(widht,-width,-height,height));    
	return camera;
}	

7、创建 blur shader 通过平均4X4周围的每个像素颜色值来避免噪声图样。

osg::ref_ptr<RttCamera> createBlurCamera(osg::Texture* colorTex,osg::Texture* ssaoTex,osg::Texture* normalTex,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setRenderTargetImplementation(osg::Camera::RenderTargetImplementation::FRAME_BUFFER_OBJECT);
	camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::POST_RENDER,100);
	camera->setViewPort(0,0,width,height);

	
	///创建相机的stateset
	auto ss=camera->getOrcreateStateSet();
	ss->addUniform(new osg::Uniform("colorTex",0));
	ss->setTextureAttributeAndModes(0,colorTex);
	ss->addUniform(new osg::Uniform("ssaoTex",1));
	ss->setTextureAttributeAndModes(1,ssaoTex);
	ss->addUniform(new osg::Uniform("normalTex",2));
	ss->setTextureAttributeAndModes(2,normalTex);
	
    ///顶点着色器
	const char* vertCode=R"(
		#version 330
		layout(location = 0) in vec3 Position;
		layout(location = 3) in vec3 TexCoord;
		
		uniform mat4 osg_ModelViewProjectionMatrix;
		uniform mat4 osg_ModelViewMatrix;
		uniform mat4 osg_NormalMatrix;
		
		out vec2 texCoord;
		void main()
		{
			texCoord=TexCoord;
			gl_Position=osg_ModelViewProjectionMatrix*vec4(Position,1.0);
		}
	)";
	///片段着色器
	const char* fragCode=R"(
		#version 330 core
		uniform sampler2D colorTex;
		uniform sampler2D ssaoTex;
		uniform sampler2D normalTex;
		
		const int blurSize=4;
		uniform mat4 osg_ModelViewMatrix;
		uniform float ambFactor=0.5;
		uniform float diffFactor=0.8;
		uniform float specFactor=0.15;
		uniform float shininess=128;
		
		vec3 calcDirLight(vec3 color,vec3 normal,float ambient,float diffuse,float specular,int strenth)
		{
			float diff=max=(0,dot(normal,lightDir));
			float spec=pow(diff,strenth);
			return color*(ambient+diff*diffuse)+spec*specular;
		}
		in vec2 texCoord;
		out vec4 fragColor;
        void main()
        {    
            vec2 texelSize = 1.0 / vec2(textureSize(ssaoTex, 0));
   			float result = 0.0;
   			for (int i = 0; i < uBlurSize; ++i) 
   			{
      			for (int j = 0; j < uBlurSize; ++j)
                {
         			vec2 offset = (vec2(-2.f) + vec2(float(x), float(y))) * texelSize;
         			result += texture(ssaoTex, texCoord + offset).r;
      			}
   			}
 
   			result = result / float(blurSize * blurSize);
   			vec3 color=texture2D(colorTex,texCoord).rgb;
   			vec3 normal=texture2D(normalTex,texCoord).rgb;
   			vec4 viewNorm=osg_ModelViewMatrix*vec4(-normal,1.0);
   			normal=normalize(viewNorm.xyz);
            vec3 lightCol=calcDirLight(color,normal,ambFactor,diffFactor,specFactor,shininess);
            lightCol=mix(lightCol,vec3(1.0),step(normal.z,0.0));
            fragCol=vec4(lightCol,1.0);
        }
	)";
	///创建四边形顶点
	osg::ref_ptr<osg:Vec3Array> vertices= new osg::Vec3Array;
	vertices->push_back(osg::Vec3(-width,-height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	vertices->push_back(osg::Vec3(width,height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	///创建四边形法线
	osg::ref_ptr<osg:Vec3Array> normals= new osg::Vec3Array;
	normals->push_back(osg::Vec3(0.0,0.0,2.f));
	///创建四边形纹理坐标
	osg::ref_ptr<osg:Vec2Array> texCoords= new osg::Vec2Array;
	texCoords->push_back(osg::Vec2(1.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,1.f));
	texCoords->push_back(osg::Vec2(1.0,1.f));
	///创建四边形几何
	osg::ref_ptr<osg:Geometry> quad= new osg::Geometry;
	quad->setVertexArray(vertices);
	quad->setNormalArray(normals);
	quad->setTexCoordArray(0,texCoords);
	quad->addPrimitiveSet(new osg::DrawArrays(GL_QUADS,0,4));
	///创建四边形节点
	osg::ref_ptr<osg::Geode> quadGeode=new osg::Geode;
	quadGeode->addDrawable(quad);
	osg::ref_ptr<osg::Shader> vertShader=new osg::Shader(osg::Shader::VERTEX,vertCode);
    osg::ref_ptr<osg::Shader> fragShader=new osg::Shader(osg::Shader::FRAGMENT,fragCode);
    osg::ref_ptr<osg::Program>  program=new osg::Program;
    program->addShader(vertShader);
    program->addShader(fragShader);
    quadGeode->getOrCreateStateSet()->setAttributeAndModes(program,OVERRIDE_ON);
    
    camera->addChild(quadGeode);
    camera-》setReferenceFrame(osg::Transform::ABSOLUTE_RF);
    camera->setProjectionMatrix(osg::Matrix::ortho2D(widht,-width,-height,height));    
	return camera;
}	

8、将G_Buffer相机节点、ssao相机节点和模糊相机节点挂载到根节点

///首先创建延迟渲染pass,输出模型深度、颜色、法线纹理
auto positionTex=createColorTexture(width,height);
auto colorTex=createColorTexture(width,height);
auto normalTex=createColorTexture(width,height); 
auto pass1=createPhongcamera(osg::Camera::COLOR_BUFFER2,positionTex, osg::Camera::COLOR_BUFFER0,colorTex,osg::Camera::COLOR_BUFFER1,normalTex,width,height);
pass1->setRenderOrder(osg::Camera::PRE_RENDER,20);
pass1->setClearColor(osg::vec4(1.0,1.0,1.0,1.0));
pass1->addChild(model);
///然后创建ssao效果pass,输出ssao纹理
auto ssaoTex=createColorTexture(width,height);
auto pass2=createSSAOCamera(positionTex,normalTex,projMat,osg::Camera::COLOR_BUFFER,ssaoTex,width,height);
pass2->setRenderOrder(osg::Camera::PRE_RENDER,100);
pass2->setClearColor(osg::vec4(1.0,1.0,1.0,1.0));
///最后创建模糊效果pass
auto pass3 =createBlurCamera(colorTex,ssaoTex,normalTex,width,height);
pass3->setRenderOrder(osg::Camera::POST_RENDER,300);
pass3->setClearColor(osg::vec4(1.0,1.0,1.0,1.0));
root->addChild(pass1);
root->addChild(pass2);
root->addChild(pass3);  
posted @ 2024-07-04 20:38  王小于的啦  阅读(138)  评论(0编辑  收藏  举报