unity玉石渲染
来源:互联网 发布:2016漳州市城镇化数据 编辑:程序博客网 时间:2024/06/12 01:05
在正常情况下,Diffuse在背光面都是纯黑的,为了模拟简单的次表面散射,引入环绕光照,环绕光照对Diffuse做出如下修改。
float diffuse = max(0,dot(L,N)); float wrap_diffuse = max(0,(dot(L,N)+_Wrap)/(1+_Wrap));//_Wrap为外部传入参数
下图是环绕光照函数的图表:
在进行了对环绕光照处理的漫反射模拟的次表面反射对真实性还远远不够,所以我们要在上述基础上用深度映射模拟吸收。
吸收是模拟半透明材质最重要的因素之一。光线在物质中传播的越远,被散射和吸收的就越厉害,为了模拟这种效果,我们需要测量光在物质中传播的距离,估算这个距离的方法就是深度映射。深度映射的实现原理即我们要在光源的视点去渲染场景,存储光源到某个模型的表面距离到RTT中。然后在渲染玉石的shader中得到这个深度,再计算当前渲染点到光源的距离,两者的差值就是光传播的距离,得到光在物质中的传播距离后可以索引美工所创建的一维纹理,或者可以选择直接计算指数函数 。
return exp(-Dist*sigma_t);
注意:此种方案仅适用于模型处于光源和视角之间的时候,即视角在背光面。使用深度映射计算光在物体中传播的距离的示意图如下:
具体实现代码如下:
这个shader是在光源空间对物体表面深度存储
Shader "Custom/DepthShader"{SubShader{Tags { "RenderType"="Opaque" }Pass{CGPROGRAM#pragma vertex vert#pragma fragment frag#include "UnityCG.cginc"struct appdata{float4 vertex : POSITION;float3 normal : NORMAL;};struct v2f{float2 depth:TEXCOORD0;float4 pos : SV_POSITION;};v2f vert (appdata v){v2f o;o.pos = mul(UNITY_MATRIX_MVP,v.vertex);o.depth.xy =o.pos.zw ;return o;}float4 frag (v2f i) : SV_Target{float d = i.depth.x/i.depth.y;return Linear01Depth(d);}ENDCG}}}
配合上述代码的cs脚本如下:
using System.Collections;using System.Collections.Generic;using UnityEngine;public class DepthTest : MonoBehaviour{public Transform lightD;public Material mt;public Shader depthShader;public RenderTexture m_depthTexture;private Camera camereDepth;void Start (){m_depthTexture = new RenderTexture ((int)Camera.main.pixelWidth, (int)Camera.main.pixelHeight, 24);m_depthTexture.hideFlags = HideFlags.DontSave;GameObject go = new GameObject ("camereDepth");go.transform.position = lightD.position;go.transform.rotation = lightD.rotation;camereDepth = go.AddComponent<Camera> ();camereDepth.farClipPlane = 2;//此种方案的缺陷,精度不够,必须把远裁剪面调小camereDepth.enabled = false;//camereDepth.CopyFrom (Camera.main);Matrix4x4 lightTexMatrix = camereDepth.projectionMatrix * camereDepth.worldToCameraMatrix * Matrix4x4.identity;mt.SetFloat ("_CamNearPlane", camereDepth.nearClipPlane);mt.SetFloat ("_CamFarPlane", camereDepth.farClipPlane);mt.SetMatrix ("_WolrdtoLightMatrix", camereDepth.worldToCameraMatrix);mt.SetMatrix ("_LightTexMatrix", lightTexMatrix);}// Update is called once per framevoid OnPreRender (){if (null != depthShader) {camereDepth.targetTexture = m_depthTexture;camereDepth.RenderWithShader (depthShader, "");mt.SetTexture ("_BackDepthTex", m_depthTexture);}}}
最后物体渲染的shader如下:
Shader "Custom/wrap_diff1"{Properties { _Diffuse("Diffuse", Color) = (1, 1, 1, 1) _Specular("Specular",Color)=(1.0,1.0,1.0,1.0) _Shinness("Shinness",Range(8,256))=128 _Wrap("Wrap",Range(0,1))=0.5 _ScatterWidth("_ScatterWidth",Vector)=(0,0,0,0) _ScatterFactor("_ScatterFactor",Range(0,1))=0.75 _MainTex("MainTex",2D)="white"{} _ScatterTex("_ScatterTex",2D)="white"{} } SubShader { Tags { "RenderType"="Opaque" "Queue"="Geometry"} Pass{ Tags { "LightMode"="ForwardBase" } CGPROGRAM #pragma vertex vert #pragma fragment frag #pragma multi_compile_fwdbase #include "Lighting.cginc" fixed4 _Diffuse; float _Wrap; sampler2D _MainTex; float4 _MainTex_ST; float4 _ScatterWidth; float _ScatterFactor; sampler2D _ScatterTex; float4 _ScatterTex_ST; float4 _Specular; float _Shinness; struct a2v { float4 vertex : POSITION; float3 normal : NORMAL; float4 tangent:TANGENT; float2 texcoord:TEXCOORD0; }; struct v2f { float4 pos : SV_POSITION; float3 wNormal : TEXCOORD0; float4 wPos:TEXCOORD1; float4 uv:TEXCOORD2; }; v2f vert(a2v v) { v2f o; o.pos = mul(UNITY_MATRIX_MVP, v.vertex); o.wNormal = mul(v.normal, (float3x3)unity_WorldToObject); o.wPos = mul(unity_ObjectToWorld,v.vertex); o.uv.xy = TRANSFORM_TEX(v.texcoord,_MainTex); o.uv.zw = TRANSFORM_TEX(v.texcoord,_ScatterTex); return o; } sampler2D _BackDepthTex; float4x4 _WolrdtoLightMatrix; float4x4 _LightTexMatrix; float _CamNearPlane; float _CamFarPlane; fixed4 frag(v2f i) : SV_Target { float d_o =mul(_WolrdtoLightMatrix,i.wPos).z; d_o = (-d_o-_CamNearPlane)/(_CamFarPlane-_CamNearPlane); float4 tpos = mul(_LightTexMatrix,i.wPos); float4 scrPos = tpos * 0.5f;scrPos.xy = float2(scrPos.x, scrPos.y*1) + scrPos.w;scrPos.zw = tpos.zw;float4 backDepthColor = SAMPLE_DEPTH_TEXTURE(_BackDepthTex,UNITY_PROJ_COORD(scrPos));float d_i = -(backDepthColor*(_CamFarPlane-_CamNearPlane)+_CamNearPlane);d_i = backDepthColor.r;float depth =d_o- d_i;float3 scattering = exp(-_ScatterWidth.xyz*depth); float3 N =normalize(i.wNormal); float4 texcol = tex2D(_MainTex, i.uv.xy); fixed3 albedo = texcol.rgb*_Diffuse.rgb; fixed3 ambient = UNITY_LIGHTMODEL_AMBIENT.xyz*albedo; float3 L = normalize(UnityWorldSpaceLightDir(i.wPos)); float3 V= normalize(UnityWorldSpaceViewDir(i.wPos)); float3 H = normalize(L+V); float wrap = (dot(L,N)+_Wrap)/(1+_Wrap); float wrap_diff = max(0,wrap); fixed3 diffuse = _LightColor0.rgb * wrap_diff*albedo; float s = pow(max(0,dot(N,H)),_Shinness); float3 specular = _LightColor0.rgb *_Specular.rgb*s; float4 finCol = float4(0,0,0,0); finCol.rgb =lerp(ambient+diffuse,scattering,_ScatterFactor)+specular; finCol.a = texcol.a; return finCol; } ENDCG } } // FallBack "Diffuse"}
至此,我们就可以看到如下效果
下次在分享另一种方案的模拟效果
主要参看文章如下:
https://developer.nvidia.com/gpugems/GPUGems/gpugems_ch16.html
阅读全文
1 0
- unity玉石渲染
- Unity渲染
- [Unity] Unity渲染优化
- 玉石鉴定
- Unity 线渲染器
- Unity屏幕渲染技术
- unity批次渲染设计
- Unity之渲染问题
- unity 渲染问题
- Unity渲染队列
- Unity渲染路径比较
- 优化Unity渲染器
- 优化Unity渲染器
- 【unity】渲染批处理
- Unity 渲染层级
- unity渲染层
- Unity 渲染顺序
- unity 渲染流程
- Socket通信,http和https的区别
- vue引入Mint-UI
- Linux 远程登录管理工具Putty
- 新侨移民告诉你:为什么要移民新西兰?
- Go实战--golang中使用MongoDB(mgo)
- unity玉石渲染
- maven自定义属性给properties文件赋值
- add
- DeepLearing4J环境配置与开源代码测试
- matlab imresize的用法
- No row with the given identifier exists: [com.device.entity.DcDevice]异常处理
- 用eclipse创建动态web项目手动生成web.xml方法
- 转载csdn
- python爬虫学习多进程下载图片