标签:
在做GIS时, 地球周围会有一个大气圈, 大气散射, 这个方面的算法是计算机图形学界不断深入研究的领域, 不过目前有几个成熟的散射算法. 我借鉴了<<GPU精粹2.高性能图形芯片和通用计算机编程技巧>>第16章的算法,实现了一个大气散射. 效果如图.
图中蓝色的天空,就是散射的效果, 具体算法请自行查看书上的算法吧.
步骤:
1: 创建一个椭球, 生成顶点,与顶点索引数组. 这个椭球生成算法后续贴出来.
2: 根据算法传递uniform, 运行shader
3:关于影像,高程的处理,后续贴出.
shader源代码:
顶点:
uniform vec3 u_viewerPositionWC;
uniform vec3 u_sunDirectionWC;
const float u_pi = 3.141592653589793;
uniform mat4 u_modelViewProjection;
uniform mat4 u_modelView;
#define SKY_FROM_ATMOSPHERE
attribute vec4 position;
uniform float fCameraHeight;
uniform float fCameraHeight2;
uniform float fOuterRadius; // The outer (atmosphere) radius
uniform float fOuterRadius2; // fOuterRadius^2
uniform float fInnerRadius; // The inner (planetary) radius
uniform float fScale; // 1 / (fOuterRadius - fInnerRadius)
uniform float fScaleDepth; // The scale depth (i.e. the altitude at which the atmosphere‘s average density is found)
uniform float fScaleOverScaleDepth; // fScale / fScaleDepth
const float Kr = 0.0025;
const float fKr4PI = Kr * 4.0 * u_pi;
const float Km = 0.0015;
const float fKm4PI = Km * 4.0 * u_pi;
const float ESun = 15.0;
const float fKmESun = Km * ESun;
const float fKrESun = Kr * ESun;
const vec3 v3InvWavelength = vec3(
5.60204474633241, // Red = 1.0 / Math.pow(0.650, 4.0)
9.473284437923038, // Green = 1.0 / Math.pow(0.570, 4.0)
19.643802610477206); // Blue = 1.0 / Math.pow(0.475, 4.0)
const float rayleighScaleDepth = 0.25;
const int nSamples = 2;
const float fSamples = 2.0;
varying vec3 v_rayleighColor;
varying vec3 v_mieColor;
varying vec3 v_toCamera;
varying vec3 v_positionEC;
float scale(float fCos)
{
float x = 1.0 - fCos;
return fScaleDepth * exp(-0.00287 + x*(0.459 + x*(3.83 + x*(-6.80 + x*5.25))));
}
void main(void)
{
// Get the ray from the camera to the vertex and its length (which is the far point of the ray passing through the atmosphere)
vec3 v3Pos = position.xyz;
vec3 v3Ray = v3Pos - u_viewerPositionWC;
float fFar = length(v3Ray);
v3Ray /= fFar;
#ifdef SKY_FROM_SPACE
// Calculate the closest intersection of the ray with the outer atmosphere (which is the near point of the ray passing through the atmosphere)
float B = 2.0 * dot(u_viewerPositionWC, v3Ray);
float C = fCameraHeight2 - fOuterRadius2;
float fDet = max(0.0, B*B - 4.0 * C);
float fNear = 0.5 * (-B - sqrt(fDet));
// Calculate the ray‘s starting position, then calculate its scattering offset
vec3 v3Start = u_viewerPositionWC + v3Ray * fNear;
fFar -= fNear;
float fStartAngle = dot(v3Ray, v3Start) / fOuterRadius;
float fStartDepth = exp(-1.0 / fScaleDepth);
float fStartOffset = fStartDepth*scale(fStartAngle);
#else // SKY_FROM_ATMOSPHERE
// Calculate the ray‘s starting position, then calculate its scattering offset
vec3 v3Start = u_viewerPositionWC;
float fHeight = length(v3Start);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fCameraHeight));
float fStartAngle = dot(v3Ray, v3Start) / fHeight;
float fStartOffset = fDepth*scale(fStartAngle);
#endif
// Initialize the scattering loop variables
float fSampleLength = fFar / fSamples;
float fScaledLength = fSampleLength * fScale;
vec3 v3SampleRay = v3Ray * fSampleLength;
vec3 v3SamplePoint = v3Start + v3SampleRay * 0.5;
// Now loop through the sample rays
vec3 v3FrontColor = vec3(0.0, 0.0, 0.0);
for(int i=0; i<nSamples; i++)
{
float fHeight = length(v3SamplePoint);
float fDepth = exp(fScaleOverScaleDepth * (fInnerRadius - fHeight));
vec3 lightPosition = normalize(u_viewerPositionWC); // u_sunDirectionWC
float fLightAngle = dot(lightPosition, v3SamplePoint) / fHeight;
float fCameraAngle = dot(v3Ray, v3SamplePoint) / fHeight;
float fScatter = (fStartOffset + fDepth*(scale(fLightAngle) - scale(fCameraAngle)));
vec3 v3Attenuate = exp(-fScatter * (v3InvWavelength * fKr4PI + fKm4PI));
v3FrontColor += v3Attenuate * (fDepth * fScaledLength);
v3SamplePoint += v3SampleRay;
}
// Finally, scale the Mie and Rayleigh colors and set up the varying variables for the pixel shader
v_mieColor = v3FrontColor * fKmESun;
v_rayleighColor = v3FrontColor * (v3InvWavelength * fKrESun);
v_toCamera = u_viewerPositionWC - v3Pos;
v_positionEC = (u_modelView * position).xyz;
gl_Position = u_modelViewProjection * position;
}
片段:
#ifdef GL_FRAGMENT_PRECISION_HIGH
precision highp float;
#else
precision mediump float;
#endif
const float u_infinity = 5906376272000.0;
struct u_raySegment
{
float start;
float stop;
};
const u_raySegment u_emptyRaySegment = u_raySegment(-u_infinity, -u_infinity);
const u_raySegment u_fullRaySegment = u_raySegment(0.0, u_infinity);
struct u_ray
{
vec3 origin;
vec3 direction;
};
uniform mat4 u_inverseModelView;
struct u_ellipsoid
{
vec3 center;
vec3 radii;
vec3 inverseRadii;
vec3 inverseRadiiSquared;
};
uniform mat4 u_view;
uniform vec3 u_sunDirectionWC;
u_raySegment u_rayEllipsoidIntersectionInterval(u_ray ray, u_ellipsoid ellipsoid)
{
// ray and ellipsoid center in eye coordinates. radii in model coordinates.
vec3 q = ellipsoid.inverseRadii * (u_inverseModelView * vec4(ray.origin, 1.0)).xyz;
vec3 w = ellipsoid.inverseRadii * (u_inverseModelView * vec4(ray.direction, 0.0)).xyz;
q = q - ellipsoid.inverseRadii * (u_inverseModelView * vec4(ellipsoid.center, 1.0)).xyz;
float q2 = dot(q, q);
float qw = dot(q, w);
if (q2 > 1.0) // Outside ellipsoid.
{
if (qw >= 0.0) // Looking outward or tangent (0 intersections).
{
return u_emptyRaySegment;
}
else // qw < 0.0.
{
float qw2 = qw * qw;
float difference = q2 - 1.0; // Positively valued.
float w2 = dot(w, w);
float product = w2 * difference;
if (qw2 < product) // Imaginary roots (0 intersections).
{
return u_emptyRaySegment;
}
else if (qw2 > product) // Distinct roots (2 intersections).
{
float discriminant = qw * qw - product;
float temp = -qw + sqrt(discriminant); // Avoid cancellation.
float root0 = temp / w2;
float root1 = difference / temp;
if (root0 < root1)
{
u_raySegment i = u_raySegment(root0, root1);
return i;
}
else
{
u_raySegment i = u_raySegment(root1, root0);
return i;
}
}
else // qw2 == product. Repeated roots (2 intersections).
{
float root = sqrt(difference / w2);
u_raySegment i = u_raySegment(root, root);
return i;
}
}
}
else if (q2 < 1.0) // Inside ellipsoid (2 intersections).
{
float difference = q2 - 1.0; // Negatively valued.
float w2 = dot(w, w);
float product = w2 * difference; // Negatively valued.
float discriminant = qw * qw - product;
float temp = -qw + sqrt(discriminant); // Positively valued.
u_raySegment i = u_raySegment(0.0, temp / w2);
return i;
}
else // q2 == 1.0. On ellipsoid.
{
if (qw < 0.0) // Looking inward.
{
float w2 = dot(w, w);
u_raySegment i = u_raySegment(0.0, -qw / w2);
return i;
}
else // qw >= 0.0. Looking outward or tangent.
{
return u_emptyRaySegment;
}
}
}
uniform float u_morphTime;
float u_luminance(vec3 rgb)
{
// Algorithm from Chapter 10 of Graphics Shaders.
const vec3 W = vec3(0.2125, 0.7154, 0.0721);
return dot(rgb, W);
}
bool u_isEmpty(u_raySegment interval)
{
return (interval.stop < 0.0);
}
u_ellipsoid u_getWgs84EllipsoidEC()
{
vec3 radii = vec3(6378137.0, 6378137.0, 6356752.314245);
vec3 inverseRadii = vec3(1.0 / radii.x, 1.0 / radii.y, 1.0 / radii.z);
vec3 inverseRadiiSquared = inverseRadii * inverseRadii;
u_ellipsoid temp = u_ellipsoid(u_view[3].xyz, radii, inverseRadii, inverseRadiiSquared);
return temp;
}
const float g = -0.95;
const float g2 = g * g;
varying vec3 v_rayleighColor;
varying vec3 v_mieColor;
varying vec3 v_toCamera;
varying vec3 v_positionEC;
void main (void)
{
// TODO: make arbitrary ellipsoid
u_ellipsoid ellipsoid = u_getWgs84EllipsoidEC();
vec3 direction = normalize(v_positionEC);
u_ray ray = u_ray(vec3(0.0), direction);
u_raySegment intersection = u_rayEllipsoidIntersectionInterval(ray, ellipsoid);
if (!u_isEmpty(intersection)) {
discard;
}
// Extra normalize added for Android
float fCos = dot(u_sunDirectionWC, normalize(v_toCamera)) / length(v_toCamera);
float fRayleighPhase = 0.75 * (1.0 + fCos*fCos);
float fMiePhase = 1.5 * ((1.0 - g2) / (2.0 + g2)) * (1.0 + fCos*fCos) / pow(1.0 + g2 - 2.0*g*fCos, 1.5);
const float fExposure = 2.0;
vec3 rgb = fRayleighPhase * v_rayleighColor + fMiePhase * v_mieColor;
rgb = vec3(1.0) - exp(-fExposure * rgb);
float l = u_luminance(rgb);
gl_FragColor = vec4(rgb, min(smoothstep(0.0, 0.1, l), 1.0) * smoothstep(0.0, 1.0, u_morphTime));
}
标签:
原文地址:http://blog.csdn.net/zangle260/article/details/42744059