学习WorldWind有很长时间了,理论学习算是基本完成了。我体会是WW的学习主要分为两大步:WW框架体系学习和WW插件学习。学习WW插件逐步深入后,必然要首先学习Direct3D编程,这也算是我的经验之谈吧。今天Virtual Earth插件学习完成,也标志着我可以从WW理论转向WW实践啦。虽然我总结介绍的是Virtual Earth插件,但是希望网友阅读下面的内容前,最好能够先深入学习Direct3D编程、BMNG和Globe Icon插件的底层渲染,这些都是学习Virtual Earth的基础。
Virtual Earth插件包括以下几个类:
VirtualEarthForm 窗体类
VirtualEarthPlugin插件类,继承自Plugin(重要)
VeReprojectTilesLayer 渲染对象类,继承自Renderable Object(重要)
VeTile 瓦片对象类(真正实现大部分功能的)(重要)
Projection投影变换类(重要)
Search类
PushPin类
我们先看看VirtualEarthPlugin,所有的插件类必须继承自Plugin.cs,必须重写Load()和Unload()方法。这两个方法分别实现插件的加载和卸载。
Load()方法一般是实现添加菜单和添加工具按钮,跟前面介绍插件都很类似的,自己开发插件时模仿着这套路写就行。
{
try
{
if (ParentApplication.WorldWindow.CurrentWorld.IsEarth)
{ //初始化VE插件控制窗体
m_Form = new VirtualEarthForm(ParentApplication);
m_Form.Owner = ParentApplication;
//添加VE插件菜单
m_MenuItem = new MenuItem("MicroSoft VirtualEarth");
m_MenuItem.Click += new EventHandler(menuItemClicked);
ParentApplication.PluginsMenu.MenuItems.Add(m_MenuItem);
//#if DEBUG
string imgPath = Path.GetDirectoryName(System.Windows.Forms.Application.ExecutablePath) + "\\Plugins\\VirtualEarth\\VirtualEarthPlugin.png";
//#else
// _pluginDir = this.PluginDirectory;
// string imgPath = this.PluginDirectory + @"\VirtualEarthPlugin.png";
//#endif
if (File.Exists(imgPath) == false)
{
Utility.Log.Write(new Exception("imgPath not found " + imgPath));
}
m_ToolbarItem = new WorldWind.WindowsControlMenuButton(
"MicroSoft VirtualEarth",
imgPath,
m_Form);
ParentApplication.WorldWindow.MenuBar.AddToolsMenuButton(m_ToolbarItem);
base.Load();
}
}
catch (Exception ex)
{
Utility.Log.Write(ex);
throw;
}
}
Unload()方法就是释放插件窗体,并移除插件菜单项和插件工具按钮。
我们再来看看VeReprojectTilesLayer 类,像其他插件类一样,要重载Initialize()、Update()、Render()方法,但是,VE插件重点在Upadate()方法,当然他还有其他自己特色的方法。
Initialize()方法主要是一些该类全局对象的实例化,如:投影、VE控制窗体及VeTile初始化等。
Render()方法主要是实现插件的三维渲染功能的,但是VE的Render真正实现是调用VeTile类中的Render()方法。
/// Draws the layer
/// </summary>
public override void Render(DrawArgs drawArgs)
{
try
{
if (this.isOn == false)
{
return;
}
if (this.isInitialized == false)
{
return;
}
if (drawArgs.device == null)
return;
if (veTiles != null && veTiles.Count > 0)
{
//render mesh and tile(s)
bool disableZBuffer = false; //TODO where do i get this setting
//foreach(VeTile veTile in veTiles)
//{
// veTile.Render(drawArgs, disableZBuffer);
//}
// camera jitter fix
drawArgs.device.Transform.World = Matrix.Translation(
(float)-drawArgs.WorldCamera.ReferenceCenter.X,
(float)-drawArgs.WorldCamera.ReferenceCenter.Y,
(float)-drawArgs.WorldCamera.ReferenceCenter.Z
);
// Clear ZBuffer between layers (as in WW)
drawArgs.device.Clear(ClearFlags.ZBuffer, 0, 1.0f, 0);
// Render tiles(这里的GetZoomLevelByTrueViewRange方法是个知识点,该方法是根据WorldCamera视角范围,求取当前球体的缩放层次)
int zoomLevel = GetZoomLevelByTrueViewRange(drawArgs.WorldCamera.TrueViewRange.Degrees);
int tileDrawn = VeTile.Render(drawArgs, disableZBuffer, veTiles, zoomLevel); // Try current level first
if(tileDrawn == 0) VeTile.Render(drawArgs, disableZBuffer, veTiles, prevLvl); // If nothing drawn, render previous level
//camera jitter fix
drawArgs.device.Transform.World = drawArgs.WorldCamera.WorldMatrix;
//Render logo
RenderDownloadProgress(drawArgs, null, 0);
}
//else pushpins only
//render PushPins
if (pushPins != null && pushPins.Count > 0)
{
RenderPushPins(drawArgs);
}
}
catch (Exception ex)
{
Utility.Log.Write(ex);
}
}
VE中获取缩放级别的方法是很巧妙的,根据原作者文章知:VE的缩放级别是1-19级甚至更低,是以2的阶乘递减的。方法我们自己开发时可以重用,但是它的思想还是要好好体会的。
注:缩放级别为 180 、90、45、22.5、……
{
int maxLevel = 3; //视角范围为45度
int minLevel = 19;
int numLevels = minLevel - maxLevel + 1;
int retLevel = maxLevel;
for (int i = 0; i < numLevels; i++)
{
retLevel = i + maxLevel;
double viewAngle = 180;
for (int j = 0; j < i; j++)
{
viewAngle = viewAngle / 2.0;
}
if (trueViewRange >= viewAngle)
{
break;
}
}
return retLevel;
}
VE插件最最关键的方法为Update(),因为VE实质上就是不断地根据缩放级别更新影像数据,其实就是构建要渲染绘制的对象集合,最后由Render()方法完成渲染绘制。所以该方法是VE中最复杂的,当然也就是VE的处理精华所在,也就是我们研究学习的重点啦。(说这些,主要是告诉大家这里是重点,要好好研究和关注)
/// Update layer (called from worker thread)
/// </summary>
public override void Update(DrawArgs drawArgs)
{
try
{
if (this.isOn == false)
{
return;
}
//NOTE for some reason Initialize is not getting called from the Plugin Menu Load/Unload
//it does get called when the plugin loads from Startup
//not sure what is going on, so i'll just call it manually
if (this.isInitialized == false)
{
this.Initialize(drawArgs);
return;
}
//get lat, lon 获取经纬度、倾斜角度、高度等
double lat = drawArgs.WorldCamera.Latitude.Degrees;
double lon = drawArgs.WorldCamera.Longitude.Degrees;
double tilt = drawArgs.WorldCamera.Tilt.Degrees;
//determine zoom level
double alt = drawArgs.WorldCamera.Altitude;
//could go off distance, but this changes when view angle changes
//Angle fov = drawArgs.WorldCamera.Fov; //stays at 45 degress
//Angle viewRange = drawArgs.WorldCamera.ViewRange; //off of distance, same as TVR but changes when view angle changes
Angle tvr = drawArgs.WorldCamera.TrueViewRange; //off of altitude
//smallest altitude = 100m
//tvr = .00179663198575926
//start altitude = 12756273m
//tvr = 180
//WW _levelZeroTileSizeDegrees 获取缩放级别,上面已经介绍啦
//180 90 45 22.5 11.25 5.625 2.8125 1.40625 .703125 .3515625 .17578125 .087890625 0.0439453125 0.02197265625 0.010986328125 0.0054931640625
int zoomLevel = GetZoomLevelByTrueViewRange(tvr.Degrees);
//dont start VE tiles until a certain zoom level
if (zoomLevel < veForm.StartZoomLevel)
{
this.RemoveAllTiles();
this.ForceRefresh();
return;
}
//WW tiles
//double tileDegrees = GetLevelDegrees(zoomLevel);
//int row = MathEngine.GetRowFromLatitude(lat, tileDegrees);
//int col = MathEngine.GetColFromLongitude(lon, tileDegrees);
//VE tiles
double metersY;
double yMeters;
int yMetersPerPixel;
int row;
/*
//WRONG - doesn't stay centered away from equator
//int yMeters = LatitudeToYAtZoom(lat, zoomLevel); //1024
double sinLat = Math.Sin(DegToRad(lat));
metersY = earthRadius / 2 * Math.Log((1 + sinLat) / (1 - sinLat)); //0
yMeters = earthHalfCirc - metersY; //20037508.342789244
yMetersPerPixel = (int) Math.Round(yMeters / MetersPerPixel(zoomLevel));
row = yMetersPerPixel / pixelsPerTile;
*/
//CORRECT
//int xMeters = LongitudeToXAtZoom(lon, zoomLevel); //1024
double metersX = earthRadius * DegToRad(lon); //0
double xMeters = earthHalfCirc + metersX; //20037508.342789244
int xMetersPerPixel = (int)Math.Round(xMeters / MetersPerPixel(zoomLevel));
int col = xMetersPerPixel / pixelsPerTile;
//reproject - overrides row above
//this correctly keeps me on the current tile that is being viewed
UV uvCurrent = new UV(DegToRad(lon), DegToRad(lat));
uvCurrent = proj.Forward(uvCurrent);
metersY = uvCurrent.V;
yMeters = earthHalfCirc - metersY;
yMetersPerPixel = (int)Math.Round(yMeters / MetersPerPixel(zoomLevel));
row = yMetersPerPixel / pixelsPerTile;
说明:计算行列数,是为了后面获取切片后图片,并将正确的图片作为纹理渲染到正确的位置上,当然这过程还有很多处理,我会一一分析的。
//update mesh if VertEx changes
if (prevVe != World.Settings.VerticalExaggeration)
{
lock (veTiles.SyncRoot)
{
VeTile veTile;
for (int i = 0; i < veTiles.Count; i++)
{
veTile = (VeTile)veTiles[i];
if (veTile.VertEx != World.Settings.VerticalExaggeration)
{
veTile.CreateMesh(this.Opacity, World.Settings.VerticalExaggeration);
}
}
}
}
prevVe = World.Settings.VerticalExaggeration;
//if within previous bounds and same zoom level, then exit
if (row == prevRow && col == prevCol && zoomLevel == prevLvl && tilt == preTilt)
{
return;
}
//System.Diagnostics.Debug.WriteLine("CHANGE");
lock (veTiles.SyncRoot)
{
VeTile veTile;
for (int i = 0; i < veTiles.Count; i++)
{
veTile = (VeTile)veTiles[i];
veTile.IsNeeded = false;
}
}
//metadata
ArrayList alMetadata = null;
if (veForm.IsDebug == true)
{
alMetadata = new ArrayList();
alMetadata.Add("yMeters " + yMeters.ToString());
alMetadata.Add("metersY " + metersY.ToString());
alMetadata.Add("yMeters2 " + yMeters.ToString());
alMetadata.Add("vLat " + uvCurrent.V.ToString());
//alMetadata.Add("xMeters " + xMeters.ToString());
//alMetadata.Add("metersX " + metersX.ToString());
//alMetadata.Add("uLon " + uvCurrent.U.ToString());
}
//添加目前VeTile(这是重点,稍后分析)
//add current tiles first
AddVeTile(drawArgs, row, col, zoomLevel, alMetadata);
//then add other tiles outwards in surrounding circles
AddNeighborTiles(drawArgs, row, col, zoomLevel, null, 1);
AddNeighborTiles(drawArgs, row, col, zoomLevel, null, 2);
AddNeighborTiles(drawArgs, row, col, zoomLevel, null, 3);
// Extend tile grid if camera tilt above some values
if(tilt > 45) AddNeighborTiles(drawArgs, row, col, zoomLevel, null, 4);
if(tilt > 60) AddNeighborTiles(drawArgs, row, col, zoomLevel, null, 5);
//if(prevLvl > zoomLevel) //zooming out
//{
//}
lock (veTiles.SyncRoot)
{
VeTile veTile;
for (int i = 0; i < veTiles.Count; i++)
{
veTile = (VeTile)veTiles[i];
if (veTile.IsNeeded == false)
{
veTile.Dispose();
veTiles.RemoveAt(i);
i--;
}
}
}
//保存当前基本的行列、缩放级别、倾斜角度
prevRow = row;
prevCol = col;
prevLvl = zoomLevel;
preTilt = tilt;
}
catch (Exception ex)
{
Utility.Log.Write(ex);
}
}
我们来看看AddVeTile(drawArgs, row, col, zoomLevel, alMetadata);方法,因为该方法是AddNeighborTiles(drawArgs, row, col, zoomLevel, null, 1);方法的基础。
CreateVeTile()方法实现,相当底层啦!我自己研究了很长时间,希望能给大家说明白。即使不明白,知道是干啥知道咋用就行啦,不用深究原理的。
private VeTile CreateVeTile(DrawArgs drawArgs, int row, int col, int zoomLevel, ArrayList alMetadata)
{
//初始化VeTile对象
VeTile newVeTile = new VeTile(row, col, zoomLevel);
//metadata
if (alMetadata != null)
{
foreach (string metadata in alMetadata)
{
newVeTile.AddMetaData(metadata);
}
}
//获取纹理图片,即从服务器端下载图片用作纹理(重点,稍后详细分析)
//thread to download new tile(s) or just load from cache
newVeTile.GetTexture(drawArgs, pixelsPerTile);
//handle the diff projection
//每个像素代表的距离
double metersPerPixel = MetersPerPixel(zoomLevel);
//获取当前级别总的网格边数
double totalTilesPerEdge = Math.Pow(2, zoomLevel);
//总长度(我认为:就是地球周长)
double totalMeters = totalTilesPerEdge * pixelsPerTile * metersPerPixel;
//(我认为:halfMeters就是地球周长的一半)
double halfMeters = totalMeters / 2;
//do meters calculation in VE space
//the 0,0 origin for VE is in upper left
//在VE空间坐标系下,计算距离,原点在左上角
//首先求出WW坐标系下的,N、W(这里就是上面求行列数的逆运算啦)
double N = row * (pixelsPerTile * metersPerPixel);
double W = col * (pixelsPerTile * metersPerPixel);
//now convert it to +/- meter coordinates for Proj.4
//the 0,0 origin for Proj.4 is 0 lat, 0 lon
//-22 to 22 million, -11 to 11 million
//在以经纬度为0 0为原点的坐标系下的新的N W,坐标系方向轴为 N——》S; W——》E
N = halfMeters - N;
W = W - halfMeters;
//计算出单元格的 E S
double E = W + (pixelsPerTile * metersPerPixel);
double S = N - (pixelsPerTile * metersPerPixel);
//给新的瓦片单元格的UL UR LL LR赋值,其实就是单元网格的四角的坐标(理解这点很重要,是为了创建Mesh用)
newVeTile.UL = new UV(W, N);
newVeTile.UR = new UV(E, N);
newVeTile.LL = new UV(W, S);
newVeTile.LR = new UV(E, S);
//create mesh
byte opacity = this.Opacity; //from RenderableObject
float verticalExaggeration = World.Settings.VerticalExaggeration;
//重点CreateMesh()方法,里面也涉及数学,不是太容易理解的
newVeTile.CreateMesh(opacity, verticalExaggeration);
newVeTile.CreateDownloadRectangle(drawArgs, World.Settings.DownloadProgressColor.ToArgb());
return newVeTile;
}
VeTile类,是真正实现渲染方法,也主要用于球体和平面坐标的转换处理、计算,算是VE插件的核心啦,这里面涉及大量的数学知识,主要编程为Direct3D编程。我们现在来看看CreateMesh()方法。
注:如果你看懂CreateMesh(),就说明你已经理解VE大部分啦
public void CreateMesh(byte opacity, float verticalExaggeration)
{
this.vertEx = verticalExaggeration;
int opacityColor = System.Drawing.Color.FromArgb(opacity, 0, 0, 0).ToArgb();
meshPointCount = 32; //64; //96 // How many vertices for each direction in mesh (total: n^2)
//vertices = new CustomVertex.PositionColoredTextured[meshPointCount * meshPointCount];
// Build mesh with one extra row and col around the terrain for normal computation and struts
vertices = new CustomVertex.PositionNormalTextured[(meshPointCount + 2) * (meshPointCount + 2)];
int upperBound = meshPointCount - 1;
float scaleFactor = (float)1 / upperBound;
//using(Projection proj = new Projection(m_projectionParameters))
//{
double uStep = (UR.U - UL.U) / upperBound;
double vStep = (UL.V - LL.V) / upperBound;
UV curUnprojected = new UV(UL.U - uStep, UL.V + vStep);
//将平面坐标投影转换为WW球面坐标
// figure out latrange (for terrain detail)
UV geoUL = _proj.Inverse(m_ul);
UV geoLR = _proj.Inverse(m_lr);
double latRange = (geoUL.U - geoLR.U) * 180 / Math.PI;
//将弧度转为角度(这里没再使用下面四个变量,原方法用到啦,不研究!)
North = geoUL.V * 180 / Math.PI;
South = geoLR.V * 180 / Math.PI;
West = geoUL.U * 180 / Math.PI;
East = geoLR.U * 180 / Math.PI;
//半径
float meshBaseRadius = (float)_layerRadius;
UV geo;
Vector3 pos;
double height = 0;
for (int i = 0; i < meshPointCount + 2; i++)
{
for (int j = 0; j < meshPointCount + 2; j++)
{ //将平面坐标(proj 4)投影转换为WW球面坐标
geo = _proj.Inverse(curUnprojected);
//将弧度转为角度
// Radians -> Degrees
geo.U *= 180 / Math.PI;
geo.V *= 180 / Math.PI;
if (_terrainAccessor != null)
{
if (_veForm.IsTerrainOn == true)
{
//height = heightData[i, j] * verticalExaggeration;
//original : need to fetch altitude on a per vertex basis (in VE space) to have matching tile borders (note PM)
height = verticalExaggeration * _terrainAccessor.GetElevationAt(geo.V, geo.U, Math.Abs(upperBound / latRange));
}
else
{
height = 0;
}
}
//将空间坐标转换为笛卡尔坐标
pos = MathEngine.SphericalToCartesian(
geo.V,
geo.U,
_layerRadius + height);
//构建Mesh顶点集合
int idx = i * (meshPointCount + 2) + j;
vertices[idx].X = pos.X;
vertices[idx].Y = pos.Y;
vertices[idx].Z = pos.Z;
//double sinLat = Math.Sin(geo.V);
//vertices[idx].Z = (float) (pos.Z * sinLat);
vertices[idx].Tu = (j - 1) * scaleFactor;
vertices[idx].Tv = (i - 1) * scaleFactor;
//vertices[idx].Color = opacityColor;
curUnprojected.U += uStep;
}
curUnprojected.U = UL.U - uStep;
curUnprojected.V -= vStep;
}
//}
//构建索引集合,分组存放Mesh点集合中的点。学Direct3D编程,看懂下面的很容易的
int slices = meshPointCount + 1;
indices = new short[2 * slices * slices * 3];
for (int i = 0; i < slices; i++)
{
for (int j = 0; j < slices; j++)
{
indices[(2 * 3 * i * slices) + 6 * j] = (short)(i * (meshPointCount + 2) + j);
indices[(2 * 3 * i * slices) + 6 * j + 1] = (short)((i + 1) * (meshPointCount + 2) + j);
indices[(2 * 3 * i * slices) + 6 * j + 2] = (short)(i * (meshPointCount + 2) + j + 1);
indices[(2 * 3 * i * slices) + 6 * j + 3] = (short)(i * (meshPointCount + 2) + j + 1);
indices[(2 * 3 * i * slices) + 6 * j + 4] = (short)((i + 1) * (meshPointCount + 2) + j);
indices[(2 * 3 * i * slices) + 6 * j + 5] = (short)((i + 1) * (meshPointCount + 2) + j + 1);
}
}
// Compute normals and fold struts
calculate_normals();
fold_struts(false, meshBaseRadius);
}
(未完待续……)