SteamVR(HTC Vive) Unity插件深度分析(十二)

发表于2017-05-10
评论0 3.5k浏览

10.16.       SteamVR_PlayArea.cs

它的主要作用是在Unity编辑器中画一个游玩区的长方体,与实际的游玩区并没有关系,即并不会影响实际的游玩区设置。在相机骨骼中的origin上面就有这个脚本:

 

显示的样式为(隐藏了相机骨骼其它部分后的效果):

 

ExecuteInEditMode表示可以在编辑器执行,在运行的时候并看不到,主要是用来供开发者调试观察用的。需要有MeshRendererMeshFilter

[ExecuteInEditMode, RequireComponent(typeof(MeshRenderer), typeof(MeshFilter))]
public class SteamVR_PlayArea : MonoBehaviour
{

边界区域(图中蓝色的部分)的宽度,15cm
       public float borderThickness = 0.15f;

游玩区的高度,2
       public float wireframeHeight = 2.0f;

只在选中所有的gameobject时在显示框架(即长方体)
       public bool drawWireframeWhenSelectedOnly = false;

这个表示在游戏视图中也画出来(这个缺省应该为false的,在正式运行时显示出来     会干扰玩家,在相机骨骼预制体中是为false的)
       public bool drawInGame = true;

游玩区的大小设置的几个枚举
       public enum Size
       {

    根据测量出来的区域大小(从Chaperone中读取)
              Calibrated,

   4m x 3m
              _400x300,

   3m x 2.25m
              _300x225,

   2m x 1.5m
              _200x150
       }

游玩区大小枚举
       public Size size;

边界的颜色
       public Color color = Color.cyan;

用于生成网格的顶点数组,不在Inspector中显示。没有必要用public
       [HideInInspector]
      
public Vector3[] vertices;

获取游玩区边界,地面上的4个点
       public static bool GetBounds( Size size, ref HmdQuad_t pRect )
       {
             
if (size == Size.Calibrated)
              {

        如果设置是测量出来的,则从Chaperone
                     var initOpenVR =(!SteamVR.active&& !SteamVR.usingNativeSupport);
                    
if (initOpenVR)
                     {

            先初始化。这里感觉不太好,为什么在这里初始化,理论上应该在一个最                            开始的地方统一初始化。这里有可能会重复调用,应该可以先判断一下是                         否初始化了。不过应该重复初始化也没有问题
                            var error = EVRInitError.None;
                           
OpenVR.Init(ref error, EVRApplicationType.VRApplication_Other);
                     }

                    
var chaperone = OpenVR.Chaperone;

        直接从Chaperone中取
                     bool success =(chaperone != null) && chaperone.GetPlayAreaRect(ref pRect);
                    
if (!success)
                           
Debug.LogWarning("Failedto get Calibrated Play Area bounds!  Make sure you have tracking first,and that your space is calibrated.");

                    
if (initOpenVR)

            呀,取完后立即shutdown,这有必要吗?
                            OpenVR.Shutdown();

                    
return success;
              }
             
else
              {
                    
try
                     {

            其它情况根据在Inspector中的选择来确定,格式为:_300x200这样                         
                            var str =size.ToString().Substring(1);
                           
var arr =str.Split(new char[] {'x'} , 2);

                           
// convert tohalf size in meters (from cm)
                            var x = float.Parse(arr[0]) / 200;
                           
var z = float.Parse(arr[1]) / 200;

            顶点顺序是这样的(顺时针):

           
                            pRect.vCorners0.v0=  x;
                            pRect.vCorners0.v1= 
0;
                            pRect.vCorners0.v2=  z;

                            pRect.vCorners1.v0=  x;
                            pRect.vCorners1.v1= 
0;
                            pRect.vCorners1.v2= -z;

                            pRect.vCorners2.v0= -x;
                            pRect.vCorners2.v1= 
0;
                            pRect.vCorners2.v2= -z;

                            pRect.vCorners3.v0= -x;
                            pRect.vCorners3.v1= 
0;
                            pRect.vCorners3.v2=  z;

                           
return true;
                     }
                    
catch {}
              }

             
return false;
       }

生成游玩区的网格(地面上的边界区域的网格,即上面图中看到的蓝色部分)
       public void BuildMesh()
       {
             
var rect = new HmdQuad_t();
             
if ( !GetBounds(size, ref rect ) )
                    
return;

             
var corners = new HmdVector3_t[] {rect.vCorners0, rect.vCorners1, rect.vCorners2, rect.vCorners3 };

    由顶点生成网格,总共8个顶点,顺序如图:

   
              vertices=
new Vector3[corners.Length* 2];
             
for (int i = 0; i               {

        先确定了里面的4个顶点
                     var c =corners[i];

        在地面上抬升了1cm
                     vertices[i]= new Vector3(c.v0, 0.01f, c.v2);
              }

             
if (borderThickness == 0.0f)
              {

        必须指定边界的宽度
                     GetComponent<MeshFilter>().mesh = null;
                    
return;
              }

    下面是计算外面的4个顶点的坐标,计算过程如下图(以顶点4为例):

   
             
for (int i = 0; i               {

        后一个顶点
                     int next = (i + 1) %corners.Length;

        前一个顶点
                     int prev = (i +corners.Length - 1) % corners.Length;

        下一个顶点的方向(单位化)
                     var nextSegment =(vertices[next] - vertices[i]).normalized;

        前一个顶点的方向(单位化)
                     var prevSegment =(vertices[prev] - vertices[i]).normalized;

                    
var vert =vertices[i];

        先移到一条边上,图中的①。叉乘的意义是得到垂直方向的向量(unity中左               手坐标系),大小为夹角sin值,因为这里是90度,因此得到的也是单位向                量,乘以宽度意思就是沿这个向量方向移动指定距离
                     vert+= Vector3.Cross(nextSegment, Vector3.up) *borderThickness;

        再沿边上移动,图中的②
                     vert+= Vector3.Cross(prevSegment, Vector3.down) *borderThickness;

                     vertices[corners.Length+ i] = vert;
              }

    有了顶点,再按顺序组成三角形,就形成网格了,如图:

   

    为逆时针方向
              var triangles = new int[]
              {
                    
0, 1, 4,
                    
1, 5, 4,
                    
1, 2, 5,
                    
2, 6, 5,
                    
2, 3, 6,
                    
3, 7, 6,
                    
3, 0, 7,
                    
0, 4, 7
              };

    为每个顶点定义了一个纹理uv
              var uv = new Vector2[]
              {
                    
new Vector2(0.0f, 0.0f),
                    
new Vector2(1.0f, 0.0f),
                    
new Vector2(0.0f, 0.0f),
                    
new Vector2(1.0f, 0.0f),
                    
new Vector2(0.0f, 1.0f),
                    
new Vector2(1.0f, 1.0f),
                    
new Vector2(0.0f, 1.0f),
                    
new Vector2(1.0f, 1.0f)
              };

    每个顶点的颜色
              var colors = new Color[]
              {
                     color,
                     color,
                     color,
                     color,
                    
new Color(color.r,color.g, color.b, 0.0f),
                    
new Color(color.r,color.g, color.b, 0.0f),
                    
new Color(color.r,color.g, color.b, 0.0f),
                    
new Color(color.r,color.g, color.b, 0.0f)
              };

    定义mesh
              var mesh = new Mesh();
              GetComponent<
MeshFilter>().mesh =mesh;
              mesh.vertices= vertices;
              mesh.uv= uv;
              mesh.colors= colors;
              mesh.triangles= triangles;

             
var renderer =GetComponent<MeshRenderer>();

    使用Sprites-Default.mat材质,不同的版本及在编辑器中获取的方法不一样
#if UNITY_EDITOR && !(UNITY_5_3 || UNITY_5_2 || UNITY_5_1|| UNITY_5_0)
              renderer.material=UnityEditor.AssetDatabase.GetBuiltinExtraResource("Sprites-Default.mat");
#else
              renderer.material= Resources.GetBuiltinResource<Material>("Sprites-Default.mat");
#endif
              renderer.reflectionProbeUsage= UnityEngine.Rendering.ReflectionProbeUsage.Off;
              renderer.shadowCastingMode= UnityEngine.Rendering.
ShadowCastingMode.Off;
              renderer.receiveShadows=
false;
#if !(UNITY_5_3|| UNITY_5_2 || UNITY_5_1 || UNITY_5_0)
              renderer.lightProbeUsage= LightProbeUsage.Off;
#else
              renderer.useLightProbes= false;
#endif
       }

#if UNITY_EDITOR

unity编辑器中,能动态修改参数并能更新网格,而在运行时或者游戏窗口,则不    能动态修改

有一个hashtable记录了每个字段对应的上次的值,有修改后则会重新生成网格
       Hashtable values;
      
void Update()
       {
             
if (!Application.isPlaying)
              {

        应用没有运行的时候才能修改(指修改网格,但那个长方体的线框是可以运行                     时修改的)

        这里看起来是获取所有public的变量(通过反射)
                     var fields =GetType().GetFields(System.Reflection.BindingFlags.Instance |System.Reflection.BindingFlags.Public);

                    
bool rebuild = false;

                    
if (values == null || (borderThickness!= 0.0f &&GetComponent<MeshFilter>().sharedMesh == null))
                     {

            第一次运行
                            rebuild= true;
                     }
                    
else
                     {
                           
foreach (var f in fields)
                            {
                                  
if (!values.Contains(f) || !f.GetValue(this).Equals(values[f]))
                                   {
                                          rebuild=
true;
                                         
break;
                                   }
                            }
                     }

                    
if (rebuild)
                     {

            参数变化,重建网格
                            BuildMesh();

                            values=
new Hashtable();
                           
foreach (var f in fields)
                                   values[f]= f.GetValue(
this);
                     }
              }
       }
#endif

Gizmo,游玩区的那个方框是以Gizmo方式画出来的。所谓的Gizmo为场景中一些    可视化的调试及辅助装置。也说是说画一些东西辅助调试、观察什么的。终于知道Unity     窗口中Gizmos相拉菜单是干什么的了:

显然如果在一个脚本中实现了OnDrawGizmos就会显示在这个菜单里。而可以看到像相  机会在场景中画一个相机,选中后还会画一个视锥体,AudioSource会画一个喇叭,这      些都是Gizmo。取消勾选会隐藏相应的Gizmo,效果和在Inspector里面折叠相应的脚     本一样。
       void OnDrawGizmos()
       {
             
if (!drawWireframeWhenSelectedOnly)
                     DrawWireframe();
       }

选中时候可以比如用另一种颜色显示。这里有一个参数控制是否只在选中状态下显示
       void OnDrawGizmosSelected()
       {
             
if (drawWireframeWhenSelectedOnly)
                     DrawWireframe();
       }

      
public void DrawWireframe()
       {
             
if (vertices == null || vertices.Length== 0)
                    
return;

   TransformVectorTransformPoint是将坐标由局部坐标转换到世界坐标

   Offset是底面到顶面的偏移向量
              var offset =transform.TransformVector(Vector3.up * wireframeHeight);
             
for (int i = 0; i < 4; i++)
              {
                    
int next = (i + 1) % 4;

                    
var a =transform.TransformPoint(vertices[i]);
                    
var b = a +offset;
                    
var c =transform.TransformPoint(vertices[next]);
                    
var d = c +offset;
                    
Gizmos.DrawLine(a,b);
                    
Gizmos.DrawLine(a,c);
                    
Gizmos.DrawLine(b,d);
              }
       }

      
public void OnEnable()
       {
             
if (Application.isPlaying)
              {

        在运行的时候是否显示由drawInGame参数决定
                     GetComponent<MeshRenderer>().enabled= drawInGame;

                    
// No need toremain enabled at runtime.
                     // Anyone thatwants to change properties at runtime
                     // should callBuildMesh themselves.

        在运行的时候自身是禁用的。如果需要在运行时修改参数,需要手动调用                     BuildMesh。因为它的网格是赋给了脚本所在物体的MeshFilter上,所以禁                   用了也能显示
                     enabled= false;

                    
// If we wantthe configured bounds of the user,
                     // we need towait for tracking.

        如果游玩区的大小是根据测量出来的,那么需要异步等设备连接(准确地说是                     Chaperone接口可用),采用了协程的方式
                     if (drawInGame&& size == Size.Calibrated)
                            StartCoroutine(
"UpdateBounds");
              }
       }

协程,等Chaperone接口可用后取网格信息,生成网格
       IEnumerator UpdateBounds()
       {
              GetComponent<
MeshFilter>().mesh = null; // clearexisting

              var chaperone = OpenVR.Chaperone;
             
if (chaperone == null)

       yield break的意思是不执行下面的语句了,也就是说协程结束了。这里的                   意思是如果取不到chaperone接口,那就直接结束了。这里协程的作用只是                异步等待测量结束
                     yield break;

             
while (chaperone.GetCalibrationState() != ChaperoneCalibrationState.OK)
                    
yield return null;

              BuildMesh();
       }
}

10.17.       SteamVR_Render.cs

这个就是最重要的对SteamVR_Camera(准确地说是对整个场景,包括各种CameraOverlay)进行渲染的类了。这个脚本是一个MonoBehaviour,因此必须(还是可以?)挂在一个物体上,但这个脚本只在SteamVR这个预制体上有用到(后面可以看到,如果有引用的话,会自动创建一个物体并添加这个脚本)。

 

public class SteamVR_Render : MonoBehaviour
{

显示控制面板时是否暂停游戏(这个暂停只是相当于时间暂停)
       public bool pauseGameWhenDashboardIsVisible = true;

锁定物理更新速度为头显渲染频率
       public bool lockPhysicsUpdateRateToRenderFrequency = true;
    外部相机。外部相机用于拍摄现实世界与虚拟世界的混合视频。不需要手动指定,项目       目录下有下面这个配置文件就会自动创建
       public SteamVR_ExternalCamera externalCamera;

外部相机配置文件
       public string externalCameraConfigPath = "externalcamera.cfg";

#if (UNITY_5_3 ||UNITY_5_2 || UNITY_5_1 || UNITY_5_0)

5.x版本中可以为左右眼分别指定要显示的层
       public LayerMask leftMask,rightMask;

用于设置渲染时需要隐藏的网格
       SteamVR_CameraMask cameraMask;
#endif

跟踪空间类型,缺省为站姿
       public ETrackingUniverseOrigin trackingSpace= ETrackingUniverseOrigin.TrackingUniverseStanding;

当前渲染的是左眼还是右眼
       static public EVREye eye { get; private set; }

单例
       static private SteamVR_Render _instance;
      
static public SteamVR_Render instance
       {
             
get
              {
                    
if (_instance == null)
                     {

            第一次被调用时,如果场景中已经有SteamVR_Render了([SteamVR]                      预制体中就放了一个,把它放到场景里就有了),则直接取出。
                            _instance= GameObject.FindObjectOfType<SteamVR_Render>();

                           
if (_instance == null)

                如果场景中还没有,则自动添加一个名为“[SteamVR]”的空物体并                                   且添加SteamVR_Render脚本。
                                   _instance= new GameObject("[SteamVR]").AddComponent<SteamVR_Render>();
                     }
                    
return _instance;
              }
       }

      
void OnDestroy()
       {
              _instance=
null;
       }

应用退出时设置一个标记,用于安全释放一些东西
       static private bool isQuitting;
      
void OnApplicationQuit()
       {
              isQuitting=
true;
             
SteamVR.SafeDispose();
       }

添加SteamVR_Camera。在SteamVR_Camera.OnEnable中会调用
       static public void Add(SteamVR_Camera vrcam)
       {
             
if (!isQuitting)
                     instance.AddInternal(vrcam);
       }

移除SteamVR_Camera,在SteamVR_Camera.OnDisable中调用
       static public void Remove(SteamVR_Camera vrcam)
       {
             
if (!isQuitting&& _instance != null)
                     instance.RemoveInternal(vrcam);
       }

获取顶层相机(最后渲染,深度最大)
       static public SteamVR_Camera Top()
       {
             
if (!isQuitting)
                    
return instance.TopInternal();

             
return null;
       }

SteamVR_Camera数组
       private SteamVR_Camera[] cameras = new SteamVR_Camera[0];

添加SteamVR_Camera相机
       void AddInternal(SteamVR_Camera vrcam)
       {

   SteamVR_Camera总是依赖于一个Unity相机
              var camera =vrcam.GetComponent<Camera>();

    扩充相机数组
              var length =cameras.Length;
             
var sorted = new SteamVR_Camera[length + 1];
             
int insert = 0;

    相机数组是按深度排序的,深度越小排在越前面(索引越小)。主相机的深度通常         默认为-1。通常情况下只会有一个相机,为什么有的时候会有多个相机呢?参看:           http://www.aiweibang.com/yuedu/89820078.html
              for (int i = 0; i               {
                    
var c =cameras[i].GetComponent<Camera>();
                    
if (i == insert&& c.depth > camera.depth)
                            sorted[insert++]= vrcam;

                     sorted[insert++]= cameras[i];
              }
             
if (insert ==length)
                     sorted[insert]= vrcam;

              cameras= sorted;

#if (UNITY_5_3 ||UNITY_5_2 || UNITY_5_1 || UNITY_5_0)

    5.x版本才启用本脚本(场景中至少有一个SteamVR_Camera才会启用)
              enabled= true;
#endif
       }

移除一个SteamVR_Camera
       void RemoveInternal(SteamVR_Camera vrcam)
       {
             
var length =cameras.Length;
             
int count = 0;
             
for (int i = 0; i               {
                    
var c =cameras[i];
                    
if (c == vrcam)

            还可能重复添加?
                            ++count;
              }
             
if (count == 0)
                    
return;

             
var sorted = new SteamVR_Camera[length -count];
             
int insert = 0;
             
for (int i = 0; i               {
                    
var c =cameras[i];
                    
if (c != vrcam)
                            sorted[insert++]= c;
              }

              cameras= sorted;
       }

取顶层相机,也就是最后一个相机,最后渲染,深度最大
       SteamVR_Camera TopInternal()
       {
             
if (cameras.Length > 0)
                    
return cameras[cameras.Length- 1];

             
return null;
       }

所有跟踪设备的姿态
       public TrackedDevicePose_t[] poses = new TrackedDevicePose_t[OpenVR.k_unMaxTrackedDeviceCount];

这个是预测的后两帧的姿态,只不过这里声明的是空数组,也就是不关心预测的结果
       public TrackedDevicePose_t[] gamePoses = new TrackedDevicePose_t[0];

暂停渲染(在比如进行场景切换时),这个比pauseGameWhenDashboardIsVisible 更彻底,它会完全停止渲染
       static private bool _pauseRendering;
      
static public bool pauseRendering
       {
             
get { return _pauseRendering; }
             
set
              {
                     _pauseRendering=
value;
#if !(UNITY_5_3|| UNITY_5_2 || UNITY_5_1 || UNITY_5_0)

        对于非5.x版本,还调用合成器的暂停渲染
                     varcompositor = OpenVR.Compositor;
                     if(compositor != null)
                            compositor.SuspendRendering(value);
#endif
              }
       }

渲染协程,就是有条件的Update
       private IEnumerator RenderLoop()
       {
             
while (true)
              {

        这里有个死循环,每次循环的开头都是等一帧渲染完即将显示前。                                WaitForEndOfFrame的意思是等所有相机和GUI都渲染完,但在显示之前。                     因此可以在后面对要显示的内容进行操作。
                     yield return new WaitForEndOfFrame();

                    
if (pauseRendering)

            如果暂停渲染了就继续等
                            continue;

        合成器上场。这里没有使用SteamVR全局的合成器,而直接从OpenVR中取                    的,当然取出来实际上是同一个
                     var compositor = OpenVR.Compositor;
                    
if (compositor!= null)
                     {
                           
if (!compositor.CanRenderScene())

                如果当前合成器不能渲染场景(能渲染场景的前提就是当前进程拥有                                   场景焦点),则继续等
                                   continue;

            设置跟踪空间类型,坐姿还是站姿,它影响的是视角的高度
                            compositor.SetTrackingSpace(trackingSpace);

#if (UNITY_5_3 ||UNITY_5_2 || UNITY_5_1 || UNITY_5_0)
       QueueEventOnRenderThread的作用是通过Unitynative插件机制异步通知到     openvr_api.dll中。关于       k_nRenderEventID_WaitGetPoses的意义需要再看       SteamVR_Utils.QueueEventOnRenderThread(SteamVR.Unity.k_nRenderEventID_WaitGetPoses);

                           
// Hack toflush render event that was queued in Update (this ensures WaitGetPoses hasreturned before we grab the new values).

            感觉EventWriteString的作用是打log
                            SteamVR.Unity.EventWriteString("[UnityMain]GetNativeTexturePtr - Begin");
       这是里只是随便调用了一个       SteamVR_Camera.GetSceneTexture.GetNativeTexturePtr,大概是上面注释说的,       只是一个强制flush渲染事件的小技巧,它可以确保前一个WaitGetPoses返回,从    而可以获取新的值。EventWriteString的作用看起来像是打log       SteamVR_Camera.GetSceneTexture(cameras[0].GetComponent<Camera>().hdr).GetNativeTexturePtr();
                           
SteamVR.Unity.EventWriteString("[UnityMain]GetNativeTexturePtr - End");

           GetLastPoses获取的是上一个WaitGetPoses返回的值,gamePoses                        是预测的姿态,这里并不关心。获取到跟踪器姿态后,通过自定义事件                            new_poses”发送给所有监听者
                            compositor.GetLastPoses(poses,gamePoses);
                           
SteamVR_Utils.Event.Send("new_poses", poses);
                           
SteamVR_Utils.Event.Send("new_poses_applied");
#endif
                     }

        更新Overlay
                     var overlay = SteamVR_Overlay.instance;
                    
if (overlay != null)
                            overlay.UpdateOverlay();

        渲染外部相机
                     RenderExternalCamera();

#if (UNITY_5_3 ||UNITY_5_2 || UNITY_5_1 || UNITY_5_0)

       5.x版本,渲染左右眼
                     var vr = SteamVR.instance;
                     RenderEye(vr,
EVREye.Eye_Left);
                     RenderEye(vr,
EVREye.Eye_Right);

                    
// Movecameras back to head position so they can be tracked reliably

        重置SteamVR_Camera的局部坐标(相对于head),使可以被跟踪。这里其                     实没有必要重置的,后面对这个坐标的改变是在RenderEye中,采用的是直                   接赋值,不是累加的,所以不影响。而且后面其实赋的值其实也是(0,0,0)                  (0,0,0,1),至于原因,后面另说
                     foreach (var c in cameras)
                     {
                            c.transform.localPosition=
Vector3.zero;
                            c.transform.localRotation=
Quaternion.identity;
                     }

                    
if (cameraMask!= null)

            如果指定了CameraMask,则清除(MeshFilter)的网格
                            cameraMask.Clear();
#endif
              }
       }

 

整理一下整个渲染循环的流程(每帧都会执行)为:等待Unity渲染完成——设置跟踪空间——获取跟踪设备姿态并通知——刷新Overlay——渲染外部相机——渲染左右眼——重置相机位置——清理相机Mask

#if (UNITY_5_3 ||UNITY_5_2 || UNITY_5_1 || UNITY_5_0)
      
void RenderEye(SteamVR vr, EVREye eye)
       {
             
int i = (int)eye;
             
SteamVR_Render.eye = eye;

             
if (cameraMask!= null)

        这个会创建网格过滤
                     cameraMask.Set(vr,eye);

             
foreach (var c in cameras)
              {

        修改相机的位置,它们的位置来自于SteamVR.eyes数组,它的数据又来自                 IVRSystem.GetEyeToHeadTransform,在SteamVR.OnNewPose里面更新。                  理论上来说眼睛与头部(head,如果头显代表头部位置的)本身就有一定的                偏移,然后为了左右眼看世界会有一定的视差,这样才能形成立体图像。所以                  理论上来说,下面的vr.eyes数组对于左右眼来说是不一样的,但实际打印                   出来的结果是,这左右眼的值是一样的,并且都是(0,0,0)(0,0,0,1)。这                   是为什么呢?这是因为这个偏移操作是在openvr_api.dll(或者运行时,或                     者硬件)里面做的——这里只能猜测了,这里是根据vive得出的结论,其它                     的硬件,比如oculus是不是这样的就说不定了
                     c.transform.localPosition= vr.eyes[i].pos;
                     c.transform.localRotation= vr.eyes[i].rot;

                    
// Updateposition to keep from getting culled

        为什么将相机的位置赋给CameraMask(当然也就相当于赋给了MeshFilter               MeshRender)就能避免被裁剪?
                     cameraMask.transform.position= c.transform.position;

                    
var camera =c.GetComponent<Camera>();

        SteamVR_Camera中的纹理设置为实际渲染相机的纹理,那么渲染出来的               场景就会绘制到这个纹理上
                     camera.targetTexture= SteamVR_Camera.GetSceneTexture(camera.hdr);

        临时保存原始相机的剔除设置,渲染完后恢复。相机的cullingMask是可以               分层显示的一个配置,如图:

       

        最多支持32层,通过一个32位的整型描述每层是否显示。通过每个对象的                 Inspector中的Layer设置可以设置到特定的层中,也可以添加新的层:

       
                     int cullingMask =camera.cullingMask;
                    
if (eye == EVREye.Eye_Left)
                     {

            渲染左眼时把右眼特有的mask去掉,加上左眼特有的mask
                            camera.cullingMask&= ~rightMask;
                            camera.cullingMask|= leftMask;
                     }
                    
else
                     {
                渲染右眼时把左眼特有的mask去掉,加上右眼特有的mask
                            camera.cullingMask&= ~leftMask;
                            camera.cullingMask|= rightMask;
                     }

        手动调用相机的Render,这样渲染出来的图像就在SteamVR_Camera的纹理                   中了。这里要手动调用相机渲染,是因为在SteamVR_Camera.OnEnable                  相机被禁用了
                     camera.Render();

        还原cullingMask
                     camera.cullingMask= cullingMask;

        还应该还原相机的位置,可以在这里还原。不过它是在上面的RenderLoop                 中还原的
              }
       }
#endif

渲染外部相机
       void RenderExternalCamera()
       {
             
if (externalCamera == null)
                    
return;

             
if (!externalCamera.gameObject.activeInHierarchy)
                    
return;

    外部相机有一个跳帧的参数,意思是外部相机的帧率不需要这么高(Vive的渲染          帧率为90帧,而视频并不需要这么高),那么可以通过设置一个跳帧参数,让外         部相机每隔几个Vive渲染帧再渲染,这样可以提高性能
              var frameSkip = (int)Mathf.Max(externalCamera.config.frameSkip, 0.0f);

    这里就是跳帧,Time.frameCount是从所有Awake以来的总帧数
              if (Time.frameCount %(frameSkip + 1) != 0)
                    
return;

             
// Keepexternal camera relative to the most relevant vr camera.

    将外部相机与顶层相机关联起来(其实不算关联,就是拷贝了一份这个相机而已)
              externalCamera.AttachToCamera(TopInternal());

    分别渲染近端和远端,或者说前景和背景,是直接绘制到PC的窗口上的
              externalCamera.RenderNear();
              externalCamera.RenderFar();
       }

      
float sceneResolutionScale = 1.0f, timeScale = 1.0f;

当前应用获取到了输入焦点(焦点发生变化)
       private void OnInputFocus(params object[] args)
       {
             
bool hasFocus = (bool)args[0];
             
if (hasFocus)
              {

        获取焦点后恢复时间及场景缩放比例
                     if (pauseGameWhenDashboardIsVisible)
                     {
                           
Time.timeScale =timeScale;
                     }

                    
SteamVR_Camera.sceneResolutionScale= sceneResolutionScale;
              }
             
else
              {

        失去焦点后,如果有设置显示控制面板后暂停游戏,则将时间缩放设为0。时               间比例可以用来加快或减慢游戏进程。这里将这个比例设为0,相当于时间停                  止了,游戏也就暂停了
                     if (pauseGameWhenDashboardIsVisible)
                     {
                            timeScale=
Time.timeScale;
                           
Time.timeScale = 0.0f;
                     }

        这里将场景分辨率缩放比例减小到一半。场景缩小一半,看到的物体的效果就                     是放大一倍
                     sceneResolutionScale= SteamVR_Camera.sceneResolutionScale;
                    
SteamVR_Camera.sceneResolutionScale= 0.5f;
              }
       }

退出,它是“Quit”这个自定义事件的响应函数。“Quit”事件是在下面的Update  PollNextEvent取出并发出来的,事件源应该是来自于SteamVR本身。
       void OnQuit(params object[] args)
       {
#if UNITY_EDITOR

    下面是通过反射的方式让Unity编辑器停止播放,可以学习一下
              foreach (System.Reflection.Assembly a in System.AppDomain.CurrentDomain.GetAssemblies())
              {
                    
var t =a.GetType("UnityEditor.EditorApplication");
                    
if (t != null)
                     {
                            t.GetProperty(
"isPlaying").SetValue(null, false, null);
                           
break;
                     }
              }
#else

    非编辑器模式的话,就是直接退出应用
              Application.Quit();
#endif
       }

      
void OnEnable()
       {

    脚本启用,启动RenderLoop协程
              StartCoroutine("RenderLoop");

    添加事件监听
              SteamVR_Utils.Event.Listen("input_focus",OnInputFocus);
             
SteamVR_Utils.Event.Listen("Quit", OnQuit);
       }

      
void OnDisable()
       {

    停止所有协程
              StopAllCoroutines();
             
SteamVR_Utils.Event.Remove("input_focus",OnInputFocus);
             
SteamVR_Utils.Event.Remove("Quit", OnQuit);
       }

      
void Awake()
       {
#if (UNITY_5_3 ||UNITY_5_2 || UNITY_5_1 || UNITY_5_0)

   5.x版本会自动添加CameraMask组件,把它加到当前对象下面
              var go = new GameObject("cameraMask");
              go.transform.parent= transform;
              cameraMask= go.AddComponent<
SteamVR_CameraMask>();
#endif
              if (externalCamera == null && System.IO.File.Exists(externalCameraConfigPath))
              {

        如果存在“externalcamera.cfg”文件(游戏根目录或Unity工程的根目                     录),则自动创建SteamVR_ExternalCamera这个预制体
                     var prefab = Resources.Load<GameObject>("SteamVR_ExternalCamera");
                    
var instance =Instantiate(prefab);
                     instance.gameObject.name=
"ExternalCamera";

       SteamVR_ExternalCamera脚本挂在预制体的一个子节点Controller上面。               这是因为外部相机需要借助控制器来定位
                     externalCamera= instance.transform.GetChild(0).GetComponent<SteamVR_ExternalCamera>();

        读取配置
                     externalCamera.configPath= externalCameraConfigPath;
                     externalCamera.ReadConfig();
              }
       }

      
void FixedUpdate()
       {
#if (UNITY_5_3 ||UNITY_5_2 || UNITY_5_1 || UNITY_5_0)
             
// We want tocall this as soon after Present as possible.
       TODOk_nRenderEventID_PostPresentHandoff的作用是什么?       SteamVR_Utils.QueueEventOnRenderThread(SteamVR.Unity.k_nRenderEventID_PostPresentHandoff);
#endif
       }

#if !(UNITY_5_3|| UNITY_5_2 || UNITY_5_1 || UNITY_5_0)

5.x版本使用SteamVR_UpdatePoses来更新跟踪设备位置。5.x版本是在 RenderLoop里面做的
       private SteamVR_UpdatePosesposeUpdater;
#endif

       void Update()
       {
#if !(UNITY_5_3|| UNITY_5_2 || UNITY_5_1 || UNITY_5_0)

    5.x版本添加一个SteamVR_UpdatePoses对象。这个为啥不放到OnEnable          里面
              if(poseUpdater == null)
              {
                     vargo = new GameObject("poseUpdater");
                     go.transform.parent= transform;
                     poseUpdater= go.AddComponent();
              }
#else
              if (cameras.Length == 0)
              {

        如果场景中没有SteamVR_Camera,则不需要特别渲染了。放到这里判断并不                   好,不如缺省为falseAdd CameraenableRemove Cameradisable
                     enabled= false;
                    
return;
              }

             
// If ourFixedUpdate rate doesn't match our render framerate, then catch the handoffhere.
       FixedUpdateUpdate里面都添加了这个事件到队列。上面注释说有可能      FixedUpdate的更新频率与渲染的帧率不一致才这里面再次添加到队列。至于如何实       现的得理解这个事件的作用是什么 TODO        SteamVR_Utils.QueueEventOnRenderThread(SteamVR.Unity.k_nRenderEventID_PostPresentHandoff);
#endif
              // Forcecontroller update in case no one else called this frame to ensure prevStategets updated.

    更新所有跟踪设备的状态及位置。可以更新的位置很多,比如RenderLoop                     FixedUpdate,还有这里的Update
              SteamVR_Controller.Update();

             
// Dispatchany OpenVR events.

    下面获取并分发OpenVR事件,转换成自定义事件
              var system = OpenVR.System;
             
if (system != null)
              {
                    
var vrEvent = new VREvent_t();
                    
var size = (uint)System.Runtime.InteropServices.Marshal.SizeOf(typeof(VREvent_t));
                    
for (int i = 0; i < 64; i++)
                     {
                           
if (!system.PollNextEvent(ref vrEvent, size))
                                  
break;

                           
switch ((EVREventType)vrEvent.eventType)
                            {

                其它app获取到了焦点(比如控制面板)
                                   case EVREventType.VREvent_InputFocusCaptured: //another app has taken focus (likely dashboard)
                                          if (vrEvent.data.process.oldPid == 0)
                                          {

                        这里为啥要判断是否进程id0?为0大概表示当前进程。                                                 所以只有当失去焦点的是当前进程的时候才需要通知。因为                                                 有可能有两个以上的app正在运行

                        发出自定义的焦点事件
                                                 SteamVR_Utils.Event.Send("input_focus", false);
                                          }
                                         
break;

                其它app释放焦点事件
                                   case EVREventType.VREvent_InputFocusReleased: //that app has released input focus
                                          if (vrEvent.data.process.pid == 0)
                                          {

                        获取焦点的app的进程id0(表示当前进程)
                                                 SteamVR_Utils.Event.Send("input_focus", true);
                                          }
                                         
break;

                显示渲染模型(TODO 为什么这还有个事件?)
                                   case EVREventType.VREvent_ShowRenderModels:
                                         
SteamVR_Utils.Event.Send("hide_render_models", false);
                                         
break;

                隐藏渲染模型
                                   case EVREventType.VREvent_HideRenderModels:
                                         
SteamVR_Utils.Event.Send("hide_render_models", true);
                                         
break;
                                  
default:

                    其它情况直接使用事件的名字作为自定义事件发出去。所以在代                                          码中看到很多看不到谁发出的事件,应该就是在这里发出来的。                                       比如:TrackedDeviceRoleChangedQuit。也可以看到这些外部事                                      件的命名规则是不一样的
                                          var name =System.Enum.GetName(typeof(EVREventType), vrEvent.eventType);
                                         
if (name != null)
                                                
SteamVR_Utils.Event.Send(name.Substring(8) /*stripVREvent_*/, vrEvent);
                                         
break;
                            }
                     }
              }

             
// Ensurevarious settings to minimize latency.

    一些减小延迟的设置项,在这里配置,相当于SteamVR_Render可以认为是               Application

    这个是预期的帧率,-1表示缺省帧率
              Application.targetFrameRate= -1;

    是否可以在后台运行。也可以使得伴随窗口不需要获得焦点
              Application.runInBackground= true; // don'trequire companion window focus

    显卡可以允许排队的最大帧数
              QualitySettings.maxQueuedFrames= -1;

   VSync数量。0表示不需要等待VSync。这个是针对伴随窗口的
              QualitySettings.vSyncCount = 0; // thisapplies to the companion window

              if (lockPhysicsUpdateRateToRenderFrequency && Time.timeScale> 0.0f)
              {

        根据渲染帧率计算固定刷新时间
                     var vr = SteamVR.instance;
                    
if (vr != null)
                     {
                           
var timing = new Compositor_FrameTiming();
                            timing.m_nSize= (
uint)System.Runtime.InteropServices.Marshal.SizeOf(typeof(Compositor_FrameTiming));
                            vr.compositor.GetFrameTiming(
ref timing, 0);

            这个值正是在Edit->Project Settings->Time里面设置的Fixed                          Timestep,也就是设置FixedUpdate调用频率的
                            Time.fixedDeltaTime= Time.timeScale /vr.hmd_DisplayFrequency;
                           
Time.maximumDeltaTime= Time.fixedDeltaTime* timing.m_nNumFramePresents;
                     }
              }
       }
}

 

可以看到SteamVR_Render的作用,一是控制渲染流程,最终的渲染后的图像位于SteamVR_Camera的纹理当中,二是事件分发,三是做一些全局设置

 

如社区发表内容存在侵权行为,您可以点击这里查看侵权投诉指引