SteamVR(HTC Vive) Unity插件深度分析(十)

发表于2017-05-04
评论1 5.2k浏览

10.6.  SteamVR_ControllerManager.cs

控制器(手柄)管理器,主要作用是管理它们的索引,因为随着设备的连接和断开,索引是会变的,然后就是失去输入焦点时,隐藏控制器。可以把这个脚本它加到需要管理跟踪设备(带SteamVR_TrackedObject脚本)的顶层父节点上。本在示例example.unity中,它被放到了origin对象上,因为它下面有两个Controller,有一个头显:

 

public class SteamVR_ControllerManager : MonoBehaviour
{

这两个代表左右控制器对象,在插件example.unity示例中看到的就是   Controller(left)Controller(right),如上图
       public GameObject left, right;

还可以定制其它控制器
       public GameObject[] objects; // populatewith objects you want to assign to additional controllers

Objects数组中的控制器的索引
       uint[] indices; // assigned

每个索引的设备对应的是否连接的状态(只针对控制器,其它比如头显状态是没有的)
       bool[] connected = new bool[OpenVR.k_unMaxTrackedDeviceCount]; //controllers only

       // cachedroles - may or may not be connected

左右两个控制器的索引
       uint leftIndex = OpenVR.k_unTrackedDeviceIndexInvalid;
      
uint rightIndex = OpenVR.k_unTrackedDeviceIndexInvalid;

      
void Awake()
       {
             
// Add leftand right entries to the head of the list so we only have to operate on thelist itself.

    最终实际上将左右控制器也添加到了objects数组当中了,方便统一处理。可以           Inspector中添加objects
              var additional =(this.objects != null) ? this.objects.Length: 0;
             
var objects = new GameObject[2 +additional];
              indices=
new uint[2 +additional];
              objects[
0] = right;
              indices[
0] = OpenVR.k_unTrackedDeviceIndexInvalid;
              objects[
1] = left;
              indices[
1] = OpenVR.k_unTrackedDeviceIndexInvalid;
             
for (int i = 0; i               {
                     objects[
2 + i] = this.objects[i];
                     indices[
2 + i] = OpenVR.k_unTrackedDeviceIndexInvalid;
              }
             
this.objects =objects;
       }

      
void OnEnable()
       {
             
for (int i = 0; i               {
                    
var obj =objects[i];
                    
if (obj != null)

            启用所有对象
                            obj.SetActive(false);
              }

    首次启用,刷新状态。不过这个方法后面也会作为监听方法,可以监听控制器角色              的变化。所谓角色的变化大概是交换左右控制器
              OnTrackedDeviceRoleChanged();

    首次启用,获取设备连接状态
              for (int i = 0; i < SteamVR.connected.Length;i++)
                    
if (SteamVR.connected[i])
                            OnDeviceConnected(i,
true);

    添加事件监听
              SteamVR_Utils.Event.Listen("input_focus",OnInputFocus);
             
SteamVR_Utils.Event.Listen("device_connected",OnDeviceConnected);

    感觉“TrackedDeviceRoleChanged”这个事件是从openvr_api.dll中发出来            的,在C#脚本中看不到谁发出这个事件
              SteamVR_Utils.Event.Listen("TrackedDeviceRoleChanged", OnTrackedDeviceRoleChanged);
       }

      
void OnDisable()
       {

    禁用时取消事件监听
              SteamVR_Utils.Event.Remove("input_focus",OnInputFocus);
             
SteamVR_Utils.Event.Remove("device_connected",OnDeviceConnected);
             
SteamVR_Utils.Event.Remove("TrackedDeviceRoleChanged", OnTrackedDeviceRoleChanged);
       }

这两个是用于隐藏控制器时的名字。隐藏控制器的做法是新建一个空物体,然后把控制     器的父亲设为这个空物体,然后将这个空物体active状态设为false。空物体的名字     用“hidden(lef)”“hidden(right)”命名
       static string[] labels = { "left", "right" };

      
// Hide controllers whenthe dashboard is up.

注释为当显示控制面板时隐藏控制器(因为控制面板会显示自己的控制器)
       private void OnInputFocus(params object[] args)
       {

    第一个参数表示是否具有输入焦点,也就是控制器是否对当前场景有效。应该就是              当显示了控制面板以时,输入焦点被控制面板拦截。
              bool hasFocus = (bool)args[0];
             
if (hasFocus)
              {
                    
for (int i = 0; i                      {
                           
var obj =objects[i];
                           
if (obj != null)
                            {

                只有前两个控制器会left/right标记,后面的用序号标记
                                   var label = (i< 2) ? labels[i]: (i - 1).ToString();
                                   ShowObject(obj.transform,
"hidden(" + label + ")");
                            }
                     }
              }
             
else
              {
                    
for (int i = 0; i                      {
                           
var obj =objects[i];
                           
if (obj != null)
                            {
                                  
var label = (i< 2) ? labels[i]: (i - 1).ToString();

                失去焦点时隐藏控制器
                                   HideObject(obj.transform, "hidden(" + label + ")");
                            }
                     }
              }
       }

      
// Reparents to a newobject and deactivates that object (this allows
       // us to callSetActive in OnDeviceConnected independently.

隐藏控制的做法是创建一个空物体,然后把控制器的父亲设为它,然后设为未激活
       private void HideObject(Transform t, string name)
       {
             
var hidden = new GameObject(name).transform;
              hidden.parent= t.parent;
              t.parent= hidden;
              hidden.gameObject.SetActive(
false);
       }
      
private void ShowObject(Transform t, string name)
       {
             
var hidden =t.parent;
             
if (hidden.gameObject.name != name)
                    
return;
              t.parent= hidden.parent;
              Destroy(hidden.gameObject);
       }

设置指定索引(objects数组中的索引)的跟踪设备索引
       private void SetTrackedDeviceIndex(int objectIndex, uint trackedDeviceIndex)
       {
             
// First makesure no one else is already using this index.

    如果当前的跟踪设备索引被其它的控制器使用了,先将这个控制器设为不可用,然              后重置索引
              if (trackedDeviceIndex != OpenVR.k_unTrackedDeviceIndexInvalid)
              {
                    
for (int i = 0; i                      {
                           
if (i !=objectIndex && indices[i] == trackedDeviceIndex)
                            {
                                  
var obj =objects[i];
                                  
if (obj != null)
                                          obj.SetActive(
false);

                                   indices[i]=
OpenVR.k_unTrackedDeviceIndexInvalid;
                            }
                     }
              }

             
// Only setwhen changed.
              if (trackedDeviceIndex != indices[objectIndex])
              {
                     indices[objectIndex]= trackedDeviceIndex;

                    
var obj =objects[objectIndex];
                    
if (obj != null)
                     {
                           
if (trackedDeviceIndex == OpenVR.k_unTrackedDeviceIndexInvalid)
                                   obj.SetActive(
false);
                           
else
                            {
                                   obj.SetActive(
true);

               BroadcastMessage的作用是会调用当前物体(包括所有子物体)上                                  所有MonoBehaviour脚本中的指定方法。这里就是调用当前控制器                                   obj上所有脚本的SetDeviceIndex方法。这就是为什么可以看到                                在控制器上的索引在inspector中都是没有设置的,是通过这里广                                播设置的
                                   obj.BroadcastMessage("SetDeviceIndex", (int)trackedDeviceIndex, SendMessageOptions.DontRequireReceiver);
                            }
                     }
              }
       }

      
// Keep track of assignedroles.

跟踪设置角色发生变化,状态的处理在Refresh当中
       private void OnTrackedDeviceRoleChanged(params object[] args)
       {
              Refresh();
       }

      
// Keep track ofconnected controller indices.

跟踪设备连接或断开连接通知
       private void OnDeviceConnected(params object[] args)
       {

    设备连接通知参数,第1个参数为设备索引
              var index = (uint)(int)args[0];
             
bool changed = this.connected[index];
             
this.connected[index]= false;

    第二个参数为是否连接
              var connected = (bool)args[1];
             
if (connected)
              {
                    
var system = OpenVR.System;
                    
if (system != null &&system.GetTrackedDeviceClass(index) == ETrackedDeviceClass.Controller)
                     {

            连接状态发生改变(连接上)的设备是控制器才会记录下来
                            this.connected[index]= true;
                            changed= !changed;
// if we clear and set the same index, nothing has changed
                     }
              }

             
if (changed)
                     Refresh();
       }

状态刷新
       public void Refresh()
       {
             
int objectIndex = 0;

             
var system = OpenVR.System;
             
if (system != null)
              {

        根据控制器的角色(在左边还是右边)获取对应的索引。因此左右索引可能会                     变——实测,左右手柄索引在交换左右手时并不会变化,但手柄重启后由于重                  新连接,则可能会发生变化
                     leftIndex= system.GetTrackedDeviceIndexForControllerRole(ETrackedControllerRole.LeftHand);
                     rightIndex= system.GetTrackedDeviceIndexForControllerRole(
ETrackedControllerRole.RightHand);
              }

             
// If neitherrole has been assigned yet, try hooking up at least the right controller.
              if (leftIndex == OpenVR.k_unTrackedDeviceIndexInvalid&& rightIndex == OpenVR.k_unTrackedDeviceIndexInvalid)
              {

        如果获取不到左右控制器索引,直接根据处于连接状态的控制器设置索引,第                     一个总认为是右控制器
                     for (uint deviceIndex = 0; deviceIndex< connected.Length; deviceIndex++)
                     {
                           
if (connected[deviceIndex])
                            {
                                   SetTrackedDeviceIndex(objectIndex++,deviceIndex);
                                  
break;
                            }
                     }
              }
             
else
              {

        如果左右控制器索引至少有一个取到了,并且都是处于连接状态,那么将左右                     控制器索引设置为前两个(右控制器在第1个,左控制器在第2个)
                     SetTrackedDeviceIndex(objectIndex++,(rightIndex < connected.Length && connected[rightIndex]) ?rightIndex : OpenVR.k_unTrackedDeviceIndexInvalid);
                     SetTrackedDeviceIndex(objectIndex++,(leftIndex < connected.Length && connected[leftIndex]) ? leftIndex :
OpenVR.k_unTrackedDeviceIndexInvalid);

                    
// Assign outany additional controllers only after both left and right have been assigned.
                     if (leftIndex != OpenVR.k_unTrackedDeviceIndexInvalid&& rightIndex != OpenVR.k_unTrackedDeviceIndexInvalid)
                     {

            只有当左右控制器都找到了才会设置其它控制器,其它控制器通过                                inspector视图中添加,如图:

           
                            for (uint deviceIndex = 0; deviceIndex                             {
                                  
if (objectIndex >=objects.Length)
                                         
break;

                                  
if (!connected[deviceIndex])
                                         
continue;

                                  
if (deviceIndex !=leftIndex && deviceIndex != rightIndex)
                                   {
                                          SetTrackedDeviceIndex(objectIndex++,deviceIndex);
                                   }
                            }
                     }
              }

             
// Reset the rest.

    其它未连接的设备全部重置
              while (objectIndex< objects.Length)
              {
                     SetTrackedDeviceIndex(objectIndex++,
OpenVR.k_unTrackedDeviceIndexInvalid);
              }
       }
}

10.7.  SteamVR_Ears.cs

AudioListener一起使用。在SteamVR_Camera脚本中Expand后会自动添加到一个与eye平级的ears物体上(当原始相机上有绑定AudioListener时)。如图:

 

要求与AudioListener一起使用

[RequireComponent(typeof(AudioListener))]
public class SteamVR_Ears : MonoBehaviour
{

SteamVR_Camera对象,不需要在Inspector中指定,由SteamVR_Camera设置进来
       public SteamVR_Camera vrcam;

是否使用扬声器,这个是从IVRSettings里面取出来的,这个是VR的全局设置,配    置文件位于C:ProgramDataSteamconfigsteamvr.vrsettings里面,这个文件    格式见:https://developer.valvesoftware.com/wiki/SteamVR/steamvr.vrsettings。但通     常不用手工修改,可以在SteamVR的设置里面设置:

说明: {266056DC-E6CF-4634-BE28-551BCE0A5E26}

不过在配置界面里并没有看到下面两个配置项,但上面的链接中有此两个配置项,可能       还得直接编辑配置文件
       bool usingSpeakers;

这个也是在IVRSettings中设置的音效的影响范围?因为没有解释,全网也搜不到,    可以画个线框看看它的影响范围
       Quaternion offset;

如果使用扬声器,则会监听玩家(头显/控制器)状态的变化,从而动态改变音效的范 
       private void OnNewPosesApplied(params object[] args)
       {

    originSteamVR_Camera的最上层父亲,也是原始相机的位置
              var origin =vrcam.origin;
             
var baseRotation= origin != null ? origin.rotation : Quaternion.identity;

    将当前位置乘以音效的影响范围(看来只有方位,没有远近)
              transform.rotation= baseRotation * offset;
       }

      
void OnEnable()
       {
              usingSpeakers=
false;

             
var settings = OpenVR.Settings;
             
if (settings != null)
              {

        IVRSettings里面取出是否使用扬声器及音效范围配置
                     var error = EVRSettingsError.None;
                    
if (settings.GetBool(OpenVR.k_pch_SteamVR_Section, OpenVR.k_pch_SteamVR_UsingSpeakers_Bool, false, ref error))
                     {
                            usingSpeakers=
true;

                           
var yawOffset =settings.GetFloat(OpenVR.k_pch_SteamVR_Section, OpenVR.k_pch_SteamVR_SpeakersForwardYawOffsetDegrees_Float, 0.0f, ref error);

            角度是绕Y轴的,也就是人的周围
                            offset= Quaternion.Euler(0.0f, yawOffset, 0.0f);
                     }
              }

             
if (usingSpeakers)

        如果使用扬声器,则监听玩家位置变化
                     SteamVR_Utils.Event.Listen("new_poses_applied",OnNewPosesApplied);
       }

      
void OnDisable()
       {

    禁用则取消对玩家位置的监听
              if (usingSpeakers)
                    
SteamVR_Utils.Event.Remove("new_poses_applied",OnNewPosesApplied);
       }
}

 

10.8.  SteamVR_ExternalCamera.cs

这个用于使用外部摄像机拍摄玩家的动作以制作特殊的玩家现实世界与虚拟游戏世界融合的视频。相关视频和讨论参看7.10小节SteamVR_ExternalCamera预制体中的介绍。这个脚本在Resources目录下的SteamVR_ExternalCamera预制体和SteamVR_Render里面有使用。它寄生于SteamVR_ExternalCamera.prefab,又与相机骨骼中的eye对象上的Camera有关联。如图:

 

public class SteamVR_ExternalCamera : MonoBehaviour
{

这个是外部摄像机的配置(很多其实是Unity Camera的参数),全部采用了基本数据  类型,这样做是为了方便加载,因为配置文件就是这样的格式
       public struct Config
       {

    这三个应该表示位置,理论上采用Vector3来表示就可以
              public float x, y, z;

    这三个表示一个角度
              public float rx, ry, rz;
             
public float fov;
             
public float near, far;
             
public float sceneResolutionScale;
             
public float frameSkip;
             
public float nearOffset,farOffset;
             
public float hmdOffset;
             
public bool disableStandardAssets;
       }

      
public Config config;

(本地)配置文件路径
       public string configPath;

从文件读取配置。从下面看,文件格式为文本格式,采用key=value这样的,key   是上面Config数据字段的名称。一个配置文件的格式如下所示:

x=0

y=0

z=0

rx=0

ry=0

rz=0

fov=60

near=0.1

far=100

//m=-0.999059,0.015577,-0.040472,-0.0127,-0.016016,-0.999816,0.01054

4,0.1799,-0.040301,0.011183,0.999125,-0.0846

sceneResolutionScale=0.5
       public void ReadConfig()
       {
             
try
              {
                    
var mCam = new HmdMatrix34_t();
                    
var readCamMatrix= false;

                    
object c = config; // box
                     var lines =System.IO.File.ReadAllLines(configPath);
                    
foreach (var line in lines)
                     {
                           
var split =line.Split('=');
                           
if (split.Length== 2)
                            {
                                  
var key = split[0];
                                  
if (key == "m")
                                   {

                    看样子对于矩阵配置,采用了12个数字表示
                                          var values =split[1].Split(',');
                                         
if (values.Length == 12)
                                          {
                                                 mCam.m0=
float.Parse(values[0]);
                                                 mCam.m1=
float.Parse(values[1]);
                                                 mCam.m2=
float.Parse(values[2]);
                                                 mCam.m3=
float.Parse(values[3]);
                                                 mCam.m4=
float.Parse(values[4]);
                                                 mCam.m5=
float.Parse(values[5]);
                                                 mCam.m6=
float.Parse(values[6]);
                                                 mCam.m7=
float.Parse(values[7]);
                                                 mCam.m8=
float.Parse(values[8]);
                                                 mCam.m9=
float.Parse(values[9]);
                                                 mCam.m10=
float.Parse(values[10]);
                                                 mCam.m11=
float.Parse(values[11]);
                                                 readCamMatrix=
true;
                                          }
                                   }
                                  
else if (key == "disableStandardAssets")
                                   {

                    注意学习从结构体中根据字段名称字符串获取字段的方法
                                          var field =c.GetType().GetField(key);
                                         
if (field != null)
                                                 field.SetValue(c,
bool.Parse(split[1]));
                                   }
                                  
else
                                   {
                                         
var field =c.GetType().GetField(key);
                                         
if (field != null)
                                                 field.SetValue(c,
float.Parse(split[1]));
                                   }
                            }
                     }
                     config= (
Config)c; //unbox

                     // Convertcalibrated camera matrix settings.
                     if (readCamMatrix)
                     {

            将矩阵转换成位置及旋转
                            var t = new SteamVR_Utils.RigidTransform(mCam);
                            config.x= t.pos.x;
                            config.y= t.pos.y;
                            config.z= t.pos.z;
                           
var angles =t.rot.eulerAngles;
                            config.rx= angles.x;
                            config.ry= angles.y;
                            config.rz= angles.z;
                     }
              }
             
catch { }
       }

这个相机是从eye上面copyCamera
       Camera cam;

这个targetheadTransform
       Transform target;

用来裁剪的一个四边形,下面的注释说利用相机的裁剪功能对于阴影会有一些问题。    Quad是个和Plane差不多的东西,是三维空间里的一个二维平面
       GameObject clipQuad;

Clip四边形的材质
       Material clipMaterial;

将本脚本与相机关联
       public void AttachToCamera(SteamVR_Camera vrcam)
       {
             
if (target ==vrcam.head)

        已经attach过了
                     return;

    target设为head
              target= vrcam.head;

    根据SteamVR_ExternalCamera预制体的层次关系,本脚本位于Controller           (原因是需要在外部摄像机上绑一个控制器以利用控制器的定位功能来定位外部              相机),Controller的父亲是预制体本身。下面是将预制体放到了origin下面,          即与head同级。这里的Controller上面也有SteamVR_TrackerObject,用来            定位外部相机
              var root =transform.parent;
             
var origin =vrcam.head.parent;
              root.parent= origin;
              root.localPosition=
Vector3.zero;
              root.localRotation=
Quaternion.identity;
              root.localScale=
Vector3.one;

             
// Make a copyof the eye camera to pick up any camera fx.

    拷贝了一份eye所在的Camera对象(即原始相机)(其实可以直接在预制体里面        预置一个Unity Camera就好了,参数可以从eye上面的Camera拷贝)。注意做            法,先将SteamVR_Camera对象禁用,然后Instantiate
              vrcam.enabled= false;
             
var go =Instantiate(vrcam.gameObject);
              vrcam.enabled=
true;
              go.name=
"camera";

    删除里面的SteamVR_CameraSteamVR_CameraFlip组件
              DestroyImmediate(go.GetComponent<SteamVR_Camera>());
              DestroyImmediate(go.GetComponent<
SteamVR_CameraFlip>());

    设置相机的视场角、去掉遮挡剔除,并且禁用。因为是要做手动渲染,后面会在           SteamVR_Render里面通过代码控制渲染
              cam= go.GetComponent<Camera>();
              cam.fieldOfView= config.fov;
              cam.useOcclusionCulling=
false;
              cam.enabled=
false; // manuallyrendered

    带不同Shader的不同材质
              colorMat= new Material(Shader.Find("Custom/SteamVR_ColorOut"));
              alphaMat=
new Material(Shader.Find("Custom/SteamVR_AlphaOut"));
              clipMaterial=
new Material(Shader.Find("Custom/SteamVR_ClearAll"));

    Camera设成Controller的子节点
              var offset =go.transform;
              offset.parent= transform;

    根据配置设定相对位置和旋转
              offset.localPosition= new Vector3(config.x,config.y, config.z);
              offset.localRotation=
Quaternion.Euler(config.rx,config.ry, config.rz);
              offset.localScale=
Vector3.one;

             
// Stripchildren of cloned object (AudioListener in particular).

    去掉Camera下的所有子节点(特别是AudioListener
              while (offset.childCount > 0)
                     DestroyImmediate(offset.GetChild(
0).gameObject);

             
// Setupclipping quad (using camera clip causes problems with shadows).

    创建一个四边形用于裁剪,使用相机的裁剪会有阴影的问题
              clipQuad= GameObject.CreatePrimitive(PrimitiveType.Quad);
              clipQuad.name=
"ClipQuad";

    去掉MeshCollider
              DestroyImmediate(clipQuad.GetComponent<MeshCollider>());

             
var clipRenderer= clipQuad.GetComponent<MeshRenderer>();

    设置Render材质,从上面看是带ClearAll Shader的。TODO 这个ClearAll              是指什么呢?还得看懂这个shader
              clipRenderer.material= clipMaterial;

    不带阴影
              clipRenderer.shadowCastingMode= ShadowCastingMode.Off;
              clipRenderer.receiveShadows=
false;
#if !(UNITY_5_3|| UNITY_5_2 || UNITY_5_1 || UNITY_5_0)
              clipRenderer.lightProbeUsage= LightProbeUsage.Off;
#else
              clipRenderer.useLightProbes= false;
#endif
              clipRenderer.reflectionProbeUsage= ReflectionProbeUsage.Off;

    将这个四边形设为Camera的子节点,
              var clipTransform= clipQuad.transform;
              clipTransform.parent= offset;

    将它放大到超级大(XY平面)
              clipTransform.localScale= new Vector3(1000.0f, 1000.0f, 1.0f);
              clipTransform.localRotation=
Quaternion.identity;

    缺省禁用,因为相机缺省也是禁用的,通过手动渲染

    注意enableactive是有区别的
              clipQuad.SetActive(false);
       }

这个是得到外部相机与head的距离?关于下面的计算,后面再看
       public float GetTargetDistance()
       {
             
if (target == null)
                    
return config.near + 0.01f;

             
var offset =cam.transform;
             
var forward = new Vector3(offset.forward.x, 0.0f,offset.forward.z).normalized;
             
var targetPos =target.position + new Vector3(target.forward.x, 0.0f,target.forward.z).normalized * config.hmdOffset;

             
var distance = -(new Plane(forward,targetPos)).GetDistanceToPoint(offset.position);
             
return Mathf.Clamp(distance,config.near + 0.01f, config.far - 0.01f);
       }

      
Material colorMat, alphaMat;

渲染近端平面。最终会在PC屏幕上将界面划分成4部分:

说明: {B47FA2EB-F6C5-4464-8863-342E01CE51B3}

左上角为前景,所谓前景是指上面设置的clipquad之前的部分?因为在外部相机位置 设置了一个超大的平面,背面部分是相机看不到的部分,抠出来当前景?右上角为带       alpha的前景,左下角为背景,即头显中看到的游戏画面,右下角为Game view,即  平常PC伴随窗口看到的画面

最终合成的图像的顺序为如图:

说明: {916C741C-C3EF-41D3-A766-12D242B3F31E}

在上图中,背景的山和和太阳就是游戏中的场景,中间的人是外部相机拍摄到的(要使       用绿幕技术,将人像抠出来),最上层中还有花,这个是前景

那这里渲染的近端平面就是渲染前景了
      
public void RenderNear()
       {

    渲染的目标大小为屏幕的1/4
              var w = Screen.width / 2;
             
var h = Screen.height / 2;

             
if (cam.targetTexture == null || cam.targetTexture.width != w || cam.targetTexture.height !=h)
              {
                     cam.targetTexture=
new RenderTexture(w, h, 24, RenderTextureFormat.ARGB32);
                     cam.targetTexture.antiAliasing=
QualitySettings.antiAliasing== 0 ? 1 : QualitySettings.antiAliasing;
              }

    设置相机的远近裁剪面,这两个都是相机参数可以在Inspector中设置,不过我           们通常都不会改。如图:

我们知道相机是有一个裁剪空间的,有六个裁剪面。我们可以看到在场景中一个相机的       表示是这样的:

因为near设得很小,所以我们看到的是一个椎体,当把near设大一些的时候我们就能       看到是一个棱锥(用图形学来说就是平截头体,frustum):


              cam.nearClipPlane= config.near;
              cam.farClipPlane= config.far;

    临时保存并修改一些相机参数
              var clearFlags =cam.clearFlags;
             
var backgroundColor = cam.backgroundColor;

    临时修改背景的清除标记为固定颜色清理
              cam.clearFlags= CameraClearFlags.Color;

    同时设置清理颜色,这里的Color.clear为纯透明色,即(0,0,0,0),但这里设          置为背景即为黑色(alpha通道貌似没用)
              cam.backgroundColor= Color.clear;

    将裁剪平面设置到了head的位置,也就是人的位置。所以才能抠出人之前的场景        的内容?也就是会有下面这样的效果(前景可以把人挡住):

        说明: {9F5E94AD-1B82-4484-9237-74B518914316}
              float dist = Mathf.Clamp(GetTargetDistance()+ config.nearOffset, config.near, config.far);

   Clipparent就是Camera
              var clipParent =clipQuad.transform.parent;
              clipQuad.transform.position= clipParent.position + clipParent.forward * dist;

             
MonoBehaviour[] behaviours= null;
             
bool[] wasEnabled= null;
             
if (config.disableStandardAssets)
              {

        禁用相机上的所有Unity(自带的)标准资源(脚本等)                                            behaviours= cam.gameObject.GetComponents<MonoBehaviour>();
                     wasEnabled=
new bool[behaviours.Length];
                    
for (int i = 0; i                      {
                           
var behaviour =behaviours[i];

            看来标准脚本的类型都是以UnityStandardAssets开头的
                            if (behaviour.enabled && behaviour.GetType().ToString().StartsWith("UnityStandardAssets."))
                            {
                                   behaviour.enabled=
false;
                                   wasEnabled[i]=
true;
                            }
                     }
              }

    所有相机参数设置完毕后,手动调用Render来做一次渲染。这里就是一帧,相当      于就是自定义了相机参数,包括相机位置、平截头、清除标记等,渲染出来的就是              这个相机所能看到的场景
              clipQuad.SetActive(true);
              cam.Render();
              clipQuad.SetActive(
false);

    恢复相机的原始配置
              if (behaviours!= null)
              {
                    
for (int i = 0; i                      {
                           
if (wasEnabled[i])
                            {
                                   behaviours[i].enabled=
true;
                            }
                     }
              }

              cam.clearFlags= clearFlags;
              cam.backgroundColor= backgroundColor;

    上面相机的渲染把图像直接渲染到纹理上,这里把渲染出来的图像(前景)直接渲              染到了屏幕的左上角和右上角(使用了不同的shader,左上角是原始色彩,右上            角带alpha
              Graphics.DrawTexture(new Rect(0, 0, w, h),cam.targetTexture, colorMat);

       Graphics.DrawTexture(new Rect(w, 0, w, h),cam.targetTexture, alphaMat);
       }

渲染远端平面,也就是背景图像,绘制到了右下角。RenderNearRenderFar       SteamVR_Render里面是顺序调用的,因此相机参数是继承的
       public void RenderFar()
       {
              cam.nearClipPlane= config.near;
              cam.farClipPlane= config.far;
              cam.Render();

             
var w = Screen.width / 2;
             
var h = Screen.height / 2;

    远端被画在左下角1/4的位置
              Graphics.DrawTexture(new Rect(0, h, w, h),cam.targetTexture, colorMat);
       }

      
void OnGUI()
       {
             
// Necessaryfor Graphics.DrawTexture to work even though we don't do anything here.
       }

      
Camera[] cameras;
      
Rect[] cameraRects;
      
float sceneResolutionScale;

      
void OnEnable()
       {
             
// Move gameview cameras to lower-right quadrant.

    注意这句注释:将游戏视图的相机移动到右下角的1/4象限。这样游戏视图就在           右下角了

    这个应该是找场景中所有加载的Camera对象
              cameras= FindObjectsOfType<Camera>() as Camera[];
             
if (cameras != null)
              {
                    
var numCameras =cameras.Length;
                     cameraRects=
new Rect[numCameras];
                    
for (int i = 0; i                      {

            保存所有相机的viewport,对当前相机(外部相机)、指定了                                       targetTexture的相机以及SteamVR_Camera相机之外的相机设置                             viewport(0.5,0.0,0.5,0.5),这个相当于是屏幕的右下角。如图                         UnityCameraInspector中的Viewport设置以及对应的Game                       视图):

            由此可见,Viewport设置的原点在左下角,即左下角为(0,0)Viewport                            的意思就是完整图像在屏幕上显示的区域
                            var cam =cameras[i];
                            cameraRects[i]= cam.rect;

                           
if (cam == this.cam)
                                  
continue;

                           
if (cam.targetTexture != null)
                                  
continue;

                           
if (cam.GetComponent<SteamVR_Camera>() != null)
                                  
continue;

                            cam.rect=
new Rect(0.5f, 0.0f, 0.5f, 0.5f);
                     }
              }

             
if (config.sceneResolutionScale > 0.0f)
              {

        保存场景分辨率缩放因子,后面恢复。这个值缺省是0.5
                     sceneResolutionScale= SteamVR_Camera.sceneResolutionScale;
                    
SteamVR_Camera.sceneResolutionScale= config.sceneResolutionScale;
              }
       }

      
void OnDisable()
       {
             
// Restoregame view cameras.

    还原游戏视图相机的视口及分辨率缩放因子
              if (cameras != null)
              {
                    
var numCameras =cameras.Length;
                    
for (int i = 0; i                      {
                           
var cam =cameras[i];
                           
if (cam != null)
                                   cam.rect= cameraRects[i];
                     }
                     cameras=
null;
                     cameraRects=
null;
              }

             
if (config.sceneResolutionScale > 0.0f)
              {
                    
SteamVR_Camera.sceneResolutionScale= sceneResolutionScale;
              }
       }
}

10.9.  SteamVR_Fade.cs

这个脚本是用来对某个相机的渲染图像(包括overlay)做渐显/渐隐的,它在SteamVR_Stats.cs中有使用,而SteamVR_Stats.csStatus这个预制体中有用。

 

注释比较多,连注释一起翻译了

//
// Purpose:  CameraFade script adapted to work with SteamVR.
//
// Usage:      Add to your top level SteamVR_Camera(the one with ApplyDistoration
//                  checked)and drag a reference to this component into SteamVR_Camera
//                  RenderComponentslist.  Then call the static helper function
//                  SteamVR_Fade.Startwith the desired color and duration.
//                  Usea duration of zero to set the start color.
//
// Example:  Fade down from black over one second.
//                  SteamVR_Fade.Start(Color.black,0);
//                  SteamVR_Fade.Start(Color.clear,1);
//
// Note: This component is provided to fade out a single camera layer's
//                  sceneview.  If instead you want to fade the entire view, use:
//                  SteamVR_Fade.View(Color.black,1);
//                  (Doesnot affect the game view, however.)
//
//=====================================================================

目的:适用于SteamVR的相机(视图)渐变脚本

用法:将这个脚本添加到顶层的SteamVR_Camera(要勾上ApplyDistoration现在已经没有这个选项了,所以这个注释过时了)上,然后将这个组件的引用拖到SteamVR_CameraRenderComponents(这个也没有了)列表里面,然后以需要的颜色和持续时间调用静态的帮助方法SteamVR_Fade.Start。持续时间为0可以设置初始颜色。

示例:如果要从黑色1秒内淡出,可以这样:

SteamVR_Fade.Start(Color.black, 0);

SteamVR_Fade.Start(Color.clear, 1);

注意:这个组件是用来针对一个相机层的场景视图做渐出的。如果你希望渐出整个视图,可以使用SteamVR_Fade.View(Color.black, 1)。但这种方法不会影响游戏视图。


using UnityEngine;
using Valve.VR;

public class SteamVR_Fade : MonoBehaviour
{

渐变的初始颜色。缺省值为全透明黑色
       private Color currentColor= new Color(0, 0, 0, 0);   // defaultstarting color: black and fully transparent

目标颜色。缺省为全透明黑色
       private Color targetColor = new Color(0, 0, 0, 0);     // defaulttarget color: black and fully transparent

步长颜色,即每帧变化的颜色,通过“速度/时间”计算
       private Color deltaColor = new Color(0, 0, 0, 0);      // thedelta-color is basically the "speed / second" at which the currentcolor should change

是否同时渐变overlay
       private bool fadeOverlay = false;

设置初始颜色及启动渐变
       static public void Start(Color newColor, float duration, bool fadeOverlay = false)
       {

    通过一个自定义的事件“fade
              SteamVR_Utils.Event.Send("fade", newColor,duration, fadeOverlay);
       }

对整个视图进行渐变,不使用这套机制,直接使用合成器来做
       static public void View(Color newColor, float duration)
       {
             
var compositor = OpenVR.Compositor;
             
if (compositor!= null)

        它看起来不需要指定开始颜色,应该是使用当前颜色就可以了
                     compositor.FadeToColor(duration,newColor.r, newColor.g, newColor.b, newColor.a, false);
       }

有一个宏开关用于测试对整个视图进行渐变。当按下空格键的时候就会做一次。咦,它       的用法和Start一样的?合成器内部的实现是一样的吗?
#if TEST_FADE_VIEW
       voidUpdate()
       {
              if(Input.GetKeyDown(KeyCode.Space))
              {
                     SteamVR_Fade.View(Color.black,0);
                     SteamVR_Fade.View(Color.clear,1);
              }
       }
#endif

调用Start后会通过事件机制回调到这里(实际上就是直接调用,但会有一个先后顺     序)
       public void OnStartFade(params object[] args)
       {
             
var newColor = (Color)args[0];
             
var duration = (float)args[1];

              fadeOverlay= (args.Length >
2) && (bool)args[2];

             
if (duration> 0.0f)
              {
                     targetColor= newColor;

        计算每秒的颜色变化值。颜色值相减本质上就是针对每个颜色分量相减
                     deltaColor= (targetColor - currentColor) / duration;
              }
             
else
              {
                     currentColor= newColor;
              }
       }

使用的是带渐变功能的shader的材质来做渐变
       static Material fadeMaterial= null;

      
void OnEnable()
       {
             
if (fadeMaterial== null)
              {

        使用SteamVR_Fade.shader来做渐变
                     fadeMaterial= new Material(Shader.Find("Custom/SteamVR_Fade"));
              }

    监听自定义的“fade”事件
              SteamVR_Utils.Event.Listen("fade",OnStartFade);

    发出“fade_ready”自定义事件。不过搜索代码,并没有人监听。所以应该是谁          感兴趣这个事件,谁就监听
              SteamVR_Utils.Event.Send("fade_ready");
       }

      
void OnDisable()
       {
             
SteamVR_Utils.Event.Remove("fade",OnStartFade);
       }

OnPostRender中做渐变,此时Camera已经渲染完。它要求当前物体上要有Camera   组件
       void OnPostRender()
       {
             
if (currentColor!= targetColor)
              {
                    
// if thedifference between the current alpha and the desired alpha is smaller thandelta-alpha * deltaTime, then we're pretty much done fading:
                     if (Mathf.Abs(currentColor.a- targetColor.a) < Mathf.Abs(deltaColor.a) * Time.deltaTime)
                     {

            最后一次颜色变化了。Time.deltaTime是自上帧以来的时间
                            currentColor= targetColor;
                            deltaColor=
new Color(0, 0, 0, 0);
                     }
                    
else
                     {
                            currentColor+= deltaColor *
Time.deltaTime;
                     }

                    
if (fadeOverlay)
                     {

            如果要针对overlay做渐变,是直接修改overlayalpha值。不过从                         这里看它是与视图的alpha值相反的。也就是说效果是如果视图渐隐,                            overlay就是渐现,而视图渐现,overlay就渐隐。
                            var overlay = SteamVR_Overlay.instance;
                           
if (overlay != null)
                            {
                                   overlay.alpha=
1.0f -currentColor.a;
                            }
                     }
              }

             
if (currentColor.a > 0 && fadeMaterial)
              {

        通过材质中的渐变shader来做实际的渐变操作。因为是在OnPostRender                     处理,实际上就是在前面渲染好的图像上做一层融合(或者说遮罩)
                     GL.PushMatrix();
                    
GL.LoadOrtho();
                     fadeMaterial.SetPass(
0);
                    
GL.Begin(GL.QUADS);
                    
GL.Color(currentColor);
                    
GL.Vertex3(0, 0, 0);
                    
GL.Vertex3(1, 0, 0);
                    
GL.Vertex3(1, 1, 0);
                    
GL.Vertex3(0, 1, 0);
                    
GL.End();
                    
GL.PopMatrix();
              }
       }
}

10.10.       SteamVR_Frustum.cs

这个从名字上看是与frustum相关的,也就是所谓的平截头体或者视见体。这个脚本的作用是生成一个视见体的网格。比如,你可以建一个空的GameObject,然后把这个脚本添加上去,然后这个GameObject就能显示一个视见体了。这样看来,这个脚本其实并没有实际意义。它在意义在于可以用一个平截头体演示某个跟踪设备(典型的就是基站)的扫描范围。

 

这里有一个新的属性ExecuteInEditMode。通常情况下脚本都只会在运行模式下执行,加了这个属性就可以在编辑模式下也可以执行。同时它要求所在的对象上有MeshRendererMeshFilter

[ExecuteInEditMode, RequireComponent(typeof(MeshRenderer), typeof(MeshFilter))]
public class SteamVR_Frustum : MonoBehaviour
{

    跟踪设备的索引。说明这个脚本与跟踪设备有关。这个索引可以在Inspector中指定
       public SteamVR_TrackedObject.EIndex index;

一个视见体有上下左右四个视场角,如图:


      
public float fovLeft = 45, fovRight = 45, fovTop = 45, fovBottom = 45, nearZ = 0.5f, farZ = 2.5f;

更新模型,大概是上面这些参数可以在Inspector中修改或者可以从硬件中读出来
       public void UpdateModel()
       {
              fovLeft=
Mathf.Clamp(fovLeft, 1, 89);
              fovRight=
Mathf.Clamp(fovRight, 1, 89);
              fovTop=
Mathf.Clamp(fovTop, 1, 89);
              fovBottom=
Mathf.Clamp(fovBottom, 1, 89);
              farZ=
Mathf.Max(farZ,nearZ + 0.01f);
              nearZ=
Mathf.Clamp(nearZ, 0.01f, farZ - 0.01f);

             
var lsin = Mathf.Sin(-fovLeft* Mathf.Deg2Rad);
             
var rsin = Mathf.Sin(fovRight* Mathf.Deg2Rad);
             
var tsin = Mathf.Sin(fovTop * Mathf.Deg2Rad);
             
var bsin = Mathf.Sin(-fovBottom* Mathf.Deg2Rad);

             
var lcos = Mathf.Cos(-fovLeft* Mathf.Deg2Rad);
             
var rcos = Mathf.Cos(fovRight* Mathf.Deg2Rad);
             
var tcos = Mathf.Cos(fovTop * Mathf.Deg2Rad);
             
var bcos = Mathf.Cos(-fovBottom* Mathf.Deg2Rad);

    这里是算出视见体(就是一个长方体)8个顶点的坐标
              var corners = new Vector3[] {
                    
new Vector3(lsin * nearZ/ lcos, tsin * nearZ / tcos, nearZ), //tln
                     new Vector3(rsin * nearZ/ rcos, tsin * nearZ / tcos, nearZ), //trn
                     new Vector3(rsin * nearZ/ rcos, bsin * nearZ / bcos, nearZ), //brn
                     new Vector3(lsin * nearZ/ lcos, bsin * nearZ / bcos, nearZ), //bln
                     new Vector3(lsin *farZ  / lcos, tsin * farZ  / tcos, farZ ), //tlf
                     new Vector3(rsin *farZ  / rcos, tsin * farZ  / tcos, farZ ), //trf
                     new Vector3(rsin *farZ  / rcos, bsin * farZ  / bcos, farZ ), //brf
                     new Vector3(lsin *farZ  / lcos, bsin * farZ  / bcos, farZ ), //blf
              };

    这里是对视见体划的网格,一个长方体6个面,每个面划成两个三个形。这里每个              三角形用上面的顶点的索引来描述。实际只对上下左右四个面划分了网格,前后没           有划分。顶点顺序如下图:


              var triangles = new int[] {
             
//    0,1, 2, 0, 2, 3, // near
              //    0,2, 1, 0, 3, 2, // near
              //    4,5, 6, 4, 6, 7, // far
              //    4,6, 5, 4, 7, 6, // far
                     0, 4, 7, 0, 7, 3, // left
                     0, 7, 4, 0, 3, 7, // left
                     1, 5, 6, 1, 6, 2, // right
                     1, 6, 5, 1, 2, 6, // right
                     0, 4, 5, 0, 5, 1, // top
                     0, 5, 4, 0, 1, 5, // top
                     2, 3, 7, 2, 7, 6, // bottom
                     2, 7, 3, 2, 6, 7, // bottom
              };

    学习下面这种方式手动生成网格(实际就是生成顶点和法线),这在OpenGL初学        的时候经常用到
              int j = 0;
             
var vertices = new Vector3[triangles.Length];
             
var normals = new Vector3[triangles.Length];
             
for (int i = 0; i 3; i++)
              {
                    
var a =corners[triangles[i * 3 + 0]];
                    
var b =corners[triangles[i * 3 + 1]];
                    
var c =corners[triangles[i * 3 + 2]];
                    
var n = Vector3.Cross(b - a,c - a).normalized;
                     normals[i*
3 + 0] = n;
                     normals[i*
3 + 1] = n;
                     normals[i*
3 + 2] = n;
                     vertices[i*
3 + 0] = a;
                     vertices[i*
3 + 1] = b;
                     vertices[i*
3 + 2] = c;

        这里对三角形顶点索引进行了重新编号,应该是网格所要求的。这得对Unity               的数据结构有足够的了解(get出来看一下就能分析出来的)
                     triangles[i* 3 + 0] = j++;
                     triangles[i*
3 + 1] = j++;
                     triangles[i*
3 + 2] = j++;
              }

             
var mesh = new Mesh();
              mesh.vertices= vertices;
              mesh.normals= normals;
              mesh.triangles= triangles;

    是将网格设置到了MeshFilter里面,这是网格过滤器,设置了的才能显示                   GetComponent<MeshFilter>().mesh =mesh;
       }

跟踪设备连接通知
       private void OnDeviceConnected(params object[] args)
       {
             
var i = (int)args[0];
             
if (i != (int)index)
                    
return;

              GetComponent<
MeshFilter>().mesh = null;

             
var connected = (bool)args[1];
             
if (connected)
              {
                    
var system = OpenVR.System;
                    
if (system != null &&system.GetTrackedDeviceClass((uint)i) == ETrackedDeviceClass.TrackingReference)
                     {

           TrackingReference通常就是指基站。所以在Inspector中设置index                         参数,不是所有的跟踪设备都有用的,只有设置成了基站的index才能                            显示出来这个平截头体
                            var error = ETrackedPropertyError.TrackedProp_Success;
                           
var result =system.GetFloatTrackedDeviceProperty((uint)i, ETrackedDeviceProperty.Prop_FieldOfViewLeftDegrees_Float, ref error);
                           
if (error == ETrackedPropertyError.TrackedProp_Success)
                                   fovLeft= result;

                            result= system.GetFloatTrackedDeviceProperty((
uint)i, ETrackedDeviceProperty.Prop_FieldOfViewRightDegrees_Float, ref error);
                           
if (error == ETrackedPropertyError.TrackedProp_Success)
                                   fovRight= result;

                            result= system.GetFloatTrackedDeviceProperty((
uint)i, ETrackedDeviceProperty.Prop_FieldOfViewTopDegrees_Float, ref error);
                           
if (error == ETrackedPropertyError.TrackedProp_Success)
                                   fovTop= result;

                            result= system.GetFloatTrackedDeviceProperty((
uint)i, ETrackedDeviceProperty.Prop_FieldOfViewBottomDegrees_Float, ref error);
                           
if (error == ETrackedPropertyError.TrackedProp_Success)
                                   fovBottom= result;

                            result= system.GetFloatTrackedDeviceProperty((
uint)i, ETrackedDeviceProperty.Prop_TrackingRangeMinimumMeters_Float, ref error);
                           
if (error == ETrackedPropertyError.TrackedProp_Success)
                                   nearZ= result;

                            result= system.GetFloatTrackedDeviceProperty((
uint)i, ETrackedDeviceProperty.Prop_TrackingRangeMaximumMeters_Float, ref error);
                           
if (error == ETrackedPropertyError.TrackedProp_Success)
                                   farZ= result;

                            UpdateModel();
                     }
              }
       }

      
void OnEnable()
       {
              GetComponent<
MeshFilter>().mesh = null;

    监听跟踪设备连接消息
              SteamVR_Utils.Event.Listen("device_connected", OnDeviceConnected);
       }

      
void OnDisable()
       {
             
SteamVR_Utils.Event.Remove("device_connected",OnDeviceConnected);
              GetComponent<
MeshFilter>().mesh = null;
       }

#if UNITY_EDITOR

在编辑器里不停的更新,这样可以反映Inspector中的参数修改
       void Update()
       {
             
if (!Application.isPlaying)
                     UpdateModel();
       }
#endif
}

 

随便在场景中添加一个空的GameObject,然后挂上这个脚本,可以看到是这样的:

说明: {292C72B2-F02A-40ED-92A1-623AD04417B5}

而如果指定了index为基站的索引,同时在这个GameObject上添加SteamVR_TrackedObject脚本,同时指定origin[CameraRig],将会发现这个平截头体的位置正是基站的位置[通过按下系统键可以看到基站的位置],而这个平截头体正是基站的扫描范围。

10.11.       SteamVR_GameView.cs

这个就是PC上显示的伴随窗口。这个组件被加在了相机骨骼中的head上面,head上面还有一个Camera组件,这里的GameView就是操作这个Camera的。Camera的设置为不Clear,而CullingMask设为Nothing,也就是缺省情况下,通过这个相机是看不到任何东西的:

相机预览看到的是一片漆黑。它所能看到的内容是通过下面的代码显示的(手动渲染)。

 

需要Camera组件

[RequireComponent(typeof(Camera))]
public class SteamVR_GameView : MonoBehaviour
{

咦,在5.4以上已经不能用了
#if (UNITY_5_3 || UNITY_5_2 || UNITY_5_1 || UNITY_5_0) // DEPRECATEDin Unity 5.4+

这个是指GameView相对于hmd中看到的视图的放大倍数
       public float scale = 1.5f;

是否在GameView中也绘制overlay
       public bool drawOverlay = true;

在绘制overlay时所使用的材质(shader
       static Material overlayMaterial;

      
void OnEnable()
       {
             
if (overlayMaterial == null)
              {
                     overlayMaterial=
new Material(Shader.Find("Custom/SteamVR_Overlay"));
              }
       }

OnPostRender在带Camera上的物体上才有用,并且只针对当前相机
       void OnPostRender()
       {

    对最后的图像做处理
              var vr = SteamVR.instance;
             
var camera =GetComponent<Camera>();

   GameView相机可能与hmd的宽高比不一样
              var aspect =scale * camera.aspect / vr.aspect;

             
var x0 = -scale;
             
var x1 = scale;
             
var y0 = aspect;
             
var y1 = -aspect;

    这里使用了SteamVR_Camera中的blitMaterial材质,这个材质的作用不明,           前面在SteamVR_Camera中已经有过分析,大概是可以渲染出两只眼睛中的不同            图像
              var blitMaterial= SteamVR_Camera.blitMaterial;

    使用了SteamVR_Camera中的场景纹理,这个纹理会在SteamVR_Render里面渲            染,应该会得到渲染后的图像
              blitMaterial.mainTexture= SteamVR_Camera.GetSceneTexture(camera.hdr);

             
GL.PushMatrix();
             
GL.LoadOrtho();
#if !(UNITY_5_0)
              blitMaterial.SetPass(
0);
#else
              blitMaterial.SetPass(QualitySettings.activeColorSpace== ColorSpace.Linear ? 1 : 0);
#endif

    下面将前面得到的场景材质(纹理)贴到当前屏幕(相机)上
              GL.Begin(GL.QUADS);
             
GL.TexCoord2(0.0f, 0.0f); GL.Vertex3(x0,y0, 0);
             
GL.TexCoord2(1.0f, 0.0f); GL.Vertex3(x1,y0, 0);
             
GL.TexCoord2(1.0f, 1.0f); GL.Vertex3(x1,y1, 0);
             
GL.TexCoord2(0.0f, 1.0f); GL.Vertex3(x0,y1, 0);
             
GL.End();
             
GL.PopMatrix();

             
var overlay = SteamVR_Overlay.instance;
             
if (overlay&& overlay.texture && overlayMaterial && drawOverlay)
              {

        如果还要显示overlay,那针对overlay做同样的操作,将overlay的纹理                    贴到当前屏幕(相机)上
                     var texture =overlay.texture;
                     overlayMaterial.mainTexture= texture;

        保持长宽比
                     var u0 = 0.0f;
                    
var v0 = 1.0f - (float)Screen.height /texture.height;
                    
var u1 = (float)Screen.width /texture.width;
                    
var v1 = 1.0f;

                    
GL.PushMatrix();
                    
GL.LoadOrtho();
#if !(UNITY_5_0)
                     overlayMaterial.SetPass(
QualitySettings.activeColorSpace== ColorSpace.Linear ? 1 : 0);
#else
                     overlayMaterial.SetPass(0);
#endif
                     GL.Begin(GL.QUADS);
                    
GL.TexCoord2(u0,v0); GL.Vertex3(-1, -1, 0);
                    
GL.TexCoord2(u1,v0); GL.Vertex3( 1, -1, 0);
                    
GL.TexCoord2(u1,v1); GL.Vertex3( 11, 0);
                    
GL.TexCoord2(u0,v1); GL.Vertex3(-11, 0);
                    
GL.End();
                    
GL.PopMatrix();
              }
       }
#endif
}

 

如社区发表内容存在侵权行为,您可以点击这里查看侵权投诉指引