Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updated Marshalled data types #199

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions OSVR-Unity/Assets/OSVRUnity/src/Math.cs
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,11 @@ public static Rect ConvertViewportRenderManager(OSVR.ClientKit.Viewport viewport
{
//Unity expects normalized coordinates, not pixel coordinates
//@todo below assumes left and right eyes split the screen in half horizontally
if(viewport.Width == 0 || viewport.Height == 0)
{
Debug.LogError("[OSVR-Unity] Viewport width/height is 0. Avoiding divide by zero error, returning default viewport.");
return new Rect(0, 0, 0.5f, 1);
}
return new Rect(viewport.Left / viewport.Width, viewport.Bottom / viewport.Height, viewport.Width / viewport.Width, 1);
}

Expand Down
37 changes: 21 additions & 16 deletions OSVR-Unity/Assets/OSVRUnity/src/OsvrRenderManager.cs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ private struct OSVR_ViewportDescription

//Create and Register RenderBuffers
[DllImport(PluginName)]
private static extern Byte
private static extern Byte
ConstructRenderBuffers();

//Create a RenderManager object in the plugin, passing in a ClientContext
Expand All @@ -82,11 +82,11 @@ private static extern Byte

[DllImport(PluginName)]
private static extern OSVR.ClientKit.Pose3
GetEyePose(int eye);
GetEyePose(Byte eye);

[DllImport(PluginName)]
private static extern OSVR_ProjectionMatrix
GetProjectionMatrix(int eye);
GetProjectionMatrix(Byte eye);

//get the render event function that we'll call every frame via GL.IssuePluginEvent
[DllImport(PluginName)]
Expand All @@ -95,7 +95,7 @@ private static extern IntPtr

[DllImport(PluginName)]
private static extern OSVR_ViewportDescription
GetViewport(int eye);
GetViewport(Byte eye);

// Allow for calling into the debug console from C++
[DllImport(PluginName)]
Expand All @@ -108,8 +108,8 @@ private static extern void
// @todo native code may change the return type to OSVR_ReturnCode.
// If so, change the return type here to Byte
[DllImport(PluginName)]
private static extern int
SetColorBufferFromUnity(System.IntPtr texturePtr, int eye);
private static extern int
SetColorBufferFromUnity(System.IntPtr texturePtr, Byte eye);

[DllImport(PluginName)]
private static extern void
Expand Down Expand Up @@ -236,16 +236,16 @@ public void ClearRoomToWorldTransform()
}

//Get the pose of a given eye from RenderManager
public OSVR.ClientKit.Pose3 GetRenderManagerEyePose(int eye)
public OSVR.ClientKit.Pose3 GetRenderManagerEyePose(Byte eye)
{
return GetEyePose(eye);
}

//Get the viewport of a given eye from RenderManager
public OSVR.ClientKit.Viewport GetEyeViewport(int eye)
public OSVR.ClientKit.Viewport GetEyeViewport(Byte eye)
{
OSVR.ClientKit.Viewport v = new OSVR.ClientKit.Viewport();
OSVR_ViewportDescription viewportDescription = GetViewport(eye);
OSVR_ViewportDescription viewportDescription = GetViewport((Byte)eye);
v.Left = (int)viewportDescription.left;
v.Bottom = (int)viewportDescription.lower;
v.Width = (int)viewportDescription.width;
Expand All @@ -254,17 +254,22 @@ public OSVR.ClientKit.Viewport GetEyeViewport(int eye)
}

//Get the projection matrix of a given eye from RenderManager
public Matrix4x4 GetEyeProjectionMatrix(int eye)
public Matrix4x4 GetEyeProjectionMatrix(Byte eye)
{
OSVR_ProjectionMatrix pm = GetProjectionMatrix(eye);
OSVR_ProjectionMatrix pm = GetProjectionMatrix((Byte)eye);
return PerspectiveOffCenter((float)pm.left, (float)pm.right, (float)pm.bottom, (float)pm.top, (float)pm.nearClip, (float)pm.farClip);

}

//Returns a Unity Matrix4x4 from the provided boundaries
//from http://docs.unity3d.com/ScriptReference/Camera-projectionMatrix.html
static Matrix4x4 PerspectiveOffCenter(float left, float right, float bottom, float top, float near, float far)
{
if (right - left == 0 || top - bottom == 0 || far - near == 0)
{
Debug.LogError("Aborting Projection Matrix calculation to avoid DivideByZero error.");
return new Matrix4x4();
}
float x = 2.0F * near / (right - left);
float y = 2.0F * near / (top - bottom);
float a = (right + left) / (right - left);
Expand Down Expand Up @@ -303,16 +308,16 @@ public int CreateRenderManager(OSVR.ClientKit.ClientContext clientContext)
catch (DllNotFoundException e)
{
result = -1;
Debug.LogError("[OSVR-Unity] Could not load " + e.Message +
"\nosvrUnityRenderingPlugin.dll, or one of its dependencies, is missing from the project " +
Debug.LogError("[OSVR-Unity] Could not load " + e.Message +
"\nosvrUnityRenderingPlugin.dll, or one of its dependencies, is missing from the project " +
"or architecture doesn't match.\n");
}
return result;
}

//Pass pointer to eye-camera RenderTexture to the Unity Rendering Plugin
public void SetEyeColorBuffer(IntPtr colorBuffer, int eye)
{
public void SetEyeColorBuffer(IntPtr colorBuffer, Byte eye)
{
SetColorBufferFromUnity(colorBuffer, eye);
}

Expand Down
42 changes: 21 additions & 21 deletions OSVR-Unity/Assets/OSVRUnity/src/VREye.cs
Original file line number Diff line number Diff line change
Expand Up @@ -40,16 +40,16 @@ public class VREye : MonoBehaviour
private VRSurface[] _surfaces; //the surfaces associated with this eye
private uint _surfaceCount;
private uint _eyeIndex;


#endregion
#region Public Variables
public uint EyeIndex
{
get { return _eyeIndex; }
set { _eyeIndex = value; }
}
public VRSurface[] Surfaces { get { return _surfaces; } }
public VRSurface[] Surfaces { get { return _surfaces; } }
public uint SurfaceCount { get { return _surfaceCount; } }
public VRViewer Viewer
{
Expand All @@ -75,7 +75,7 @@ void Init()
{
//cache:
cachedTransform = transform;
}
}
#endregion

// Updates the position and rotation of the eye
Expand Down Expand Up @@ -117,23 +117,23 @@ public void UpdateSurfaces()
//get viewport from ClientKit and set surface viewport
if (Viewer.DisplayController.UseRenderManager)
{
viewport = Viewer.DisplayController.RenderManager.GetEyeViewport((int)EyeIndex);
viewport = Viewer.DisplayController.RenderManager.GetEyeViewport((Byte)EyeIndex);
surface.SetViewportRect(Math.ConvertViewportRenderManager(viewport));

//get projection matrix from RenderManager and set surface projection matrix
surface.SetProjectionMatrix(Viewer.DisplayController.RenderManager.GetEyeProjectionMatrix((int)EyeIndex));
surface.SetProjectionMatrix(Viewer.DisplayController.RenderManager.GetEyeProjectionMatrix((Byte)EyeIndex));

surface.Render();
}
else
{
//get viewport from ClientKit and set surface viewport
viewport = Viewer.DisplayController.DisplayConfig.GetRelativeViewportForViewerEyeSurface(
Viewer.ViewerIndex, (byte)_eyeIndex, surfaceIndex);
Viewer.ViewerIndex, (Byte)_eyeIndex, surfaceIndex);

int displayInputIndex = Viewer.DisplayController.DisplayConfig.GetViewerEyeSurfaceDisplayInputIndex(Viewer.ViewerIndex, (byte)_eyeIndex, surfaceIndex);
int displayInputIndex = Viewer.DisplayController.DisplayConfig.GetViewerEyeSurfaceDisplayInputIndex(Viewer.ViewerIndex, (Byte)_eyeIndex, surfaceIndex);
int numDisplayInputs = Viewer.DisplayController.DisplayConfig.GetNumDisplayInputs();
surface.SetViewportRect(Math.ConvertViewport(viewport, Viewer.DisplayController.DisplayConfig.GetDisplayDimensions((byte)displayInputIndex),
surface.SetViewportRect(Math.ConvertViewport(viewport, Viewer.DisplayController.DisplayConfig.GetDisplayDimensions((Byte)displayInputIndex),
numDisplayInputs, (int)_eyeIndex, (int)Viewer.DisplayController.TotalDisplayWidth));

//get projection matrix from ClientKit and set surface projection matrix
Expand All @@ -145,7 +145,7 @@ public void UpdateSurfaces()

//render the surface
surface.Render();
}
}

}
}
Expand Down Expand Up @@ -209,14 +209,14 @@ public void CreateSurfaces(uint surfaceCount)
//get distortion parameters
OSVR.ClientKit.RadialDistortionParameters distortionParameters =
Viewer.DisplayController.DisplayConfig.GetViewerEyeSurfaceRadialDistortion(
Viewer.ViewerIndex, (byte)_eyeIndex, surfaceIndex);
Viewer.ViewerIndex, (byte)_eyeIndex, surfaceIndex);
surface.SetDistortion(distortionParameters);
}

//render manager
if (Viewer.DisplayController.UseRenderManager)
{
surface.SetViewport(Viewer.DisplayController.RenderManager.GetEyeViewport((int)EyeIndex));
surface.SetViewport(Viewer.DisplayController.RenderManager.GetEyeViewport((Byte)EyeIndex));

//create a RenderTexture for this eye's camera to render into
RenderTexture renderTexture = new RenderTexture(surface.Viewport.Width, surface.Viewport.Height, 24, RenderTextureFormat.Default);
Expand Down Expand Up @@ -247,7 +247,7 @@ public void CreateSurfaces(uint surfaceCount)

//distortion
bool useDistortion = Viewer.DisplayController.DisplayConfig.DoesViewerEyeSurfaceWantDistortion(Viewer.ViewerIndex, (byte)_eyeIndex, surfaceIndex);
if(useDistortion)
if (useDistortion)
{
//@todo figure out which type of distortion to use
//right now, there is only one option, SurfaceRadialDistortion
Expand All @@ -257,22 +257,22 @@ public void CreateSurfaces(uint surfaceCount)
Viewer.ViewerIndex, (byte)_eyeIndex, surfaceIndex);

surface.SetDistortion(distortionParameters);
}
}

//render manager
if(Viewer.DisplayController.UseRenderManager)
if (Viewer.DisplayController.UseRenderManager)
{
//Set the surfaces viewport from RenderManager
surface.SetViewport(Viewer.DisplayController.RenderManager.GetEyeViewport((int)EyeIndex));
surface.SetViewport(Viewer.DisplayController.RenderManager.GetEyeViewport((Byte)EyeIndex));

//create a RenderTexture for this eye's camera to render into
RenderTexture renderTexture = new RenderTexture(surface.Viewport.Width, surface.Viewport.Height, 24, RenderTextureFormat.Default);
if (QualitySettings.antiAliasing > 0)
{
renderTexture.antiAliasing = QualitySettings.antiAliasing;
}
surface.SetRenderTexture(renderTexture);
}
surface.SetRenderTexture(renderTexture);
}
}
}

Expand All @@ -284,7 +284,7 @@ private void CopyCamera(Camera srcCamera, Camera destCamera)
destCamera.CopyFrom(srcCamera);
destCamera.depth = 0;
//@todo Copy other components attached to the DisplayController?
}
}
}
}
}
7 changes: 4 additions & 3 deletions OSVR-Unity/Assets/OSVRUnity/src/VRSurface.cs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
using UnityEngine.Rendering;
using System.Collections;
using System.Runtime.InteropServices;
using System;

namespace OSVR
{
Expand All @@ -48,7 +49,7 @@ public class VRSurface : MonoBehaviour
public Camera Camera { get { return _camera; } set { _camera = value; } }
public uint SurfaceIndex { get { return _surfaceIndex; } set { _surfaceIndex = value; } }
public VREye Eye { get { return _eye; } set { _eye = value; } }
public OSVR.ClientKit.Viewport Viewport { get { return _viewport;} set {_viewport = value;} }
public OSVR.ClientKit.Viewport Viewport { get { return _viewport; } set { _viewport = value; } }

[HideInInspector]
public K1RadialDistortion DistortionEffect
Expand Down Expand Up @@ -137,9 +138,9 @@ public void SetRenderTexture(RenderTexture rt)
RenderToTexture = rt;
Camera.targetTexture = RenderToTexture;
RenderTexture.active = RenderToTexture;

//Set the native texture pointer so we can access this texture from the plugin
Eye.Viewer.DisplayController.RenderManager.SetEyeColorBuffer(RenderToTexture.GetNativeTexturePtr(), (int)Eye.EyeIndex);
Eye.Viewer.DisplayController.RenderManager.SetEyeColorBuffer(RenderToTexture.GetNativeTexturePtr(), (Byte)Eye.EyeIndex);
}
public RenderTexture GetRenderTexture()
{
Expand Down