diff --git a/hellocardboard-android/src/main/AndroidManifest.xml b/hellocardboard-android/src/main/AndroidManifest.xml index 1318cc44..14008db6 100644 --- a/hellocardboard-android/src/main/AndroidManifest.xml +++ b/hellocardboard-android/src/main/AndroidManifest.xml @@ -18,10 +18,10 @@ android:supportsRtl="true" android:theme="@style/AppTheme"> diff --git a/hellocardboard-ios/HelloCardboardViewController.mm b/hellocardboard-ios/HelloCardboardViewController.mm index 9d94406a..e587736e 100644 --- a/hellocardboard-ios/HelloCardboardViewController.mm +++ b/hellocardboard-ios/HelloCardboardViewController.mm @@ -85,9 +85,14 @@ - (BOOL)prefersHomeIndicatorAutoHidden { return true; } +- (void)viewLayoutMarginsDidChange { + [super viewLayoutMarginsDidChange]; + _updateParams = YES; +} + - (UIInterfaceOrientationMask)supportedInterfaceOrientations { - // Cardboard only supports landscape right orientation for inserting the phone in the viewer. - return UIInterfaceOrientationMaskLandscapeRight; + // Cardboard supports all screen orientations except Portrait Upside Down + return UIInterfaceOrientationMaskAllButUpsideDown; } - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { @@ -119,13 +124,6 @@ - (BOOL)updateCardboardParams { int height = screenRect.size.height * screenScale; int width = screenRect.size.width * screenScale; - // Rendering coordinates asumes landscape orientation. - if (height > width) { - int temp = height; - height = width; - width = temp; - } - // Create CardboardLensDistortion. CardboardLensDistortion_destroy(_cardboardLensDistortion); _cardboardLensDistortion = diff --git a/sdk/CMakeLists.txt b/sdk/CMakeLists.txt index 6a222dbd..ae8bfa9b 100644 --- a/sdk/CMakeLists.txt +++ b/sdk/CMakeLists.txt @@ -50,6 +50,8 @@ file(GLOB screen_params_srcs "screen_params/android/*.cc") file(GLOB device_params_srcs "device_params/android/*.cc") # Rendering Sources file(GLOB rendering_srcs "rendering/opengl_*.cc") +# 6DoF sources +file(GLOB sixdof_srcs "sixdof/*.cc") # === Cardboard Unity JNI === file(GLOB cardboard_unity_jni_srcs "unity/android/*.cc") @@ -73,6 +75,7 @@ add_library(cardboard_api SHARED ${screen_params_srcs} ${device_params_srcs} ${rendering_srcs} + ${sixdof_srcs} # Cardboard Unity JNI sources ${cardboard_unity_jni_srcs} # Cardboard Unity Wrapper sources @@ -81,8 +84,9 @@ add_library(cardboard_api SHARED ${cardboard_xr_provider_srcs}) # Includes -target_include_directories(cardboard_api - PRIVATE ../third_party/unity_plugin_api) +target_include_directories(cardboard_api PRIVATE + ../third_party/unity_plugin_api + include) # Build target_link_libraries(cardboard_api diff --git a/sdk/cardboard.cc b/sdk/cardboard.cc index ffef56b5..3ad5c706 100644 --- a/sdk/cardboard.cc +++ b/sdk/cardboard.cc @@ -119,6 +119,13 @@ void Cardboard_initializeAndroid(JavaVM* vm, jobject context) { } #endif +CardboardScreenOrientation CardboardScreenParameters_getScreenOrientation() { + if (CARDBOARD_IS_NOT_INITIALIZED()) { + return kUnknown; + } + return cardboard::screen_params::getScreenOrientation(); +} + CardboardLensDistortion* CardboardLensDistortion_create( const uint8_t* encoded_device_params, int size, int display_width, int display_height) { @@ -313,6 +320,18 @@ void CardboardHeadTracker_getPose(CardboardHeadTracker* head_tracker, std::memcpy(orientation, &out_orientation[0], 4 * sizeof(float)); } +// Aryzon 6DoF +void CardboardHeadTracker_addSixDoFData(CardboardHeadTracker* head_tracker, + int64_t timestamp_ns, + float* position, + float* orientation) { + if (CARDBOARD_IS_NOT_INITIALIZED() || CARDBOARD_IS_ARG_NULL(head_tracker)) { + return; + } + + static_cast(head_tracker)->AddSixDoFData(timestamp_ns, position, orientation); +} + void CardboardQrCode_getSavedDeviceParams(uint8_t** encoded_device_params, int* size) { if (CARDBOARD_IS_NOT_INITIALIZED() || diff --git a/sdk/head_tracker.cc b/sdk/head_tracker.cc index f859e076..b7625c9a 100644 --- a/sdk/head_tracker.cc +++ b/sdk/head_tracker.cc @@ -19,28 +19,48 @@ #include "util/logging.h" #include "util/vector.h" #include "util/vectorutils.h" +#include "screen_params.h" namespace cardboard { -// TODO(b/135488467): Support different screen orientations. -const Rotation HeadTracker::kEkfToHeadTrackerRotation = - Rotation::FromYawPitchRoll(-M_PI / 2.0, 0, -M_PI / 2.0); - -const Rotation HeadTracker::kSensorToDisplayRotation = - Rotation::FromAxisAndAngle(Vector3(0, 0, 1), M_PI / 2.0); +// Aryzon 6DoF +constexpr int kRotationSamples = 10; +constexpr int kPositionSamples = 6; +constexpr int64_t kMaxSixDoFTimeDifference = 200000000; // Maximum time difference between last pose state timestamp and last 6DoF timestamp, if it takes longer than this the last known location of sixdof will be used +constexpr float kReduceBiasRate = 0.05; HeadTracker::HeadTracker() : is_tracking_(false), sensor_fusion_(new SensorFusionEkf()), latest_gyroscope_data_({0, 0, Vector3::Zero()}), accel_sensor_(new SensorEventProducer()), - gyro_sensor_(new SensorEventProducer()) { + gyro_sensor_(new SensorEventProducer()), + // Aryzon 6DoF + rotation_data_(new RotationData(kRotationSamples)), + position_data_(new PositionData(kPositionSamples)) { + on_accel_callback_ = [&](const AccelerometerData& event) { OnAccelerometerData(event); }; on_gyro_callback_ = [&](const GyroscopeData& event) { OnGyroscopeData(event); }; + + switch(screen_params::getScreenOrientation()) { + case kLandscapeLeft: + ekf_to_head_tracker_ = Rotation::FromYawPitchRoll(-M_PI / 2.0, 0, -M_PI / 2.0); + break; + case kLandscapeRight: + ekf_to_head_tracker_ = Rotation::FromYawPitchRoll(M_PI / 2.0, 0, M_PI / 2.0); + break; + default: // Portrait and PortraitUpsideDown + ekf_to_head_tracker_ = Rotation::FromYawPitchRoll(M_PI / 2.0, M_PI / 2.0, M_PI / 2.0); + break; + } + ekf_to_sixDoF_ = Rotation::Identity(); + smooth_ekf_to_sixDoF_ = Rotation::Identity(); + steady_start_ = Rotation::Identity(); + steady_frames_ = -1; } HeadTracker::~HeadTracker() { UnregisterCallbacks(); } @@ -58,35 +78,79 @@ void HeadTracker::Pause() { event.data = Vector3::Zero(); OnGyroscopeData(event); - is_tracking_ = false; } void HeadTracker::Resume() { + if (!is_tracking_) { + RegisterCallbacks(); + } + steady_frames_ = -1; + steady_start_ = Rotation::Identity(); is_tracking_ = true; - RegisterCallbacks(); } void HeadTracker::GetPose(int64_t timestamp_ns, std::array& out_position, std::array& out_orientation) const { - const Rotation predicted_rotation = - sensor_fusion_->PredictRotation(timestamp_ns); - - // In order to update our pose as the sensor changes, we begin with the - // inverse default orientation (the orientation returned by a reset sensor), - // apply the current sensor transformation, and then transform into display - // space. - const Vector4 orientation = (kSensorToDisplayRotation * predicted_rotation * - kEkfToHeadTrackerRotation) - .GetQuaternion(); - - out_orientation[0] = static_cast(orientation[0]); - out_orientation[1] = static_cast(orientation[1]); - out_orientation[2] = static_cast(orientation[2]); - out_orientation[3] = static_cast(orientation[3]); - - out_position = ApplyNeckModel(out_orientation, 1.0); + + Rotation sensor_to_display; + + switch(screen_params::getScreenOrientation()) { + case kLandscapeLeft: + sensor_to_display = Rotation::FromAxisAndAngle(Vector3(0, 0, 1), M_PI / 2.0); + break; + case kLandscapeRight: + sensor_to_display = Rotation::FromAxisAndAngle(Vector3(0, 0, 1), -M_PI / 2.0); + break; + default: // Portrait and PortraitUpsideDown + sensor_to_display = Rotation::FromAxisAndAngle(Vector3(0, 0, 1), 0.); + break; + } + + const RotationState rotation_state = sensor_fusion_->GetLatestRotationState(); + const Rotation unpredicted_rotation = rotation_state.sensor_from_start_rotation; + const Rotation predicted_rotation = sensor_fusion_->PredictRotation(timestamp_ns); + + const Rotation adjusted_unpredicted_rotation = (sensor_to_display * unpredicted_rotation * + ekf_to_head_tracker_); + + const Rotation adjusted_rotation = (sensor_to_display * predicted_rotation * + ekf_to_head_tracker_); + + // Save rotation sample with timestamp to be used in AddSixDoFData() + rotation_data_->AddSample(adjusted_unpredicted_rotation.GetQuaternion(), rotation_state.timestamp); + + if (position_data_->IsValid() && rotation_state.timestamp - position_data_->GetLatestTimestamp() < kMaxSixDoFTimeDifference) { + + // 6DoF is recently updated + const Vector4 orientation = (adjusted_rotation * smooth_ekf_to_sixDoF_).GetQuaternion(); + + out_orientation[0] = static_cast(orientation[0]); + out_orientation[1] = static_cast(orientation[1]); + out_orientation[2] = static_cast(orientation[2]); + out_orientation[3] = static_cast(orientation[3]); + + Vector3 p = position_data_->GetExtrapolatedForTimeStamp(timestamp_ns); + out_position = {(float)p[0], (float)p[1], (float)p[2]}; + } else { + // 6DoF is not recently updated + const Vector4 orientation = adjusted_rotation.GetQuaternion(); + + out_orientation[0] = static_cast(orientation[0]); + out_orientation[1] = static_cast(orientation[1]); + out_orientation[2] = static_cast(orientation[2]); + out_orientation[3] = static_cast(orientation[3]); + + out_position = ApplyNeckModel(out_orientation, 1.0); + if (position_data_->IsValid()) { + // Apply last known 6DoF position if 6DoF data was previously added, while still applying neckmodel. + Vector3 last_known_position_ = position_data_->GetLatestData(); + out_position[0] += (float)last_known_position_[0]; + out_position[1] += (float)last_known_position_[1]; + out_position[2] += (float)last_known_position_[2]; + } + } } void HeadTracker::RegisterCallbacks() { @@ -114,4 +178,63 @@ void HeadTracker::OnGyroscopeData(const GyroscopeData& event) { sensor_fusion_->ProcessGyroscopeSample(event); } +Rotation ShortestRotation(Rotation a, Rotation b) { + + Vector4 aQ = a.GetQuaternion(); + Vector4 bQ = b.GetQuaternion(); + + if (Dot(aQ, bQ) < 0) { + return -a * Rotation::FromQuaternion(-bQ); + } else { + return -a * b; + } +} + +// Aryzon 6DoF +void HeadTracker::AddSixDoFData(int64_t timestamp_ns, float* pos, float* orientation) { + if (!is_tracking_) { + return; + } + if (position_data_->GetLatestTimestamp() != timestamp_ns) { + position_data_->AddSample(Vector3(pos[0], pos[1], pos[2]), timestamp_ns); + } + + // There will be a difference in rotation between ekf and sixDoF. + // SixDoF sensor is the 'truth' but is slower then ekf + // When the device is steady the difference between rotatations is saved + // smooth_ekf_to_sixDoF is slowly adjusted to smoothly close the gap + // between ekf and sixDoF. This value is used in GetPose(). + + if (position_data_->IsValid() && rotation_data_->IsValid()) { + if ((steady_frames_ == 30 || steady_frames_ < 0) && rotation_data_->GetLatestTimeStamp() > timestamp_ns) { + // Match rotation timestamps of ekf to sixDoF by interpolating the saved ekf rotations + // 6DoF timestamp should be before the latest rotation_data timestamp otherwise extrapolation + // needs to happen which will be less accurate. + const Rotation ekf_at_time_of_sixDoF = Rotation::FromQuaternion(rotation_data_->GetInterpolatedForTimeStamp(timestamp_ns)); + const Rotation six_DoF_rotation = Rotation::FromQuaternion(Vector4(orientation[0], orientation[1], orientation[2], orientation[3])); + + ekf_to_sixDoF_ = ShortestRotation(ekf_at_time_of_sixDoF, six_DoF_rotation); + + } else if (steady_frames_ == 0) { + steady_start_ = Rotation::FromQuaternion(rotation_data_->GetLatestData()); + } + + const Rotation steady_difference = steady_start_ * -Rotation::FromQuaternion(rotation_data_->GetLatestData()); + + if (steady_difference.GetQuaternion()[3] > 0.9995) { + steady_frames_ += 1; + } else { + steady_frames_ = 0; + } + + const Rotation bias_to_fill = ShortestRotation(smooth_ekf_to_sixDoF_, ekf_to_sixDoF_); + Vector3 axis; + double angle; + bias_to_fill.GetAxisAndAngle(&axis, &angle); + + const Rotation add_to_bias = Rotation::FromAxisAndAngle(axis, angle * kReduceBiasRate); + + smooth_ekf_to_sixDoF_ *= add_to_bias; + } +} } // namespace cardboard diff --git a/sdk/head_tracker.h b/sdk/head_tracker.h index f20b3aad..e8fdac45 100644 --- a/sdk/head_tracker.h +++ b/sdk/head_tracker.h @@ -26,6 +26,13 @@ #include "sensors/sensor_fusion_ekf.h" #include "util/rotation.h" +// Aryzon 6DoF +#include "sixdof/rotation_data.h" +#include "sixdof/position_data.h" + +// Aryzon multiple orientations +#include "screen_params.h" + namespace cardboard { // HeadTracker encapsulates pose tracking by connecting sensors @@ -46,6 +53,12 @@ class HeadTracker { // TODO(b/135488467): Support different display to sensor orientations. void GetPose(int64_t timestamp_ns, std::array& out_position, std::array& out_orientation) const; + + // Function to be called when receiving SixDoFData. + // + // Aryzon 6DoF + // @param event sensor event. + void AddSixDoFData(int64_t timestamp_ns, float* position, float* orientation); private: // Function called when receiving AccelerometerData. @@ -81,9 +94,17 @@ class HeadTracker { // Callback functions registered to the input SingleTypeEventProducer. std::function on_accel_callback_; std::function on_gyro_callback_; - - static const Rotation kEkfToHeadTrackerRotation; - static const Rotation kSensorToDisplayRotation; + + // Aryzon 6DoF + RotationData *rotation_data_; + PositionData *position_data_; + + Rotation ekf_to_sixDoF_; + Rotation smooth_ekf_to_sixDoF_; + Rotation ekf_to_head_tracker_; + + float steady_frames_; + Rotation steady_start_; }; } // namespace cardboard diff --git a/sdk/include/cardboard.h b/sdk/include/cardboard.h index 48f456e8..5243ae24 100644 --- a/sdk/include/cardboard.h +++ b/sdk/include/cardboard.h @@ -42,6 +42,20 @@ typedef enum CardboardEye { kRight = 1, } CardboardEye; +/// Enum to distinguish device screen orientations +typedef enum CardboardScreenOrientation { + /// Landscape Left orientation. + kLandscapeLeft = 0, + /// Portrait orientation. + kPortrait = 1, + /// Landscape Right orientation. + kLandscapeRight = 2, + /// Portrait Upside Down orientation. + kPortraitUpsideDown = 3, + /// Orientation unknown + kUnknown = -1, +} CardboardScreenOrientation; + /// Struct representing a 3D mesh with 3D vertices and corresponding UV /// coordinates. typedef struct CardboardMesh { @@ -196,6 +210,20 @@ void Cardboard_initializeAndroid(JavaVM* vm, jobject context); /// @} +///////////////////////////////////////////////////////////////////////////// +// Screen Parameters +///////////////////////////////////////////////////////////////////////////// +/// @defgroup screen-parameters Screen Parameters +/// @brief This module calculates the screen size and current screen orientation +/// @{ +/// Returns the current device screen orientation +/// +/// @pre If the SDK is not initialized, this function will always return kUnknown +/// +/// @return CardboardScreenOrientation enum +CardboardScreenOrientation CardboardScreenParameters_getScreenOrientation(); +/// @} + ///////////////////////////////////////////////////////////////////////////// // Lens Distortion ///////////////////////////////////////////////////////////////////////////// @@ -503,6 +531,22 @@ void CardboardHeadTracker_getPose(CardboardHeadTracker* head_tracker, int64_t timestamp_ns, float* position, float* orientation); +/// Aryzon 6DoF +/// Sends through the event with pose and timestamp data from 6DoF tracker +/// +/// @pre @p head_tracker Must not be null. +/// When it is unmet, a call to this function results in a no-op. +/// +/// @param[in] head_tracker Head tracker object pointer. +/// @param[in] timestamp_ns The timestamp for the data in +/// nanoseconds in system monotonic clock. +/// @param[out] position 3 floats for (x, y, z). +/// @param[out] orientation 4 floats for quaternion +void CardboardHeadTracker_addSixDoFData(CardboardHeadTracker* head_tracker, + int64_t timestamp_ns, + float* position, + float* orientation); + /// @} ///////////////////////////////////////////////////////////////////////////// diff --git a/sdk/screen_params.h b/sdk/screen_params.h index 5ab02405..630abcf0 100644 --- a/sdk/screen_params.h +++ b/sdk/screen_params.h @@ -20,14 +20,21 @@ #include #endif +#include "cardboard.h" + namespace cardboard { namespace screen_params { static constexpr float kMetersPerInch = 0.0254f; + #ifdef __ANDROID__ void initializeAndroid(JavaVM* vm, jobject context); #endif void getScreenSizeInMeters(int width_pixels, int height_pixels, float* out_width_meters, float* out_height_meters); + +/// @brief Returns the current screen orientation. +CardboardScreenOrientation getScreenOrientation(); + } // namespace screen_params } // namespace cardboard diff --git a/sdk/screen_params/android/java/com/google/cardboard/sdk/screenparams/ScreenParamsUtils.java b/sdk/screen_params/android/java/com/google/cardboard/sdk/screenparams/ScreenParamsUtils.java index ba30dbd1..9af43d15 100644 --- a/sdk/screen_params/android/java/com/google/cardboard/sdk/screenparams/ScreenParamsUtils.java +++ b/sdk/screen_params/android/java/com/google/cardboard/sdk/screenparams/ScreenParamsUtils.java @@ -19,17 +19,25 @@ import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.util.DisplayMetrics; +import android.view.Display; +import android.view.OrientationEventListener; +import android.view.Surface; import android.view.WindowManager; +import android.content.res.Configuration; +import android.hardware.SensorManager; /** Utility methods to manage the screen parameters. */ public abstract class ScreenParamsUtils { + /** Holds the screen orientation. */ + private static ScreenOrientation screenOrientation; + /** Holds the screen pixel density. */ public static class ScreenPixelDensity { /** The exact number of pixels per inch in the x direction. */ public final float xdpi; /** The exact number of pixels per inch in the y direction. */ public final float ydpi; - + /** * Constructor. * @@ -68,4 +76,89 @@ public static ScreenPixelDensity getScreenPixelDensity(Context context) { } return new ScreenPixelDensity(displayMetrics.xdpi, displayMetrics.ydpi); } + + private static class ScreenOrientation { + + public static final int LANDSCAPE_LEFT = 0; + public static final int PORTRAIT = 1; + public static final int LANDSCAPE_RIGHT = 2; + public static final int PORTRAIT_UPSIDE_DOWN = 3; + public static final int UNKNOWN = -1; + + private static Context context; + private static int orientation; + private static int previousOrientation; + private static OrientationEventListener orientationEventListener; + + public ScreenOrientation(final Context context) { + this.context = context; + this.orientation = getCurrentOrientation(context); + + this.orientationEventListener = new OrientationEventListener(context, SensorManager.SENSOR_DELAY_UI) { + public void onOrientationChanged(int orientationIn) { + + // A call to getCurrentOrientation() can take a long time so we do not want to call it every frame. + // context.getResources().getConfiguration().orientation detects landscape to portrait changes, this + // is used to detect a change between any landscape (left or right) and any portrait (up or down). + // It does not tell us the difference between landscape left to landscape right. We therefore do + // another check near the range where a rotation change may occur from landscape left to right and vice + // versa. This is between 60 and 120 for landscape right and between 240 and 300 for landscape left. + + int newOrientation = context.getResources().getConfiguration().orientation; + if (orientationIn != ORIENTATION_UNKNOWN) { + if (previousOrientation != newOrientation) { + // Device rotate to/from landscape to/from portrait + orientation = getCurrentOrientation(ScreenOrientation.context); + } else if (orientationIn >= 60 && orientationIn <= 120 && orientation != LANDSCAPE_RIGHT) { + // Device possibly rotated from landscape left to landscape right rotation + orientation = getCurrentOrientation(ScreenOrientation.context); + } else if (orientationIn <= 300 && orientationIn >= 240 && orientation != LANDSCAPE_LEFT) { + // Device possibly rotated from landscape right to landscape left rotation + orientation = getCurrentOrientation(ScreenOrientation.context); + } + previousOrientation = newOrientation; + } + } + }; + if (this.orientationEventListener.canDetectOrientation()) { + this.orientationEventListener.enable(); + } + } + } + + public static int getScreenOrientation(Context context) { + if (screenOrientation == null) { + screenOrientation = new ScreenOrientation(context); + } + return screenOrientation.orientation; + } + + // This call takes a long time so we don't call this every frame, only when rotation changes + private static int getCurrentOrientation (Context context) { + Display defaultDisplay; + + if (VERSION.SDK_INT <= VERSION_CODES.Q) { + defaultDisplay = ((WindowManager) context.getSystemService(Context.WINDOW_SERVICE)) + .getDefaultDisplay(); + } else { + defaultDisplay = context.getDisplay(); + } + + int orientation = context.getResources().getConfiguration().orientation; + int rotation = defaultDisplay.getRotation(); + + if (orientation == Configuration.ORIENTATION_LANDSCAPE) { + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return ScreenOrientation.LANDSCAPE_LEFT; + } + return ScreenOrientation.LANDSCAPE_RIGHT; + } else if (orientation == Configuration.ORIENTATION_PORTRAIT) { + if (rotation == Surface.ROTATION_0 || rotation == Surface.ROTATION_90) { + return ScreenOrientation.PORTRAIT; + } + return ScreenOrientation.PORTRAIT_UPSIDE_DOWN; + } + // Unexpected orientation value. + return ScreenOrientation.UNKNOWN; + } } diff --git a/sdk/screen_params/android/screen_params.cc b/sdk/screen_params/android/screen_params.cc index 31566afd..ad69d0dd 100644 --- a/sdk/screen_params/android/screen_params.cc +++ b/sdk/screen_params/android/screen_params.cc @@ -29,6 +29,8 @@ jobject context_; jclass screen_pixel_density_class_; jclass screen_params_utils_class_; +jclass screen_orientation_class_; + struct DisplayMetrics { float xdpi; float ydpi; @@ -43,6 +45,12 @@ void LoadJNIResources(JNIEnv* env) { cardboard::jni::LoadJClass(env, "com/google/cardboard/sdk/screenparams/" "ScreenParamsUtils$ScreenPixelDensity"))); + + screen_orientation_class_ = reinterpret_cast(env->NewGlobalRef( + cardboard::jni::LoadJClass(env, + "com/google/cardboard/sdk/screenparams/" + "ScreenParamsUtils$ScreenOrientation"))); + } DisplayMetrics getDisplayMetrics() { @@ -84,5 +92,20 @@ void getScreenSizeInMeters(int width_pixels, int height_pixels, *out_height_meters = (height_pixels / display_metrics.ydpi) * kMetersPerInch; } +CardboardScreenOrientation getScreenOrientation() { + + JNIEnv* env; + cardboard::jni::LoadJNIEnv(vm_, &env); + + jmethodID get_screen_orientation_method = env->GetStaticMethodID(screen_params_utils_class_, + "getScreenOrientation", + "(Landroid/content/Context;)I"); + + const int screen_orientation = env->CallStaticIntMethod( + screen_params_utils_class_, get_screen_orientation_method, context_); + + return static_cast(screen_orientation); +} + } // namespace screen_params } // namespace cardboard diff --git a/sdk/screen_params/ios/screen_params.mm b/sdk/screen_params/ios/screen_params.mm index 1ecfe80f..ce3ddf8b 100644 --- a/sdk/screen_params/ios/screen_params.mm +++ b/sdk/screen_params/ios/screen_params.mm @@ -18,6 +18,74 @@ #import #import +@interface ScreenOrientationHelper : NSObject { + +} +@property CardboardScreenOrientation orientation; +-(void) orientationChanged:(NSNotification *)note; +@end + +@implementation ScreenOrientationHelper { + +} +@synthesize orientation; + +-(id)init +{ + self = [super init]; + if (self) { + UIDevice *device = [UIDevice currentDevice]; + [self setOrientationValue:device]; + [device beginGeneratingDeviceOrientationNotifications]; + + [[NSNotificationCenter defaultCenter] + addObserver:self selector:@selector(orientationChanged:) + name:UIDeviceOrientationDidChangeNotification + object:device]; + } + return self; +} + +-(void) orientationChanged:(NSNotification *)note { + [self setOrientationValue:note.object]; +} + +-(void)setOrientationValue:(UIDevice *)device { + UIDeviceOrientation orientation = [device orientation]; + + if (!UIDeviceOrientationIsValidInterfaceOrientation(orientation)) { + return; + } + UIInterfaceOrientationMask supportedInterfaces = -1; + for (UIWindow *window in [[UIApplication sharedApplication] windows]) { + if (!window.hidden && window.subviews.count > 0) { + for (int i=0;i +#include +#include "util/logging.h" + +namespace cardboard { + +PositionData::PositionData(size_t buffer_size) : buffer_size_(buffer_size) {} + +void PositionData::AddSample(const Vector3& sample, const int64_t timestamp_ns) { + + buffer_.push_back(sample); + + if (buffer_.size() > buffer_size_) { + buffer_.pop_front(); + } + + timestamp_buffer_.push_back(timestamp_ns); + if (timestamp_buffer_.size() > buffer_size_) { + timestamp_buffer_.pop_front(); + } +} + +bool PositionData::IsValid() const { return buffer_.size() == buffer_size_; } + +long long PositionData::GetLatestTimestamp() const { + if (timestamp_buffer_.size() > 0) { + return timestamp_buffer_[timestamp_buffer_.size() -1]; + } + return 0; +} + +Vector3 PositionData::GetLatestData() const { + if (buffer_.size() > 0) { + return buffer_[buffer_.size() -1]; + } + return Vector3::Zero(); +} + + +Vector3 PositionData::GetExtrapolatedForTimeStamp(const int64_t timestamp_ns) { + + if (!IsValid() || buffer_size_ < 2) { + return {0.0,0.0,0.0}; + } + + if (timestamp_ns > timestamp_buffer_[buffer_size_-1]) { + const Vector3 v0 = (buffer_[buffer_size_-1] - buffer_[buffer_size_-2]) / (timestamp_buffer_[buffer_size_-1] - timestamp_buffer_[buffer_size_-2]); + const Vector3 v1 = (buffer_[buffer_size_-2] - buffer_[buffer_size_-3]) / (timestamp_buffer_[buffer_size_-2] - timestamp_buffer_[buffer_size_-3]); + const Vector3 v2 = (buffer_[buffer_size_-3] - buffer_[buffer_size_-4]) / (timestamp_buffer_[buffer_size_-3] - timestamp_buffer_[buffer_size_-4]); + const Vector3 v3 = (buffer_[buffer_size_-4] - buffer_[buffer_size_-5]) / (timestamp_buffer_[buffer_size_-4] - timestamp_buffer_[buffer_size_-5]); + const Vector3 v4 = (buffer_[buffer_size_-5] - buffer_[buffer_size_-6]) / (timestamp_buffer_[buffer_size_-5] - timestamp_buffer_[buffer_size_-6]); + + const Vector3 v = (v0 + v1 + v2 + v3 + v4) / 5; + + //printf("v\t%f\t%llu\t%llu\t%llu\t%f\t%f\t%f\n", v[0] * 1000000,timestamp_buffer_[buffer_size_ - 1],timestamp_buffer_[buffer_size_ - 2],timestamp_ns, newPoss[0], newPoss[1], newPoss[2]); + return buffer_[buffer_size_-1] + v * (timestamp_ns - timestamp_buffer_[buffer_size_ - 1]); + } + return buffer_[buffer_size_-1]; +} + +void PositionData::Reset() { + buffer_.clear(); + timestamp_buffer_.clear(); +} + +} // namespace cardboard diff --git a/sdk/sixdof/position_data.h b/sdk/sixdof/position_data.h new file mode 100644 index 00000000..086baa4c --- /dev/null +++ b/sdk/sixdof/position_data.h @@ -0,0 +1,64 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Aryzon 6DoF + +#ifndef position_data_h +#define position_data_h + +#include + +#include "util/vector.h" + +namespace cardboard { + +// This class holds a buffer of position data samples with corresponding timestamp samples +class PositionData { + public: + // Create a buffer to hold position data of size buffer_size. + // @param buffer_size size of samples to buffer. + explicit PositionData(size_t buffer_size); + + // Add sample to buffer_ if buffer_ is full it drop the oldest sample. + void AddSample(const Vector3& sample, const int64_t timestamp_ns); + + // Returns true if buffer has buffer_size sample, false otherwise. + bool IsValid() const; + + // Returns the latest value stored in the internal buffer. + Vector3 GetLatestData() const; + + // Returns the latest timestamp value stored in the internal timestampbuffer. + long long GetLatestTimestamp() const; + + // Returns the position extrapolated from data stored in the internal buffers. + // A buffer size of 2 is required to work. + // It returns a zero Vector3 when not fully initialised. + // @param timestamp_ns the time in nanoseconds to get a position value for. + Vector3 GetExtrapolatedForTimeStamp(const int64_t timestamp_ns); + + // Clear the internal buffers. + void Reset(); + private: + const size_t buffer_size_; + std::deque buffer_; + std::deque timestamp_buffer_; +}; + +} // namespace cardboard + + +#endif /* rotation_data_h */ diff --git a/sdk/sixdof/rotation_data.cc b/sdk/sixdof/rotation_data.cc new file mode 100644 index 00000000..1c3cce20 --- /dev/null +++ b/sdk/sixdof/rotation_data.cc @@ -0,0 +1,84 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sixdof/rotation_data.h" + +#include +#include + +namespace cardboard { + +RotationData::RotationData(size_t buffer_size) : buffer_size_(buffer_size) {} + +void RotationData::AddSample(const Vector4& sample, const int64_t timestamp_ns) { + buffer_.push_back(sample); + if (buffer_.size() > buffer_size_) { + buffer_.pop_front(); + } + + timestamp_buffer_.push_back(timestamp_ns); + if (timestamp_buffer_.size() > buffer_size_) { + timestamp_buffer_.pop_front(); + } +} + +bool RotationData::IsValid() const { return buffer_.size() == buffer_size_; } + +Vector4 RotationData::GetLatestData() const { + if (buffer_.size() > 0) { + return buffer_[buffer_.size() -1]; + } + return {0.0,0.0,0.0,1.0}; +} + +int64_t RotationData::GetLatestTimeStamp() const { + if (timestamp_buffer_.size() > 0) { + return timestamp_buffer_[timestamp_buffer_.size() -1]; + } + return 0; +} + + +Vector4 RotationData::GetInterpolatedForTimeStamp(const int64_t timestamp_ns) const { + + if (!IsValid()) { + return {0.0,0.0,0.0,1.0}; + } + int64_t smaller = -1; + int64_t larger = -1; + + bool did_pass_larger = false; + int i=0; + + while (!did_pass_larger && (const size_t)i < buffer_size_) { + int64_t current_ts = timestamp_buffer_[i]; + if (current_ts <= timestamp_ns) { + smaller = current_ts; + } else { + larger = current_ts; + did_pass_larger = true; + } + i++; + } + + if (smaller > 0 && larger > 0) { + const float interpolation_value = (timestamp_ns - smaller) / (double)(larger - smaller); + return buffer_[i-1] + interpolation_value * (buffer_[i] - buffer_[i-1]); + } + + return buffer_[buffer_size_-1]; +} + +} // namespace cardboard diff --git a/sdk/sixdof/rotation_data.h b/sdk/sixdof/rotation_data.h new file mode 100644 index 00000000..28cede82 --- /dev/null +++ b/sdk/sixdof/rotation_data.h @@ -0,0 +1,61 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef rotation_data_h +#define rotation_data_h + +#include + +#include "util/vector.h" + +namespace cardboard { + +// Fixed window FIFO mean filter for vectors of the given dimension. +class RotationData { + public: + // Create a buffer to hold rotation data of size buffer_size. + // @param buffer_size size of samples to buffer. + explicit RotationData(size_t buffer_size); + + // Add sample to buffer_ if buffer_ is full it drop the oldest sample. + void AddSample(const Vector4& sample, const int64_t timestamp_ns); + + // Returns true if buffer has buffer_size sample, false otherwise. + bool IsValid() const; + + // Returns the latest value stored in the internal buffer. + Vector4 GetLatestData() const; + + // Returns the latest value stored in the internal timestamp buffer. + int64_t GetLatestTimeStamp() const; + + // Returns a rotation linearly interpolated from the data stored in the internal buffer. + // It returns an identity rotation when not fully initialised. + // It returns the last value added to the buffer when the requested timestamp is + // outside the buffered timestamps. + // @param timestamp_ns the time in nanoseconds to get a rotation value for. + Vector4 GetInterpolatedForTimeStamp(const int64_t timestamp_ns) const; + + private: + const size_t buffer_size_; + std::deque buffer_; + std::deque timestamp_buffer_; +}; + +} // namespace cardboard + + +#endif /* rotation_data_h */ diff --git a/sdk/unity/xr_provider/input.cc b/sdk/unity/xr_provider/input.cc index 808a2f7b..4bcf6262 100644 --- a/sdk/unity/xr_provider/input.cc +++ b/sdk/unity/xr_provider/input.cc @@ -57,8 +57,8 @@ class CardboardInputProvider { UnityXRInputProvider input_provider; input_provider.userData = nullptr; input_provider.Tick = [](UnitySubsystemHandle, void*, - UnityXRInputUpdateType) { - return GetInstance()->Tick(); + UnityXRInputUpdateType updateType) { + return GetInstance()->Tick(updateType); }; input_provider.FillDeviceDefinition = [](UnitySubsystemHandle, void*, UnityXRInternalInputDeviceId device_id, @@ -126,16 +126,27 @@ class CardboardInputProvider { cardboard_api_->PauseHeadTracker(); } - UnitySubsystemErrorCode Tick() { - std::array out_orientation; - std::array out_position; - cardboard_api_->GetHeadTrackerPose(out_position.data(), - out_orientation.data()); - // TODO(b/151817737): Compute pose position within SDK with custom rotation. - head_pose_ = cardboard::unity::CardboardRotationToUnityPose(out_orientation); + // Aryzon 6DoF changed to include updateType and only update onBeforeRender to save computation power + UnitySubsystemErrorCode Tick(UnityXRInputUpdateType updateType) { + if (updateType == kUnityXRInputUpdateTypeBeforeRender) { + std::array out_orientation; + std::array out_position; + cardboard_api_->GetHeadTrackerPose(out_position.data(), + out_orientation.data()); + // TODO(b/151817737): Compute pose position within SDK with custom rotation. + //head_pose_ = cardboard::unity::CardboardRotationToUnityPose(out_orientation); + + // Aryzon 6DoF changed to include position + head_pose_ = cardboard::unity::CardboardPoseToUnityPose(out_orientation, out_position); + } return kUnitySubsystemErrorCodeSuccess; } + // Aryzon 6DoF + void Add6DoF(int64_t timestamp_ns, float* position, float* orientation) { + cardboard_api_->AddSixDoFData(timestamp_ns, position, orientation); + } + UnitySubsystemErrorCode FillDeviceDefinition( UnityXRInternalInputDeviceId device_id, UnityXRInputDeviceDefinition* definition) { @@ -253,3 +264,10 @@ UnitySubsystemErrorCode LoadInput(IUnityInterfaces* xr_interfaces) { } void UnloadInput() { CardboardInputProvider::GetInstance().reset(); } + +// Aryzon 6DoF +extern "C" { + void CardboardUnity_AddSixDoFData(CardboardInputProvider* ptr, int64_t timestamp_ns, float* position, float* orientation) { + ptr->GetInstance()->Add6DoF(timestamp_ns, position, orientation); + } +} diff --git a/sdk/unity/xr_provider/math_tools.cc b/sdk/unity/xr_provider/math_tools.cc index ed4fdb72..37948e92 100644 --- a/sdk/unity/xr_provider/math_tools.cc +++ b/sdk/unity/xr_provider/math_tools.cc @@ -132,6 +132,33 @@ UnityXRPose CardboardRotationToUnityPose(const std::array& rotation) { return result; } +// Aryzon 6DoF added CardboardPoseToUnityPose to include also position instead of only rotation +UnityXRPose CardboardPoseToUnityPose(const std::array& rotation, const std::array& position) { + UnityXRPose result; + + // Sets Unity Pose's rotation. Unity expects forward as positive z axis, + // whereas OpenGL expects forward as negative z. + result.rotation.x = rotation.at(0); + result.rotation.y = rotation.at(1); + result.rotation.z = -rotation.at(2); + result.rotation.w = rotation.at(3); + + result.position.x = position.at(0); + result.position.y = position.at(1); + result.position.z = -position.at(2); + + // Computes Unity Pose's position. + // Reasoning: It's easier to compute the position directly applying the neck + // model to Unity's rotation instead of using the one provided by the SDK. To + // use the provided position we should perform the following computation: + // 1. Compute inverse rotation quaternion (OpenGL's coordinates frame). + // 2. Apply the inverse rotation to the provided position. + // 3. Modify the position vector to suit Unity's coordinates frame. + // 4. Apply the new rotation (Unity's coordinates frame). + + return result; +} + // TODO(b/155113586): refactor this function to be part of the same // transformation as the above. UnityXRPose CardboardTransformToUnityPose( diff --git a/sdk/unity/xr_provider/math_tools.h b/sdk/unity/xr_provider/math_tools.h index ce482f41..39e41a00 100644 --- a/sdk/unity/xr_provider/math_tools.h +++ b/sdk/unity/xr_provider/math_tools.h @@ -30,6 +30,13 @@ namespace unity { /// @returns A UnityXRPose from Cardboard @p rotation. UnityXRPose CardboardRotationToUnityPose(const std::array& rotation); +/// Aryzon 6DoF +/// @brief Creates a UnityXRPose from a Cardboard rotation. +/// @param rotation A Cardboard rotation quaternion expressed as [x, y, z, w]. +/// @param position A Cardboard position vector expressed as [x, y, z]. +/// @returns A UnityXRPose from Cardboard @p rotation. +UnityXRPose CardboardPoseToUnityPose(const std::array& rotation, const std::array& position); + /// @brief Creates a UnityXRPose from a Cardboard transformation matrix. /// @param transform A 4x4 float transformation matrix. /// @returns A UnityXRPose from Cardboard @p transform. diff --git a/sdk/unity/xr_unity_plugin/cardboard_xr_unity.cc b/sdk/unity/xr_unity_plugin/cardboard_xr_unity.cc index b0f9a75e..595b8ee5 100644 --- a/sdk/unity/xr_unity_plugin/cardboard_xr_unity.cc +++ b/sdk/unity/xr_unity_plugin/cardboard_xr_unity.cc @@ -141,6 +141,20 @@ class CardboardApi::CardboardApiImpl { position, orientation); } + // Aryzon 6DoF + void AddSixDoFData(int64_t timestamp_nano, float* position, float* orientation) { + //LOGW("Head tracker was queried when setting 6DoF data."); + if (head_tracker_ == nullptr) { + LOGW("Uninitialized head tracker was queried when setting 6DoF data."); + return; + } + // Convert from Unity space to Cardboard space + position[2] = -position[2]; + orientation[2] = -orientation[2]; + + CardboardHeadTracker_addSixDoFData(head_tracker_.get(), timestamp_nano, position, orientation); + } + static void ScanDeviceParams() { CardboardQrCode_scanQrCodeAndSaveDeviceParams(); } @@ -276,6 +290,7 @@ class CardboardApi::CardboardApiImpl { int viewport_height) { unity_screen_params_ = ScreenParams{ width, height, viewport_x, viewport_y, viewport_width, viewport_height}; + SetDeviceParametersChanged(); } static void GetUnityScreenParams(int* width, int* height) { @@ -511,6 +526,11 @@ void CardboardApi::GetHeadTrackerPose(float* position, float* orientation) { p_impl_->GetHeadTrackerPose(position, orientation); } +// Aryzon 6DoF +void CardboardApi::AddSixDoFData(int64_t timestamp_nano, float *position, float *orientation) { + p_impl_->AddSixDoFData(timestamp_nano, position, orientation); +} + void CardboardApi::ScanDeviceParams() { CardboardApiImpl::ScanDeviceParams(); } void CardboardApi::UpdateDeviceParams() { p_impl_->UpdateDeviceParams(); } diff --git a/sdk/unity/xr_unity_plugin/cardboard_xr_unity.h b/sdk/unity/xr_unity_plugin/cardboard_xr_unity.h index dd2e1ad5..2d47d7f7 100644 --- a/sdk/unity/xr_unity_plugin/cardboard_xr_unity.h +++ b/sdk/unity/xr_unity_plugin/cardboard_xr_unity.h @@ -64,6 +64,13 @@ class CardboardApi { // TODO(b/154305848): Move argument types to std::array*. void GetHeadTrackerPose(float* position, float* orientation); + /// Aryzon 6DoF + /// @brief Add a 6DoF pose sample to the HeadTracker module. + /// @param[in] timestamp_nano A timestamp of the moment the 6DoF data was captured in nanoseconds + /// @param[in] position A pointer to an array with three floats that holds the 6DoF position + /// @param[in] orientation A pointer to an array with four floats that holds the 6DoF orientation + void AddSixDoFData(int64_t timestamp_nano, float* position, float* orientation); + /// @brief Triggers a device parameters scan. /// @pre When using Android, the pointer to `JavaVM` must be previously set. void ScanDeviceParams(); diff --git a/sdk/unity/xr_unity_plugin/metal_renderer.mm b/sdk/unity/xr_unity_plugin/metal_renderer.mm index 790455fb..7b7fc63f 100644 --- a/sdk/unity/xr_unity_plugin/metal_renderer.mm +++ b/sdk/unity/xr_unity_plugin/metal_renderer.mm @@ -182,51 +182,39 @@ void CreateRenderTexture(RenderTexture* render_texture, int screen_width, int screen_height) override { id mtl_device = metal_interface_->MetalDevice(); - // Create texture color buffer. + // Create texture color buffer. NSDictionary* color_surface_attribs = @{ - (NSString*)kIOSurfaceIsGlobal : @YES, + // (NSString*)kIOSurfaceIsGlobal : @YES, // Seems to not be necessary and less safe, maybe doesn't work with earlier versions of Unity? (NSString*)kIOSurfaceWidth : @(screen_width / 2), (NSString*)kIOSurfaceHeight : @(screen_height), (NSString*)kIOSurfaceBytesPerElement : @4u }; - color_surface_ = IOSurfaceCreate((CFDictionaryRef)color_surface_attribs); - MTLTextureDescriptor* texture_color_buffer_descriptor = [MTLTextureDescriptorClass new]; - texture_color_buffer_descriptor.textureType = MTLTextureType2D; - texture_color_buffer_descriptor.width = screen_width / 2; - texture_color_buffer_descriptor.height = screen_height; - texture_color_buffer_descriptor.pixelFormat = MTLPixelFormatRGBA8Unorm; - texture_color_buffer_descriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead; - color_texture_ = [mtl_device newTextureWithDescriptor:texture_color_buffer_descriptor - iosurface:color_surface_ - plane:0]; - render_texture->color_buffer = reinterpret_cast(color_surface_); - - // When using Metal, texture depth buffer is unused. - render_texture->depth_buffer = 0; - - // Create a black texture. It is used to hide a rendering previously performed by Unity. - // TODO(b/185478026): Prevent Unity from drawing a monocular scene when using Metal. - MTLTextureDescriptor* black_texture_descriptor = [MTLTextureDescriptorClass new]; - black_texture_descriptor.textureType = MTLTextureType2D; - black_texture_descriptor.width = screen_width; - black_texture_descriptor.height = screen_height; - black_texture_descriptor.pixelFormat = MTLPixelFormatRGBA8Unorm; - black_texture_descriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead; - black_texture_ = [mtl_device newTextureWithDescriptor:black_texture_descriptor]; - - std::vector black_texture_data(screen_width * screen_height, 0xFF000000); - MTLRegion region = MTLRegionMake2D(0, 0, screen_width, screen_height); - [black_texture_ replaceRegion:region - mipmapLevel:0 - withBytes:reinterpret_cast(black_texture_data.data()) - bytesPerRow:4 * screen_width]; - - black_texture_vertices_buffer_ = [mtl_device newBufferWithBytes:vertices - length:sizeof(vertices) - options:MTLResourceStorageModeShared]; - black_texture_uvs_buffer_ = [mtl_device newBufferWithBytes:uvs - length:sizeof(uvs) - options:MTLResourceStorageModeShared]; + + MTLTextureDescriptor* texture_color_buffer_descriptor = [MTLTextureDescriptorClass new]; + texture_color_buffer_descriptor.textureType = MTLTextureType2D; + texture_color_buffer_descriptor.width = screen_width / 2; + texture_color_buffer_descriptor.height = screen_height; + texture_color_buffer_descriptor.pixelFormat = MTLPixelFormatRGBA8Unorm; + texture_color_buffer_descriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderWrite; // MTLTextureUsageShaderRead is less optimized. + + IOSurfaceRef color_surface = IOSurfaceCreate((CFDictionaryRef)color_surface_attribs); + + render_texture->color_buffer = reinterpret_cast(color_surface); + + if (eyeIndex == 0) { + color_texture_left_ = [mtl_device newTextureWithDescriptor:texture_color_buffer_descriptor + iosurface:color_surface + plane:0]; + eyeIndex = 1; + } else { + color_texture_right_ = [mtl_device newTextureWithDescriptor:texture_color_buffer_descriptor + iosurface:color_surface + plane:0]; + eyeIndex = 0; + } + + // When using Metal, texture depth buffer is unused. + render_texture->depth_buffer = 0; } void DestroyRenderTexture(RenderTexture* render_texture) override { @@ -237,30 +225,24 @@ void DestroyRenderTexture(RenderTexture* render_texture) override { void RenderEyesToDisplay(CardboardDistortionRenderer* renderer, const ScreenParams& screen_params, const CardboardEyeTextureDescription* left_eye, const CardboardEyeTextureDescription* right_eye) override { - // Render black texture. It is used to hide a rendering previously performed by Unity. - // TODO(b/185478026): Prevent Unity from drawing a monocular scene when using Metal. - RenderBlackTexture(screen_params.width, screen_params.height); const CardboardDistortionRendererTargetConfig target_config{ - reinterpret_cast(CFBridgingRetain(metal_interface_->CurrentCommandEncoder())), - screen_params.width, screen_params.height}; + reinterpret_cast(CFBridgingRetain(metal_interface_->CurrentCommandEncoder())), + screen_params.width, screen_params.height}; - // An IOSurfaceRef was passed to Unity for drawing, but a reference to an id using - // it must be passed to the SDK. - CFTypeRef color_texture = CFBridgingRetain(color_texture_); CardboardEyeTextureDescription left_eye_description = *left_eye; - left_eye_description.texture = reinterpret_cast(color_texture); CardboardEyeTextureDescription right_eye_description = *right_eye; - right_eye_description.texture = reinterpret_cast(color_texture); + left_eye_description.texture = reinterpret_cast(color_texture_left_); + right_eye_description.texture = reinterpret_cast(color_texture_right_); + CardboardDistortionRenderer_renderEyeToDisplay( - renderer, reinterpret_cast(&target_config), screen_params.viewport_x, - screen_params.viewport_y, screen_params.viewport_width, screen_params.viewport_height, - &left_eye_description, &right_eye_description); + renderer, reinterpret_cast(&target_config), screen_params.viewport_x, + screen_params.viewport_y, screen_params.viewport_width, screen_params.viewport_height, + &left_eye_description, &right_eye_description); - CFBridgingRelease(color_texture); CFBridgingRelease(reinterpret_cast(target_config.render_command_encoder)); - } +} private: static constexpr float Lerp(float start, float end, float val) { @@ -293,47 +275,22 @@ void RenderWidget(id mtl_render_command_encoder, int sc vertexCount:4]; } - void RenderBlackTexture(int screen_width, int screen_height) { - // Get Metal current render command encoder. - id mtl_render_command_encoder_ = - static_cast>(metal_interface_->CurrentCommandEncoder()); - - [mtl_render_command_encoder_ setRenderPipelineState:mtl_render_pipeline_state_]; - - [mtl_render_command_encoder_ - setViewport:(MTLViewport){0.0, 0.0, static_cast(screen_width), - static_cast(screen_height), 0.0, 1.0}]; - - [mtl_render_command_encoder_ setVertexBuffer:black_texture_vertices_buffer_ - offset:0 - atIndex:VertexInputIndexPosition]; - - [mtl_render_command_encoder_ setVertexBuffer:black_texture_uvs_buffer_ - offset:0 - atIndex:VertexInputIndexTexCoords]; - - [mtl_render_command_encoder_ setFragmentTexture:black_texture_ - atIndex:FragmentInputIndexTexture]; - - [mtl_render_command_encoder_ drawPrimitives:MTLPrimitiveTypeTriangleStrip - vertexStart:0 - vertexCount:4]; - } - constexpr static float vertices[] = {-1, -1, 1, -1, -1, 1, 1, 1}; constexpr static float uvs[] = {0, 0, 1, 0, 0, 1, 1, 1}; IUnityGraphicsMetalV1* metal_interface_{nullptr}; id mtl_render_pipeline_state_; - IOSurfaceRef color_surface_; - id color_texture_; + id color_texture_left_; + id color_texture_right_; id black_texture_; id black_texture_vertices_buffer_; id black_texture_uvs_buffer_; bool are_widgets_setup_{false}; + + int eyeIndex = 0; }; } // namespace diff --git a/sixdof-unity/SixDoFCardboardStartup.cs b/sixdof-unity/SixDoFCardboardStartup.cs new file mode 100644 index 00000000..244cc16c --- /dev/null +++ b/sixdof-unity/SixDoFCardboardStartup.cs @@ -0,0 +1,134 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Runtime.InteropServices; + +using UnityEngine; +using UnityEngine.XR; + +namespace Google.XR.Cardboard +{ + /// + /// Initializes Cardboard XR Plugin for 6DoF use. + /// + public class SixDoFCardboardStartup : MonoBehaviour + { + private static IntPtr _inputPointer; + public static IntPtr inputPointer + { + get { if (isStarted) { return _inputPointer; } else { return IntPtr.Zero; } } + set { _inputPointer = value; } + } + + private static IntPtr _displayPointer; + public static IntPtr displayPointer + { + get { if (isStarted) { return _displayPointer; } else { return IntPtr.Zero; } } + set { _displayPointer = value; } + } + + private XRLoader loader; + + public static bool isInitialized = false; + public static bool isStarted = false; + + private string inputMatch = "Input"; + + private void Start() + { + StartCardboard(); + } + + public void StartCardboard() + { + // Configures the app to not shut down the screen and sets the brightness to maximum. + // Brightness control is expected to work only in iOS, see: + // https://docs.unity3d.com/ScriptReference/Screen-brightness.html. + Screen.sleepTimeout = SleepTimeout.NeverSleep; + Screen.brightness = 1.0f; + + if (!loader) + { + loader = new XRLoader(); + } +#if !UNITY_EDITOR + loader.Initialize(); +#endif + loader.Start(); + ConnectCardboardInputSystem(); + + isStarted = true; + + ReloadDeviceParams(); + + if (!Api.HasDeviceParams()) + { + Api.ScanDeviceParams(); + } + } + + public void StopCardboard() + { + if (loader) + { + loader.Stop(); + loader.Deinitialize(); + } + isStarted = false; + } + + public void ReloadDeviceParams() + { + if (!isStarted) + { + return; + } + Api.ReloadDeviceParams(); + } + + public void Update() + { + if (!isStarted) + { + return; + } + + if (Api.IsGearButtonPressed) + { + Api.ScanDeviceParams(); + } + + if (Api.IsCloseButtonPressed) + { + Application.Quit(); + } + + if (Api.HasNewDeviceParams()) + { + Api.ReloadDeviceParams(); + } + + Api.UpdateScreenParams(); + } + + private void ConnectCardboardInputSystem() + { + List inputs = new List(); + SubsystemManager.GetSubsystemDescriptors(inputs); + + foreach (var d in inputs) + { + if (d.id.Equals(inputMatch)) + { + XRInputSubsystem inputInst = d.Create(); + + if (inputInst != null) + { + GCHandle handle = GCHandle.Alloc(inputInst); + inputPointer = GCHandle.ToIntPtr(handle); + } + } + } + } + } +} diff --git a/sixdof-unity/SixDoFPoseDriver.cs b/sixdof-unity/SixDoFPoseDriver.cs new file mode 100644 index 00000000..63365076 --- /dev/null +++ b/sixdof-unity/SixDoFPoseDriver.cs @@ -0,0 +1,290 @@ + +using System; +using System.Collections; +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; +using UnityEngine; +using UnityEngine.XR; +using UnityEngine.XR.ARFoundation; + +namespace Google.XR.Cardboard +{ + public class SixDoFPoseDriver : MonoBehaviour + { +#if UNITY_ANDROID + public const string CardboardApi = "cardboard_api"; +#elif UNITY_IOS + public const string CardboardApi = "__Internal"; +#else + public const string CardboardApi = "NOT_AVAILABLE"; +#endif + + [DllImport(CardboardApi)] + private static extern void CardboardUnity_AddSixDoFData(IntPtr ptr, Int64 timestamp, [In] float[] position, [In] float[] orientation); + + private ARCameraManager _arCameraManager; + public ARCameraManager arCameraManager + { + get { + if (!_arCameraManager) + { + foreach (Camera camera in Camera.allCameras) + { + ARCameraManager _cameraManager = camera.gameObject.GetComponent(); + if (_cameraManager && _cameraManager.enabled) + { + _arCameraManager = _cameraManager; + break; + } + } + } + if (!_arCameraManager) + { + Debug.LogError("[Aryzon] No ARCameraManager found, make sure there is one attached to an active camera."); + } + return _arCameraManager; + } + set { _arCameraManager = value; } + } + + internal struct NullablePose + { + internal Vector3? position; + internal Quaternion? rotation; + } + + public void OnEnable() + { + Application.onBeforeRender += OnBeforeRender; + arCameraManager.frameReceived += ArCameraManager_frameReceived; +#if UNITY_2020_1_OR_NEWER + List devices = new List(); + InputDevices.GetDevicesWithCharacteristics(InputDeviceCharacteristics.TrackedDevice, devices); + foreach (var device in devices) + { + if (device.characteristics.HasFlag(InputDeviceCharacteristics.TrackedDevice)) + { + CheckConnectedDevice(device, false); + } + } + + InputDevices.deviceConnected += OnInputDeviceConnected; +#endif // UNITY_UNITY_2020_1_OR_NEWER + } + + public void OnDisable() + { + Application.onBeforeRender -= OnBeforeRender; + arCameraManager.frameReceived -= ArCameraManager_frameReceived; + +#if UNITY_2020_1_OR_NEWER + InputDevices.deviceConnected -= OnInputDeviceConnected; +#endif // UNITY_UNITY_2020_1_OR_NEWER + } + + void OnBeforeRender() => PerformUpdate(); + + void PerformUpdate() + { +#if !UNITY_EDITOR + var updatedPose = GetPoseData(); + + if (updatedPose.position.HasValue) + { + transform.localPosition = updatedPose.position.Value; + } + if (updatedPose.rotation.HasValue) + { + transform.localRotation = updatedPose.rotation.Value; + } +#endif + } + +#if UNITY_2020_1_OR_NEWER + static internal InputDevice? s_InputTrackingDevice = null; + static internal InputDevice? s_CardboardHMDInputTrackingDevice = null; + + void OnInputDeviceConnected(InputDevice device) => CheckConnectedDevice(device); + + void CheckConnectedDevice(InputDevice device, bool displayWarning = true) + { + var positionSuccess = false; + var rotationSuccess = false; + if (!(positionSuccess = device.TryGetFeatureValue(CommonUsages.centerEyePosition, out Vector3 position))) + positionSuccess = device.TryGetFeatureValue(CommonUsages.colorCameraPosition, out position); + if (!(rotationSuccess = device.TryGetFeatureValue(CommonUsages.centerEyeRotation, out Quaternion rotation))) + rotationSuccess = device.TryGetFeatureValue(CommonUsages.colorCameraRotation, out rotation); + + if (positionSuccess && rotationSuccess) + { + if (s_InputTrackingDevice == null) + { + s_InputTrackingDevice = device; + } + else if (s_CardboardHMDInputTrackingDevice == null && device.name == "Cardboard HMD") + { + s_CardboardHMDInputTrackingDevice = device; + } + } + } + +#else + static internal List nodeStates = new List(); +#endif // UNITY_2020_1_OR_NEWER + + public void AddSixDoFData(Vector3 position, Quaternion rotation, long timestampNs) + { + float[] positionArray = { position.x, position.y, position.z }; + float[] rotationArray = { rotation.x, rotation.y, rotation.z, rotation.w }; + CardboardUnity_AddSixDoFData(SixDoFCardboardStartup.inputPointer, timestampNs, positionArray, rotationArray); + } + + private void ArCameraManager_frameReceived(ARCameraFrameEventArgs obj) + { + if (!SixDoFCardboardStartup.isStarted) + { + return; + } +#if UNITY_2020_1_OR_NEWER + + if (s_InputTrackingDevice != null) + { + var pose = Pose.identity; + var positionSuccess = false; + var rotationSuccess = false; + + if (!(positionSuccess = s_InputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.centerEyePosition, out pose.position))) + positionSuccess = s_InputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.colorCameraPosition, out pose.position); + if (!(rotationSuccess = s_InputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.centerEyeRotation, out pose.rotation))) + rotationSuccess = s_InputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.colorCameraRotation, out pose.rotation); + + if (positionSuccess && rotationSuccess) + { + AddSixDoFData(pose.position, pose.rotation, (long)obj.timestampNs); + } + } +#else + UnityEngine.XR.InputTracking.GetNodeStates(nodeStates); + foreach (var nodeState in nodeStates) + { + if (nodeState.nodeType == UnityEngine.XR.XRNode.CenterEye) + { + var pose = Pose.identity; + var positionSuccess = nodeState.TryGetPosition(out pose.position); + var rotationSuccess = nodeState.TryGetRotation(out pose.rotation); + + if (positionSuccess && rotationSuccess) + { + AddSixDoFData(pose.position, pose.rotation, (long)obj.timestampNs); + } + break; + } + } +#endif + } + + static internal NullablePose GetPoseData() + { + NullablePose resultPose = new NullablePose(); + +#if UNITY_2020_1_OR_NEWER + if (!SixDoFCardboardStartup.isStarted && s_CardboardHMDInputTrackingDevice != null) + { + s_CardboardHMDInputTrackingDevice = null; + } + + if (s_CardboardHMDInputTrackingDevice != null) + { + var pose = Pose.identity; + var positionSuccess = false; + var rotationSuccess = false; + + if (!(positionSuccess = s_CardboardHMDInputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.centerEyePosition, out pose.position))) + positionSuccess = s_CardboardHMDInputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.colorCameraPosition, out pose.position); + if (!(rotationSuccess = s_CardboardHMDInputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.centerEyeRotation, out pose.rotation))) + rotationSuccess = s_CardboardHMDInputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.colorCameraRotation, out pose.rotation); + + if (positionSuccess) + resultPose.position = pose.position; + if (rotationSuccess) + resultPose.rotation = pose.rotation; + + Debug.Log("x: " + pose.position.x + " y: " + pose.position.y + " z: " + pose.position.z); + + if (positionSuccess || rotationSuccess) + return resultPose; + } + else if (s_InputTrackingDevice != null) + { + var pose = Pose.identity; + var positionSuccess = false; + var rotationSuccess = false; + + if (!(positionSuccess = s_InputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.centerEyePosition, out pose.position))) + positionSuccess = s_InputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.colorCameraPosition, out pose.position); + if (!(rotationSuccess = s_InputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.centerEyeRotation, out pose.rotation))) + rotationSuccess = s_InputTrackingDevice.Value.TryGetFeatureValue(CommonUsages.colorCameraRotation, out pose.rotation); + + if (positionSuccess) + resultPose.position = pose.position; + if (rotationSuccess) + resultPose.rotation = pose.rotation; + + if (positionSuccess || rotationSuccess) + return resultPose; + } +#else + UnityEngine.XR.InputTracking.GetNodeStates(nodeStates); + + List states = new List(); + + if (!SixDoFCardboardStartup.isStarted) + { + foreach (UnityEngine.XR.XRNodeState nodeState in nodeStates) + { + if (nodeState.nodeType == UnityEngine.XR.XRNode.CenterEye) + { + var pose = Pose.identity; + var positionSuccess = nodeState.TryGetPosition(out pose.position); + var rotationSuccess = nodeState.TryGetRotation(out pose.rotation); + + if (positionSuccess) + resultPose.position = pose.position; + if (rotationSuccess) + resultPose.rotation = pose.rotation; + + break; + } + } + } + else + { + foreach (UnityEngine.XR.XRNodeState nodeState in nodeStates) + { + if (nodeState.nodeType == UnityEngine.XR.XRNode.CenterEye) + { + states.Add(nodeState); + } + } + + if (nodeStates.Count > 0) + { + UnityEngine.XR.XRNodeState nodeState = nodeStates[nodeStates.Count - 1]; + var pose = Pose.identity; + var positionSuccess = nodeState.TryGetPosition(out pose.position); + var rotationSuccess = nodeState.TryGetRotation(out pose.rotation); + + if (positionSuccess) + resultPose.position = pose.position; + if (rotationSuccess) + resultPose.rotation = pose.rotation; + + return resultPose; + } + } +#endif // UNITY_2020_1_OR_NEWER + return resultPose; + } + } +}