diff --git a/CMakeLists.txt b/CMakeLists.txt
new file mode 100644
index 00000000..718db04b
--- /dev/null
+++ b/CMakeLists.txt
@@ -0,0 +1,127 @@
+cmake_minimum_required(VERSION 2.8.3)
+project(astra_camera)
+
+find_package(catkin REQUIRED camera_info_manager dynamic_reconfigure image_transport nodelet sensor_msgs roscpp message_generation)
+
+find_package(Boost REQUIRED COMPONENTS system thread)
+
+
+#MACHINE = $(shell uname -m)
+execute_process(COMMAND uname -m OUTPUT_VARIABLE MACHINES )
+execute_process(COMMAND getconf LONG_BIT OUTPUT_VARIABLE MACHINES_BIT )
+MESSAGE(STATUS "ORRBEC Machine : ${MACHINES}")
+MESSAGE(STATUS "ORRBEC Machine Bits : ${MACHINES_BIT}")
+IF ( (${MACHINES} MATCHES "x86_64") AND (${MACHINES_BIT} MATCHES "64") )
+ set(HOST_PLATFORM "x64")
+# for schroot enviroment
+ELSEIF ( (${MACHINES} MATCHES "x86_64") AND (${MACHINES_BIT} MATCHES "32") )
+ set(HOST_PLATFORM "x86")
+ELSEIF ( ${MACHINES} MATCHES "x86" )
+ELSEIF ( ${MACHINES} MATCHES "x86" )
+ set(HOST_PLATFORM "x86")
+ELSEIF ( ${MACHINES} MATCHES "i686" )
+ set(HOST_PLATFORM "x86")
+ELSEIF ( ${MACHINES} MATCHES "i386" )
+ set(HOST_PLATFORM "x86")
+ELSEIF ( ${MACHINES} MATCHES "arm" )
+ set(HOST_PLATFORM "arm")
+ELSEIF ( (${MACHINES} MATCHES "aarch64") AND (${MACHINES_BIT} MATCHES "64") )
+ set(HOST_PLATFORM "arm64")
+ELSEIF ( (${MACHINES} MATCHES "aarch64") AND (${MACHINES_BIT} MATCHES "32") )
+ set(HOST_PLATFORM "arm")
+ENDIF ()
+
+MESSAGE(STATUS "ORRBEC : ${HOST_PLATFORM}")
+
+generate_dynamic_reconfigure_options(cfg/Astra.cfg)
+add_service_files(FILES
+ GetSerial.srv)
+generate_messages()
+
+catkin_package(
+ INCLUDE_DIRS include
+ LIBRARIES astra_wrapper
+ CATKIN_DEPENDS camera_info_manager dynamic_reconfigure image_transport nodelet sensor_msgs roscpp message_runtime
+ DEPENDS libastra
+)
+
+set(ORRBEC_OPENNI2_REDIST "${CMAKE_CURRENT_SOURCE_DIR}/include/openni2_redist/${HOST_PLATFORM}")
+link_directories(${ORRBEC_OPENNI2_REDIST})
+
+#MESSAGE(STATUS "ORRBEC : ${ORRBEC_OPENNI2_REDIST}")
+
+include_directories(include
+ ${catkin_INCLUDE_DIRS}
+ ${Boost_INCLUDE_DIRS}
+ ${CMAKE_CURRENT_SOURCE_DIR}/include/openni2
+ )
+
+
+add_library(astra_wrapper
+ src/astra_convert.cpp
+ src/astra_device.cpp
+ src/astra_device_info.cpp
+ src/astra_timer_filter.cpp
+ src/astra_frame_listener.cpp
+ src/astra_device_manager.cpp
+ src/astra_exception.cpp
+ src/astra_video_mode.cpp
+)
+target_link_libraries(astra_wrapper ${catkin_LIBRARIES} -lOpenNI2 -L${ORRBEC_OPENNI2_REDIST}
+ ${Boost_LIBRARIES} )
+
+add_library(astra_driver_lib
+ src/astra_driver.cpp
+)
+target_link_libraries(astra_driver_lib astra_wrapper ${catkin_LIBRARIES} ${Boost_LIBRARIES} )
+add_dependencies(astra_driver_lib ${PROJECT_NAME}_gencfg)
+
+add_library(astra_camera_nodelet
+ ros/astra_camera_nodelet.cpp
+)
+target_link_libraries(astra_camera_nodelet astra_driver_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} )
+add_dependencies(astra_camera_nodelet ${PROJECT_NAME}_gencfg)
+
+add_executable(astra_camera_node
+ ros/astra_camera_node.cpp
+)
+target_link_libraries(astra_camera_node astra_driver_lib ${catkin_LIBRARIES} ${Boost_LIBRARIES} )
+add_dependencies(astra_camera_node ${PROJECT_NAME}_gencfg ${PROJECT_NAME}_generate_messages_cpp)
+
+add_executable(astra_list_devices
+ src/list_devices.cpp
+)
+target_link_libraries(astra_list_devices astra_wrapper)
+
+add_executable(astra_test_wrapper test/test_wrapper.cpp )
+target_link_libraries(astra_test_wrapper astra_wrapper ${Boost_LIBRARIES})
+if (UNIX AND NOT APPLE)
+ add_executable(astra_usb_reset src/usb_reset.c)
+ set(ADDITIONAL_EXECUTABLES "astra_usb_reset")
+endif()
+
+install(TARGETS astra_wrapper astra_camera_nodelet astra_camera_node astra_list_devices astra_driver_lib ${ADDITIONAL_EXECUTABLES}
+ ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+ LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION}
+ RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}
+ )
+
+install(DIRECTORY include/${PROJECT_NAME}/
+ DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION}
+)
+
+# add xml file
+install(FILES astra_nodelets.xml
+ DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}
+)
+
+#copy runtime file to bin, not support wildcard *.*
+#execute_process(COMMAND cp -R ${CMAKE_CURRENT_SOURCE_DIR}/include/openni2_redist/OpenNI2 ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/
+# COMMAND cp -R ${CMAKE_CURRENT_SOURCE_DIR}/include/openni2_redist/libOpenNI2.jni.so ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/
+# COMMAND cp -R ${CMAKE_CURRENT_SOURCE_DIR}/include/openni2_redist/libOpenNI2.so ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/
+# COMMAND cp -R ${CMAKE_CURRENT_SOURCE_DIR}/include/openni2_redist/OpenNI.ini ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/
+# COMMAND cp -R ${CMAKE_CURRENT_SOURCE_DIR}/include/openni2_redist/org.openni.jar ${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/
+#)
+
+
+
diff --git a/COPYRIGHT.txt b/COPYRIGHT.txt
new file mode 100644
index 00000000..85d2b68b
--- /dev/null
+++ b/COPYRIGHT.txt
@@ -0,0 +1,33 @@
+This project includes source code derived from openni2_camera project by Willow Garage.
+The original license for openni2_camera project is included below:
+
+/*
+ * Copyright (c) 2013, Willow Garage, Inc.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Willow Garage, Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Julius Kammerl (jkammerl@willowgarage.com)
+ */
diff --git a/astra_nodelets.xml b/astra_nodelets.xml
new file mode 100644
index 00000000..0b1bd850
--- /dev/null
+++ b/astra_nodelets.xml
@@ -0,0 +1,9 @@
+
+
+
+
+ Astra camera driver nodelet.
+
+
+
+
diff --git a/cfg/Astra.cfg b/cfg/Astra.cfg
new file mode 100644
index 00000000..245cca7c
--- /dev/null
+++ b/cfg/Astra.cfg
@@ -0,0 +1,49 @@
+#! /usr/bin/env python
+
+PACKAGE='astra_camera'
+
+from dynamic_reconfigure.parameter_generator_catkin import *
+
+gen = ParameterGenerator()
+
+# TODO Only offer modes supported by known hardware
+output_mode_enum = gen.enum([ gen.const( "SXGA_30Hz", int_t, 1, "1280x1024@30Hz"),
+ gen.const( "SXGA_15Hz", int_t, 2, "1280x1024@15Hz"),
+ gen.const( "XGA_30Hz", int_t, 3, "1280x720@30Hz"),
+ gen.const( "XGA_15Hz", int_t, 4, "1280x720@15Hz"),
+ gen.const( "VGA_30Hz", int_t, 5, "640x480@30Hz"),
+ gen.const( "VGA_25Hz", int_t, 6, "640x480@25Hz"),
+ gen.const( "QVGA_25Hz", int_t, 7, "320x240@25Hz"),
+ gen.const( "QVGA_30Hz", int_t, 8, "320x240@30Hz"),
+ gen.const( "QVGA_60Hz", int_t, 9, "320x240@60Hz"),
+ gen.const( "QQVGA_25Hz", int_t, 10, "160x120@25Hz"),
+ gen.const( "QQVGA_30Hz", int_t, 11, "160x120@30Hz"),
+ gen.const( "QQVGA_60Hz", int_t, 12, "160x120@60Hz")],
+ "output mode")
+
+
+gen.add("ir_mode", int_t, 0, "Video mode for IR camera", 5, 1, 12, edit_method = output_mode_enum)
+gen.add("color_mode", int_t, 0, "Video mode for color camera", 5, 1, 12, edit_method = output_mode_enum)
+gen.add("depth_mode", int_t, 0, "Video mode for depth camera", 5, 1, 12, edit_method = output_mode_enum)
+
+gen.add("depth_registration", bool_t, 0, "Depth data registration", True)
+gen.add("color_depth_synchronization", bool_t, 0, "Synchronization of color and depth camera", False)
+gen.add("auto_exposure", bool_t, 0, "Auto-Exposure", True)
+gen.add("auto_white_balance", bool_t, 0, "Auto-White-Balance", True)
+
+gen.add("data_skip", int_t, 0, "Skip N images for every image published (rgb/depth/depth_registered/ir)", 0, 0, 10)
+
+gen.add("ir_time_offset", double_t, 0, "ir image time offset in seconds", -0.033, -1.0, 1.0 );
+gen.add("color_time_offset", double_t, 0, "color image time offset in seconds", -0.033, -1.0, 1.0 );
+gen.add("depth_time_offset", double_t, 0, "depth image time offset in seconds", -0.033, -1.0, 1.0 );
+
+gen.add("depth_ir_offset_x", double_t, 0, "X offset between IR and depth images", 5.0, -10.0, 10.0)
+gen.add("depth_ir_offset_y", double_t, 0, "Y offset between IR and depth images", 4.0, -10.0, 10.0)
+
+gen.add("z_offset_mm", int_t, 0, "Z offset in mm", 0, -200, 200)
+gen.add("z_scaling", double_t, 0, "Scaling factor for depth values", 1.0, 0.5, 1.5)
+
+gen.add("use_device_time", bool_t, 0, "Use internal timer of OpenNI device", True)
+
+exit(gen.generate(PACKAGE, "Astra", "Astra"))
+
diff --git a/include/astra_camera/astra_convert.h b/include/astra_camera/astra_convert.h
new file mode 100644
index 00000000..3a259a83
--- /dev/null
+++ b/include/astra_camera/astra_convert.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+
+#ifndef ASTRA_CONVERT_H_
+#define ASTRA_CONVERT_H_
+
+#include "astra_camera/astra_device_info.h"
+#include "astra_camera/astra_video_mode.h"
+
+#include "openni2/OpenNI.h"
+
+#include
+
+namespace astra_wrapper
+{
+
+const AstraDeviceInfo astra_convert(const openni::DeviceInfo* pInfo);
+
+const AstraVideoMode astra_convert(const openni::VideoMode& input);
+const openni::VideoMode astra_convert(const AstraVideoMode& input);
+
+const std::vector astra_convert(const openni::Array& input);
+}
+
+#endif
diff --git a/include/astra_camera/astra_device.h b/include/astra_camera/astra_device.h
new file mode 100644
index 00000000..bb16bfb1
--- /dev/null
+++ b/include/astra_camera/astra_device.h
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#ifndef ASTRA_DEVICE_H
+#define ASTRA_DEVICE_H
+
+#include "astra_camera/astra_video_mode.h"
+
+#include "astra_camera/astra_exception.h"
+
+#include
+#include
+#include
+#include
+
+#include
+
+#include
+#include
+
+namespace openni
+{
+class Device;
+class DeviceInfo;
+class VideoStream;
+class SensorInfo;
+}
+
+namespace astra_wrapper
+{
+
+typedef boost::function FrameCallbackFunction;
+
+class AstraFrameListener;
+
+class AstraDevice
+{
+public:
+ AstraDevice(const std::string& device_URI) throw (AstraException);
+ virtual ~AstraDevice();
+
+ const std::string getUri() const;
+ const std::string getVendor() const;
+ const std::string getName() const;
+ uint16_t getUsbVendorId() const;
+ uint16_t getUsbProductId() const;
+
+ const std::string getStringID() const;
+
+ bool isValid() const;
+
+ bool hasIRSensor() const;
+ bool hasColorSensor() const;
+ bool hasDepthSensor() const;
+
+ void startIRStream();
+ void startColorStream();
+ void startDepthStream();
+
+ void stopAllStreams();
+
+ void stopIRStream();
+ void stopColorStream();
+ void stopDepthStream();
+
+ bool isIRStreamStarted();
+ bool isColorStreamStarted();
+ bool isDepthStreamStarted();
+
+ bool isImageRegistrationModeSupported() const;
+ void setImageRegistrationMode(bool enabled) throw (AstraException);
+ void setDepthColorSync(bool enabled) throw (AstraException);
+
+ const AstraVideoMode getIRVideoMode() throw (AstraException);
+ const AstraVideoMode getColorVideoMode() throw (AstraException);
+ const AstraVideoMode getDepthVideoMode() throw (AstraException);
+
+ const std::vector& getSupportedIRVideoModes() const;
+ const std::vector& getSupportedColorVideoModes() const;
+ const std::vector& getSupportedDepthVideoModes() const;
+
+ bool isIRVideoModeSupported(const AstraVideoMode& video_mode) const;
+ bool isColorVideoModeSupported(const AstraVideoMode& video_mode) const;
+ bool isDepthVideoModeSupported(const AstraVideoMode& video_mode) const;
+
+ void setIRVideoMode(const AstraVideoMode& video_mode) throw (AstraException);
+ void setColorVideoMode(const AstraVideoMode& video_mode) throw (AstraException);
+ void setDepthVideoMode(const AstraVideoMode& video_mode) throw (AstraException);
+
+ void setIRFrameCallback(FrameCallbackFunction callback);
+ void setColorFrameCallback(FrameCallbackFunction callback);
+ void setDepthFrameCallback(FrameCallbackFunction callback);
+
+ float getIRFocalLength (int output_y_resolution) const;
+ float getColorFocalLength (int output_y_resolution) const;
+ float getDepthFocalLength (int output_y_resolution) const;
+
+ void setAutoExposure(bool enable) throw (AstraException);
+ void setAutoWhiteBalance(bool enable) throw (AstraException);
+
+ bool getAutoExposure() const;
+ bool getAutoWhiteBalance() const;
+
+ void setUseDeviceTimer(bool enable);
+
+protected:
+ void shutdown();
+
+ boost::shared_ptr getIRVideoStream() const throw (AstraException);
+ boost::shared_ptr getColorVideoStream() const throw (AstraException);
+ boost::shared_ptr getDepthVideoStream() const throw (AstraException);
+
+ boost::shared_ptr openni_device_;
+ boost::shared_ptr device_info_;
+
+ boost::shared_ptr ir_frame_listener;
+ boost::shared_ptr color_frame_listener;
+ boost::shared_ptr depth_frame_listener;
+
+ mutable boost::shared_ptr ir_video_stream_;
+ mutable boost::shared_ptr color_video_stream_;
+ mutable boost::shared_ptr depth_video_stream_;
+
+ mutable std::vector ir_video_modes_;
+ mutable std::vector color_video_modes_;
+ mutable std::vector depth_video_modes_;
+
+ bool ir_video_started_;
+ bool color_video_started_;
+ bool depth_video_started_;
+
+ bool image_registration_activated_;
+
+ bool use_device_time_;
+
+};
+
+std::ostream& operator << (std::ostream& stream, const AstraDevice& device);
+
+}
+
+#endif /* OPENNI_DEVICE_H */
diff --git a/include/astra_camera/astra_device_info.h b/include/astra_camera/astra_device_info.h
new file mode 100644
index 00000000..3fd0e795
--- /dev/null
+++ b/include/astra_camera/astra_device_info.h
@@ -0,0 +1,55 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#ifndef ASTRA_DEVICE_INFO_H_
+#define ASTRA_DEVICE_INFO_H_
+
+#include
+
+#include
+
+namespace astra_wrapper
+{
+
+struct AstraDeviceInfo
+{
+ std::string uri_;
+ std::string vendor_;
+ std::string name_;
+ uint16_t vendor_id_;
+ uint16_t product_id_;
+};
+
+std::ostream& operator << (std::ostream& stream, const AstraDeviceInfo& device_info);
+
+}
+
+#endif /* DRIVER_H_ */
diff --git a/include/astra_camera/astra_device_manager.h b/include/astra_camera/astra_device_manager.h
new file mode 100644
index 00000000..6143c85d
--- /dev/null
+++ b/include/astra_camera/astra_device_manager.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#ifndef ASTRA_DEVICE_MANAGER_H_
+#define ASTRA_DEVICE_MANAGER_H_
+
+#include "astra_camera/astra_device_info.h"
+
+#include
+
+#include
+#include
+#include
+
+namespace astra_wrapper
+{
+
+class AstraDeviceListener;
+class AstraDevice;
+
+class AstraDeviceManager
+{
+public:
+ AstraDeviceManager();
+ virtual ~AstraDeviceManager();
+
+ static boost::shared_ptr getSingelton();
+
+ boost::shared_ptr > getConnectedDeviceInfos() const;
+ boost::shared_ptr > getConnectedDeviceURIs() const;
+ std::size_t getNumOfConnectedDevices() const;
+
+ boost::shared_ptr getAnyDevice();
+ boost::shared_ptr getDevice(const std::string& device_URI);
+
+ std::string getSerial(const std::string& device_URI) const;
+
+protected:
+ boost::shared_ptr device_listener_;
+
+ static boost::shared_ptr singelton_;
+};
+
+
+std::ostream& operator <<(std::ostream& stream, const AstraDeviceManager& device_manager);
+
+}
+
+#endif
diff --git a/include/astra_camera/astra_driver.h b/include/astra_camera/astra_driver.h
new file mode 100644
index 00000000..c47957a8
--- /dev/null
+++ b/include/astra_camera/astra_driver.h
@@ -0,0 +1,179 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#ifndef ASTRA_DRIVER_H
+#define ASTRA_DRIVER_H
+
+#include
+#include
+#include
+#include
+
+#include
+
+#include
+#include
+
+#include
+#include
+
+#include
+#include
+
+#include "astra_camera/astra_device_manager.h"
+#include "astra_camera/astra_device.h"
+#include "astra_camera/astra_video_mode.h"
+#include "astra_camera/GetSerial.h"
+
+#include
+
+namespace astra_wrapper
+{
+
+class AstraDriver
+{
+public:
+ AstraDriver(ros::NodeHandle& n, ros::NodeHandle& pnh) ;
+
+private:
+ typedef astra_camera::AstraConfig Config;
+ typedef dynamic_reconfigure::Server ReconfigureServer;
+
+ void newIRFrameCallback(sensor_msgs::ImagePtr image);
+ void newColorFrameCallback(sensor_msgs::ImagePtr image);
+ void newDepthFrameCallback(sensor_msgs::ImagePtr image);
+
+ // Methods to get calibration parameters for the various cameras
+ sensor_msgs::CameraInfoPtr getDefaultCameraInfo(int width, int height, double f) const;
+ sensor_msgs::CameraInfoPtr getColorCameraInfo(int width, int height, ros::Time time) const;
+ sensor_msgs::CameraInfoPtr getIRCameraInfo(int width, int height, ros::Time time) const;
+ sensor_msgs::CameraInfoPtr getDepthCameraInfo(int width, int height, ros::Time time) const;
+
+ void readConfigFromParameterServer();
+
+ // resolves non-URI device IDs to URIs, e.g. '#1' is resolved to the URI of the first device
+ std::string resolveDeviceURI(const std::string& device_id) throw(AstraException);
+ void initDevice();
+
+ void advertiseROSTopics();
+
+ void colorConnectCb();
+ void depthConnectCb();
+ void irConnectCb();
+
+ bool getSerialCb(astra_camera::GetSerialRequest& req, astra_camera::GetSerialResponse& res);
+
+ void configCb(Config &config, uint32_t level);
+
+ void applyConfigToOpenNIDevice();
+
+ void genVideoModeTableMap();
+ int lookupVideoModeFromDynConfig(int mode_nr, AstraVideoMode& video_mode);
+
+ sensor_msgs::ImageConstPtr rawToFloatingPointConversion(sensor_msgs::ImageConstPtr raw_image);
+
+ void setIRVideoMode(const AstraVideoMode& ir_video_mode);
+ void setColorVideoMode(const AstraVideoMode& color_video_mode);
+ void setDepthVideoMode(const AstraVideoMode& depth_video_mode);
+
+ ros::NodeHandle& nh_;
+ ros::NodeHandle& pnh_;
+
+ boost::shared_ptr device_manager_;
+ boost::shared_ptr device_;
+
+ std::string device_id_;
+
+ /** \brief get_serial server*/
+ ros::ServiceServer get_serial_server;
+
+ /** \brief reconfigure server*/
+ boost::shared_ptr reconfigure_server_;
+ bool config_init_;
+
+ boost::mutex connect_mutex_;
+ // published topics
+ image_transport::CameraPublisher pub_color_;
+ image_transport::CameraPublisher pub_depth_;
+ image_transport::CameraPublisher pub_depth_raw_;
+ image_transport::CameraPublisher pub_ir_;
+ ros::Publisher pub_projector_info_;
+
+ /** \brief Camera info manager objects. */
+ boost::shared_ptr color_info_manager_, ir_info_manager_;
+
+ AstraVideoMode ir_video_mode_;
+ AstraVideoMode color_video_mode_;
+ AstraVideoMode depth_video_mode_;
+
+ std::string ir_frame_id_;
+ std::string color_frame_id_;
+ std::string depth_frame_id_ ;
+
+ std::string color_info_url_, ir_info_url_;
+
+ bool color_depth_synchronization_;
+ bool depth_registration_;
+
+ std::map video_modes_lookup_;
+
+ // dynamic reconfigure config
+ double depth_ir_offset_x_;
+ double depth_ir_offset_y_;
+ int z_offset_mm_;
+ double z_scaling_;
+
+ ros::Duration ir_time_offset_;
+ ros::Duration color_time_offset_;
+ ros::Duration depth_time_offset_;
+
+ int data_skip_;
+
+ int data_skip_ir_counter_;
+ int data_skip_color_counter_;
+ int data_skip_depth_counter_;
+
+ bool auto_exposure_;
+ bool auto_white_balance_;
+
+ bool ir_subscribers_;
+ bool color_subscribers_;
+ bool depth_subscribers_;
+ bool depth_raw_subscribers_;
+
+ bool use_device_time_;
+
+ Config old_config_;
+};
+
+}
+
+#endif
diff --git a/include/astra_camera/astra_exception.h b/include/astra_camera/astra_exception.h
new file mode 100644
index 00000000..d45f1217
--- /dev/null
+++ b/include/astra_camera/astra_exception.h
@@ -0,0 +1,90 @@
+/*
+ * Software License Agreement (BSD License)
+ *
+ * Copyright (c) 2016, Orbbec Ltd.
+ * Tim Liu
+ *
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+ * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
+ * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+#ifndef __ASTRA_EXCEPTION__
+#define __ASTRA_EXCEPTION__
+
+#include
+#include
+#include
+#include
+
+#if defined _WIN32 && defined _MSC_VER
+# define __PRETTY_FUNCTION__ __FUNCTION__
+#endif
+#define THROW_OPENNI_EXCEPTION(format,...) throwOpenNIException( __PRETTY_FUNCTION__, __FILE__, __LINE__, format , ##__VA_ARGS__ )
+
+namespace astra_wrapper
+{
+/**
+ * @brief General exception class
+ * @author Suat Gedikli
+ * @date 02.january 2011
+ */
+class AstraException : public std::exception
+{
+public:
+ AstraException(const std::string& function_name,
+ const std::string& file_name,
+ unsigned line_number,
+ const std::string& message) throw ();
+
+ virtual ~AstraException() throw ();
+ AstraException & operator=(const AstraException& exception) throw ();
+ virtual const char* what() const throw ();
+
+ const std::string& getFunctionName() const throw ();
+ const std::string& getFileName() const throw ();
+ unsigned getLineNumber() const throw ();
+
+protected:
+ std::string function_name_;
+ std::string file_name_;
+ unsigned line_number_;
+ std::string message_;
+ std::string message_long_;
+};
+
+inline void throwOpenNIException(const char* function, const char* file, unsigned line, const char* format, ...)
+{
+ static char msg[1024];
+ va_list args;
+ va_start(args, format);
+ vsprintf(msg, format, args);
+ throw AstraException(function, file, line, msg);
+}
+} // namespace astra_camera
+#endif
diff --git a/include/astra_camera/astra_frame_listener.h b/include/astra_camera/astra_frame_listener.h
new file mode 100644
index 00000000..91f5af88
--- /dev/null
+++ b/include/astra_camera/astra_frame_listener.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#ifndef ASTRA_FRAME_LISTENER_H_
+#define ASTRA_FRAME_LISTENER_H_
+
+#include "astra_camera/astra_device.h"
+
+#include
+
+#include
+
+#include "openni2/OpenNI.h"
+
+namespace astra_wrapper
+{
+
+class AstraTimerFilter;
+
+class AstraFrameListener : public openni::VideoStream::NewFrameListener
+{
+public:
+ AstraFrameListener();
+
+ virtual ~AstraFrameListener()
+ { };
+
+ void onNewFrame(openni::VideoStream& stream);
+
+ void setCallback(FrameCallbackFunction& callback)
+ {
+ callback_ = callback;
+ }
+
+ void setUseDeviceTimer(bool enable);
+
+private:
+ openni::VideoFrameRef m_frame;
+
+ FrameCallbackFunction callback_;
+
+ bool user_device_timer_;
+ boost::shared_ptr timer_filter_;
+
+ double prev_time_stamp_;
+};
+
+}
+
+#endif
diff --git a/include/astra_camera/astra_timer_filter.h b/include/astra_camera/astra_timer_filter.h
new file mode 100644
index 00000000..e5fba484
--- /dev/null
+++ b/include/astra_camera/astra_timer_filter.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#ifndef ASTRA_TIME_FILTER_H_
+#define ASTRA_TIME_FILTER_H_
+
+#include
+
+#include "openni2/OpenNI.h"
+
+namespace astra_wrapper
+{
+
+class AstraTimerFilter
+{
+public:
+ AstraTimerFilter(std::size_t filter_len);
+ virtual ~AstraTimerFilter();
+
+ void addSample(double sample);
+
+ double getMedian();
+ double getMovingAvg();
+
+ void clear();
+
+private:
+ std::size_t filter_len_;
+
+ std::deque buffer_;
+};
+
+}
+
+#endif
diff --git a/include/astra_camera/astra_video_mode.h b/include/astra_camera/astra_video_mode.h
new file mode 100644
index 00000000..75621a4a
--- /dev/null
+++ b/include/astra_camera/astra_video_mode.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#ifndef ASTRA_VIDEO_MODE_H_
+#define ASTRA_VIDEO_MODE_H_
+
+#include
+#include
+
+namespace astra_wrapper
+{
+
+// copied from OniEnums.h
+typedef enum
+{
+ // Depth
+ PIXEL_FORMAT_DEPTH_1_MM = 100,
+ PIXEL_FORMAT_DEPTH_100_UM = 101,
+ PIXEL_FORMAT_SHIFT_9_2 = 102,
+ PIXEL_FORMAT_SHIFT_9_3 = 103,
+
+ // Color
+ PIXEL_FORMAT_RGB888 = 200,
+ PIXEL_FORMAT_YUV422 = 201,
+ PIXEL_FORMAT_GRAY8 = 202,
+ PIXEL_FORMAT_GRAY16 = 203,
+ PIXEL_FORMAT_JPEG = 204,
+} PixelFormat;
+
+struct AstraVideoMode
+{
+ std::size_t x_resolution_;
+ std::size_t y_resolution_;
+ double frame_rate_;
+ PixelFormat pixel_format_;
+};
+
+std::ostream& operator << (std::ostream& stream, const AstraVideoMode& video_mode);
+
+bool operator==(const AstraVideoMode& video_mode_a, const AstraVideoMode& video_mode_b);
+bool operator!=(const AstraVideoMode& video_mode_a, const AstraVideoMode& video_mode_b);
+
+}
+
+#endif
diff --git a/include/openni2/Android-Arm/OniPlatformAndroid-Arm.h b/include/openni2/Android-Arm/OniPlatformAndroid-Arm.h
new file mode 100644
index 00000000..0aae276b
--- /dev/null
+++ b/include/openni2/Android-Arm/OniPlatformAndroid-Arm.h
@@ -0,0 +1,43 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_PLATFORM_ANDROID_ARM_H_
+#define _ONI_PLATFORM_ANDROID_ARM_H_
+
+// Start with Linux-x86, and override what's different
+#include "../Linux-x86/OniPlatformLinux-x86.h"
+
+//---------------------------------------------------------------------------
+// Platform Basic Definition
+//---------------------------------------------------------------------------
+#undef ONI_PLATFORM
+#undef ONI_PLATFORM_STRING
+
+#define ONI_PLATFORM ONI_PLATFORM_ANDROID_ARM
+#define ONI_PLATFORM_STRING "Android-Arm"
+
+#ifdef HAVE_ANDROID_OS
+ #define ONI_PLATFORM_ANDROID_OS
+
+ #undef ONI_PLATFORM_STRING
+ #define ONI_PLATFORM_STRING "AndroidOS-Arm"
+#endif
+
+#endif //_ONI_PLATFORM_LINUX_ARM_H_
diff --git a/include/openni2/Driver/OniDriverAPI.h b/include/openni2/Driver/OniDriverAPI.h
new file mode 100644
index 00000000..c41e1f66
--- /dev/null
+++ b/include/openni2/Driver/OniDriverAPI.h
@@ -0,0 +1,378 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_DRIVER_API_H_
+#define _ONI_DRIVER_API_H_
+
+#include "OniPlatform.h"
+#include "OniCTypes.h"
+#include "OniCProperties.h"
+#include "OniDriverTypes.h"
+#include
+
+namespace oni { namespace driver {
+
+class DeviceBase;
+class StreamBase;
+
+typedef void (ONI_CALLBACK_TYPE* DeviceConnectedCallback)(const OniDeviceInfo*, void* pCookie);
+typedef void (ONI_CALLBACK_TYPE* DeviceDisconnectedCallback)(const OniDeviceInfo*, void* pCookie);
+typedef void (ONI_CALLBACK_TYPE* DeviceStateChangedCallback)(const OniDeviceInfo* deviceId, int errorState, void* pCookie);
+typedef void (ONI_CALLBACK_TYPE* NewFrameCallback)(StreamBase* streamId, OniFrame*, void* pCookie);
+typedef void (ONI_CALLBACK_TYPE* PropertyChangedCallback)(void* sender, int propertyId, const void* data, int dataSize, void* pCookie);
+
+class StreamServices : public OniStreamServices
+{
+public:
+ int getDefaultRequiredFrameSize()
+ {
+ return OniStreamServices::getDefaultRequiredFrameSize(streamServices);
+ }
+
+ OniFrame* acquireFrame()
+ {
+ return OniStreamServices::acquireFrame(streamServices);
+ }
+
+ void addFrameRef(OniFrame* pFrame)
+ {
+ OniStreamServices::addFrameRef(streamServices, pFrame);
+ }
+
+ void releaseFrame(OniFrame* pFrame)
+ {
+ OniStreamServices::releaseFrame(streamServices, pFrame);
+ }
+};
+
+class StreamBase
+{
+public:
+ StreamBase() : m_newFrameCallback(NULL), m_propertyChangedCallback(NULL) {}
+ virtual ~StreamBase() {}
+
+ virtual void setServices(StreamServices* pStreamServices) { m_pServices = pStreamServices; }
+
+ virtual OniStatus setProperty(int /*propertyId*/, const void* /*data*/, int /*dataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;}
+ virtual OniStatus getProperty(int /*propertyId*/, void* /*data*/, int* /*pDataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;}
+ virtual OniBool isPropertySupported(int /*propertyId*/) {return FALSE;}
+ virtual OniStatus invoke(int /*commandId*/, void* /*data*/, int /*dataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;}
+ virtual OniBool isCommandSupported(int /*commandId*/) {return FALSE;}
+
+ virtual int getRequiredFrameSize() { return getServices().getDefaultRequiredFrameSize(); }
+
+ virtual OniStatus start() = 0;
+ virtual void stop() = 0;
+
+ virtual void setNewFrameCallback(NewFrameCallback handler, void* pCookie) { m_newFrameCallback = handler; m_newFrameCallbackCookie = pCookie; }
+ virtual void setPropertyChangedCallback(PropertyChangedCallback handler, void* pCookie) { m_propertyChangedCallback = handler; m_propertyChangedCookie = pCookie; }
+
+ virtual void notifyAllProperties() { return; }
+
+ virtual OniStatus convertDepthToColorCoordinates(StreamBase* /*colorStream*/, int /*depthX*/, int /*depthY*/, OniDepthPixel /*depthZ*/, int* /*pColorX*/, int* /*pColorY*/) { return ONI_STATUS_NOT_SUPPORTED; }
+
+protected:
+ void raiseNewFrame(OniFrame* pFrame) { (*m_newFrameCallback)(this, pFrame, m_newFrameCallbackCookie); }
+ void raisePropertyChanged(int propertyId, const void* data, int dataSize) { (*m_propertyChangedCallback)(this, propertyId, data, dataSize, m_propertyChangedCookie); }
+
+ StreamServices& getServices() { return *m_pServices; }
+
+private:
+ StreamServices* m_pServices;
+ NewFrameCallback m_newFrameCallback;
+ void* m_newFrameCallbackCookie;
+ PropertyChangedCallback m_propertyChangedCallback;
+ void* m_propertyChangedCookie;
+};
+
+class DeviceBase
+{
+public:
+ DeviceBase() {}
+ virtual ~DeviceBase() {}
+
+ virtual OniStatus getSensorInfoList(OniSensorInfo** pSensorInfos, int* numSensors) = 0;
+
+ virtual StreamBase* createStream(OniSensorType) = 0;
+ virtual void destroyStream(StreamBase* pStream) = 0;
+
+ virtual OniStatus setProperty(int /*propertyId*/, const void* /*data*/, int /*dataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;}
+ virtual OniStatus getProperty(int /*propertyId*/, void* /*data*/, int* /*pDataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;}
+ virtual OniBool isPropertySupported(int /*propertyId*/) {return FALSE;}
+ virtual OniStatus invoke(int /*commandId*/, void* /*data*/, int /*dataSize*/) {return ONI_STATUS_NOT_IMPLEMENTED;}
+ virtual OniBool isCommandSupported(int /*commandId*/) {return FALSE;}
+ virtual OniStatus tryManualTrigger() {return ONI_STATUS_OK;}
+
+ virtual void setPropertyChangedCallback(PropertyChangedCallback handler, void* pCookie) { m_propertyChangedCallback = handler; m_propertyChangedCookie = pCookie; }
+ virtual void notifyAllProperties() { return; }
+
+ virtual OniBool isImageRegistrationModeSupported(OniImageRegistrationMode mode) { return (mode == ONI_IMAGE_REGISTRATION_OFF); }
+
+protected:
+ void raisePropertyChanged(int propertyId, const void* data, int dataSize) { (*m_propertyChangedCallback)(this, propertyId, data, dataSize, m_propertyChangedCookie); }
+
+private:
+ PropertyChangedCallback m_propertyChangedCallback;
+ void* m_propertyChangedCookie;
+};
+
+class DriverServices
+{
+public:
+ DriverServices(OniDriverServices* pDriverServices) : m_pDriverServices(pDriverServices) {}
+
+ void errorLoggerAppend(const char* format, ...)
+ {
+ va_list args;
+ va_start(args, format);
+ m_pDriverServices->errorLoggerAppend(m_pDriverServices->driverServices, format, args);
+ va_end(args);
+ }
+
+ void errorLoggerClear()
+ {
+ m_pDriverServices->errorLoggerClear(m_pDriverServices->driverServices);
+ }
+
+ void log(int severity, const char* file, int line, const char* mask, const char* message)
+ {
+ m_pDriverServices->log(m_pDriverServices->driverServices, severity, file, line, mask, message);
+ }
+
+private:
+ OniDriverServices* m_pDriverServices;
+};
+
+class DriverBase
+{
+public:
+ DriverBase(OniDriverServices* pDriverServices) : m_services(pDriverServices)
+ {}
+
+ virtual ~DriverBase() {}
+
+ virtual OniStatus initialize(DeviceConnectedCallback connectedCallback, DeviceDisconnectedCallback disconnectedCallback, DeviceStateChangedCallback deviceStateChangedCallback, void* pCookie)
+ {
+ m_deviceConnectedEvent = connectedCallback;
+ m_deviceDisconnectedEvent = disconnectedCallback;
+ m_deviceStateChangedEvent = deviceStateChangedCallback;
+ m_pCookie = pCookie;
+ return ONI_STATUS_OK;
+ }
+
+ virtual DeviceBase* deviceOpen(const char* uri, const char* mode) = 0;
+ virtual void deviceClose(DeviceBase* pDevice) = 0;
+
+ virtual void shutdown() = 0;
+
+ virtual OniStatus tryDevice(const char* /*uri*/) { return ONI_STATUS_ERROR;}
+
+ virtual void* enableFrameSync(StreamBase** /*pStreams*/, int /*streamCount*/) { return NULL; }
+ virtual void disableFrameSync(void* /*frameSyncGroup*/) {}
+
+protected:
+ void deviceConnected(const OniDeviceInfo* pInfo) { (m_deviceConnectedEvent)(pInfo, m_pCookie); }
+ void deviceDisconnected(const OniDeviceInfo* pInfo) { (m_deviceDisconnectedEvent)(pInfo, m_pCookie); }
+ void deviceStateChanged(const OniDeviceInfo* pInfo, int errorState) { (m_deviceStateChangedEvent)(pInfo, errorState, m_pCookie); }
+
+ DriverServices& getServices() { return m_services; }
+
+private:
+ DeviceConnectedCallback m_deviceConnectedEvent;
+ DeviceDisconnectedCallback m_deviceDisconnectedEvent;
+ DeviceStateChangedCallback m_deviceStateChangedEvent;
+ void* m_pCookie;
+
+ DriverServices m_services;
+};
+
+}} // oni::driver
+
+#define ONI_EXPORT_DRIVER(DriverClass) \
+ \
+oni::driver::DriverBase* g_pDriver = NULL; \
+ \
+/* As Driver */ \
+ONI_C_API_EXPORT void oniDriverCreate(OniDriverServices* driverServices) { \
+ g_pDriver = XN_NEW(DriverClass, driverServices); \
+} \
+ONI_C_API_EXPORT void oniDriverDestroy() \
+{ \
+ g_pDriver->shutdown(); \
+ XN_DELETE(g_pDriver); g_pDriver = NULL; \
+} \
+ONI_C_API_EXPORT OniStatus oniDriverInitialize(oni::driver::DeviceConnectedCallback deviceConnectedCallback, \
+ oni::driver::DeviceDisconnectedCallback deviceDisconnectedCallback, \
+ oni::driver::DeviceStateChangedCallback deviceStateChangedCallback, \
+ void* pCookie) \
+{ \
+ return g_pDriver->initialize(deviceConnectedCallback, deviceDisconnectedCallback, deviceStateChangedCallback, pCookie); \
+} \
+ \
+ONI_C_API_EXPORT OniStatus oniDriverTryDevice(const char* uri) \
+{ \
+ return g_pDriver->tryDevice(uri); \
+} \
+ \
+/* As Device */ \
+ONI_C_API_EXPORT oni::driver::DeviceBase* oniDriverDeviceOpen(const char* uri, const char* mode) \
+{ \
+ return g_pDriver->deviceOpen(uri, mode); \
+} \
+ONI_C_API_EXPORT void oniDriverDeviceClose(oni::driver::DeviceBase* pDevice) \
+{ \
+ g_pDriver->deviceClose(pDevice); \
+} \
+ \
+ONI_C_API_EXPORT OniStatus oniDriverDeviceGetSensorInfoList(oni::driver::DeviceBase* pDevice, OniSensorInfo** pSensorInfos, \
+ int* numSensors) \
+{ \
+ return pDevice->getSensorInfoList(pSensorInfos, numSensors); \
+} \
+ \
+ONI_C_API_EXPORT oni::driver::StreamBase* oniDriverDeviceCreateStream(oni::driver::DeviceBase* pDevice, \
+ OniSensorType sensorType) \
+{ \
+ return pDevice->createStream(sensorType); \
+} \
+ \
+ONI_C_API_EXPORT void oniDriverDeviceDestroyStream(oni::driver::DeviceBase* pDevice, oni::driver::StreamBase* pStream) \
+{ \
+ return pDevice->destroyStream(pStream); \
+} \
+ \
+ONI_C_API_EXPORT OniStatus oniDriverDeviceSetProperty(oni::driver::DeviceBase* pDevice, int propertyId, \
+ const void* data, int dataSize) \
+{ \
+ return pDevice->setProperty(propertyId, data, dataSize); \
+} \
+ONI_C_API_EXPORT OniStatus oniDriverDeviceGetProperty(oni::driver::DeviceBase* pDevice, int propertyId, \
+ void* data, int* pDataSize) \
+{ \
+ return pDevice->getProperty(propertyId, data, pDataSize); \
+} \
+ONI_C_API_EXPORT OniBool oniDriverDeviceIsPropertySupported(oni::driver::DeviceBase* pDevice, int propertyId) \
+{ \
+ return pDevice->isPropertySupported(propertyId); \
+} \
+ONI_C_API_EXPORT void oniDriverDeviceSetPropertyChangedCallback(oni::driver::DeviceBase* pDevice, \
+ oni::driver::PropertyChangedCallback handler, void* pCookie) \
+{ \
+ pDevice->setPropertyChangedCallback(handler, pCookie); \
+} \
+ONI_C_API_EXPORT void oniDriverDeviceNotifyAllProperties(oni::driver::DeviceBase* pDevice) \
+{ \
+ pDevice->notifyAllProperties(); \
+} \
+ONI_C_API_EXPORT OniStatus oniDriverDeviceInvoke(oni::driver::DeviceBase* pDevice, int commandId, \
+ void* data, int dataSize) \
+{ \
+ return pDevice->invoke(commandId, data, dataSize); \
+} \
+ONI_C_API_EXPORT OniBool oniDriverDeviceIsCommandSupported(oni::driver::DeviceBase* pDevice, int commandId) \
+{ \
+ return pDevice->isCommandSupported(commandId); \
+} \
+ONI_C_API_EXPORT OniStatus oniDriverDeviceTryManualTrigger(oni::driver::DeviceBase* pDevice) \
+{ \
+ return pDevice->tryManualTrigger(); \
+} \
+ONI_C_API_EXPORT OniBool oniDriverDeviceIsImageRegistrationModeSupported(oni::driver::DeviceBase* pDevice, \
+ OniImageRegistrationMode mode) \
+{ \
+ return pDevice->isImageRegistrationModeSupported(mode); \
+} \
+ \
+/* As Stream */ \
+ONI_C_API_EXPORT void oniDriverStreamSetServices(oni::driver::StreamBase* pStream, OniStreamServices* pServices) \
+{ \
+ pStream->setServices((oni::driver::StreamServices*)pServices); \
+} \
+ \
+ONI_C_API_EXPORT OniStatus oniDriverStreamSetProperty(oni::driver::StreamBase* pStream, int propertyId, \
+ const void* data, int dataSize) \
+{ \
+ return pStream->setProperty(propertyId, data, dataSize); \
+} \
+ONI_C_API_EXPORT OniStatus oniDriverStreamGetProperty(oni::driver::StreamBase* pStream, int propertyId, void* data, \
+ int* pDataSize) \
+{ \
+ return pStream->getProperty(propertyId, data, pDataSize); \
+} \
+ONI_C_API_EXPORT OniBool oniDriverStreamIsPropertySupported(oni::driver::StreamBase* pStream, int propertyId) \
+{ \
+ return pStream->isPropertySupported(propertyId); \
+} \
+ONI_C_API_EXPORT void oniDriverStreamSetPropertyChangedCallback(oni::driver::StreamBase* pStream, \
+ oni::driver::PropertyChangedCallback handler, void* pCookie) \
+{ \
+ pStream->setPropertyChangedCallback(handler, pCookie); \
+} \
+ONI_C_API_EXPORT void oniDriverStreamNotifyAllProperties(oni::driver::StreamBase* pStream) \
+{ \
+ pStream->notifyAllProperties(); \
+} \
+ONI_C_API_EXPORT OniStatus oniDriverStreamInvoke(oni::driver::StreamBase* pStream, int commandId, \
+ void* data, int dataSize) \
+{ \
+ return pStream->invoke(commandId, data, dataSize); \
+} \
+ONI_C_API_EXPORT OniBool oniDriverStreamIsCommandSupported(oni::driver::StreamBase* pStream, int commandId) \
+{ \
+ return pStream->isCommandSupported(commandId); \
+} \
+ \
+ONI_C_API_EXPORT OniStatus oniDriverStreamStart(oni::driver::StreamBase* pStream) \
+{ \
+ return pStream->start(); \
+} \
+ONI_C_API_EXPORT void oniDriverStreamStop(oni::driver::StreamBase* pStream) \
+{ \
+ pStream->stop(); \
+} \
+ \
+ONI_C_API_EXPORT int oniDriverStreamGetRequiredFrameSize(oni::driver::StreamBase* pStream) \
+{ \
+ return pStream->getRequiredFrameSize(); \
+} \
+ \
+ONI_C_API_EXPORT void oniDriverStreamSetNewFrameCallback(oni::driver::StreamBase* pStream, \
+ oni::driver::NewFrameCallback handler, void* pCookie) \
+{ \
+ pStream->setNewFrameCallback(handler, pCookie); \
+} \
+ \
+ONI_C_API_EXPORT OniStatus oniDriverStreamConvertDepthToColorCoordinates(oni::driver::StreamBase* pDepthStream, \
+ oni::driver::StreamBase* pColorStream, int depthX, int depthY, OniDepthPixel depthZ, int* pColorX, int* pColorY) \
+{ \
+ return pDepthStream->convertDepthToColorCoordinates(pColorStream, depthX, depthY, depthZ, pColorX, pColorY); \
+} \
+ \
+ONI_C_API_EXPORT void* oniDriverEnableFrameSync(oni::driver::StreamBase** pStreams, int streamCount) \
+{ \
+ return g_pDriver->enableFrameSync(pStreams, streamCount); \
+} \
+ \
+ONI_C_API_EXPORT void oniDriverDisableFrameSync(void* frameSyncGroup) \
+{ \
+ return g_pDriver->disableFrameSync(frameSyncGroup); \
+} \
+
+#endif // _ONI_DRIVER_API_H_
diff --git a/include/openni2/Driver/OniDriverTypes.h b/include/openni2/Driver/OniDriverTypes.h
new file mode 100644
index 00000000..fe8cd44c
--- /dev/null
+++ b/include/openni2/Driver/OniDriverTypes.h
@@ -0,0 +1,54 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_DRIVER_TYPES_H_
+#define _ONI_DRIVER_TYPES_H_
+
+#include
+#include
+
+#define ONI_STREAM_PROPERTY_PRIVATE_BASE XN_MAX_UINT16
+
+typedef struct
+{
+ int dataSize;
+ void* data;
+} OniGeneralBuffer;
+
+/////// DriverServices
+struct OniDriverServices
+{
+ void* driverServices;
+ void (ONI_CALLBACK_TYPE* errorLoggerAppend)(void* driverServices, const char* format, va_list args);
+ void (ONI_CALLBACK_TYPE* errorLoggerClear)(void* driverServices);
+ void (ONI_CALLBACK_TYPE* log)(void* driverServices, int severity, const char* file, int line, const char* mask, const char* message);
+};
+
+struct OniStreamServices
+{
+ void* streamServices;
+ int (ONI_CALLBACK_TYPE* getDefaultRequiredFrameSize)(void* streamServices);
+ OniFrame* (ONI_CALLBACK_TYPE* acquireFrame)(void* streamServices); // returns a frame with size corresponding to getRequiredFrameSize()
+ void (ONI_CALLBACK_TYPE* addFrameRef)(void* streamServices, OniFrame* pframe);
+ void (ONI_CALLBACK_TYPE* releaseFrame)(void* streamServices, OniFrame* pframe);
+};
+
+
+#endif // _ONI_DRIVER_TYPES_H_
diff --git a/include/openni2/Linux-Arm/OniPlatformLinux-Arm.h b/include/openni2/Linux-Arm/OniPlatformLinux-Arm.h
new file mode 100644
index 00000000..fb96323b
--- /dev/null
+++ b/include/openni2/Linux-Arm/OniPlatformLinux-Arm.h
@@ -0,0 +1,36 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_PLATFORM_LINUX_ARM_H_
+#define _ONI_PLATFORM_LINUX_ARM_H_
+
+// Start with Linux-x86, and override what's different
+#include "../Linux-x86/OniPlatformLinux-x86.h"
+
+//---------------------------------------------------------------------------
+// Platform Basic Definition
+//---------------------------------------------------------------------------
+#undef ONI_PLATFORM
+#undef ONI_PLATFORM_STRING
+#define ONI_PLATFORM ONI_PLATFORM_LINUX_ARM
+#define ONI_PLATFORM_STRING "Linux-Arm"
+
+#endif //_ONI_PLATFORM_LINUX_ARM_H_
+
diff --git a/include/openni2/Linux-x86/OniPlatformLinux-x86.h b/include/openni2/Linux-x86/OniPlatformLinux-x86.h
new file mode 100644
index 00000000..e5980f35
--- /dev/null
+++ b/include/openni2/Linux-x86/OniPlatformLinux-x86.h
@@ -0,0 +1,102 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_PLATFORM_LINUX_X86_H_
+#define _ONI_PLATFORM_LINUX_X86_H_
+
+//---------------------------------------------------------------------------
+// Prerequisites
+//---------------------------------------------------------------------------
+
+//---------------------------------------------------------------------------
+// Includes
+//---------------------------------------------------------------------------
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+//---------------------------------------------------------------------------
+// Platform Basic Definition
+//---------------------------------------------------------------------------
+#define ONI_PLATFORM ONI_PLATFORM_LINUX_X86
+#define ONI_PLATFORM_STRING "Linux-x86"
+
+//---------------------------------------------------------------------------
+// Platform Capabilities
+//---------------------------------------------------------------------------
+#define ONI_PLATFORM_ENDIAN_TYPE ONI_PLATFORM_IS_LITTLE_ENDIAN
+
+#define ONI_PLATFORM_SUPPORTS_DYNAMIC_LIBS 1
+
+//---------------------------------------------------------------------------
+// Memory
+//---------------------------------------------------------------------------
+/** The default memory alignment. */
+#define ONI_DEFAULT_MEM_ALIGN 16
+
+/** The thread static declarator (using TLS). */
+#define ONI_THREAD_STATIC __thread
+
+//---------------------------------------------------------------------------
+// Files
+//---------------------------------------------------------------------------
+/** The maximum allowed file path size (in bytes). */
+#define ONI_FILE_MAX_PATH 256
+
+//---------------------------------------------------------------------------
+// Call back
+//---------------------------------------------------------------------------
+/** The std call type. */
+#define ONI_STDCALL __stdcall
+
+/** The call back calling convention. */
+#define ONI_CALLBACK_TYPE
+
+/** The C and C++ calling convension. */
+#define ONI_C_DECL
+
+//---------------------------------------------------------------------------
+// Macros
+//---------------------------------------------------------------------------
+/** Returns the date and time at compile time. */
+#define ONI_TIMESTAMP __DATE__ " " __TIME__
+
+/** Converts n into a pre-processor string. */
+#define ONI_STRINGIFY(n) ONI_STRINGIFY_HELPER(n)
+#define ONI_STRINGIFY_HELPER(n) #n
+
+//---------------------------------------------------------------------------
+// API Export/Import Macros
+//---------------------------------------------------------------------------
+/** Indicates an exported shared library function. */
+#define ONI_API_EXPORT __attribute__ ((visibility("default")))
+
+/** Indicates an imported shared library function. */
+#define ONI_API_IMPORT
+
+/** Indicates a deprecated function */
+#define ONI_API_DEPRECATED(msg) __attribute__((warning("This function is deprecated: " msg)))
+
+#endif //_ONI_PLATFORM_LINUX_X86_H_
+
diff --git a/include/openni2/MacOSX/OniPlatformMacOSX.h b/include/openni2/MacOSX/OniPlatformMacOSX.h
new file mode 100644
index 00000000..251256eb
--- /dev/null
+++ b/include/openni2/MacOSX/OniPlatformMacOSX.h
@@ -0,0 +1,42 @@
+/*****************************************************************************
+* *
+* PrimeSense PSCommon Library *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of PSCommon. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_PLATFORM_MACOSX_H_
+#define _ONI_PLATFORM_MACOSX_H_
+
+// Start with Linux-x86, and override what's different
+#include "../Linux-x86/OniPlatformLinux-x86.h"
+
+#include
+
+#undef ONI_PLATFORM
+#undef ONI_PLATFORM_STRING
+#define ONI_PLATFORM ONI_PLATFORM_MACOSX
+#define ONI_PLATFORM_STRING "MacOSX"
+
+#define ONI_PLATFORM_HAS_NO_TIMED_OPS
+#define ONI_PLATFORM_HAS_NO_CLOCK_GETTIME
+#define ONI_PLATFORM_HAS_NO_SCHED_PARAM
+#define ONI_PLATFORM_HAS_BUILTIN_SEMUN
+
+#undef ONI_THREAD_STATIC
+#define ONI_THREAD_STATIC
+
+#endif //_ONI_PLATFORM_MACOSX_H_
diff --git a/include/openni2/OniCAPI.h b/include/openni2/OniCAPI.h
new file mode 100644
index 00000000..aea426d4
--- /dev/null
+++ b/include/openni2/OniCAPI.h
@@ -0,0 +1,259 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_C_API_H_
+#define _ONI_C_API_H_
+
+#include "OniPlatform.h"
+#include "OniCTypes.h"
+#include "OniCProperties.h"
+#include "OniVersion.h"
+
+/******************************************** General APIs */
+
+/** Initialize OpenNI2. Use ONI_API_VERSION as the version. */
+ONI_C_API OniStatus oniInitialize(int apiVersion);
+/** Shutdown OpenNI2 */
+ONI_C_API void oniShutdown();
+
+/**
+ * Get the list of currently connected device.
+ * Each device is represented by its OniDeviceInfo.
+ * pDevices will be allocated inside.
+ */
+ONI_C_API OniStatus oniGetDeviceList(OniDeviceInfo** pDevices, int* pNumDevices);
+/** Release previously allocated device list */
+ONI_C_API OniStatus oniReleaseDeviceList(OniDeviceInfo* pDevices);
+
+ONI_C_API OniStatus oniRegisterDeviceCallbacks(OniDeviceCallbacks* pCallbacks, void* pCookie, OniCallbackHandle* pHandle);
+ONI_C_API void oniUnregisterDeviceCallbacks(OniCallbackHandle handle);
+
+/** Wait for any of the streams to have a new frame */
+ONI_C_API OniStatus oniWaitForAnyStream(OniStreamHandle* pStreams, int numStreams, int* pStreamIndex, int timeout);
+
+/** Get the current version of OpenNI2 */
+ONI_C_API OniVersion oniGetVersion();
+
+/** Translate from format to number of bytes per pixel. Will return 0 for formats in which the number of bytes per pixel isn't fixed. */
+ONI_C_API int oniFormatBytesPerPixel(OniPixelFormat format);
+
+/** Get internal error */
+ONI_C_API const char* oniGetExtendedError();
+
+/******************************************** Device APIs */
+
+/** Open a device. Uri can be taken from the matching OniDeviceInfo. */
+ONI_C_API OniStatus oniDeviceOpen(const char* uri, OniDeviceHandle* pDevice);
+/** Close a device */
+ONI_C_API OniStatus oniDeviceClose(OniDeviceHandle device);
+
+/** Get the possible configurations available for a specific source, or NULL if the source does not exist. */
+ONI_C_API const OniSensorInfo* oniDeviceGetSensorInfo(OniDeviceHandle device, OniSensorType sensorType);
+
+/** Get the OniDeviceInfo of a certain device. */
+ONI_C_API OniStatus oniDeviceGetInfo(OniDeviceHandle device, OniDeviceInfo* pInfo);
+
+/** Create a new stream in the device. The stream will originate from the source. */
+ONI_C_API OniStatus oniDeviceCreateStream(OniDeviceHandle device, OniSensorType sensorType, OniStreamHandle* pStream);
+
+ONI_C_API OniStatus oniDeviceEnableDepthColorSync(OniDeviceHandle device);
+ONI_C_API void oniDeviceDisableDepthColorSync(OniDeviceHandle device);
+ONI_C_API OniBool oniDeviceGetDepthColorSyncEnabled(OniDeviceHandle device);
+
+/** Set property in the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */
+ONI_C_API OniStatus oniDeviceSetProperty(OniDeviceHandle device, int propertyId, const void* data, int dataSize);
+/** Get property in the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */
+ONI_C_API OniStatus oniDeviceGetProperty(OniDeviceHandle device, int propertyId, void* data, int* pDataSize);
+/** Check if the property is supported by the device. Use the properties listed in OniTypes.h: ONI_DEVICE_PROPERTY_..., or specific ones supplied by the device. */
+ONI_C_API OniBool oniDeviceIsPropertySupported(OniDeviceHandle device, int propertyId);
+/** Invoke an internal functionality of the device. */
+ONI_C_API OniStatus oniDeviceInvoke(OniDeviceHandle device, int commandId, void* data, int dataSize);
+/** Check if a command is supported, for invoke */
+ONI_C_API OniBool oniDeviceIsCommandSupported(OniDeviceHandle device, int commandId);
+
+ONI_C_API OniBool oniDeviceIsImageRegistrationModeSupported(OniDeviceHandle device, OniImageRegistrationMode mode);
+
+/** @internal */
+ONI_C_API OniStatus oniDeviceOpenEx(const char* uri, const char* mode, OniDeviceHandle* pDevice);
+
+/******************************************** Stream APIs */
+
+/** Destroy an existing stream */
+ONI_C_API void oniStreamDestroy(OniStreamHandle stream);
+
+/** Get the OniSensorInfo of the certain stream. */
+ONI_C_API const OniSensorInfo* oniStreamGetSensorInfo(OniStreamHandle stream);
+
+/** Start generating data from the stream. */
+ONI_C_API OniStatus oniStreamStart(OniStreamHandle stream);
+/** Stop generating data from the stream. */
+ONI_C_API void oniStreamStop(OniStreamHandle stream);
+
+/** Get the next frame from the stream. This function is blocking until there is a new frame from the stream. For timeout, use oniWaitForStreams() first */
+ONI_C_API OniStatus oniStreamReadFrame(OniStreamHandle stream, OniFrame** pFrame);
+
+/** Register a callback to when the stream has a new frame. */
+ONI_C_API OniStatus oniStreamRegisterNewFrameCallback(OniStreamHandle stream, OniNewFrameCallback handler, void* pCookie, OniCallbackHandle* pHandle);
+/** Unregister a previously registered callback to when the stream has a new frame. */
+ONI_C_API void oniStreamUnregisterNewFrameCallback(OniStreamHandle stream, OniCallbackHandle handle);
+
+/** Set property in the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */
+ONI_C_API OniStatus oniStreamSetProperty(OniStreamHandle stream, int propertyId, const void* data, int dataSize);
+/** Get property in the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */
+ONI_C_API OniStatus oniStreamGetProperty(OniStreamHandle stream, int propertyId, void* data, int* pDataSize);
+/** Check if the property is supported the stream. Use the properties listed in OniTypes.h: ONI_STREAM_PROPERTY_..., or specific ones supplied by the device for its streams. */
+ONI_C_API OniBool oniStreamIsPropertySupported(OniStreamHandle stream, int propertyId);
+/** Invoke an internal functionality of the stream. */
+ONI_C_API OniStatus oniStreamInvoke(OniStreamHandle stream, int commandId, void* data, int dataSize);
+/** Check if a command is supported, for invoke */
+ONI_C_API OniBool oniStreamIsCommandSupported(OniStreamHandle stream, int commandId);
+/** Sets the stream buffer allocation functions. Note that this function may only be called while stream is not started. */
+ONI_C_API OniStatus oniStreamSetFrameBuffersAllocator(OniStreamHandle stream, OniFrameAllocBufferCallback alloc, OniFrameFreeBufferCallback free, void* pCookie);
+
+////
+/** Mark another user of the frame. */
+ONI_C_API void oniFrameAddRef(OniFrame* pFrame);
+/** Mark that the frame is no longer needed. */
+ONI_C_API void oniFrameRelease(OniFrame* pFrame);
+
+// ONI_C_API OniStatus oniConvertRealWorldToProjective(OniStreamHandle stream, OniFloatPoint3D* pRealWorldPoint, OniFloatPoint3D* pProjectivePoint);
+// ONI_C_API OniStatus oniConvertProjectiveToRealWorld(OniStreamHandle stream, OniFloatPoint3D* pProjectivePoint, OniFloatPoint3D* pRealWorldPoint);
+
+/**
+ * Creates a recorder that records to a file.
+ * @param [in] fileName The name of the file that will contain the recording.
+ * @param [out] pRecorder Points to the handle to the newly created recorder.
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniCreateRecorder(const char* fileName, OniRecorderHandle* pRecorder);
+
+/**
+ * Attaches a stream to a recorder. The amount of attached streams is virtually
+ * infinite. You cannot attach a stream after you have started a recording, if
+ * you do: an error will be returned by oniRecorderAttachStream.
+ * @param [in] recorder The handle to the recorder.
+ * @param [in] stream The handle to the stream.
+ * @param [in] allowLossyCompression Allows/denies lossy compression
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniRecorderAttachStream(
+ OniRecorderHandle recorder,
+ OniStreamHandle stream,
+ OniBool allowLossyCompression);
+
+/**
+ * Starts recording. There must be at least one stream attached to the recorder,
+ * if not: oniRecorderStart will return an error.
+ * @param[in] recorder The handle to the recorder.
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniRecorderStart(OniRecorderHandle recorder);
+
+/**
+ * Stops recording. You can resume recording via oniRecorderStart.
+ * @param[in] recorder The handle to the recorder.
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API void oniRecorderStop(OniRecorderHandle recorder);
+
+/**
+ * Stops recording if needed, and destroys a recorder.
+ * @param [in,out] recorder The handle to the recorder, the handle will be
+ * invalidated (nullified) when the function returns.
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniRecorderDestroy(OniRecorderHandle* pRecorder);
+
+ONI_C_API OniStatus oniCoordinateConverterDepthToWorld(OniStreamHandle depthStream, float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ);
+
+ONI_C_API OniStatus oniCoordinateConverterWorldToDepth(OniStreamHandle depthStream, float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ);
+
+ONI_C_API OniStatus oniCoordinateConverterDepthToColor(OniStreamHandle depthStream, OniStreamHandle colorStream, int depthX, int depthY, OniDepthPixel depthZ, int* pColorX, int* pColorY);
+
+/******************************************** Log APIs */
+
+/**
+ * Change the log output folder
+
+ * @param const char * strOutputFolder [in] path to the desirebale folder
+ *
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniSetLogOutputFolder(const char* strOutputFolder);
+
+/**
+ * Get the current log file name
+
+ * @param char * strFileName [out] hold the returned file name
+ * @param int nBufferSize [in] size of strFileName
+ *
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniGetLogFileName(char* strFileName, int nBufferSize);
+
+/**
+ * Set the Minimum severity for log produce
+
+ * @param const char * strMask [in] Name of the logger
+ *
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniSetLogMinSeverity(int nMinSeverity);
+
+/**
+ * Configures if log entries will be printed to console.
+
+ * @param OniBool bConsoleOutput [in] TRUE to print log entries to console, FALSE otherwise.
+ *
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniSetLogConsoleOutput(OniBool bConsoleOutput);
+
+/**
+ * Configures if log entries will be printed to a log file.
+
+ * @param OniBool bFileOutput [in] TRUE to print log entries to the file, FALSE otherwise.
+ *
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniSetLogFileOutput(OniBool bFileOutput);
+
+#if ONI_PLATFORM == ONI_PLATFORM_ANDROID_ARM
+/**
+ * Configures if log entries will be printed to the Android log.
+
+ * @param OniBool bAndroidOutput [in] TRUE to print log entries to the Android log, FALSE otherwise.
+ *
+ * @retval ONI_STATUS_OK Upon successful completion.
+ * @retval ONI_STATUS_ERROR Upon any kind of failure.
+ */
+ONI_C_API OniStatus oniSetLogAndroidOutput(OniBool bAndroidOutput);
+#endif
+#endif // _ONI_C_API_H_
diff --git a/include/openni2/OniCEnums.h b/include/openni2/OniCEnums.h
new file mode 100644
index 00000000..d7f513b8
--- /dev/null
+++ b/include/openni2/OniCEnums.h
@@ -0,0 +1,84 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_C_ENUMS_H_
+#define _ONI_C_ENUMS_H_
+
+/** Possible failure values */
+typedef enum
+{
+ ONI_STATUS_OK = 0,
+ ONI_STATUS_ERROR = 1,
+ ONI_STATUS_NOT_IMPLEMENTED = 2,
+ ONI_STATUS_NOT_SUPPORTED = 3,
+ ONI_STATUS_BAD_PARAMETER = 4,
+ ONI_STATUS_OUT_OF_FLOW = 5,
+ ONI_STATUS_NO_DEVICE = 6,
+ ONI_STATUS_TIME_OUT = 102,
+} OniStatus;
+
+/** The source of the stream */
+typedef enum
+{
+ ONI_SENSOR_IR = 1,
+ ONI_SENSOR_COLOR = 2,
+ ONI_SENSOR_DEPTH = 3,
+
+} OniSensorType;
+
+/** All available formats of the output of a stream */
+typedef enum
+{
+ // Depth
+ ONI_PIXEL_FORMAT_DEPTH_1_MM = 100,
+ ONI_PIXEL_FORMAT_DEPTH_100_UM = 101,
+ ONI_PIXEL_FORMAT_SHIFT_9_2 = 102,
+ ONI_PIXEL_FORMAT_SHIFT_9_3 = 103,
+
+ // Color
+ ONI_PIXEL_FORMAT_RGB888 = 200,
+ ONI_PIXEL_FORMAT_YUV422 = 201,
+ ONI_PIXEL_FORMAT_GRAY8 = 202,
+ ONI_PIXEL_FORMAT_GRAY16 = 203,
+ ONI_PIXEL_FORMAT_JPEG = 204,
+ ONI_PIXEL_FORMAT_YUYV = 205,
+} OniPixelFormat;
+
+typedef enum
+{
+ ONI_DEVICE_STATE_OK = 0,
+ ONI_DEVICE_STATE_ERROR = 1,
+ ONI_DEVICE_STATE_NOT_READY = 2,
+ ONI_DEVICE_STATE_EOF = 3
+} OniDeviceState;
+
+typedef enum
+{
+ ONI_IMAGE_REGISTRATION_OFF = 0,
+ ONI_IMAGE_REGISTRATION_DEPTH_TO_COLOR = 1,
+} OniImageRegistrationMode;
+
+enum
+{
+ ONI_TIMEOUT_NONE = 0,
+ ONI_TIMEOUT_FOREVER = -1,
+};
+
+#endif // _ONI_C_ENUMS_H_
diff --git a/include/openni2/OniCProperties.h b/include/openni2/OniCProperties.h
new file mode 100644
index 00000000..da13d58e
--- /dev/null
+++ b/include/openni2/OniCProperties.h
@@ -0,0 +1,68 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_C_PROPERTIES_H_
+#define _ONI_C_PROPERTIES_H_
+
+// Device properties
+enum
+{
+ ONI_DEVICE_PROPERTY_FIRMWARE_VERSION = 0, // By implementation
+ ONI_DEVICE_PROPERTY_DRIVER_VERSION = 1, // OniVersion
+ ONI_DEVICE_PROPERTY_HARDWARE_VERSION = 2, // int
+ ONI_DEVICE_PROPERTY_SERIAL_NUMBER = 3, // string
+ ONI_DEVICE_PROPERTY_ERROR_STATE = 4, // ??
+ ONI_DEVICE_PROPERTY_IMAGE_REGISTRATION = 5, // OniImageRegistrationMode
+
+ // Files
+ ONI_DEVICE_PROPERTY_PLAYBACK_SPEED = 100, // float
+ ONI_DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED = 101, // OniBool
+};
+
+// Stream properties
+enum
+{
+ ONI_STREAM_PROPERTY_CROPPING = 0, // OniCropping*
+ ONI_STREAM_PROPERTY_HORIZONTAL_FOV = 1, // float: radians
+ ONI_STREAM_PROPERTY_VERTICAL_FOV = 2, // float: radians
+ ONI_STREAM_PROPERTY_VIDEO_MODE = 3, // OniVideoMode*
+
+ ONI_STREAM_PROPERTY_MAX_VALUE = 4, // int
+ ONI_STREAM_PROPERTY_MIN_VALUE = 5, // int
+
+ ONI_STREAM_PROPERTY_STRIDE = 6, // int
+ ONI_STREAM_PROPERTY_MIRRORING = 7, // OniBool
+
+ ONI_STREAM_PROPERTY_NUMBER_OF_FRAMES = 8, // int
+
+ // Camera
+ ONI_STREAM_PROPERTY_AUTO_WHITE_BALANCE = 100, // OniBool
+ ONI_STREAM_PROPERTY_AUTO_EXPOSURE = 101, // OniBool
+ ONI_STREAM_PROPERTY_EXPOSURE = 102, // int
+ ONI_STREAM_PROPERTY_GAIN = 103, // int
+};
+
+// Device commands (for Invoke)
+enum
+{
+ ONI_DEVICE_COMMAND_SEEK = 1, // OniSeek
+};
+
+#endif // _ONI_C_PROPERTIES_H_
diff --git a/include/openni2/OniCTypes.h b/include/openni2/OniCTypes.h
new file mode 100644
index 00000000..12246949
--- /dev/null
+++ b/include/openni2/OniCTypes.h
@@ -0,0 +1,193 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_TYPES_H_
+#define _ONI_TYPES_H_
+
+#include "OniPlatform.h"
+#include "OniCEnums.h"
+
+/** Basic types **/
+typedef int OniBool;
+
+#ifndef TRUE
+#define TRUE 1
+#endif //TRUE
+#ifndef FALSE
+#define FALSE 0
+#endif //FALSE
+
+#define ONI_MAX_STR 256
+#define ONI_MAX_SENSORS 10
+
+struct OniCallbackHandleImpl;
+typedef struct OniCallbackHandleImpl* OniCallbackHandle;
+
+/** Holds an OpenNI version number, which consists of four separate numbers in the format: @c major.minor.maintenance.build. For example: 2.0.0.20. */
+typedef struct
+{
+ /** Major version number, incremented for major API restructuring. */
+ int major;
+ /** Minor version number, incremented when significant new features added. */
+ int minor;
+ /** Maintenance build number, incremented for new releases that primarily provide minor bug fixes. */
+ int maintenance;
+ /** Build number. Incremented for each new API build. Generally not shown on the installer and download site. */
+ int build;
+} OniVersion;
+
+typedef int OniHardwareVersion;
+
+/** Description of the output: format and resolution */
+typedef struct
+{
+ OniPixelFormat pixelFormat;
+ int resolutionX;
+ int resolutionY;
+ int fps;
+} OniVideoMode;
+
+/** List of supported video modes by a specific source */
+typedef struct
+{
+ OniSensorType sensorType;
+ int numSupportedVideoModes;
+ OniVideoMode *pSupportedVideoModes;
+} OniSensorInfo;
+
+/** Basic description of a device */
+typedef struct
+{
+ char uri[ONI_MAX_STR];
+ char vendor[ONI_MAX_STR];
+ char name[ONI_MAX_STR];
+ uint16_t usbVendorId;
+ uint16_t usbProductId;
+} OniDeviceInfo;
+
+struct _OniDevice;
+typedef _OniDevice* OniDeviceHandle;
+
+struct _OniStream;
+typedef _OniStream* OniStreamHandle;
+
+struct _OniRecorder;
+typedef _OniRecorder* OniRecorderHandle;
+
+/** All information of the current frame */
+typedef struct
+{
+ int dataSize;
+ void* data;
+
+ OniSensorType sensorType;
+ uint64_t timestamp;
+ int frameIndex;
+
+ int width;
+ int height;
+
+ OniVideoMode videoMode;
+ OniBool croppingEnabled;
+ int cropOriginX;
+ int cropOriginY;
+
+ int stride;
+} OniFrame;
+
+typedef void (ONI_CALLBACK_TYPE* OniNewFrameCallback)(OniStreamHandle stream, void* pCookie);
+typedef void (ONI_CALLBACK_TYPE* OniGeneralCallback)(void* pCookie);
+typedef void (ONI_CALLBACK_TYPE* OniDeviceInfoCallback)(const OniDeviceInfo* pInfo, void* pCookie);
+typedef void (ONI_CALLBACK_TYPE* OniDeviceStateCallback)(const OniDeviceInfo* pInfo, OniDeviceState deviceState, void* pCookie);
+
+typedef void* (ONI_CALLBACK_TYPE* OniFrameAllocBufferCallback)(int size, void* pCookie);
+typedef void (ONI_CALLBACK_TYPE* OniFrameFreeBufferCallback)(void* data, void* pCookie);
+
+typedef struct
+{
+ OniDeviceInfoCallback deviceConnected;
+ OniDeviceInfoCallback deviceDisconnected;
+ OniDeviceStateCallback deviceStateChanged;
+} OniDeviceCallbacks;
+
+typedef struct
+{
+ int enabled;
+ int originX;
+ int originY;
+ int width;
+ int height;
+} OniCropping;
+
+// Pixel types
+/**
+Pixel type used to store depth images.
+*/
+typedef uint16_t OniDepthPixel;
+
+/**
+Pixel type used to store 16-bit grayscale images
+*/
+typedef uint16_t OniGrayscale16Pixel;
+
+/**
+Pixel type used to store 8-bit grayscale/bayer images
+*/
+typedef uint8_t OniGrayscale8Pixel;
+
+#pragma pack (push, 1)
+
+/** Holds the value of a single color image pixel in 24-bit RGB format. */
+typedef struct
+{
+ /* Red value of this pixel. */
+ uint8_t r;
+ /* Green value of this pixel. */
+ uint8_t g;
+ /* Blue value of this pixel. */
+ uint8_t b;
+} OniRGB888Pixel;
+
+/**
+ Holds the value of two pixels in YUV422 format (Luminance/Chrominance,16-bits/pixel).
+ The first pixel has the values y1, u, v.
+ The second pixel has the values y2, u, v.
+*/
+typedef struct
+{
+ /** First chrominance value for two pixels, stored as blue luminance difference signal. */
+ uint8_t u;
+ /** Overall luminance value of first pixel. */
+ uint8_t y1;
+ /** Second chrominance value for two pixels, stored as red luminance difference signal. */
+ uint8_t v;
+ /** Overall luminance value of second pixel. */
+ uint8_t y2;
+} OniYUV422DoublePixel;
+
+#pragma pack (pop)
+
+typedef struct
+{
+ int frameIndex;
+ OniStreamHandle stream;
+} OniSeek;
+
+#endif // _ONI_TYPES_H_
diff --git a/include/openni2/OniEnums.h b/include/openni2/OniEnums.h
new file mode 100644
index 00000000..018f2227
--- /dev/null
+++ b/include/openni2/OniEnums.h
@@ -0,0 +1,86 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_ENUMS_H_
+#define _ONI_ENUMS_H_
+
+namespace openni
+{
+
+/** Possible failure values */
+typedef enum
+{
+ STATUS_OK = 0,
+ STATUS_ERROR = 1,
+ STATUS_NOT_IMPLEMENTED = 2,
+ STATUS_NOT_SUPPORTED = 3,
+ STATUS_BAD_PARAMETER = 4,
+ STATUS_OUT_OF_FLOW = 5,
+ STATUS_NO_DEVICE = 6,
+ STATUS_TIME_OUT = 102,
+} Status;
+
+/** The source of the stream */
+typedef enum
+{
+ SENSOR_IR = 1,
+ SENSOR_COLOR = 2,
+ SENSOR_DEPTH = 3,
+
+} SensorType;
+
+/** All available formats of the output of a stream */
+typedef enum
+{
+ // Depth
+ PIXEL_FORMAT_DEPTH_1_MM = 100,
+ PIXEL_FORMAT_DEPTH_100_UM = 101,
+ PIXEL_FORMAT_SHIFT_9_2 = 102,
+ PIXEL_FORMAT_SHIFT_9_3 = 103,
+
+ // Color
+ PIXEL_FORMAT_RGB888 = 200,
+ PIXEL_FORMAT_YUV422 = 201,
+ PIXEL_FORMAT_GRAY8 = 202,
+ PIXEL_FORMAT_GRAY16 = 203,
+ PIXEL_FORMAT_JPEG = 204,
+ PIXEL_FORMAT_YUYV = 205,
+} PixelFormat;
+
+typedef enum
+{
+ DEVICE_STATE_OK = 0,
+ DEVICE_STATE_ERROR = 1,
+ DEVICE_STATE_NOT_READY = 2,
+ DEVICE_STATE_EOF = 3
+} DeviceState;
+
+typedef enum
+{
+ IMAGE_REGISTRATION_OFF = 0,
+ IMAGE_REGISTRATION_DEPTH_TO_COLOR = 1,
+} ImageRegistrationMode;
+
+static const int TIMEOUT_NONE = 0;
+static const int TIMEOUT_FOREVER = -1;
+
+} // namespace openni
+
+#endif // _ONI_ENUMS_H_
diff --git a/include/openni2/OniPlatform.h b/include/openni2/OniPlatform.h
new file mode 100644
index 00000000..deb3e476
--- /dev/null
+++ b/include/openni2/OniPlatform.h
@@ -0,0 +1,72 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_PLATFORM_H_
+#define _ONI_PLATFORM_H_
+
+// Supported platforms
+#define ONI_PLATFORM_WIN32 1
+#define ONI_PLATFORM_LINUX_X86 2
+#define ONI_PLATFORM_LINUX_ARM 3
+#define ONI_PLATFORM_MACOSX 4
+#define ONI_PLATFORM_ANDROID_ARM 5
+
+#if (defined _WIN32)
+# ifndef RC_INVOKED
+# if _MSC_VER < 1300
+# error OpenNI Platform Abstraction Layer - Win32 - Microsoft Visual Studio version below 2003 (7.0) are not supported!
+# endif
+# endif
+# include "Win32/OniPlatformWin32.h"
+#elif defined (ANDROID) && defined (__arm__)
+# include "Android-Arm/OniPlatformAndroid-Arm.h"
+#elif (__linux__ && (i386 || __x86_64__))
+# include "Linux-x86/OniPlatformLinux-x86.h"
+#elif (__linux__ && (__arm__ || __aarch64__) )
+# include "Linux-Arm/OniPlatformLinux-Arm.h"
+#elif _ARC
+# include "ARC/OniPlaformARC.h"
+#elif (__APPLE__)
+# include "MacOSX/OniPlatformMacOSX.h"
+#else
+# error Xiron Platform Abstraction Layer - Unsupported Platform!
+#endif
+
+#ifdef __cplusplus
+# define ONI_C extern "C"
+# define ONI_C_API_EXPORT ONI_C ONI_API_EXPORT
+# define ONI_C_API_IMPORT ONI_C ONI_API_IMPORT
+# define ONI_CPP_API_EXPORT ONI_API_EXPORT
+# define ONI_CPP_API_IMPORT ONI_API_IMPORT
+#else // __cplusplus
+# define ONI_C_API_EXPORT ONI_API_EXPORT
+# define ONI_C_API_IMPORT ONI_API_IMPORT
+#endif // __cplusplus
+
+#ifdef ASTRA_EXPORT
+# define ONI_C_API ONI_C_API_EXPORT
+# define ONI_CPP_API ONI_CPP_API_EXPORT
+#else // ASTRA_EXPORT
+# define ONI_C_API ONI_C_API_IMPORT
+# define ONI_CPP_API ONI_CPP_API_IMPORT
+#endif // ASTRA_EXPORT
+
+
+#endif // _ONI_PLATFORM_H_
diff --git a/include/openni2/OniProperties.h b/include/openni2/OniProperties.h
new file mode 100644
index 00000000..19b0805a
--- /dev/null
+++ b/include/openni2/OniProperties.h
@@ -0,0 +1,73 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_PROPERTIES_H_
+#define _ONI_PROPERTIES_H_
+
+namespace openni
+{
+
+// Device properties
+enum
+{
+ DEVICE_PROPERTY_FIRMWARE_VERSION = 0, // string
+ DEVICE_PROPERTY_DRIVER_VERSION = 1, // OniVersion
+ DEVICE_PROPERTY_HARDWARE_VERSION = 2, // int
+ DEVICE_PROPERTY_SERIAL_NUMBER = 3, // string
+ DEVICE_PROPERTY_ERROR_STATE = 4, // ??
+ DEVICE_PROPERTY_IMAGE_REGISTRATION = 5, // OniImageRegistrationMode
+
+ // Files
+ DEVICE_PROPERTY_PLAYBACK_SPEED = 100, // float
+ DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED = 101, // OniBool
+};
+
+// Stream properties
+enum
+{
+ STREAM_PROPERTY_CROPPING = 0, // OniCropping*
+ STREAM_PROPERTY_HORIZONTAL_FOV = 1, // float: radians
+ STREAM_PROPERTY_VERTICAL_FOV = 2, // float: radians
+ STREAM_PROPERTY_VIDEO_MODE = 3, // OniVideoMode*
+
+ STREAM_PROPERTY_MAX_VALUE = 4, // int
+ STREAM_PROPERTY_MIN_VALUE = 5, // int
+
+ STREAM_PROPERTY_STRIDE = 6, // int
+ STREAM_PROPERTY_MIRRORING = 7, // OniBool
+
+ STREAM_PROPERTY_NUMBER_OF_FRAMES = 8, // int
+
+ // Camera
+ STREAM_PROPERTY_AUTO_WHITE_BALANCE = 100, // OniBool
+ STREAM_PROPERTY_AUTO_EXPOSURE = 101, // OniBool
+ STREAM_PROPERTY_EXPOSURE = 102, // int
+ STREAM_PROPERTY_GAIN = 103, // int
+
+};
+
+// Device commands (for Invoke)
+enum
+{
+ DEVICE_COMMAND_SEEK = 1, // OniSeek
+};
+
+} // namespace openni
+#endif // _ONI_PROPERTIES_H_
diff --git a/include/openni2/OniVersion.h b/include/openni2/OniVersion.h
new file mode 100644
index 00000000..7aa13ed5
--- /dev/null
+++ b/include/openni2/OniVersion.h
@@ -0,0 +1,43 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#include "OniPlatform.h"
+
+#define ONI_VERSION_MAJOR 2
+#define ONI_VERSION_MINOR 2
+#define ONI_VERSION_MAINTENANCE 0
+#define ONI_VERSION_BUILD 33
+
+/** OpenNI version (in brief string format): "Major.Minor.Maintenance (Build)" */
+#define ONI_BRIEF_VERSION_STRING \
+ ONI_STRINGIFY(ONI_VERSION_MAJOR) "." \
+ ONI_STRINGIFY(ONI_VERSION_MINOR) "." \
+ ONI_STRINGIFY(ONI_VERSION_MAINTENANCE) \
+ " (Build " ONI_STRINGIFY(ONI_VERSION_BUILD) ")"
+
+/** OpenNI version (in numeric format): (OpenNI major version * 100000000 + OpenNI minor version * 1000000 + OpenNI maintenance version * 10000 + OpenNI build version). */
+#define ONI_VERSION (ONI_VERSION_MAJOR*100000000 + ONI_VERSION_MINOR*1000000 + ONI_VERSION_MAINTENANCE*10000 + ONI_VERSION_BUILD)
+#define ONI_CREATE_API_VERSION(major, minor) ((major)*1000 + (minor))
+#define ONI_API_VERSION ONI_CREATE_API_VERSION(ONI_VERSION_MAJOR, ONI_VERSION_MINOR)
+
+/** OpenNI version (in string format): "Major.Minor.Maintenance.Build-Platform (MMM DD YYYY HH:MM:SS)". */
+#define ONI_VERSION_STRING \
+ ONI_BRIEF_VERSION_STRING "-" \
+ ONI_PLATFORM_STRING " (" ONI_TIMESTAMP ")"
diff --git a/include/openni2/OpenNI.h b/include/openni2/OpenNI.h
new file mode 100644
index 00000000..52324b4e
--- /dev/null
+++ b/include/openni2/OpenNI.h
@@ -0,0 +1,2750 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _OPENNI_H_
+#define _OPENNI_H_
+
+#include "OniPlatform.h"
+#include "OniProperties.h"
+#include "OniEnums.h"
+
+#include "OniCAPI.h"
+#include "OniCProperties.h"
+
+/**
+openni is the namespace of the entire C++ API of OpenNI
+*/
+namespace openni
+{
+
+/** Pixel type used to store depth images. */
+typedef uint16_t DepthPixel;
+
+/** Pixel type used to store IR images. */
+typedef uint16_t Grayscale16Pixel;
+
+// structs
+/** Holds an OpenNI version number, which consists of four separate numbers in the format: @c major.minor.maintenance.build. For example: 2.0.0.20. */
+typedef struct
+{
+ /** Major version number, incremented for major API restructuring. */
+ int major;
+ /** Minor version number, incremented when significant new features added. */
+ int minor;
+ /** Maintenance build number, incremented for new releases that primarily provide minor bug fixes. */
+ int maintenance;
+ /** Build number. Incremented for each new API build. Generally not shown on the installer and download site. */
+ int build;
+} Version;
+
+/** Holds the value of a single color image pixel in 24-bit RGB format. */
+typedef struct
+{
+ /* Red value of this pixel. */
+ uint8_t r;
+ /* Green value of this pixel. */
+ uint8_t g;
+ /* Blue value of this pixel. */
+ uint8_t b;
+} RGB888Pixel;
+
+/**
+ Holds the value of two pixels in YUV422 format (Luminance/Chrominance,16-bits/pixel).
+ The first pixel has the values y1, u, v.
+ The second pixel has the values y2, u, v.
+*/
+typedef struct
+{
+ /** First chrominance value for two pixels, stored as blue luminance difference signal. */
+ uint8_t u;
+ /** Overall luminance value of first pixel. */
+ uint8_t y1;
+ /** Second chrominance value for two pixels, stored as red luminance difference signal. */
+ uint8_t v;
+ /** Overall luminance value of second pixel. */
+ uint8_t y2;
+} YUV422DoublePixel;
+
+/** This special URI can be passed to @ref Device::open() when the application has no concern for a specific device. */
+#if ONI_PLATFORM != ONI_PLATFORM_WIN32
+#pragma GCC diagnostic ignored "-Wunused-variable"
+#pragma GCC diagnostic push
+#endif
+static const char* ANY_DEVICE = NULL;
+#if ONI_PLATFORM != ONI_PLATFORM_WIN32
+#pragma GCC diagnostic pop
+#endif
+
+/**
+Provides a simple array class used throughout the API. Wraps a primitive array
+of objects, holding the elements and their count.
+*/
+template
+class Array
+{
+public:
+ /**
+ Default constructor. Creates an empty Array and sets the element count to zero.
+ */
+ Array() : m_data(NULL), m_count(0), m_owner(false) {}
+
+ /**
+ Constructor. Creates new Array from an existing primitive array of known size.
+
+ @tparam [in] T Object type this Array will contain.
+ @param [in] data Pointer to a primitive array of objects of type T.
+ @param [in] count Number of elements in the primitive array pointed to by data.
+ */
+ Array(const T* data, int count) : m_owner(false) { _setData(data, count); }
+
+ /**
+ Destructor. Destroys the Array object.
+ */
+ ~Array()
+ {
+ clear();
+ }
+
+ /**
+ Getter function for the Array size.
+ @returns Current number of elements in the Array.
+ */
+ int getSize() const { return m_count; }
+
+ /**
+ Implements the array indexing operator for the Array class.
+ */
+ const T& operator[](int index) const {return m_data[index];}
+
+ /**
+ @internal
+ Setter function for data. Causes this array to wrap an existing primitive array
+ of specified type. The optional data ownership flag controls whether the primitive
+ array this Array wraps will be destroyed when this Array is deconstructed.
+ @param [in] T Type of objects array will contain.
+ @param [in] data Pointer to first object in list.
+ @param [in] count Number of objects in list.
+ @param [in] isOwner Optional flag to indicate data ownership
+ */
+ void _setData(const T* data, int count, bool isOwner = false)
+ {
+ clear();
+ m_count = count;
+ m_owner = isOwner;
+ if (!isOwner)
+ {
+ m_data = data;
+ }
+ else
+ {
+ m_data = new T[count];
+ memcpy((void*)m_data, data, count*sizeof(T));
+ }
+ }
+
+private:
+ Array(const Array&);
+ Array& operator=(const Array&);
+
+ void clear()
+ {
+ if (m_owner && m_data != NULL)
+ delete []m_data;
+ m_owner = false;
+ m_data = NULL;
+ m_count = 0;
+ }
+
+ const T* m_data;
+ int m_count;
+ bool m_owner;
+};
+
+// Forward declaration of all
+class SensorInfo;
+class VideoStream;
+class VideoFrameRef;
+class Device;
+class OpenNI;
+class CameraSettings;
+class PlaybackControl;
+
+/**
+Encapsulates a group of settings for a @ref VideoStream. Settings stored include
+frame rate, resolution, and pixel format.
+
+This class is used as an input for changing the settings of a @ref VideoStream,
+as well as an output for reporting the current settings of that class. It is also used
+by @ref SensorInfo to report available video modes of a stream.
+
+Recommended practice is to use @ref SensorInfo::getSupportedVideoModes()
+to obtain a list of valid video modes, and then to use items from that list to pass
+new settings to @ref VideoStream. This is much less likely to produce an
+invalid video mode than instantiating and manually changing objects of this
+class.
+*/
+class VideoMode : private OniVideoMode
+{
+public:
+ /**
+ Default constructor, creates an empty VideoMode object. Application programs should, in most
+ cases, use the copy constructor to copy an existing valid video mode. This is much less
+ error prone that creating and attempting to configure a new VideoMode from scratch.
+ */
+ VideoMode()
+ {}
+
+ /**
+ Copy constructor, creates a new VideoMode identical to an existing VideoMode.
+
+ @param [in] other Existing VideoMode to copy.
+ */
+ VideoMode(const VideoMode& other)
+ {
+ *this = other;
+ }
+
+ /**
+ Assignment operator. Sets the pixel format, frame rate, and resolution of this
+ VideoMode to equal that of a different VideoMode.
+
+ @param [in] other Existing VideoMode to copy settings from.
+ */
+ VideoMode& operator=(const VideoMode& other)
+ {
+ setPixelFormat(other.getPixelFormat());
+ setResolution(other.getResolutionX(), other.getResolutionY());
+ setFps(other.getFps());
+
+ return *this;
+ }
+
+ /**
+ Getter function for the pixel format of this VideoMode.
+ @returns Current pixel format setting of this VideoMode.
+ */
+ PixelFormat getPixelFormat() const { return (PixelFormat)pixelFormat; }
+
+ /**
+ Getter function for the X resolution of this VideoMode.
+ @returns Current horizontal resolution of this VideoMode, in pixels.
+ */
+ int getResolutionX() const { return resolutionX; }
+
+ /**
+ Getter function for the Y resolution of this VideoMode.
+ @returns Current vertical resolution of this VideoMode, in pixels.
+ */
+ int getResolutionY() const {return resolutionY;}
+
+ /**
+ Getter function for the frame rate of this VideoMode.
+ @returns Current frame rate, measured in frames per second.
+ */
+ int getFps() const { return fps; }
+
+ /**
+ Setter function for the pixel format of this VideoMode. Application use of this
+ function is not recommended. Instead, use @ref SensorInfo::getSupportedVideoModes()
+ to obtain a list of valid video modes.
+ @param [in] format Desired new pixel format for this VideoMode.
+ */
+ void setPixelFormat(PixelFormat format) { this->pixelFormat = (OniPixelFormat)format; }
+
+ /**
+ Setter function for the resolution of this VideoMode. Application use of this
+ function is not recommended. Instead, use @ref SensorInfo::getSupportedVideoModes() to
+ obtain a list of valid video modes.
+ @param [in] resolutionX Desired new horizontal resolution in pixels.
+ @param [in] resolutionY Desired new vertical resolution in pixels.
+ */
+ void setResolution(int resolutionX, int resolutionY)
+ {
+ this->resolutionX = resolutionX;
+ this->resolutionY = resolutionY;
+ }
+
+ /**
+ Setter function for the frame rate. Application use of this function is not recommended.
+ Instead, use @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid
+ video modes.
+ @param [in] fps Desired new frame rate, measured in frames per second.
+ */
+ void setFps(int fps) { this->fps = fps; }
+
+ friend class SensorInfo;
+ friend class VideoStream;
+ friend class VideoFrameRef;
+};
+
+/**
+The SensorInfo class encapsulates all info related to a specific sensor in a specific
+device.
+A @ref Device object holds a SensorInfo object for each sensor it contains.
+A @ref VideoStream object holds one SensorInfo object, describing the sensor used to produce that stream.
+
+A given SensorInfo object will contain the type of the sensor (Depth, IR or Color), and
+a list of all video modes that the sensor can support. Each available video mode will have a single
+VideoMode object that can be queried to get the details of that mode.
+
+SensorInfo objects should be the only source of VideoMode objects for the vast majority of
+application programs.
+
+Application programs will never directly instantiate objects of type SensorInfo. In fact, no
+public constructors are provided. SensorInfo objects should be obtained either from a Device or @ref VideoStream,
+and in turn be used to provide available video modes for that sensor.
+*/
+class SensorInfo
+{
+public:
+ /**
+ Provides the sensor type of the sensor this object is associated with.
+ @returns Type of the sensor.
+ */
+ SensorType getSensorType() const { return (SensorType)m_pInfo->sensorType; }
+
+ /**
+ Provides a list of video modes that this sensor can support. This function is the
+ recommended method to be used by applications to obtain @ref VideoMode objects.
+
+ @returns Reference to an array of @ref VideoMode objects, one for each supported
+ video mode.
+ */
+ const Array& getSupportedVideoModes() const { return m_videoModes; }
+
+private:
+ SensorInfo(const SensorInfo&);
+ SensorInfo& operator=(const SensorInfo&);
+
+ SensorInfo() : m_pInfo(NULL), m_videoModes(NULL, 0) {}
+
+ SensorInfo(const OniSensorInfo* pInfo) : m_pInfo(NULL), m_videoModes(NULL, 0)
+ {
+ _setInternal(pInfo);
+ }
+
+ void _setInternal(const OniSensorInfo* pInfo)
+ {
+ m_pInfo = pInfo;
+ if (pInfo == NULL)
+ {
+ m_videoModes._setData(NULL, 0);
+ }
+ else
+ {
+ m_videoModes._setData(static_cast(pInfo->pSupportedVideoModes), pInfo->numSupportedVideoModes);
+ }
+ }
+
+ const OniSensorInfo* m_pInfo;
+ Array m_videoModes;
+
+ friend class VideoStream;
+ friend class Device;
+};
+
+/**
+The DeviceInfo class encapsulates info related to a specific device.
+
+Applications will generally obtain objects of this type via calls to @ref OpenNI::enumerateDevices() or
+@ref openni::Device::getDeviceInfo(), and then use the various accessor functions to obtain specific
+information on that device.
+
+There should be no reason for application code to instantiate this object directly.
+*/
+class DeviceInfo : private OniDeviceInfo
+{
+public:
+ /**
+ Returns the device URI. URI can be used by @ref Device::open to open a specific device.
+ The URI string format is determined by the driver.
+ */
+ const char* getUri() const { return uri; }
+ /** Returns a the vendor name for this device. */
+ const char* getVendor() const { return vendor; }
+ /** Returns the device name for this device. */
+ const char* getName() const { return name; }
+ /** Returns the USB VID code for this device. */
+ uint16_t getUsbVendorId() const { return usbVendorId; }
+ /** Returns the USB PID code for this device. */
+ uint16_t getUsbProductId() const { return usbProductId; }
+
+ friend class Device;
+ friend class OpenNI;
+};
+
+/**
+The @ref VideoFrameRef class encapsulates a single video frame - the output of a @ref VideoStream at a specific time.
+The data contained will be a single frame of color, IR, or depth video, along with associated meta data.
+
+An object of type @ref VideoFrameRef does not actually hold the data of the frame, but only a reference to it. The
+reference can be released by destroying the @ref VideoFrameRef object, or by calling the @ref release() method. The
+actual data of the frame is freed when the last reference to it is released.
+
+The usual way to obtain @ref VideoFrameRef objects is by a call to @ref VideoStream.:readFrame().
+
+All data references by a @ref VideoFrameRef is stored as a primitive array of pixels. Each pixel will be
+of a type according to the configured pixel format (see @ref VideoMode).
+*/
+class VideoFrameRef
+{
+public:
+ /**
+ Default constructor. Creates a new empty @ref VideoFrameRef object.
+ This object will be invalid until initialized by a call to @ref VideoStream::readFrame().
+ */
+ VideoFrameRef()
+ {
+ m_pFrame = NULL;
+ }
+
+ /**
+ Destroy this object and release the reference to the frame.
+ */
+ ~VideoFrameRef()
+ {
+ release();
+ }
+
+ /**
+ Copy constructor. Creates a new @ref VideoFrameRef object. The newly created
+ object will reference the same frame current object references.
+ @param [in] other Another @ref VideoFrameRef object.
+ */
+ VideoFrameRef(const VideoFrameRef& other) : m_pFrame(NULL)
+ {
+ _setFrame(other.m_pFrame);
+ }
+
+ /**
+ Make this @ref VideoFrameRef object reference the same frame that the @c other frame references.
+ If this object referenced another frame before calling this method, the previous frame will be released.
+ @param [in] other Another @ref VideoFrameRef object.
+ */
+ VideoFrameRef& operator=(const VideoFrameRef& other)
+ {
+ _setFrame(other.m_pFrame);
+ return *this;
+ }
+
+ /**
+ Getter function for the size of the data contained by this object. Useful primarily
+ when allocating buffers.
+ @returns Current size of data pointed to by this object, measured in bytes.
+ */
+ inline int getDataSize() const
+ {
+ return m_pFrame->dataSize;
+ }
+
+ /**
+ Getter function for the array of data pointed to by this object.
+ @returns Pointer to the actual frame data array. Type of data
+ pointed to can be determined according to the pixel format (can be obtained by calling @ref getVideoMode()).
+ */
+ inline const void* getData() const
+ {
+ return m_pFrame->data;
+ }
+
+ /**
+ Getter function for the sensor type used to produce this frame. Used to determine whether
+ this is an IR, Color or Depth frame. See the @ref SensorType enumeration for all possible return
+ values from this function.
+ @returns The type of sensor used to produce this frame.
+ */
+ inline SensorType getSensorType() const
+ {
+ return (SensorType)m_pFrame->sensorType;
+ }
+
+ /**
+ Returns a reference to the @ref VideoMode object assigned to this frame. This object describes
+ the video mode the sensor was configured to when the frame was produced and can be used
+ to determine the pixel format and resolution of the data. It will also provide the frame rate
+ that the sensor was running at when it recorded this frame.
+ @returns Reference to the @ref VideoMode assigned to this frame.
+ */
+ inline const VideoMode& getVideoMode() const
+ {
+ return static_cast(m_pFrame->videoMode);
+ }
+
+ /**
+ Provides a timestamp for the frame. The 'zero' point for this stamp
+ is implementation specific, but all streams from the same device are guaranteed to use the same zero.
+ This value can therefore be used to compute time deltas between frames from the same device,
+ regardless of whether they are from the same stream.
+ @returns Timestamp of frame, measured in microseconds from an arbitrary zero
+ */
+ inline uint64_t getTimestamp() const
+ {
+ return m_pFrame->timestamp;
+ }
+
+ /**
+ Frames are provided sequential frame ID numbers by the sensor that produced them. If frame
+ synchronization has been enabled for a device via @ref Device::setDepthColorSyncEnabled(), then frame
+ numbers for corresponding frames of depth and color are guaranteed to match.
+
+ If frame synchronization is not enabled, then there is no guarantee of matching frame indexes between
+ @ref VideoStream "VideoStreams". In the latter case, applications should use timestamps instead of frame indexes to
+ align frames in time.
+ @returns Index number for this frame.
+ */
+ inline int getFrameIndex() const
+ {
+ return m_pFrame->frameIndex;
+ }
+
+ /**
+ Gives the current width of this frame, measured in pixels. If cropping is enabled, this will be
+ the width of the cropping window. If cropping is not enabled, then this will simply be equal to
+ the X resolution of the @ref VideoMode used to produce this frame.
+ @returns Width of this frame in pixels.
+ */
+ inline int getWidth() const
+ {
+ return m_pFrame->width;
+ }
+
+ /**
+ Gives the current height of this frame, measured in pixels. If cropping is enabled, this will
+ be the length of the cropping window. If cropping is not enabled, then this will simply be equal
+ to the Y resolution of the @ref VideoMode used to produce this frame.
+ */
+ inline int getHeight() const
+ {
+ return m_pFrame->height;
+ }
+
+ /**
+ Indicates whether cropping was enabled when the frame was produced.
+ @return true if cropping is enabled, false otherwise
+ */
+ inline bool getCroppingEnabled() const
+ {
+ return m_pFrame->croppingEnabled == TRUE;
+ }
+
+ /**
+ Indicates the X coordinate of the upper left corner of the crop window.
+ @return Distance of crop origin from left side of image, in pixels.
+ */
+ inline int getCropOriginX() const
+ {
+ return m_pFrame->cropOriginX;
+ }
+
+ /**
+ Indicates the Y coordinate of the upper left corner of the crop window.
+ @return Distance of crop origin from top of image, in pixels.
+ */
+ inline int getCropOriginY() const
+ {
+ return m_pFrame->cropOriginY;
+ }
+
+ /**
+ Gives the length of one row of pixels, measured in bytes. Primarily useful
+ for indexing the array which contains the data.
+ @returns Stride of the array which contains the image for this frame, in bytes
+ */
+ inline int getStrideInBytes() const
+ {
+ return m_pFrame->stride;
+ }
+
+ /**
+ Check if this object references an actual frame.
+ */
+ inline bool isValid() const
+ {
+ return m_pFrame != NULL;
+ }
+
+ /**
+ Release the reference to the frame. Once this method is called, the object becomes invalid, and no method
+ should be called other than the assignment operator, or passing this object to a @ref VideoStream::readFrame() call.
+ */
+ void release()
+ {
+ if (m_pFrame != NULL)
+ {
+ oniFrameRelease(m_pFrame);
+ m_pFrame = NULL;
+ }
+ }
+
+ /** @internal */
+ void _setFrame(OniFrame* pFrame)
+ {
+ setReference(pFrame);
+ if (pFrame != NULL)
+ {
+ oniFrameAddRef(pFrame);
+ }
+ }
+
+ /** @internal */
+ OniFrame* _getFrame()
+ {
+ return m_pFrame;
+ }
+
+private:
+ friend class VideoStream;
+ inline void setReference(OniFrame* pFrame)
+ {
+ // Initial - don't addref. This is the reference from OpenNI
+ release();
+ m_pFrame = pFrame;
+ }
+
+ OniFrame* m_pFrame; // const!!?
+};
+
+/**
+The @ref VideoStream object encapsulates a single video stream from a device. Once created, it is used to start data flow
+from the device, and to read individual frames of data. This is the central class used to obtain data in OpenNI. It
+provides the ability to manually read data in a polling loop, as well as providing events and a Listener class that can be
+used to implement event-driven data acquisition.
+
+Aside from the video data frames themselves, the class offers a number of functions used for obtaining information about a
+@ref VideoStream. Field of view, available video modes, and minimum and maximum valid pixel values can all be obtained.
+
+In addition to obtaining data, the @ref VideoStream object is used to set all configuration properties that apply to a specific
+stream (rather than to an entire device). In particular, it is used to control cropping, mirroring, and video modes.
+
+A pointer to a valid, initialized device that provides the desired stream type is required to create a stream.
+
+Several video streams can be created to stream data from the same sensor. This is useful if several components of an application
+need to read frames separately.
+
+While some device might allow different streams
+from the same sensor to have different configurations, most devices will have a single configuration for the sensor,
+shared by all streams.
+*/
+class VideoStream
+{
+public:
+ /**
+ The @ref VideoStream::NewFrameListener class is provided to allow the implementation of event driven frame reading. To use
+ it, create a class that inherits from it and implement override the onNewFrame() method. Then, register
+ your created class with an active @ref VideoStream using the @ref VideoStream::addNewFrameListener() function. Once this is done, the
+ event handler function you implemented will be called whenever a new frame becomes available. You may call
+ @ref VideoStream::readFrame() from within the event handler.
+ */
+ class NewFrameListener
+ {
+ public:
+ /**
+ Default constructor.
+ */
+ NewFrameListener() : m_callbackHandle(NULL)
+ {
+ }
+
+ virtual ~NewFrameListener()
+ {
+ }
+
+ /**
+ Derived classes should implement this function to handle new frames.
+ */
+ virtual void onNewFrame(VideoStream&) = 0;
+
+ private:
+ friend class VideoStream;
+
+ static void ONI_CALLBACK_TYPE callback(OniStreamHandle streamHandle, void* pCookie)
+ {
+ NewFrameListener* pListener = (NewFrameListener*)pCookie;
+ VideoStream stream;
+ stream._setHandle(streamHandle);
+ pListener->onNewFrame(stream);
+ stream._setHandle(NULL);
+ }
+ OniCallbackHandle m_callbackHandle;
+ };
+
+ class FrameAllocator
+ {
+ public:
+ virtual ~FrameAllocator() {}
+ virtual void* allocateFrameBuffer(int size) = 0;
+ virtual void freeFrameBuffer(void* data) = 0;
+
+ private:
+ friend class VideoStream;
+
+ static void* ONI_CALLBACK_TYPE allocateFrameBufferCallback(int size, void* pCookie)
+ {
+ FrameAllocator* pThis = (FrameAllocator*)pCookie;
+ return pThis->allocateFrameBuffer(size);
+ }
+
+ static void ONI_CALLBACK_TYPE freeFrameBufferCallback(void* data, void* pCookie)
+ {
+ FrameAllocator* pThis = (FrameAllocator*)pCookie;
+ pThis->freeFrameBuffer(data);
+ }
+ };
+
+ /**
+ Default constructor. Creates a new, non-valid @ref VideoStream object. The object created will be invalid until its create() function
+ is called with a valid Device.
+ */
+ VideoStream() : m_stream(NULL), m_sensorInfo(), m_pCameraSettings(NULL), m_isOwner(true)
+ {}
+
+ /**
+ Handle constructor. Creates a VideoStream object based on the given initialized handle.
+ This object will not destroy the underlying handle when @ref destroy() or destructor is called
+ */
+ explicit VideoStream(OniStreamHandle handle) : m_stream(NULL), m_sensorInfo(), m_pCameraSettings(NULL), m_isOwner(false)
+ {
+ _setHandle(handle);
+ }
+
+ /**
+ Destructor. The destructor calls the destroy() function, but it is considered a best practice for applications to
+ call destroy() manually on any @ref VideoStream that they run create() on.
+ */
+ ~VideoStream()
+ {
+ destroy();
+ }
+
+ /**
+ Checks to see if this object has been properly initialized and currently points to a valid stream.
+ @returns true if this object has been previously initialized, false otherwise.
+ */
+ bool isValid() const
+ {
+ return m_stream != NULL;
+ }
+
+ /**
+ Creates a stream of frames from a specific sensor type of a specific device. You must supply a reference to a
+ Device that supplies the sensor type requested. You can use @ref Device::hasSensor() to check whether a
+ given sensor is available on your target device before calling create().
+
+ @param [in] device A reference to the @ref Device you want to create the stream on.
+ @param [in] sensorType The type of sensor the stream should produce data from.
+ @returns Status code indicating success or failure for this operation.
+ */
+ inline Status create(const Device& device, SensorType sensorType);
+
+ /**
+ Destroy this stream. This function is currently called automatically by the destructor, but it is
+ considered a best practice for applications to manually call this function on any @ref VideoStream that they
+ call create() for.
+ */
+ inline void destroy();
+
+ /**
+ Provides the @ref SensorInfo object associated with the sensor that is producing this @ref VideoStream. Note that
+ this function will return NULL if the stream has not yet been initialized with the create() function.
+
+ @ref SensorInfo is useful primarily as a means of learning which video modes are valid for this VideoStream.
+
+ @returns Reference to the SensorInfo object associated with the sensor providing this stream.
+ */
+ const SensorInfo& getSensorInfo() const
+ {
+ return m_sensorInfo;
+ }
+
+ /**
+ Starts data generation from this video stream.
+ */
+ Status start()
+ {
+ if (!isValid())
+ {
+ return STATUS_ERROR;
+ }
+
+ return (Status)oniStreamStart(m_stream);
+ }
+
+ /**
+ Stops data generation from this video stream.
+ */
+ void stop()
+ {
+ if (!isValid())
+ {
+ return;
+ }
+
+ oniStreamStop(m_stream);
+ }
+
+ /**
+ Read the next frame from this video stream, delivered as a @ref VideoFrameRef. This is the primary
+ method for manually obtaining frames of video data.
+ If no new frame is available, the call will block until one is available.
+ To avoid blocking, use @ref VideoStream::Listener to implement an event driven architecture. Another
+ alternative is to use @ref OpenNI::waitForAnyStream() to wait for new frames from several streams.
+
+ @param [out] pFrame Pointer to a @ref VideoFrameRef object to hold the reference to the new frame.
+ @returns Status code to indicated success or failure of this function.
+ */
+ Status readFrame(VideoFrameRef* pFrame)
+ {
+ if (!isValid())
+ {
+ return STATUS_ERROR;
+ }
+
+ OniFrame* pOniFrame;
+ Status rc = (Status)oniStreamReadFrame(m_stream, &pOniFrame);
+
+ pFrame->setReference(pOniFrame);
+ return rc;
+ }
+
+ /**
+ Adds a new Listener to receive this VideoStream onNewFrame event. See @ref VideoStream::NewFrameListener for
+ more information on implementing an event driven frame reading architecture. An instance of a listener can be added to only one source.
+
+ @param [in] pListener Pointer to a @ref VideoStream::NewFrameListener object (or a derivative) that will respond to this event.
+ @returns Status code indicating success or failure of the operation.
+ */
+ Status addNewFrameListener(NewFrameListener* pListener)
+ {
+ if (!isValid())
+ {
+ return STATUS_ERROR;
+ }
+
+ return (Status)oniStreamRegisterNewFrameCallback(m_stream, pListener->callback, pListener, &pListener->m_callbackHandle);
+ }
+
+ /**
+ Removes a Listener from this video stream list. The listener removed will no longer receive new frame events from this stream.
+ @param [in] pListener Pointer to the listener object to be removed.
+ */
+ void removeNewFrameListener(NewFrameListener* pListener)
+ {
+ if (!isValid())
+ {
+ return;
+ }
+
+ oniStreamUnregisterNewFrameCallback(m_stream, pListener->m_callbackHandle);
+ pListener->m_callbackHandle = NULL;
+ }
+
+ /**
+ Sets the frame buffers allocator for this video stream.
+ @param [in] pAllocator Pointer to the frame buffers allocator object. Pass NULL to return to default frame allocator.
+ @returns ONI_STATUS_OUT_OF_FLOW The frame buffers allocator cannot be set while stream is streaming.
+ */
+ Status setFrameBuffersAllocator(FrameAllocator* pAllocator)
+ {
+ if (!isValid())
+ {
+ return STATUS_ERROR;
+ }
+
+ if (pAllocator == NULL)
+ {
+ return (Status)oniStreamSetFrameBuffersAllocator(m_stream, NULL, NULL, NULL);
+ }
+ else
+ {
+ return (Status)oniStreamSetFrameBuffersAllocator(m_stream, pAllocator->allocateFrameBufferCallback, pAllocator->freeFrameBufferCallback, pAllocator);
+ }
+ }
+
+ /**
+ @internal
+ Get an internal handle. This handle can be used via the C API.
+ */
+ OniStreamHandle _getHandle() const
+ {
+ return m_stream;
+ }
+
+ /**
+ Gets an object through which several camera settings can be configured.
+ @returns NULL if the stream doesn't support camera settings.
+ */
+ CameraSettings* getCameraSettings() {return m_pCameraSettings;}
+
+ /**
+ General function for obtaining the value of stream specific properties.
+ There are convenience functions available for all commonly used properties, so it is not
+ expected that applications will make direct use of the getProperty function very often.
+
+ @param [in] propertyId The numerical ID of the property to be queried.
+ @param [out] data Place to store the value of the property.
+ @param [in,out] dataSize IN: Size of the buffer passed in the @c data argument. OUT: the actual written size.
+ @returns Status code indicating success or failure of this operation.
+ */
+ Status getProperty(int propertyId, void* data, int* dataSize) const
+ {
+ if (!isValid())
+ {
+ return STATUS_ERROR;
+ }
+
+ return (Status)oniStreamGetProperty(m_stream, propertyId, data, dataSize);
+ }
+
+ /**
+ General function for setting the value of stream specific properties.
+ There are convenience functions available for all commonly used properties, so it is not
+ expected that applications will make direct use of the setProperty function very often.
+
+ @param [in] propertyId The numerical ID of the property to be set.
+ @param [in] data Place to store the data to be written to the property.
+ @param [in] dataSize Size of the data to be written to the property.
+ @returns Status code indicating success or failure of this operation.
+ */
+ Status setProperty(int propertyId, const void* data, int dataSize)
+ {
+ if (!isValid())
+ {
+ return STATUS_ERROR;
+ }
+
+ return (Status)oniStreamSetProperty(m_stream, propertyId, data, dataSize);
+ }
+
+ /**
+ Get the current video mode information for this video stream.
+ This includes its resolution, fps and stream format.
+
+ @returns Current video mode information for this video stream.
+ */
+ VideoMode getVideoMode() const
+ {
+ VideoMode videoMode;
+ getProperty(STREAM_PROPERTY_VIDEO_MODE, static_cast(&videoMode));
+ return videoMode;
+ }
+
+ /**
+ Changes the current video mode of this stream. Recommended practice is to use @ref Device::getSensorInfo(), and
+ then @ref SensorInfo::getSupportedVideoModes() to obtain a list of valid video mode settings for this stream. Then,
+ pass a valid @ref VideoMode to @ref setVideoMode to ensure correct operation.
+
+ @param [in] videoMode Desired new video mode for this stream.
+ returns Status code indicating success or failure of this operation.
+ */
+ Status setVideoMode(const VideoMode& videoMode)
+ {
+ return setProperty(STREAM_PROPERTY_VIDEO_MODE, static_cast(videoMode));
+ }
+
+ /**
+ Provides the maximum possible value for pixels obtained by this stream. This is most useful for
+ getting the maximum possible value of depth streams.
+ @returns Maximum possible pixel value.
+ */
+ int getMaxPixelValue() const
+ {
+ int maxValue;
+ Status rc = getProperty(STREAM_PROPERTY_MAX_VALUE, &maxValue);
+ if (rc != STATUS_OK)
+ {
+ return 0;
+ }
+ return maxValue;
+ }
+
+ /**
+ Provides the smallest possible value for pixels obtains by this VideoStream. This is most useful
+ for getting the minimum possible value that will be reported by a depth stream.
+ @returns Minimum possible pixel value that can come from this stream.
+ */
+ int getMinPixelValue() const
+ {
+ int minValue;
+ Status rc = getProperty(STREAM_PROPERTY_MIN_VALUE, &minValue);
+ if (rc != STATUS_OK)
+ {
+ return 0;
+ }
+ return minValue;
+ }
+
+ /**
+ Checks whether this stream supports cropping.
+ @returns true if the stream supports cropping, false if it does not.
+ */
+ bool isCroppingSupported() const
+ {
+ return isPropertySupported(STREAM_PROPERTY_CROPPING);
+ }
+
+ /**
+ Obtains the current cropping settings for this stream.
+ @param [out] pOriginX X coordinate of the upper left corner of the cropping window
+ @param [out] pOriginY Y coordinate of the upper left corner of the cropping window
+ @param [out] pWidth Horizontal width of the cropping window, in pixels
+ @param [out] pHeight Vertical width of the cropping window, in pixels
+ returns true if cropping is currently enabled, false if it is not.
+ */
+ bool getCropping(int* pOriginX, int* pOriginY, int* pWidth, int* pHeight) const
+ {
+ OniCropping cropping;
+ bool enabled = false;
+
+ Status rc = getProperty(STREAM_PROPERTY_CROPPING, &cropping);
+
+ if (rc == STATUS_OK)
+ {
+ *pOriginX = cropping.originX;
+ *pOriginY = cropping.originY;
+ *pWidth = cropping.width;
+ *pHeight = cropping.height;
+ enabled = (cropping.enabled == TRUE);
+ }
+
+ return enabled;
+ }
+
+ /**
+ Changes the cropping settings for this stream. You can use the @ref isCroppingSupported()
+ function to make sure cropping is supported before calling this function.
+ @param [in] originX New X coordinate of the upper left corner of the cropping window.
+ @param [in] originY New Y coordinate of the upper left corner of the cropping window.
+ @param [in] width New horizontal width for the cropping window, in pixels.
+ @param [in] height New vertical height for the cropping window, in pixels.
+ @returns Status code indicating success or failure of this operation.
+ */
+ Status setCropping(int originX, int originY, int width, int height)
+ {
+ OniCropping cropping;
+ cropping.enabled = true;
+ cropping.originX = originX;
+ cropping.originY = originY;
+ cropping.width = width;
+ cropping.height = height;
+ return setProperty(STREAM_PROPERTY_CROPPING, cropping);
+ }
+
+ /**
+ Disables cropping.
+ @returns Status code indicating success or failure of this operation.
+ */
+ Status resetCropping()
+ {
+ OniCropping cropping;
+ cropping.enabled = false;
+ return setProperty(STREAM_PROPERTY_CROPPING, cropping);
+ }
+
+ /**
+ Check whether mirroring is currently turned on for this stream.
+ @returns true if mirroring is currently enabled, false otherwise.
+ */
+ bool getMirroringEnabled() const
+ {
+ OniBool enabled;
+ Status rc = getProperty(STREAM_PROPERTY_MIRRORING, &enabled);
+ if (rc != STATUS_OK)
+ {
+ return false;
+ }
+ return enabled == TRUE;
+ }
+
+ /**
+ Enable or disable mirroring for this stream.
+ @param [in] isEnabled true to enable mirroring, false to disable it.
+ @returns Status code indicating the success or failure of this operation.
+ */
+ Status setMirroringEnabled(bool isEnabled)
+ {
+ return setProperty(STREAM_PROPERTY_MIRRORING, isEnabled ? TRUE : FALSE);
+ }
+
+ /**
+ Gets the horizontal field of view of frames received from this stream.
+ @returns Horizontal field of view, in radians.
+ */
+ float getHorizontalFieldOfView() const
+ {
+ float horizontal = 0;
+ getProperty(STREAM_PROPERTY_HORIZONTAL_FOV, &horizontal);
+ return horizontal;
+ }
+
+ /**
+ Gets the vertical field of view of frames received from this stream.
+ @returns Vertical field of view, in radians.
+ */
+ float getVerticalFieldOfView() const
+ {
+ float vertical = 0;
+ getProperty(STREAM_PROPERTY_VERTICAL_FOV, &vertical);
+ return vertical;
+ }
+
+ /**
+ Function for setting a value of a stream property using an arbitrary input type.
+ There are convenience functions available for all commonly used properties, so it is not
+ expected that applications will make direct use of this function very often.
+ @tparam [in] T Data type of the value to be passed to the property.
+ @param [in] propertyId The numerical ID of the property to be set.
+ @param [in] value Data to be sent to the property.
+ @returns Status code indicating success or failure of this operation.
+ */
+ template
+ Status setProperty(int propertyId, const T& value)
+ {
+ return setProperty(propertyId, &value, sizeof(T));
+ }
+
+ /**
+ Function for getting the value from a property using an arbitrary output type.
+ There are convenience functions available for all commonly used properties, so it is not
+ expected that applications will make direct use of this function very often.
+ @tparam [in] T Data type of the value to be read.
+ @param [in] propertyId The numerical ID of the property to be read.
+ @param [in, out] value Pointer to a place to store the value read from the property.
+ @returns Status code indicating success or failure of this operation.
+ */
+ template
+ Status getProperty(int propertyId, T* value) const
+ {
+ int size = sizeof(T);
+ return getProperty(propertyId, value, &size);
+ }
+
+ /**
+ Checks if a specific property is supported by the video stream.
+ @param [in] propertyId Property to be checked.
+ @returns true if the property is supported, false otherwise.
+ */
+ bool isPropertySupported(int propertyId) const
+ {
+ if (!isValid())
+ {
+ return false;
+ }
+
+ return oniStreamIsPropertySupported(m_stream, propertyId) == TRUE;
+ }
+
+ /**
+ Invokes a command that takes an arbitrary data type as its input. It is not expected that
+ application code will need this function frequently, as all commonly used properties have
+ higher level functions provided.
+ @param [in] commandId Numerical code of the property to be invoked.
+ @param [in] data Data to be passed to the property.
+ @param [in] dataSize size of the buffer passed in @c data.
+ @returns Status code indicating success or failure of this operation.
+ */
+ Status invoke(int commandId, void* data, int dataSize)
+ {
+ if (!isValid())
+ {
+ return STATUS_ERROR;
+ }
+
+ return (Status)oniStreamInvoke(m_stream, commandId, data, dataSize);
+ }
+
+ /**
+ Invokes a command that takes an arbitrary data type as its input. It is not expected that
+ application code will need this function frequently, as all commonly used properties have
+ higher level functions provided.
+ @tparam [in] T Type of data to be passed to the property.
+ @param [in] commandId Numerical code of the property to be invoked.
+ @param [in] value Data to be passed to the property.
+ @returns Status code indicating success or failure of this operation.
+ */
+ template
+ Status invoke(int commandId, T& value)
+ {
+ return invoke(commandId, &value, sizeof(T));
+ }
+
+ /**
+ Checks if a specific command is supported by the video stream.
+ @param [in] commandId Command to be checked.
+ @returns true if the command is supported, false otherwise.
+ */
+ bool isCommandSupported(int commandId) const
+ {
+ if (!isValid())
+ {
+ return false;
+ }
+
+ return (Status)oniStreamIsCommandSupported(m_stream, commandId) == TRUE;
+ }
+
+private:
+ friend class Device;
+
+ void _setHandle(OniStreamHandle stream)
+ {
+ m_sensorInfo._setInternal(NULL);
+ m_stream = stream;
+
+ if (stream != NULL)
+ {
+ m_sensorInfo._setInternal(oniStreamGetSensorInfo(m_stream));
+ }
+ }
+
+private:
+ VideoStream(const VideoStream& other);
+ VideoStream& operator=(const VideoStream& other);
+
+ OniStreamHandle m_stream;
+ SensorInfo m_sensorInfo;
+ CameraSettings* m_pCameraSettings;
+ bool m_isOwner;
+};
+
+/**
+The Device object abstracts a specific device; either a single hardware device, or a file
+device holding a recording from a hardware device. It offers the ability to connect to
+the device, and obtain information about its configuration and the data streams it can offer.
+
+It provides the means to query and change all configuration parameters that apply to the
+device as a whole. This includes enabling depth/color image registration and frame
+synchronization.
+
+Devices are used when creating and initializing @ref VideoStream "VideoStreams" -- you will need a valid pointer to
+a Device in order to use the VideoStream.create() function. This, along with configuration, is
+the primary use of this class for application developers.
+
+Before devices can be created, @ref OpenNI::initialize() must have been run to make the device drivers
+on the system available to the API.
+*/
+class Device
+{
+public:
+ /**
+ Default constructor. Creates a new empty Device object. This object will be invalid until it is initialized by
+ calling its open() function.
+ */
+ Device() : m_pPlaybackControl(NULL), m_device(NULL), m_isOwner(true)
+ {
+ clearSensors();
+ }
+
+ /**
+ Handle constructor. Creates a Device object based on the given initialized handle.
+ This object will not destroy the underlying handle when @ref close() or destructor is called
+ */
+ explicit Device(OniDeviceHandle handle) : m_pPlaybackControl(NULL), m_device(NULL), m_isOwner(false)
+ {
+ _setHandle(handle);
+ }
+
+ /**
+ The destructor calls the @ref close() function, but it is considered a best practice for applications to
+ call @ref close() manually on any @ref Device that they run @ref open() on.
+ */
+ ~Device()
+ {
+ if (m_device != NULL)
+ {
+ close();
+ }
+ }
+
+ /**
+ Opens a device. This can either open a device chosen arbitrarily from all devices
+ on the system, or open a specific device selected by passing this function the device URI.
+
+ To open any device, simply pass the constant@ref ANY_DEVICE to this function. If multiple
+ devices are connected to the system, then one of them will be opened. This procedure is most
+ useful when it is known that exactly one device is (or can be) connected to the system. In that case,
+ requesting a list of all devices and iterating through it would be a waste of effort.
+
+ If multiple devices are (or may be) connected to a system, then a URI will be required to select
+ a specific device to open. There are two ways to obtain a URI: from a DeviceConnected event, or
+ by calling @ref OpenNI::enumerateDevices().
+
+ In the case of a DeviceConnected event, the @ref OpenNI::Listener will be provided with a DeviceInfo object
+ as an argument to its @ref OpenNI::Listener::onDeviceConnected "onDeviceConnected()" function.
+ The DeviceInfo.getUri() function can then be used to obtain the URI.
+
+ If the application is not using event handlers, then it can also call the static function
+ @ref OpenNI::enumerateDevices(). This will return an array of @ref DeviceInfo objects, one for each device
+ currently available to the system. The application can then iterate through this list and
+ select the desired device. The URI is again obtained via the @ref DeviceInfo::getUri() function.
+
+ Standard codes of type Status are returned indicating whether opening was successful.
+
+ @param [in] uri String containing the URI of the device to be opened, or @ref ANY_DEVICE.
+ @returns Status code with the outcome of the open operation.
+
+ @remark For opening a recording file, pass the file path as a uri.
+ */
+ inline Status open(const char* uri);
+
+ /**
+ Closes the device. This properly closes any files or shuts down hardware, as appropriate. This
+ function is currently called by the destructor if not called manually by application code, but it
+ is considered a best practice to manually close any device that was opened.
+ */
+ inline void close();
+
+ /**
+ Provides information about this device in the form of a DeviceInfo object. This object can
+ be used to access the URI of the device, as well as various USB descriptor strings that might
+ be useful to an application.
+
+ Note that valid device info will not be available if this device has not yet been opened. If you are
+ trying to obtain a URI to open a device, use OpenNI::enumerateDevices() instead.
+ @returns DeviceInfo object for this Device
+ */
+ const DeviceInfo& getDeviceInfo() const
+ {
+ return m_deviceInfo;
+ }
+
+ /**
+ This function checks to see if one of the specific sensor types defined in @ref SensorType is
+ available on this device. This allows an application to, for example, query for the presence
+ of a depth sensor, or color sensor.
+ @param [in] sensorType of sensor to query for
+ @returns true if the Device supports the sensor queried, false otherwise.
+ */
+ bool hasSensor(SensorType sensorType)
+ {
+ int i;
+ for (i = 0; (i < ONI_MAX_SENSORS) && (m_aSensorInfo[i].m_pInfo != NULL); ++i)
+ {
+ if (m_aSensorInfo[i].getSensorType() == sensorType)
+ {
+ return true;
+ }
+ }
+
+ if (i == ONI_MAX_SENSORS)
+ {
+ return false;
+ }
+
+ const OniSensorInfo* pInfo = oniDeviceGetSensorInfo(m_device, (OniSensorType)sensorType);
+
+ if (pInfo == NULL)
+ {
+ return false;
+ }
+
+ m_aSensorInfo[i]._setInternal(pInfo);
+
+ return true;
+ }
+
+ /**
+ Get the @ref SensorInfo for a specific sensor type on this device. The @ref SensorInfo
+ is useful primarily for determining which video modes are supported by the sensor.
+ @param [in] sensorType of sensor to get information about.
+ @returns SensorInfo object corresponding to the sensor type specified, or NULL if such a sensor
+ is not available from this device.
+ */
+ const SensorInfo* getSensorInfo(SensorType sensorType)
+ {
+ int i;
+ for (i = 0; (i < ONI_MAX_SENSORS) && (m_aSensorInfo[i].m_pInfo != NULL); ++i)
+ {
+ if (m_aSensorInfo[i].getSensorType() == sensorType)
+ {
+ return &m_aSensorInfo[i];
+ }
+ }
+
+ // not found. check to see we have additional space
+ if (i == ONI_MAX_SENSORS)
+ {
+ return NULL;
+ }
+
+ const OniSensorInfo* pInfo = oniDeviceGetSensorInfo(m_device, (OniSensorType)sensorType);
+ if (pInfo == NULL)
+ {
+ return NULL;
+ }
+
+ m_aSensorInfo[i]._setInternal(pInfo);
+ return &m_aSensorInfo[i];
+ }
+
+ /**
+ @internal
+ Get an internal handle. This handle can be used via the C API.
+ */
+ OniDeviceHandle _getHandle() const
+ {
+ return m_device;
+ }
+
+ /**
+ Gets an object through which playback of a file device can be controlled.
+ @returns NULL if this device is not a file device.
+ */
+ PlaybackControl* getPlaybackControl() {return m_pPlaybackControl;}
+
+ /**
+ Get the value of a general property of the device.
+ There are convenience functions for all the commonly used properties, such as
+ image registration and frame synchronization. It is expected for this reason
+ that this function will rarely be directly used by applications.
+
+ @param [in] propertyId Numerical ID of the property you would like to check.
+ @param [out] data Place to store the value of the property.
+ @param [in,out] dataSize IN: Size of the buffer passed in the @c data argument. OUT: the actual written size.
+ @returns Status code indicating results of this operation.
+ */
+ Status getProperty(int propertyId, void* data, int* dataSize) const
+ {
+ return (Status)oniDeviceGetProperty(m_device, propertyId, data, dataSize);
+ }
+
+ /**
+ Sets the value of a general property of the device.
+ There are convenience functions for all the commonly used properties, such as
+ image registration and frame synchronization. It is expected for this reason
+ that this function will rarely be directly used by applications.
+
+ @param [in] propertyId The numerical ID of the property to be set.
+ @param [in] data Place to store the data to be written to the property.
+ @param [in] dataSize Size of the data to be written to the property.
+ @returns Status code indicating results of this operation.
+ */
+ Status setProperty(int propertyId, const void* data, int dataSize)
+ {
+ return (Status)oniDeviceSetProperty(m_device, propertyId, data, dataSize);
+ }
+
+ /**
+ Checks to see if this device can support registration of color video and depth video.
+ Image registration is used to properly superimpose two images from cameras located at different
+ points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about
+ registration.
+ @returns true if image registration is supported by this device, false otherwise.
+ */
+ bool isImageRegistrationModeSupported(ImageRegistrationMode mode) const
+ {
+ return (oniDeviceIsImageRegistrationModeSupported(m_device, (OniImageRegistrationMode)mode) == TRUE);
+ }
+
+ /**
+ Gets the current image registration mode of this device.
+ Image registration is used to properly superimpose two images from cameras located at different
+ points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about
+ registration.
+ @returns Current image registration mode. See @ref ImageRegistrationMode for possible return values.
+ */
+ ImageRegistrationMode getImageRegistrationMode() const
+ {
+ ImageRegistrationMode mode;
+ Status rc = getProperty(DEVICE_PROPERTY_IMAGE_REGISTRATION, &mode);
+ if (rc != STATUS_OK)
+ {
+ return IMAGE_REGISTRATION_OFF;
+ }
+ return mode;
+ }
+
+ /**
+ Sets the image registration on this device.
+ Image registration is used to properly superimpose two images from cameras located at different
+ points in space. Please see the OpenNi 2.0 Programmer's Guide for more information about
+ registration.
+
+ See @ref ImageRegistrationMode for a list of valid settings to pass to this function.
+
+ It is a good practice to first check if the mode is supported by calling @ref isImageRegistrationModeSupported().
+
+ @param [in] mode Desired new value for the image registration mode.
+ @returns Status code for the operation.
+ */
+ Status setImageRegistrationMode(ImageRegistrationMode mode)
+ {
+ return setProperty(DEVICE_PROPERTY_IMAGE_REGISTRATION, mode);
+ }
+
+ /**
+ Checks whether this Device object is currently connected to an actual file or hardware device.
+ @returns true if the Device is connected, false otherwise.
+ */
+ bool isValid() const
+ {
+ return m_device != NULL;
+ }
+
+ /**
+ Checks whether this device is a file device (i.e. a recording).
+ @returns true if this is a file device, false otherwise.
+ */
+ bool isFile() const
+ {
+ return isPropertySupported(DEVICE_PROPERTY_PLAYBACK_SPEED) &&
+ isPropertySupported(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED) &&
+ isCommandSupported(DEVICE_COMMAND_SEEK);
+ }
+
+ /**
+ Used to turn the depth/color frame synchronization feature on and off. When frame synchronization
+ is enabled, the device will deliver depth and image frames that are separated in time
+ by some maximum value. When disabled, the phase difference between depth and image frame
+ generation cannot be guaranteed.
+ @param [in] isEnabled Set to TRUE to enable synchronization, FALSE to disable it
+ @returns Status code indicating success or failure of this operation
+ */
+ Status setDepthColorSyncEnabled(bool isEnabled)
+ {
+ Status rc = STATUS_OK;
+
+ if (isEnabled)
+ {
+ rc = (Status)oniDeviceEnableDepthColorSync(m_device);
+ }
+ else
+ {
+ oniDeviceDisableDepthColorSync(m_device);
+ }
+
+ return rc;
+ }
+
+ bool getDepthColorSyncEnabled()
+ {
+ return oniDeviceGetDepthColorSyncEnabled(m_device) == TRUE;
+ }
+
+ /**
+ Sets a property that takes an arbitrary data type as its input. It is not expected that
+ application code will need this function frequently, as all commonly used properties have
+ higher level functions provided.
+
+ @tparam T Type of data to be passed to the property.
+ @param [in] propertyId The numerical ID of the property to be set.
+ @param [in] value Place to store the data to be written to the property.
+ @returns Status code indicating success or failure of this operation.
+ */
+ template
+ Status setProperty(int propertyId, const T& value)
+ {
+ return setProperty(propertyId, &value, sizeof(T));
+ }
+
+ /**
+ Checks a property that provides an arbitrary data type as its output. It is not expected that
+ application code will need this function frequently, as all commonly used properties have
+ higher level functions provided.
+ @tparam [in] T Data type of the value to be read.
+ @param [in] propertyId The numerical ID of the property to be read.
+ @param [in, out] value Pointer to a place to store the value read from the property.
+ @returns Status code indicating success or failure of this operation.
+ */
+ template
+ Status getProperty(int propertyId, T* value) const
+ {
+ int size = sizeof(T);
+ return getProperty(propertyId, value, &size);
+ }
+
+ /**
+ Checks if a specific property is supported by the device.
+ @param [in] propertyId Property to be checked.
+ @returns true if the property is supported, false otherwise.
+ */
+ bool isPropertySupported(int propertyId) const
+ {
+ return oniDeviceIsPropertySupported(m_device, propertyId) == TRUE;
+ }
+
+ /**
+ Invokes a command that takes an arbitrary data type as its input. It is not expected that
+ application code will need this function frequently, as all commonly used properties have
+ higher level functions provided.
+ @param [in] commandId Numerical code of the property to be invoked.
+ @param [in] data Data to be passed to the property.
+ @param [in] dataSize size of the buffer passed in @c data.
+ @returns Status code indicating success or failure of this operation.
+ */
+ Status invoke(int commandId, void* data, int dataSize)
+ {
+ return (Status)oniDeviceInvoke(m_device, commandId, data, dataSize);
+ }
+
+ /**
+ Invokes a command that takes an arbitrary data type as its input. It is not expected that
+ application code will need this function frequently, as all commonly used properties have
+ higher level functions provided.
+ @tparam [in] T Type of data to be passed to the property.
+ @param [in] propertyId Numerical code of the property to be invoked.
+ @param [in] value Data to be passed to the property.
+ @returns Status code indicating success or failure of this operation.
+ */
+ template
+ Status invoke(int propertyId, T& value)
+ {
+ return invoke(propertyId, &value, sizeof(T));
+ }
+
+ /**
+ Checks if a specific command is supported by the device.
+ @param [in] commandId Command to be checked.
+ @returns true if the command is supported, false otherwise.
+ */
+ bool isCommandSupported(int commandId) const
+ {
+ return oniDeviceIsCommandSupported(m_device, commandId) == TRUE;
+ }
+
+ /** @internal **/
+ inline Status _openEx(const char* uri, const char* mode);
+
+private:
+ Device(const Device&);
+ Device& operator=(const Device&);
+
+ void clearSensors()
+ {
+ for (int i = 0; i < ONI_MAX_SENSORS; ++i)
+ {
+ m_aSensorInfo[i]._setInternal(NULL);
+ }
+ }
+
+ inline Status _setHandle(OniDeviceHandle deviceHandle);
+
+private:
+ PlaybackControl* m_pPlaybackControl;
+
+ OniDeviceHandle m_device;
+ DeviceInfo m_deviceInfo;
+ SensorInfo m_aSensorInfo[ONI_MAX_SENSORS];
+
+ bool m_isOwner;
+};
+
+/**
+ * The PlaybackControl class provides access to a series of specific to playing back
+ * a recording from a file device.
+ *
+ * When playing a stream back from a recording instead of playing from a live device,
+ * it is possible to vary playback speed, change the current time location (ie
+ * fast forward / rewind / seek), specify whether the playback should be repeated at the end
+ * of the recording, and query the total size of the recording.
+ *
+ * Since none of these functions make sense in the context of a physical device, they are
+ * split out into a seperate playback control class. To use, simply create your file device,
+ * create a PlaybackControl, and then attach the PlaybackControl to the file device.
+ */
+class PlaybackControl
+{
+public:
+
+ /**
+ * Deconstructor. Destroys a PlaybackControl class. The deconstructor presently detaches
+ * from its recording automatically, but it is considered a best practice for applications to
+ * manually detach from any stream that was attached to.
+ */
+ ~PlaybackControl()
+ {
+ detach();
+ }
+
+ /**
+ * Getter function for the current playback speed of this device.
+ *
+ * This value is expressed as a multiple of the speed the original
+ * recording was taken at. For example, if the original recording was at 30fps, and
+ * playback speed is set to 0.5, then the recording will play at 15fps. If playback speed
+ * is set to 2.0, then the recording would playback at 60fps.
+ *
+ * In addition, there are two "special" values. A playback speed of 0.0 indicates that the
+ * playback should occur as fast as the system is capable of returning frames. This is
+ * most useful when testing algorithms on large datasets, as it enables playback to be
+ * done at a much higher rate than would otherwise be possible.
+ *
+ * A value of -1 indicates that speed is "manual". In this mode, new frames will only
+ * become available when an application manually reads them. If used in a polling loop,
+ * this setting also enables systems to read and process frames limited only by
+ * available processing speeds.
+ *
+ * @returns Current playback speed of the device, measured as ratio of recording speed.
+ */
+ float getSpeed() const
+ {
+ if (!isValid())
+ {
+ return 0.0f;
+ }
+ float speed;
+ Status rc = m_pDevice->getProperty(DEVICE_PROPERTY_PLAYBACK_SPEED, &speed);
+ if (rc != STATUS_OK)
+ {
+ return 1.0f;
+ }
+ return speed;
+ }
+ /**
+ * Setter function for the playback speed of the device. For a full explaination of
+ * what this value means @see PlaybackControl::getSpeed().
+ *
+ * @param [in] speed Desired new value of playback speed, as ratio of original recording.
+ * @returns Status code indicating success or failure of this operation.
+ */
+ Status setSpeed(float speed)
+ {
+ if (!isValid())
+ {
+ return STATUS_NO_DEVICE;
+ }
+ return m_pDevice->setProperty(DEVICE_PROPERTY_PLAYBACK_SPEED, speed);
+ }
+
+ /**
+ * Gets the current repeat setting of the file device.
+ *
+ * @returns true if repeat is enabled, false if not enabled.
+ */
+ bool getRepeatEnabled() const
+ {
+ if (!isValid())
+ {
+ return false;
+ }
+
+ OniBool repeat;
+ Status rc = m_pDevice->getProperty(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED, &repeat);
+ if (rc != STATUS_OK)
+ {
+ return false;
+ }
+
+ return repeat == TRUE;
+ }
+
+ /**
+ * Changes the current repeat mode of the device. If repeat mode is turned on, then the recording will
+ * begin playback again at the beginning after the last frame is read. If turned off, no more frames
+ * will become available after last frame is read.
+ *
+ * @param [in] repeat New value for repeat -- true to enable, false to disable
+ * @returns Status code indicating success or failure of this operations.
+ */
+ Status setRepeatEnabled(bool repeat)
+ {
+ if (!isValid())
+ {
+ return STATUS_NO_DEVICE;
+ }
+
+ return m_pDevice->setProperty(DEVICE_PROPERTY_PLAYBACK_REPEAT_ENABLED, repeat ? TRUE : FALSE);
+ }
+
+ /**
+ * Seeks within a VideoStream to a given FrameID. Note that when this function is called on one
+ * stream, all other streams will also be changed to the corresponding place in the recording. The FrameIDs
+ * of different streams may not match, since FrameIDs may differ for streams that are not synchronized, but
+ * the recording will set all streams to the same moment in time.
+ *
+ * @param [in] stream Stream for which the frameIndex value is valid.
+ * @param [in] frameIndex Frame index to move playback to
+ * @returns Status code indicating success or failure of this operation
+ */
+ Status seek(const VideoStream& stream, int frameIndex)
+ {
+ if (!isValid())
+ {
+ return STATUS_NO_DEVICE;
+ }
+ OniSeek seek;
+ seek.frameIndex = frameIndex;
+ seek.stream = stream._getHandle();
+ return m_pDevice->invoke(DEVICE_COMMAND_SEEK, seek);
+ }
+
+ /**
+ * Provides the a count of frames that this recording contains for a given stream. This is useful
+ * both to determine the length of the recording, and to ensure that a valid Frame Index is set when using
+ * the @ref PlaybackControl::seek() function.
+ *
+ * @param [in] stream The video stream to count frames for
+ * @returns Number of frames in provided @ref VideoStream, or 0 if the stream is not part of the recording
+ */
+ int getNumberOfFrames(const VideoStream& stream) const
+ {
+ int numOfFrames = -1;
+ Status rc = stream.getProperty(STREAM_PROPERTY_NUMBER_OF_FRAMES, &numOfFrames);
+ if (rc != STATUS_OK)
+ {
+ return 0;
+ }
+ return numOfFrames;
+ }
+
+ bool isValid() const
+ {
+ return m_pDevice != NULL;
+ }
+private:
+ Status attach(Device* device)
+ {
+ if (!device->isValid() || !device->isFile())
+ {
+ return STATUS_ERROR;
+ }
+
+ detach();
+ m_pDevice = device;
+
+ return STATUS_OK;
+ }
+ void detach()
+ {
+ m_pDevice = NULL;
+ }
+
+ friend class Device;
+ PlaybackControl(Device* pDevice) : m_pDevice(NULL)
+ {
+ if (pDevice != NULL)
+ {
+ attach(pDevice);
+ }
+ }
+
+ Device* m_pDevice;
+};
+
+class CameraSettings
+{
+public:
+ // setters
+ Status setAutoExposureEnabled(bool enabled)
+ {
+ return setProperty(STREAM_PROPERTY_AUTO_EXPOSURE, enabled ? TRUE : FALSE);
+ }
+ Status setAutoWhiteBalanceEnabled(bool enabled)
+ {
+ return setProperty(STREAM_PROPERTY_AUTO_WHITE_BALANCE, enabled ? TRUE : FALSE);
+ }
+
+ bool getAutoExposureEnabled() const
+ {
+ OniBool enabled = FALSE;
+
+ Status rc = getProperty(STREAM_PROPERTY_AUTO_EXPOSURE, &enabled);
+ return rc == STATUS_OK && enabled == TRUE;
+ }
+ bool getAutoWhiteBalanceEnabled() const
+ {
+ OniBool enabled = FALSE;
+
+ Status rc = getProperty(STREAM_PROPERTY_AUTO_WHITE_BALANCE, &enabled);
+ return rc == STATUS_OK && enabled == TRUE;
+ }
+
+ Status setGain(int gain)
+ {
+ return setProperty(STREAM_PROPERTY_GAIN, gain);
+ }
+ Status setExposure(int exposure)
+ {
+ return setProperty(STREAM_PROPERTY_EXPOSURE, exposure);
+ }
+ int getGain()
+ {
+ int gain;
+ Status rc = getProperty(STREAM_PROPERTY_GAIN, &gain);
+ if (rc != STATUS_OK)
+ {
+ return 100;
+ }
+ return gain;
+ }
+ int getExposure()
+ {
+ int exposure;
+ Status rc = getProperty(STREAM_PROPERTY_EXPOSURE, &exposure);
+ if (rc != STATUS_OK)
+ {
+ return 0;
+ }
+ return exposure;
+ }
+
+ bool isValid() const {return m_pStream != NULL;}
+private:
+ template
+ Status getProperty(int propertyId, T* value) const
+ {
+ if (!isValid()) return STATUS_NOT_SUPPORTED;
+
+ return m_pStream->getProperty(propertyId, value);
+ }
+ template
+ Status setProperty(int propertyId, const T& value)
+ {
+ if (!isValid()) return STATUS_NOT_SUPPORTED;
+
+ return m_pStream->setProperty(propertyId, value);
+ }
+
+ friend class VideoStream;
+ CameraSettings(VideoStream* pStream)
+ {
+ m_pStream = pStream;
+ }
+
+ VideoStream* m_pStream;
+};
+
+
+/**
+ * The OpenNI class is a static entry point to the library. It is used by every OpenNI 2.0
+ * application to initialize the SDK and drivers to enable creation of valid device objects.
+ *
+ * It also defines a listener class and events that enable for event driven notification of
+ * device connection, device disconnection, and device configuration changes.
+ *
+ * In addition, it gives access to SDK version information and provides a function that allows
+ * you to wait for data to become available on any one of a list of streams (as opposed to
+ * waiting for data on one specific stream with functions provided by the VideoStream class)
+ *
+*/
+class OpenNI
+{
+public:
+
+ /**
+ * The OpenNI::DeviceConnectedListener class provides a means of registering for, and responding to
+ * when a device is connected.
+ *
+ * onDeviceConnected is called whenever a new device is connected to the system (ie this event
+ * would be triggered when a new sensor is manually plugged into the host system running the
+ * application)
+ *
+ * To use this class, you should write a new class that inherits from it, and override the
+ * onDeviceConnected method. Once you instantiate your class, use the
+ * OpenNI::addDeviceConnectedListener() function to add your listener object to OpenNI's list of listeners. Your
+ * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceConnectedListener()
+ * function is also provided, if you want to have your class stop listening to these events for any
+ * reason.
+ */
+ class DeviceConnectedListener
+ {
+ public:
+ DeviceConnectedListener()
+ {
+ m_deviceConnectedCallbacks.deviceConnected = deviceConnectedCallback;
+ m_deviceConnectedCallbacks.deviceDisconnected = NULL;
+ m_deviceConnectedCallbacks.deviceStateChanged = NULL;
+ m_deviceConnectedCallbacksHandle = NULL;
+ }
+
+ virtual ~DeviceConnectedListener()
+ {
+ }
+
+ /**
+ * Callback function for the onDeviceConnected event. This function will be
+ * called whenever this event occurs. When this happens, a pointer to the @ref DeviceInfo
+ * object for the newly connected device will be supplied. Note that once a
+ * device is removed, if it was opened by a @ref Device object, that object can no longer be
+ * used to access the device, even if it was reconnected. Once a device was reconnected,
+ * @ref Device::open() should be called again in order to use this device.
+ *
+ * If you wish to open the new device as it is connected, simply query the provided DeviceInfo
+ * object to obtain the URI of the device, and pass this URI to the Device.Open() function.
+ */
+ virtual void onDeviceConnected(const DeviceInfo*) = 0;
+ private:
+ static void ONI_CALLBACK_TYPE deviceConnectedCallback(const OniDeviceInfo* pInfo, void* pCookie)
+ {
+ DeviceConnectedListener* pListener = (DeviceConnectedListener*)pCookie;
+ pListener->onDeviceConnected(static_cast(pInfo));
+ }
+
+ friend class OpenNI;
+ OniDeviceCallbacks m_deviceConnectedCallbacks;
+ OniCallbackHandle m_deviceConnectedCallbacksHandle;
+
+ };
+ /**
+ * The OpenNI::DeviceDisconnectedListener class provides a means of registering for, and responding to
+ * when a device is disconnected.
+ *
+ * onDeviceDisconnected is called when a device is removed from the system. Note that once a
+ * device is removed, if it was opened by a @ref Device object, that object can no longer be
+ * used to access the device, even if it was reconnected. Once a device was reconnected,
+ * @ref Device::open() should be called again in order to use this device.
+ *
+ * To use this class, you should write a new class that inherits from it, and override the
+ * onDeviceDisconnected method. Once you instantiate your class, use the
+ * OpenNI::addDeviceDisconnectedListener() function to add your listener object to OpenNI's list of listeners. Your
+ * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceDisconnectedListener()
+ * function is also provided, if you want to have your class stop listening to these events for any
+ * reason.
+ */
+ class DeviceDisconnectedListener
+ {
+ public:
+ DeviceDisconnectedListener()
+ {
+ m_deviceDisconnectedCallbacks.deviceConnected = NULL;
+ m_deviceDisconnectedCallbacks.deviceDisconnected = deviceDisconnectedCallback;
+ m_deviceDisconnectedCallbacks.deviceStateChanged = NULL;
+ m_deviceDisconnectedCallbacksHandle = NULL;
+ }
+
+ virtual ~DeviceDisconnectedListener()
+ {
+ }
+
+ /**
+ * Callback function for the onDeviceDisconnected event. This function will be
+ * called whenever this event occurs. When this happens, a pointer to the DeviceInfo
+ * object for the newly disconnected device will be supplied. Note that once a
+ * device is removed, if it was opened by a @ref Device object, that object can no longer be
+ * used to access the device, even if it was reconnected. Once a device was reconnected,
+ * @ref Device::open() should be called again in order to use this device.
+ */
+ virtual void onDeviceDisconnected(const DeviceInfo*) = 0;
+ private:
+ static void ONI_CALLBACK_TYPE deviceDisconnectedCallback(const OniDeviceInfo* pInfo, void* pCookie)
+ {
+ DeviceDisconnectedListener* pListener = (DeviceDisconnectedListener*)pCookie;
+ pListener->onDeviceDisconnected(static_cast(pInfo));
+ }
+
+ friend class OpenNI;
+ OniDeviceCallbacks m_deviceDisconnectedCallbacks;
+ OniCallbackHandle m_deviceDisconnectedCallbacksHandle;
+ };
+ /**
+ * The OpenNI::DeviceStateChangedListener class provides a means of registering for, and responding to
+ * when a device's state is changed.
+ *
+ * onDeviceStateChanged is triggered whenever the state of a connected device is changed.
+ *
+ * To use this class, you should write a new class that inherits from it, and override the
+ * onDeviceStateChanged method. Once you instantiate your class, use the
+ * OpenNI::addDeviceStateChangedListener() function to add your listener object to OpenNI's list of listeners. Your
+ * handler function will then be called whenever the event occurs. A OpenNI::removeDeviceStateChangedListener()
+ * function is also provided, if you want to have your class stop listening to these events for any
+ * reason.
+ */
+ class DeviceStateChangedListener
+ {
+ public:
+ DeviceStateChangedListener()
+ {
+ m_deviceStateChangedCallbacks.deviceConnected = NULL;
+ m_deviceStateChangedCallbacks.deviceDisconnected = NULL;
+ m_deviceStateChangedCallbacks.deviceStateChanged = deviceStateChangedCallback;
+ m_deviceStateChangedCallbacksHandle = NULL;
+ }
+
+ virtual ~DeviceStateChangedListener()
+ {
+ }
+
+ /**
+ * Callback function for the onDeviceStateChanged event. This function will be
+ * called whenever this event occurs. When this happens, a pointer to a DeviceInfo
+ * object for the affected device will be supplied, as well as the new DeviceState
+ * value of that device.
+ */
+ virtual void onDeviceStateChanged(const DeviceInfo*, DeviceState) = 0;
+ private:
+ static void ONI_CALLBACK_TYPE deviceStateChangedCallback(const OniDeviceInfo* pInfo, OniDeviceState state, void* pCookie)
+ {
+ DeviceStateChangedListener* pListener = (DeviceStateChangedListener*)pCookie;
+ pListener->onDeviceStateChanged(static_cast(pInfo), DeviceState(state));
+ }
+
+ friend class OpenNI;
+ OniDeviceCallbacks m_deviceStateChangedCallbacks;
+ OniCallbackHandle m_deviceStateChangedCallbacksHandle;
+ };
+
+ /**
+ Initialize the library.
+ This will load all available drivers, and see which devices are available
+ It is forbidden to call any other method in OpenNI before calling @ref initialize().
+ */
+ static Status initialize()
+ {
+ return (Status)oniInitialize(ONI_API_VERSION); // provide version of API, to make sure proper struct sizes are used
+ }
+
+ /**
+ Stop using the library. Unload all drivers, close all streams and devices.
+ Once @ref shutdown was called, no other calls to OpenNI is allowed.
+ */
+ static void shutdown()
+ {
+ oniShutdown();
+ }
+
+ /**
+ * Returns the version of OpenNI
+ */
+ static Version getVersion()
+ {
+ OniVersion oniVersion = oniGetVersion();
+ Version version;
+ version.major = oniVersion.major;
+ version.minor = oniVersion.minor;
+ version.maintenance = oniVersion.maintenance;
+ version.build = oniVersion.build;
+ return version;
+ }
+
+ /**
+ * Retrieves the calling thread's last extended error information. The last extended error information is maintained
+ * on a per-thread basis. Multiple threads do not overwrite each other's last extended error information.
+ *
+ * The extended error information is cleared on every call to an OpenNI method, so you should call this method
+ * immediately after a call to an OpenNI method which have failed.
+ */
+ static const char* getExtendedError()
+ {
+ return oniGetExtendedError();
+ }
+
+ /**
+ Fills up an array of @ref DeviceInfo objects with devices that are available.
+ @param [in,out] deviceInfoList An array to be filled with devices.
+ */
+ static void enumerateDevices(Array* deviceInfoList)
+ {
+ OniDeviceInfo* m_pDeviceInfos;
+ int m_deviceInfoCount;
+ oniGetDeviceList(&m_pDeviceInfos, &m_deviceInfoCount);
+ deviceInfoList->_setData((DeviceInfo*)m_pDeviceInfos, m_deviceInfoCount, true);
+ oniReleaseDeviceList(m_pDeviceInfos);
+ }
+
+ /**
+ Wait for a new frame from any of the streams provided. The function blocks until any of the streams
+ has a new frame available, or the timeout has passed.
+ @param [in] pStreams An array of streams to wait for.
+ @param [in] streamCount The number of streams in @c pStreams
+ @param [out] pReadyStreamIndex The index of the first stream that has new frame available.
+ @param [in] timeout [Optional] A timeout before returning if no stream has new data. Default value is @ref TIMEOUT_FOREVER.
+ */
+ static Status waitForAnyStream(VideoStream** pStreams, int streamCount, int* pReadyStreamIndex, int timeout = TIMEOUT_FOREVER)
+ {
+ static const int ONI_MAX_STREAMS = 50;
+ OniStreamHandle streams[ONI_MAX_STREAMS];
+
+ if (streamCount > ONI_MAX_STREAMS)
+ {
+ printf("Too many streams for wait: %d > %d\n", streamCount, ONI_MAX_STREAMS);
+ return STATUS_BAD_PARAMETER;
+ }
+
+ *pReadyStreamIndex = -1;
+ for (int i = 0; i < streamCount; ++i)
+ {
+ if (pStreams[i] != NULL)
+ {
+ streams[i] = pStreams[i]->_getHandle();
+ }
+ else
+ {
+ streams[i] = NULL;
+ }
+ }
+ Status rc = (Status)oniWaitForAnyStream(streams, streamCount, pReadyStreamIndex, timeout);
+
+ return rc;
+ }
+
+ /**
+ * Add a listener to the list of objects that receive the event when a device is connected. See the
+ * @ref OpenNI::DeviceConnectedListener class for details on utilizing the events provided by OpenNI.
+ *
+ * @param pListener Pointer to the Listener to be added to the list
+ * @returns Status code indicating success or failure of this operation.
+ */
+ static Status addDeviceConnectedListener(DeviceConnectedListener* pListener)
+ {
+ if (pListener->m_deviceConnectedCallbacksHandle != NULL)
+ {
+ return STATUS_ERROR;
+ }
+ return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceConnectedCallbacks, pListener, &pListener->m_deviceConnectedCallbacksHandle);
+ }
+ /**
+ * Add a listener to the list of objects that receive the event when a device is disconnected. See the
+ * @ref OpenNI::DeviceDisconnectedListener class for details on utilizing the events provided by OpenNI.
+ *
+ * @param pListener Pointer to the Listener to be added to the list
+ * @returns Status code indicating success or failure of this operation.
+ */
+ static Status addDeviceDisconnectedListener(DeviceDisconnectedListener* pListener)
+ {
+ if (pListener->m_deviceDisconnectedCallbacksHandle != NULL)
+ {
+ return STATUS_ERROR;
+ }
+ return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceDisconnectedCallbacks, pListener, &pListener->m_deviceDisconnectedCallbacksHandle);
+ }
+ /**
+ * Add a listener to the list of objects that receive the event when a device's state changes. See the
+ * @ref OpenNI::DeviceStateChangedListener class for details on utilizing the events provided by OpenNI.
+ *
+ * @param pListener Pointer to the Listener to be added to the list
+ * @returns Status code indicating success or failure of this operation.
+ */
+ static Status addDeviceStateChangedListener(DeviceStateChangedListener* pListener)
+ {
+ if (pListener->m_deviceStateChangedCallbacksHandle != NULL)
+ {
+ return STATUS_ERROR;
+ }
+ return (Status)oniRegisterDeviceCallbacks(&pListener->m_deviceStateChangedCallbacks, pListener, &pListener->m_deviceStateChangedCallbacksHandle);
+ }
+ /**
+ * Remove a listener from the list of objects that receive the event when a device is connected. See
+ * the @ref OpenNI::DeviceConnectedListener class for details on utilizing the events provided by OpenNI.
+ *
+ * @param pListener Pointer to the Listener to be removed from the list
+ * @returns Status code indicating the success or failure of this operation.
+ */
+ static void removeDeviceConnectedListener(DeviceConnectedListener* pListener)
+ {
+ oniUnregisterDeviceCallbacks(pListener->m_deviceConnectedCallbacksHandle);
+ pListener->m_deviceConnectedCallbacksHandle = NULL;
+ }
+ /**
+ * Remove a listener from the list of objects that receive the event when a device is disconnected. See
+ * the @ref OpenNI::DeviceDisconnectedListener class for details on utilizing the events provided by OpenNI.
+ *
+ * @param pListener Pointer to the Listener to be removed from the list
+ * @returns Status code indicating the success or failure of this operation.
+ */
+ static void removeDeviceDisconnectedListener(DeviceDisconnectedListener* pListener)
+ {
+ oniUnregisterDeviceCallbacks(pListener->m_deviceDisconnectedCallbacksHandle);
+ pListener->m_deviceDisconnectedCallbacksHandle = NULL;
+ }
+ /**
+ * Remove a listener from the list of objects that receive the event when a device's state changes. See
+ * the @ref OpenNI::DeviceStateChangedListener class for details on utilizing the events provided by OpenNI.
+ *
+ * @param pListener Pointer to the Listener to be removed from the list
+ * @returns Status code indicating the success or failure of this operation.
+ */
+ static void removeDeviceStateChangedListener(DeviceStateChangedListener* pListener)
+ {
+ oniUnregisterDeviceCallbacks(pListener->m_deviceStateChangedCallbacksHandle);
+ pListener->m_deviceStateChangedCallbacksHandle = NULL;
+ }
+
+ /**
+ * Change the log output folder
+
+ * @param const char * strLogOutputFolder [in] log required folder
+ *
+ * @retval STATUS_OK Upon successful completion.
+ * @retval STATUS_ERROR Upon any kind of failure.
+ */
+ static Status setLogOutputFolder(const char *strLogOutputFolder)
+ {
+ return (Status)oniSetLogOutputFolder(strLogOutputFolder);
+ }
+
+ /**
+ * Get current log file name
+
+ * @param char * strFileName [out] returned file name buffer
+ * @param int nBufferSize [in] Buffer size
+ *
+ * @retval STATUS_OK Upon successful completion.
+ * @retval STATUS_ERROR Upon any kind of failure.
+ */
+ static Status getLogFileName(char *strFileName, int nBufferSize)
+ {
+ return (Status)oniGetLogFileName(strFileName, nBufferSize);
+ }
+
+ /**
+ * Set minimum severity for log produce
+
+ * @param const char * strMask [in] Logger name
+ * @param int nMinSeverity [in] Logger severity
+ *
+ * @retval STATUS_OK Upon successful completion.
+ * @retval STATUS_ERROR Upon any kind of failure.
+ */
+ static Status setLogMinSeverity(int nMinSeverity)
+ {
+ return(Status) oniSetLogMinSeverity(nMinSeverity);
+ }
+
+ /**
+ * Configures if log entries will be printed to console.
+
+ * @param const OniBool bConsoleOutput [in] TRUE to print log entries to console, FALSE otherwise.
+ *
+ * @retval STATUS_OK Upon successful completion.
+ * @retval STATUS_ERROR Upon any kind of failure.
+ */
+ static Status setLogConsoleOutput(bool bConsoleOutput)
+ {
+ return (Status)oniSetLogConsoleOutput(bConsoleOutput);
+ }
+
+ /**
+ * Configures if log entries will be printed to file.
+
+ * @param const OniBool bConsoleOutput [in] TRUE to print log entries to file, FALSE otherwise.
+ *
+ * @retval STATUS_OK Upon successful completion.
+ * @retval STATUS_ERROR Upon any kind of failure.
+ */
+ static Status setLogFileOutput(bool bFileOutput)
+ {
+ return (Status)oniSetLogFileOutput(bFileOutput);
+ }
+
+ #if ONI_PLATFORM == ONI_PLATFORM_ANDROID_ARM
+ /**
+ * Configures if log entries will be printed to the Android log.
+
+ * @param OniBool bAndroidOutput bAndroidOutput [in] TRUE to print log entries to the Android log, FALSE otherwise.
+ *
+ * @retval STATUS_OK Upon successful completion.
+ * @retval STATUS_ERROR Upon any kind of failure.
+ */
+
+ static Status setLogAndroidOutput(bool bAndroidOutput)
+ {
+ return (Status)oniSetLogAndroidOutput(bAndroidOutput);
+ }
+ #endif
+
+private:
+ OpenNI()
+ {
+ }
+};
+
+/**
+The CoordinateConverter class converts points between the different coordinate systems.
+
+Depth and World coordinate systems
+
+OpenNI applications commonly use two different coordinate systems to represent depth. These two systems are referred to as Depth
+and World representation.
+
+Depth coordinates are the native data representation. In this system, the frame is a map (two dimensional array), and each pixel is
+assigned a depth value. This depth value represents the distance between the camera plane and whatever object is in the given
+pixel. The X and Y coordinates are simply the location in the map, where the origin is the top-left corner of the field of view.
+
+World coordinates superimpose a more familiar 3D Cartesian coordinate system on the world, with the camera lens at the origin.
+In this system, every point is specified by 3 points -- x, y and z. The x axis of this system is along a line that passes
+through the infrared projector and CMOS imager of the camera. The y axis is parallel to the front face of the camera, and
+perpendicular to the x axis (it will also be perpendicular to the ground if the camera is upright and level). The z axis
+runs into the scene, perpendicular to both the x and y axis. From the perspective of the camera, an object moving from
+left to right is moving along the increasing x axis. An object moving up is moving along the increasing y axis, and an object
+moving away from the camera is moving along the increasing z axis.
+
+Mathematically, the Depth coordinate system is the projection of the scene on the CMOS. If the sensor's angular field of view and
+resolution are known, then an angular size can be calculated for each pixel. This is how the conversion algorithms work. The
+dependence of this calculation on FoV and resolution is the reason that a @ref VideoStream pointer must be provided to these
+functions. The @ref VideoStream pointer is used to determine parameters for the specific points to be converted.
+
+Since Depth coordinates are a projective, the apparent size of objects in depth coordinates (measured in pixels)
+will increase as an object moves closer to the sensor. The size of objects in the World coordinate system is independent of
+distance from the sensor.
+
+Note that converting from Depth to World coordinates is relatively expensive computationally. It is generally not practical to convert
+the entire raw depth map to World coordinates. A better approach is to have your computer vision algorithm work in Depth
+coordinates for as long as possible, and only converting a few specific points to World coordinates right before output.
+
+Note that when converting from Depth to World or vice versa, the Z value remains the same.
+*/
+class CoordinateConverter
+{
+public:
+ /**
+ Converts a single point from the World coordinate system to the Depth coordinate system.
+ @param [in] depthStream Reference to an openni::VideoStream that will be used to determine the format of the Depth coordinates
+ @param [in] worldX The X coordinate of the point to be converted, measured in millimeters in World coordinates
+ @param [in] worldY The Y coordinate of the point to be converted, measured in millimeters in World coordinates
+ @param [in] worldZ The Z coordinate of the point to be converted, measured in millimeters in World coordinates
+ @param [out] pDepthX Pointer to a place to store the X coordinate of the output value, measured in pixels with 0 at far left of image
+ @param [out] pDepthY Pointer to a place to store the Y coordinate of the output value, measured in pixels with 0 at top of image
+ @param [out] pDepthZ Pointer to a place to store the Z(depth) coordinate of the output value, measured in the @ref PixelFormat of depthStream
+ */
+ static Status convertWorldToDepth(const VideoStream& depthStream, float worldX, float worldY, float worldZ, int* pDepthX, int* pDepthY, DepthPixel* pDepthZ)
+ {
+ float depthX, depthY, depthZ;
+ Status rc = (Status)oniCoordinateConverterWorldToDepth(depthStream._getHandle(), worldX, worldY, worldZ, &depthX, &depthY, &depthZ);
+ *pDepthX = (int)depthX;
+ *pDepthY = (int)depthY;
+ *pDepthZ = (DepthPixel)depthZ;
+ return rc;
+ }
+
+ /**
+ Converts a single point from the World coordinate system to a floating point representation of the Depth coordinate system
+ @param [in] depthStream Reference to an openni::VideoStream that will be used to determine the format of the Depth coordinates
+ @param [in] worldX The X coordinate of the point to be converted, measured in millimeters in World coordinates
+ @param [in] worldY The Y coordinate of the point to be converted, measured in millimeters in World coordinates
+ @param [in] worldZ The Z coordinate of the point to be converted, measured in millimeters in World coordinates
+ @param [out] pDepthX Pointer to a place to store the X coordinate of the output value, measured in pixels with 0.0 at far left of the image
+ @param [out] pDepthY Pointer to a place to store the Y coordinate of the output value, measured in pixels with 0.0 at the top of the image
+ @param [out] pDepthZ Pointer to a place to store the Z(depth) coordinate of the output value, measured in millimeters with 0.0 at the camera lens
+ */
+ static Status convertWorldToDepth(const VideoStream& depthStream, float worldX, float worldY, float worldZ, float* pDepthX, float* pDepthY, float* pDepthZ)
+ {
+ return (Status)oniCoordinateConverterWorldToDepth(depthStream._getHandle(), worldX, worldY, worldZ, pDepthX, pDepthY, pDepthZ);
+ }
+
+ /**
+ Converts a single point from the Depth coordinate system to the World coordinate system.
+ @param [in] depthStream Reference to an openi::VideoStream that will be used to determine the format of the Depth coordinates
+ @param [in] depthX The X coordinate of the point to be converted, measured in pixels with 0 at the far left of the image
+ @param [in] depthY The Y coordinate of the point to be converted, measured in pixels with 0 at the top of the image
+ @param [in] depthZ the Z(depth) coordinate of the point to be converted, measured in the @ref PixelFormat of depthStream
+ @param [out] pWorldX Pointer to a place to store the X coordinate of the output value, measured in millimeters in World coordinates
+ @param [out] pWorldY Pointer to a place to store the Y coordinate of the output value, measured in millimeters in World coordinates
+ @param [out] pWorldZ Pointer to a place to store the Z coordinate of the output value, measured in millimeters in World coordinates
+ */
+ static Status convertDepthToWorld(const VideoStream& depthStream, int depthX, int depthY, DepthPixel depthZ, float* pWorldX, float* pWorldY, float* pWorldZ)
+ {
+ return (Status)oniCoordinateConverterDepthToWorld(depthStream._getHandle(), float(depthX), float(depthY), float(depthZ), pWorldX, pWorldY, pWorldZ);
+ }
+
+ /**
+ Converts a single point from a floating point representation of the Depth coordinate system to the World coordinate system.
+ @param [in] depthStream Reference to an openi::VideoStream that will be used to determine the format of the Depth coordinates
+ @param [in] depthX The X coordinate of the point to be converted, measured in pixels with 0.0 at the far left of the image
+ @param [in] depthY The Y coordinate of the point to be converted, measured in pixels with 0.0 at the top of the image
+ @param [in] depthZ Z(depth) coordinate of the point to be converted, measured in the @ref PixelFormat of depthStream
+ @param [out] pWorldX Pointer to a place to store the X coordinate of the output value, measured in millimeters in World coordinates
+ @param [out] pWorldY Pointer to a place to store the Y coordinate of the output value, measured in millimeters in World coordinates
+ @param [out] pWorldZ Pointer to a place to store the Z coordinate of the output value, measured in millimeters in World coordinates
+ */
+ static Status convertDepthToWorld(const VideoStream& depthStream, float depthX, float depthY, float depthZ, float* pWorldX, float* pWorldY, float* pWorldZ)
+ {
+ return (Status)oniCoordinateConverterDepthToWorld(depthStream._getHandle(), depthX, depthY, depthZ, pWorldX, pWorldY, pWorldZ);
+ }
+
+ /**
+ For a given depth point, provides the coordinates of the corresponding color value. Useful for superimposing the depth and color images.
+ This operation is the same as turning on registration, but is performed on a single pixel rather than the whole image.
+ @param [in] depthStream Reference to a openni::VideoStream that produced the depth value
+ @param [in] colorStream Reference to a openni::VideoStream that we want to find the appropriate color pixel in
+ @param [in] depthX X value of the depth point, given in Depth coordinates and measured in pixels
+ @param [in] depthY Y value of the depth point, given in Depth coordinates and measured in pixels
+ @param [in] depthZ Z(depth) value of the depth point, given in the @ref PixelFormat of depthStream
+ @param [out] pColorX The X coordinate of the color pixel that overlaps the given depth pixel, measured in pixels
+ @param [out] pColorY The Y coordinate of the color pixel that overlaps the given depth pixel, measured in pixels
+ */
+ static Status convertDepthToColor(const VideoStream& depthStream, const VideoStream& colorStream, int depthX, int depthY, DepthPixel depthZ, int* pColorX, int* pColorY)
+ {
+ return (Status)oniCoordinateConverterDepthToColor(depthStream._getHandle(), colorStream._getHandle(), depthX, depthY, depthZ, pColorX, pColorY);
+ }
+};
+
+/**
+ * The Recorder class is used to record streams to an ONI file.
+ *
+ * After a recorder is instantiated, it must be initialized with a specific filename where
+ * the recording will be stored. The recorder is then attached to one or more streams. Once
+ * this is complete, the recorder can be told to start recording. The recorder will store
+ * every frame from every stream to the specified file. Later, this file can be used to
+ * initialize a file Device, and used to play back the same data that was recorded.
+ *
+ * Opening a file device is done by passing its path as the uri to the @ref Device::open() method.
+ *
+ * @see PlaybackControl for options available to play a reorded file.
+ *
+ */
+class Recorder
+{
+public:
+ /**
+ * Creates a recorder. The recorder is not valid, i.e. @ref isValid() returns
+ * false. You must initialize the recorder before use with @ref create().
+ */
+ Recorder() : m_recorder(NULL)
+ {
+ }
+
+ /**
+ * Destroys a recorder. This will also stop recording.
+ */
+ ~Recorder()
+ {
+ destroy();
+ }
+
+ /**
+ * Initializes a recorder. You can initialize the recorder only once. Attempts
+ * to intialize more than once will result in an error code being returned.
+ *
+ * Initialization assigns the recorder to an output file that will be used for
+ * recording. Before use, the @ref attach() function must also be used to assign input
+ * data to the Recorder.
+ *
+ * @param [in] fileName The name of a file which will contain the recording.
+ * @returns Status code which indicates success or failure of the operation.
+ */
+ Status create(const char* fileName)
+ {
+ if (!isValid())
+ {
+ return (Status)oniCreateRecorder(fileName, &m_recorder);
+ }
+ return STATUS_ERROR;
+ }
+
+ /**
+ * Verifies if the recorder is valid, i.e. if one can record with this recorder. A
+ * recorder object is not valid until the @ref create() method is called.
+ *
+ * @returns true if the recorder has been intialized, false otherwise.
+ */
+ bool isValid() const
+ {
+ return NULL != getHandle();
+ }
+
+ /**
+ * Attaches a stream to the recorder. Note, this won't start recording, you
+ * should explicitly start it using @ref start() method. As soon as the recording
+ * process has been started, no more streams can be attached to the recorder.
+ *
+ * @param [in] stream The stream to be recorded.
+ * @param [in] allowLossyCompression [Optional] If this value is true, the recorder might use
+ * a lossy compression, which means that when the recording will be played-back, there might
+ * be small differences from the original frame. Default value is false.
+ */
+ Status attach(VideoStream& stream, bool allowLossyCompression = false)
+ {
+ if (!isValid() || !stream.isValid())
+ {
+ return STATUS_ERROR;
+ }
+ return (Status)oniRecorderAttachStream(
+ m_recorder,
+ stream._getHandle(),
+ allowLossyCompression);
+ }
+
+ /**
+ * Starts recording.
+ * Once this method is called, the recorder will take all subsequent frames from the attached streams
+ * and store them in the file.
+ * You may not attach additional streams once recording was started.
+ */
+ Status start()
+ {
+ if (!isValid())
+ {
+ return STATUS_ERROR;
+ }
+ return (Status)oniRecorderStart(m_recorder);
+ }
+
+ /**
+ * Stops recording. You may use @ref start() to resume the recording.
+ */
+ void stop()
+ {
+ if (isValid())
+ {
+ oniRecorderStop(m_recorder);
+ }
+ }
+
+ /**
+ Destroys the recorder object.
+ */
+ void destroy()
+ {
+ if (isValid())
+ {
+ oniRecorderDestroy(&m_recorder);
+ }
+ }
+
+private:
+ Recorder(const Recorder&);
+ Recorder& operator=(const Recorder&);
+
+ /**
+ * Returns a handle of this recorder.
+ */
+ OniRecorderHandle getHandle() const
+ {
+ return m_recorder;
+ }
+
+
+ OniRecorderHandle m_recorder;
+};
+
+// Implemetation
+Status VideoStream::create(const Device& device, SensorType sensorType)
+{
+ OniStreamHandle streamHandle;
+ Status rc = (Status)oniDeviceCreateStream(device._getHandle(), (OniSensorType)sensorType, &streamHandle);
+ if (rc != STATUS_OK)
+ {
+ return rc;
+ }
+
+ m_isOwner = true;
+ _setHandle(streamHandle);
+
+ if (isPropertySupported(STREAM_PROPERTY_AUTO_WHITE_BALANCE) && isPropertySupported(STREAM_PROPERTY_AUTO_EXPOSURE))
+ {
+ m_pCameraSettings = new CameraSettings(this);
+ }
+
+ return STATUS_OK;
+}
+
+void VideoStream::destroy()
+{
+ if (!isValid())
+ {
+ return;
+ }
+
+ if (m_pCameraSettings != NULL)
+ {
+ delete m_pCameraSettings;
+ m_pCameraSettings = NULL;
+ }
+
+ if (m_stream != NULL)
+ {
+ if(m_isOwner)
+ oniStreamDestroy(m_stream);
+ m_stream = NULL;
+ }
+}
+
+Status Device::open(const char* uri)
+{
+ //If we are not the owners, we stick with our own device
+ if(!m_isOwner)
+ {
+ if(isValid()){
+ return STATUS_OK;
+ }else{
+ return STATUS_OUT_OF_FLOW;
+ }
+ }
+
+ OniDeviceHandle deviceHandle;
+ Status rc = (Status)oniDeviceOpen(uri, &deviceHandle);
+ if (rc != STATUS_OK)
+ {
+ return rc;
+ }
+
+ _setHandle(deviceHandle);
+
+ return STATUS_OK;
+}
+
+Status Device::_openEx(const char* uri, const char* mode)
+{
+ //If we are not the owners, we stick with our own device
+ if(!m_isOwner)
+ {
+ if(isValid()){
+ return STATUS_OK;
+ }else{
+ return STATUS_OUT_OF_FLOW;
+ }
+ }
+
+ OniDeviceHandle deviceHandle;
+ Status rc = (Status)oniDeviceOpenEx(uri, mode, &deviceHandle);
+ if (rc != STATUS_OK)
+ {
+ return rc;
+ }
+
+ _setHandle(deviceHandle);
+
+ return STATUS_OK;
+}
+
+Status Device::_setHandle(OniDeviceHandle deviceHandle)
+{
+ if (m_device == NULL)
+ {
+ m_device = deviceHandle;
+
+ clearSensors();
+
+ oniDeviceGetInfo(m_device, &m_deviceInfo);
+
+ if (isFile())
+ {
+ m_pPlaybackControl = new PlaybackControl(this);
+ }
+
+ // Read deviceInfo
+ return STATUS_OK;
+ }
+
+ return STATUS_OUT_OF_FLOW;
+}
+
+void Device::close()
+{
+ if (m_pPlaybackControl != NULL)
+ {
+ delete m_pPlaybackControl;
+ m_pPlaybackControl = NULL;
+ }
+
+ if (m_device != NULL)
+ {
+ if(m_isOwner)
+ {
+ oniDeviceClose(m_device);
+ }
+
+ m_device = NULL;
+ }
+}
+
+
+}
+
+#endif // _OPEN_NI_HPP_
diff --git a/include/openni2/Win32/OniPlatformWin32.h b/include/openni2/Win32/OniPlatformWin32.h
new file mode 100644
index 00000000..23bd81db
--- /dev/null
+++ b/include/openni2/Win32/OniPlatformWin32.h
@@ -0,0 +1,139 @@
+/*****************************************************************************
+* *
+* OpenNI 2.x Alpha *
+* Copyright (C) 2012 PrimeSense Ltd. *
+* *
+* This file is part of OpenNI. *
+* *
+* Licensed under the Apache License, Version 2.0 (the "License"); *
+* you may not use this file except in compliance with the License. *
+* You may obtain a copy of the License at *
+* *
+* http://www.apache.org/licenses/LICENSE-2.0 *
+* *
+* Unless required by applicable law or agreed to in writing, software *
+* distributed under the License is distributed on an "AS IS" BASIS, *
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
+* See the License for the specific language governing permissions and *
+* limitations under the License. *
+* *
+*****************************************************************************/
+#ifndef _ONI_PLATFORM_WIN32_H_
+#define _ONI_PLATFORM_WIN32_H_
+
+//---------------------------------------------------------------------------
+// Prerequisites
+//---------------------------------------------------------------------------
+#ifndef WINVER // Allow use of features specific to Windows XP or later
+ #define WINVER 0x0501
+#endif
+#ifndef _WIN32_WINNT // Allow use of features specific to Windows XP or later
+ #define _WIN32_WINNT 0x0501
+#endif
+#ifndef _WIN32_WINDOWS // Allow use of features specific to Windows 98 or later
+ #define _WIN32_WINDOWS 0x0410
+#endif
+#ifndef _WIN32_IE // Allow use of features specific to IE 6.0 or later
+ #define _WIN32_IE 0x0600
+#endif
+#define WIN32_LEAN_AND_MEAN // Exclude rarely-used stuff from Windows headers
+
+// Undeprecate CRT functions
+#ifndef _CRT_SECURE_NO_DEPRECATE
+ #define _CRT_SECURE_NO_DEPRECATE 1
+#endif
+
+//---------------------------------------------------------------------------
+// Includes
+//---------------------------------------------------------------------------
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#if _MSC_VER < 1600 // Visual Studio 2008 and older doesn't have stdint.h...
+typedef signed char int8_t;
+typedef short int16_t;
+typedef int int32_t;
+typedef __int64 int64_t;
+
+typedef unsigned char uint8_t;
+typedef unsigned short uint16_t;
+typedef unsigned int uint32_t;
+typedef unsigned __int64 uint64_t;
+#else
+#include
+#endif
+
+//---------------------------------------------------------------------------
+// Platform Basic Definition
+//---------------------------------------------------------------------------
+#define ONI_PLATFORM ONI_PLATFORM_WIN32
+#define ONI_PLATFORM_STRING "Win32"
+
+//---------------------------------------------------------------------------
+// Platform Capabilities
+//---------------------------------------------------------------------------
+#define ONI_PLATFORM_ENDIAN_TYPE ONI_PLATFORM_IS_LITTLE_ENDIAN
+
+#define ONI_PLATFORM_SUPPORTS_DYNAMIC_LIBS 1
+
+//---------------------------------------------------------------------------
+// Memory
+//---------------------------------------------------------------------------
+/** The default memory alignment. */
+#define ONI_DEFAULT_MEM_ALIGN 16
+
+/** The thread static declarator (using TLS). */
+#define ONI_THREAD_STATIC __declspec(thread)
+
+//---------------------------------------------------------------------------
+// Files
+//---------------------------------------------------------------------------
+/** The maximum allowed file path size (in bytes). */
+#define ONI_FILE_MAX_PATH MAX_PATH
+
+//---------------------------------------------------------------------------
+// Call backs
+//---------------------------------------------------------------------------
+/** The std call type. */
+#define ONI_STDCALL __stdcall
+
+/** The call back calling convention. */
+#define ONI_CALLBACK_TYPE ONI_STDCALL
+
+/** The C and C++ calling convension. */
+#define ONI_C_DECL __cdecl
+
+//---------------------------------------------------------------------------
+// Macros
+//---------------------------------------------------------------------------
+/** Returns the date and time at compile time. */
+#define ONI_TIMESTAMP __DATE__ " " __TIME__
+
+/** Converts n into a pre-processor string. */
+#define ONI_STRINGIFY(n) ONI_STRINGIFY_HELPER(n)
+#define ONI_STRINGIFY_HELPER(n) #n
+
+//---------------------------------------------------------------------------
+// API Export/Import Macros
+//---------------------------------------------------------------------------
+/** Indicates an exported shared library function. */
+#define ONI_API_EXPORT __declspec(dllexport)
+
+/** Indicates an imported shared library function. */
+#define ONI_API_IMPORT __declspec(dllimport)
+
+/** Indicates a deprecated function */
+#if _MSC_VER < 1400 // Before VS2005 there was no support for declspec deprecated...
+ #define ONI_API_DEPRECATED(msg)
+#else
+ #define ONI_API_DEPRECATED(msg) __declspec(deprecated(msg))
+#endif
+
+#endif //_ONI_PLATFORM_WIN32_H_
diff --git a/include/openni2_redist/arm/OpenNI.ini b/include/openni2_redist/arm/OpenNI.ini
new file mode 100644
index 00000000..d6fc79d1
--- /dev/null
+++ b/include/openni2_redist/arm/OpenNI.ini
@@ -0,0 +1,17 @@
+[Log]
+; 0 - Verbose; 1 - Info; 2 - Warning; 3 - Error. Default - None
+;Verbosity=0
+;LogToConsole=1
+;LogToFile=1
+;LogToAndroidLog=1
+
+[Device]
+;Override=
+;RecordTo=
+
+[Drivers]
+; Location of the drivers, relative to OpenNI shared library location. When not provided, "OpenNI2/Drivers" will be used.
+;Repository=OpenNI2/Drivers
+
+; List of drivers to load, separated by commas. When not provided, OpenNI will try to load each shared library in Repository.
+;List=
diff --git a/include/openni2_redist/arm/OpenNI2/Drivers/OniFile.ini b/include/openni2_redist/arm/OpenNI2/Drivers/OniFile.ini
new file mode 100644
index 00000000..487c3191
--- /dev/null
+++ b/include/openni2_redist/arm/OpenNI2/Drivers/OniFile.ini
@@ -0,0 +1,7 @@
+;---------------- Player Configuration -------------------
+[Player]
+; Speed. 1.0 - 1.0 * fps (default), Values can be R while R is real and grater the 0
+;Speed=1.0
+
+; Repeat. 1 - on (default), 0 - off
+;Repeat=1
\ No newline at end of file
diff --git a/include/openni2_redist/arm/OpenNI2/Drivers/Orbbec.ini b/include/openni2_redist/arm/OpenNI2/Drivers/Orbbec.ini
new file mode 100644
index 00000000..705c3396
--- /dev/null
+++ b/include/openni2_redist/arm/OpenNI2/Drivers/Orbbec.ini
@@ -0,0 +1,158 @@
+;---------------- Sensor Default Configuration -------------------
+[Device]
+; Mirroring. 0 - Off (default), 1 - On
+;Mirror=1
+
+; FrameSync. 0 - Off (default), 1 - On
+;FrameSync=1
+
+; Stream Data Timestamps. 0 - milliseconds, 1 - microseconds (default)
+;HighResTimestamps=1
+
+; Stream Data Timestamps Source. 0 - Firmware (default), 1 - Host
+;HostTimestamps=0
+
+; A filter for the firmware log. Default is determined by firmware.
+;FirmwareLogFilter=0
+
+; Automatic firmare log retrieval. 0 - Off (default), or the number of milliseconds between log retrievals operations.
+;FirmwareLogInterval=1000
+
+; Print firmware log to console when automatic firmware log retrieval is on. 0 - Off (default), 1 - On
+;FirmwareLogPrint=1
+
+; Is APC enabled. 0 - Off, 1 - On (default)
+;APCEnabled=1
+
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines), 3 - ISO endpoints for low-bandwidth depth
+UsbInterface=2
+
+[Depth]
+; Output format. 100 - 1mm depth values (default), 102 - u9.2 Shift values.
+;OutputFormat=100
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA, 1 - VGA, 4 - QQVGA. Default: Arm - 4, other platforms - 0
+Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Min depth cutoff. 0-10000 mm (default is 0)
+;MinDepthValue=0
+
+; Max depth cutoff. 0-10000 mm (default is 10000)
+;MaxDepthValue=10000
+
+; Input format. 0 - Uncompressed 16-bit, 1 - PS Compression, 3 - Packed 11-bit, 4 - Packed 12-bit. Default: Arm - 4, other platforms - 3
+InputFormat=3
+
+; Registration. 0 - Off (default), 1 - On
+;Registration=1
+
+; Registration Type. 0 - Don't care (default), 1 - use hardware accelaration, 2 - perform in software
+;RegistrationType=0
+
+; Hole Filler. 0 - Off, 1 - On (default)
+;HoleFilter=0
+
+; White Balance. 0 - Off, 1 - On (default)
+;WhiteBalancedEnabled=0
+
+; Gain. 0-50 (0 - Auto, 1 - Min., 50 - Max.). Default value is set by firmware.
+;Gain=0
+
+; Close Range Mode. 0 - Off (default), 1 - On
+;CloseRange=1
+
+; GMC Mode. 0 - Off, 1 - On (default)
+;GMCMode=0
+
+; GMC Debug. 0 - Off (default), 1 - On
+;GMCDebug=1
+
+; Depth Auto Gain Region-of-Interest. Default values are set by firmware.
+;DepthAGCBin0MinDepth=500
+;DepthAGCBin0MaxDepth=800
+;DepthAGCBin1MinDepth=1500
+;DepthAGCBin1MaxDepth=1800
+;DepthAGCBin2MinDepth=2500
+;DepthAGCBin2MaxDepth=2800
+;DepthAGCBin3MinDepth=3500
+;DepthAGCBin3MaxDepth=3800
+
+; Wavelength Correction Mechanism. 0 - Off (default), 1 - On
+;WavelengthCorrection=1
+
+; Wavelength Correction debug info. 0 - Off (default), 1 - On
+;WavelengthCorrectionDebug=1
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=2
+
+; Cropping area
+[Depth.Cropping]
+OffsetX=0
+OffsetY=0
+SizeX=320
+SizeY=240
+;Enabled=1
+
+[Image]
+; Output format. 200 - RGB888 (default), 201 - YUV422, 202 - Gray8 (2.0 MP only), 205 - YUYV
+;OutputFormat=201
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA (1.3MP), 3 - UXGA (2.0MP), 14 - 720p, 15 - 1280x960
+Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Input format. 0 - Compressed 8-bit BAYER (1.3MP or 2.0MP only), 1 - Compressed YUV422 (default in BULK), 2 - Jpeg, 5 - Uncompressed YUV422 (default in ISO), 6 - Uncompressed 8-bit BAYER (1.3MP or 2.0MP only), 7 - Uncompressed YUYV
+InputFormat=5
+
+; Anti Flicker. 0 - Off (default), 50 - 50Hz, 60 - 60 Hz.
+;Flicker=50
+
+; Image quality when using Jpeg. 1-10 (1 - Lowest, 10 - Highest (default))
+;Quality=10
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Image.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[IR]
+; Output format. 200 - RGB888, 203 - Grayscale 16-bit (default)
+;OutputFormat=203
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA(1.3MP)
+;Resolution=0
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[IR.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
diff --git a/include/openni2_redist/arm/OpenNI2/Drivers/PS1080.ini b/include/openni2_redist/arm/OpenNI2/Drivers/PS1080.ini
new file mode 100644
index 00000000..c7c23484
--- /dev/null
+++ b/include/openni2_redist/arm/OpenNI2/Drivers/PS1080.ini
@@ -0,0 +1,158 @@
+;---------------- Sensor Default Configuration -------------------
+[Device]
+; Mirroring. 0 - Off (default), 1 - On
+;Mirror=1
+
+; FrameSync. 0 - Off (default), 1 - On
+;FrameSync=1
+
+; Stream Data Timestamps. 0 - milliseconds, 1 - microseconds (default)
+;HighResTimestamps=1
+
+; Stream Data Timestamps Source. 0 - Firmware (default), 1 - Host
+;HostTimestamps=0
+
+; A filter for the firmware log. Default is determined by firmware.
+;FirmwareLogFilter=0
+
+; Automatic firmare log retrieval. 0 - Off (default), or the number of milliseconds between log retrievals operations.
+;FirmwareLogInterval=1000
+
+; Print firmware log to console when automatic firmware log retrieval is on. 0 - Off (default), 1 - On
+;FirmwareLogPrint=1
+
+; Is APC enabled. 0 - Off, 1 - On (default)
+;APCEnabled=1
+
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines), 3 - ISO endpoints for low-bandwidth depth
+;UsbInterface=2
+
+[Depth]
+; Output format. 100 - 1mm depth values (default), 102 - u9.2 Shift values.
+;OutputFormat=102
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA, 1 - VGA, 4 - QQVGA. Default: Arm - 4, other platforms - 0
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Min depth cutoff. 0-10000 mm (default is 0)
+;MinDepthValue=0
+
+; Max depth cutoff. 0-10000 mm (default is 10000)
+;MaxDepthValue=10000
+
+; Input format. 0 - Uncompressed 16-bit, 1 - PS Compression, 3 - Packed 11-bit, 4 - Packed 12-bit. Default: Arm - 4, other platforms - 3
+;InputFormat=1
+
+; Registration. 0 - Off (default), 1 - On
+;Registration=1
+
+; Registration Type. 0 - Don't care (default), 1 - use hardware accelaration, 2 - perform in software
+;RegistrationType=0
+
+; Hole Filler. 0 - Off, 1 - On (default)
+;HoleFilter=1
+
+; White Balance. 0 - Off, 1 - On (default)
+;WhiteBalancedEnabled=1
+
+; Gain. 0-50 (0 - Auto, 1 - Min., 50 - Max.). Default value is set by firmware.
+;Gain=0
+
+; Close Range Mode. 0 - Off (default), 1 - On
+;CloseRange=0
+
+; GMC Mode. 0 - Off, 1 - On (default)
+;GMCMode=0
+
+; GMC Debug. 0 - Off (default), 1 - On
+;GMCDebug=1
+
+; Depth Auto Gain Region-of-Interest. Default values are set by firmware.
+;DepthAGCBin0MinDepth=500
+;DepthAGCBin0MaxDepth=800
+;DepthAGCBin1MinDepth=1500
+;DepthAGCBin1MaxDepth=1800
+;DepthAGCBin2MinDepth=2500
+;DepthAGCBin2MaxDepth=2800
+;DepthAGCBin3MinDepth=3500
+;DepthAGCBin3MaxDepth=3800
+
+; Wavelength Correction Mechanism. 0 - Off (default), 1 - On
+;WavelengthCorrection=1
+
+; Wavelength Correction debug info. 0 - Off (default), 1 - On
+;WavelengthCorrectionDebug=1
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Depth.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[Image]
+; Output format. 200 - RGB888 (default), 201 - YUV422, 202 - Gray8 (2.0 MP only), 205 - YUYV
+;OutputFormat=200
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA (1.3MP), 3 - UXGA (2.0MP), 14 - 720p, 15 - 1280x960
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Input format. 0 - Compressed 8-bit BAYER (1.3MP or 2.0MP only), 1 - Compressed YUV422 (default in BULK), 2 - Jpeg, 5 - Uncompressed YUV422 (default in ISO), 6 - Uncompressed 8-bit BAYER (1.3MP or 2.0MP only), 7 - Uncompressed YUYV
+;InputFormat=5
+
+; Anti Flicker. 0 - Off (default), 50 - 50Hz, 60 - 60 Hz.
+;Flicker=50
+
+; Image quality when using Jpeg. 1-10 (1 - Lowest, 10 - Highest (default))
+;Quality=10
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Image.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[IR]
+; Output format. 200 - RGB888, 203 - Grayscale 16-bit (default)
+;OutputFormat=203
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA(1.3MP)
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[IR.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
diff --git a/include/openni2_redist/arm/OpenNI2/Drivers/PSLink.ini b/include/openni2_redist/arm/OpenNI2/Drivers/PSLink.ini
new file mode 100644
index 00000000..577629b7
--- /dev/null
+++ b/include/openni2_redist/arm/OpenNI2/Drivers/PSLink.ini
@@ -0,0 +1,48 @@
+;---------------- PSLink Driver Default Configuration -------------------
+
+[Device]
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines)
+;UsbInterface=2
+
+; Enable firmware logs. 0 - Off (default), 1 - On
+;FirmwareLog=1
+
+[Depth]
+; Allows dumping all frames to files. 0 - Off (default), 1 - On
+;DumpData=1
+
+; Allow flipping the frame horizontally. 0 - Off, 1 - On (default)
+;Mirror=0
+
+; Compression of the data passed from device to host. 0 - None, 2 - 16z, 6 - 11-bit packed, 7 - 12-bit packed. Default is set by the firmware
+;Compression=2
+
+[Depth.VideoMode]
+; Pixel Format. 100 - Depth 1 mm, 101 - Depth 100 um, 102 - Shifts 9.2, 103 - Shifts 9.3
+;PixelFormat=100
+; Requested X resolution
+;XResolution=320
+; Requested Y resolution
+;YResolution=240
+; Requested FPS
+;FPS=30
+
+[IR]
+; Allows dumping all frames to files. 0 - Off (default), 1 - On
+;DumpData=1
+
+; Allow flipping the frame horizontally. 0 - Off, 1 - On (default)
+;Mirror=0
+
+; Compression of the data passed from device to host. 0 - None, 5 - 10-bit packed. Default is set by the firmware
+;Compression=5
+
+[IR.VideoMode]
+; Pixel Format. 200 - RGB888, 202 - Grayscale 8-bit, 203 - Grayscale 16-bit
+;PixelFormat=200
+; Requested X resolution
+;XResolution=320
+; Requested Y resolution
+;YResolution=240
+; Requested FPS
+;FPS=30
diff --git a/include/openni2_redist/arm/OpenNI2/Drivers/libORBBEC.so b/include/openni2_redist/arm/OpenNI2/Drivers/libORBBEC.so
new file mode 100644
index 00000000..c31efcd0
Binary files /dev/null and b/include/openni2_redist/arm/OpenNI2/Drivers/libORBBEC.so differ
diff --git a/include/openni2_redist/arm/OpenNI2/Drivers/libOniFile.so b/include/openni2_redist/arm/OpenNI2/Drivers/libOniFile.so
new file mode 100644
index 00000000..38e9db5e
Binary files /dev/null and b/include/openni2_redist/arm/OpenNI2/Drivers/libOniFile.so differ
diff --git a/include/openni2_redist/arm/OpenNI2/Drivers/libPSLink.so b/include/openni2_redist/arm/OpenNI2/Drivers/libPSLink.so
new file mode 100644
index 00000000..80e13f04
Binary files /dev/null and b/include/openni2_redist/arm/OpenNI2/Drivers/libPSLink.so differ
diff --git a/include/openni2_redist/arm/libOpenNI2.jni.so b/include/openni2_redist/arm/libOpenNI2.jni.so
new file mode 100644
index 00000000..092a8f05
Binary files /dev/null and b/include/openni2_redist/arm/libOpenNI2.jni.so differ
diff --git a/include/openni2_redist/arm/libOpenNI2.so b/include/openni2_redist/arm/libOpenNI2.so
new file mode 100644
index 00000000..5f0feade
Binary files /dev/null and b/include/openni2_redist/arm/libOpenNI2.so differ
diff --git a/include/openni2_redist/arm/org.openni.jar b/include/openni2_redist/arm/org.openni.jar
new file mode 100644
index 00000000..6edc1395
Binary files /dev/null and b/include/openni2_redist/arm/org.openni.jar differ
diff --git a/include/openni2_redist/arm64/OpenNI.ini b/include/openni2_redist/arm64/OpenNI.ini
new file mode 100644
index 00000000..d6fc79d1
--- /dev/null
+++ b/include/openni2_redist/arm64/OpenNI.ini
@@ -0,0 +1,17 @@
+[Log]
+; 0 - Verbose; 1 - Info; 2 - Warning; 3 - Error. Default - None
+;Verbosity=0
+;LogToConsole=1
+;LogToFile=1
+;LogToAndroidLog=1
+
+[Device]
+;Override=
+;RecordTo=
+
+[Drivers]
+; Location of the drivers, relative to OpenNI shared library location. When not provided, "OpenNI2/Drivers" will be used.
+;Repository=OpenNI2/Drivers
+
+; List of drivers to load, separated by commas. When not provided, OpenNI will try to load each shared library in Repository.
+;List=
diff --git a/include/openni2_redist/arm64/OpenNI2/Drivers/OniFile.ini b/include/openni2_redist/arm64/OpenNI2/Drivers/OniFile.ini
new file mode 100644
index 00000000..487c3191
--- /dev/null
+++ b/include/openni2_redist/arm64/OpenNI2/Drivers/OniFile.ini
@@ -0,0 +1,7 @@
+;---------------- Player Configuration -------------------
+[Player]
+; Speed. 1.0 - 1.0 * fps (default), Values can be R while R is real and grater the 0
+;Speed=1.0
+
+; Repeat. 1 - on (default), 0 - off
+;Repeat=1
\ No newline at end of file
diff --git a/include/openni2_redist/arm64/OpenNI2/Drivers/PS1080.ini b/include/openni2_redist/arm64/OpenNI2/Drivers/PS1080.ini
new file mode 100644
index 00000000..c7c23484
--- /dev/null
+++ b/include/openni2_redist/arm64/OpenNI2/Drivers/PS1080.ini
@@ -0,0 +1,158 @@
+;---------------- Sensor Default Configuration -------------------
+[Device]
+; Mirroring. 0 - Off (default), 1 - On
+;Mirror=1
+
+; FrameSync. 0 - Off (default), 1 - On
+;FrameSync=1
+
+; Stream Data Timestamps. 0 - milliseconds, 1 - microseconds (default)
+;HighResTimestamps=1
+
+; Stream Data Timestamps Source. 0 - Firmware (default), 1 - Host
+;HostTimestamps=0
+
+; A filter for the firmware log. Default is determined by firmware.
+;FirmwareLogFilter=0
+
+; Automatic firmare log retrieval. 0 - Off (default), or the number of milliseconds between log retrievals operations.
+;FirmwareLogInterval=1000
+
+; Print firmware log to console when automatic firmware log retrieval is on. 0 - Off (default), 1 - On
+;FirmwareLogPrint=1
+
+; Is APC enabled. 0 - Off, 1 - On (default)
+;APCEnabled=1
+
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines), 3 - ISO endpoints for low-bandwidth depth
+;UsbInterface=2
+
+[Depth]
+; Output format. 100 - 1mm depth values (default), 102 - u9.2 Shift values.
+;OutputFormat=102
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA, 1 - VGA, 4 - QQVGA. Default: Arm - 4, other platforms - 0
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Min depth cutoff. 0-10000 mm (default is 0)
+;MinDepthValue=0
+
+; Max depth cutoff. 0-10000 mm (default is 10000)
+;MaxDepthValue=10000
+
+; Input format. 0 - Uncompressed 16-bit, 1 - PS Compression, 3 - Packed 11-bit, 4 - Packed 12-bit. Default: Arm - 4, other platforms - 3
+;InputFormat=1
+
+; Registration. 0 - Off (default), 1 - On
+;Registration=1
+
+; Registration Type. 0 - Don't care (default), 1 - use hardware accelaration, 2 - perform in software
+;RegistrationType=0
+
+; Hole Filler. 0 - Off, 1 - On (default)
+;HoleFilter=1
+
+; White Balance. 0 - Off, 1 - On (default)
+;WhiteBalancedEnabled=1
+
+; Gain. 0-50 (0 - Auto, 1 - Min., 50 - Max.). Default value is set by firmware.
+;Gain=0
+
+; Close Range Mode. 0 - Off (default), 1 - On
+;CloseRange=0
+
+; GMC Mode. 0 - Off, 1 - On (default)
+;GMCMode=0
+
+; GMC Debug. 0 - Off (default), 1 - On
+;GMCDebug=1
+
+; Depth Auto Gain Region-of-Interest. Default values are set by firmware.
+;DepthAGCBin0MinDepth=500
+;DepthAGCBin0MaxDepth=800
+;DepthAGCBin1MinDepth=1500
+;DepthAGCBin1MaxDepth=1800
+;DepthAGCBin2MinDepth=2500
+;DepthAGCBin2MaxDepth=2800
+;DepthAGCBin3MinDepth=3500
+;DepthAGCBin3MaxDepth=3800
+
+; Wavelength Correction Mechanism. 0 - Off (default), 1 - On
+;WavelengthCorrection=1
+
+; Wavelength Correction debug info. 0 - Off (default), 1 - On
+;WavelengthCorrectionDebug=1
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Depth.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[Image]
+; Output format. 200 - RGB888 (default), 201 - YUV422, 202 - Gray8 (2.0 MP only), 205 - YUYV
+;OutputFormat=200
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA (1.3MP), 3 - UXGA (2.0MP), 14 - 720p, 15 - 1280x960
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Input format. 0 - Compressed 8-bit BAYER (1.3MP or 2.0MP only), 1 - Compressed YUV422 (default in BULK), 2 - Jpeg, 5 - Uncompressed YUV422 (default in ISO), 6 - Uncompressed 8-bit BAYER (1.3MP or 2.0MP only), 7 - Uncompressed YUYV
+;InputFormat=5
+
+; Anti Flicker. 0 - Off (default), 50 - 50Hz, 60 - 60 Hz.
+;Flicker=50
+
+; Image quality when using Jpeg. 1-10 (1 - Lowest, 10 - Highest (default))
+;Quality=10
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Image.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[IR]
+; Output format. 200 - RGB888, 203 - Grayscale 16-bit (default)
+;OutputFormat=203
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA(1.3MP)
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[IR.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
diff --git a/include/openni2_redist/arm64/OpenNI2/Drivers/PSLink.ini b/include/openni2_redist/arm64/OpenNI2/Drivers/PSLink.ini
new file mode 100644
index 00000000..577629b7
--- /dev/null
+++ b/include/openni2_redist/arm64/OpenNI2/Drivers/PSLink.ini
@@ -0,0 +1,48 @@
+;---------------- PSLink Driver Default Configuration -------------------
+
+[Device]
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines)
+;UsbInterface=2
+
+; Enable firmware logs. 0 - Off (default), 1 - On
+;FirmwareLog=1
+
+[Depth]
+; Allows dumping all frames to files. 0 - Off (default), 1 - On
+;DumpData=1
+
+; Allow flipping the frame horizontally. 0 - Off, 1 - On (default)
+;Mirror=0
+
+; Compression of the data passed from device to host. 0 - None, 2 - 16z, 6 - 11-bit packed, 7 - 12-bit packed. Default is set by the firmware
+;Compression=2
+
+[Depth.VideoMode]
+; Pixel Format. 100 - Depth 1 mm, 101 - Depth 100 um, 102 - Shifts 9.2, 103 - Shifts 9.3
+;PixelFormat=100
+; Requested X resolution
+;XResolution=320
+; Requested Y resolution
+;YResolution=240
+; Requested FPS
+;FPS=30
+
+[IR]
+; Allows dumping all frames to files. 0 - Off (default), 1 - On
+;DumpData=1
+
+; Allow flipping the frame horizontally. 0 - Off, 1 - On (default)
+;Mirror=0
+
+; Compression of the data passed from device to host. 0 - None, 5 - 10-bit packed. Default is set by the firmware
+;Compression=5
+
+[IR.VideoMode]
+; Pixel Format. 200 - RGB888, 202 - Grayscale 8-bit, 203 - Grayscale 16-bit
+;PixelFormat=200
+; Requested X resolution
+;XResolution=320
+; Requested Y resolution
+;YResolution=240
+; Requested FPS
+;FPS=30
diff --git a/include/openni2_redist/arm64/OpenNI2/Drivers/libOniFile.so b/include/openni2_redist/arm64/OpenNI2/Drivers/libOniFile.so
new file mode 100644
index 00000000..24e9cff5
Binary files /dev/null and b/include/openni2_redist/arm64/OpenNI2/Drivers/libOniFile.so differ
diff --git a/include/openni2_redist/arm64/OpenNI2/Drivers/libPSLink.so b/include/openni2_redist/arm64/OpenNI2/Drivers/libPSLink.so
new file mode 100644
index 00000000..a185a581
Binary files /dev/null and b/include/openni2_redist/arm64/OpenNI2/Drivers/libPSLink.so differ
diff --git a/include/openni2_redist/arm64/OpenNI2/Drivers/liborbbec.so b/include/openni2_redist/arm64/OpenNI2/Drivers/liborbbec.so
new file mode 100644
index 00000000..e854ead9
Binary files /dev/null and b/include/openni2_redist/arm64/OpenNI2/Drivers/liborbbec.so differ
diff --git a/include/openni2_redist/arm64/OpenNI2/Drivers/orbbec.ini b/include/openni2_redist/arm64/OpenNI2/Drivers/orbbec.ini
new file mode 100644
index 00000000..705c3396
--- /dev/null
+++ b/include/openni2_redist/arm64/OpenNI2/Drivers/orbbec.ini
@@ -0,0 +1,158 @@
+;---------------- Sensor Default Configuration -------------------
+[Device]
+; Mirroring. 0 - Off (default), 1 - On
+;Mirror=1
+
+; FrameSync. 0 - Off (default), 1 - On
+;FrameSync=1
+
+; Stream Data Timestamps. 0 - milliseconds, 1 - microseconds (default)
+;HighResTimestamps=1
+
+; Stream Data Timestamps Source. 0 - Firmware (default), 1 - Host
+;HostTimestamps=0
+
+; A filter for the firmware log. Default is determined by firmware.
+;FirmwareLogFilter=0
+
+; Automatic firmare log retrieval. 0 - Off (default), or the number of milliseconds between log retrievals operations.
+;FirmwareLogInterval=1000
+
+; Print firmware log to console when automatic firmware log retrieval is on. 0 - Off (default), 1 - On
+;FirmwareLogPrint=1
+
+; Is APC enabled. 0 - Off, 1 - On (default)
+;APCEnabled=1
+
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines), 3 - ISO endpoints for low-bandwidth depth
+UsbInterface=2
+
+[Depth]
+; Output format. 100 - 1mm depth values (default), 102 - u9.2 Shift values.
+;OutputFormat=100
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA, 1 - VGA, 4 - QQVGA. Default: Arm - 4, other platforms - 0
+Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Min depth cutoff. 0-10000 mm (default is 0)
+;MinDepthValue=0
+
+; Max depth cutoff. 0-10000 mm (default is 10000)
+;MaxDepthValue=10000
+
+; Input format. 0 - Uncompressed 16-bit, 1 - PS Compression, 3 - Packed 11-bit, 4 - Packed 12-bit. Default: Arm - 4, other platforms - 3
+InputFormat=3
+
+; Registration. 0 - Off (default), 1 - On
+;Registration=1
+
+; Registration Type. 0 - Don't care (default), 1 - use hardware accelaration, 2 - perform in software
+;RegistrationType=0
+
+; Hole Filler. 0 - Off, 1 - On (default)
+;HoleFilter=0
+
+; White Balance. 0 - Off, 1 - On (default)
+;WhiteBalancedEnabled=0
+
+; Gain. 0-50 (0 - Auto, 1 - Min., 50 - Max.). Default value is set by firmware.
+;Gain=0
+
+; Close Range Mode. 0 - Off (default), 1 - On
+;CloseRange=1
+
+; GMC Mode. 0 - Off, 1 - On (default)
+;GMCMode=0
+
+; GMC Debug. 0 - Off (default), 1 - On
+;GMCDebug=1
+
+; Depth Auto Gain Region-of-Interest. Default values are set by firmware.
+;DepthAGCBin0MinDepth=500
+;DepthAGCBin0MaxDepth=800
+;DepthAGCBin1MinDepth=1500
+;DepthAGCBin1MaxDepth=1800
+;DepthAGCBin2MinDepth=2500
+;DepthAGCBin2MaxDepth=2800
+;DepthAGCBin3MinDepth=3500
+;DepthAGCBin3MaxDepth=3800
+
+; Wavelength Correction Mechanism. 0 - Off (default), 1 - On
+;WavelengthCorrection=1
+
+; Wavelength Correction debug info. 0 - Off (default), 1 - On
+;WavelengthCorrectionDebug=1
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=2
+
+; Cropping area
+[Depth.Cropping]
+OffsetX=0
+OffsetY=0
+SizeX=320
+SizeY=240
+;Enabled=1
+
+[Image]
+; Output format. 200 - RGB888 (default), 201 - YUV422, 202 - Gray8 (2.0 MP only), 205 - YUYV
+;OutputFormat=201
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA (1.3MP), 3 - UXGA (2.0MP), 14 - 720p, 15 - 1280x960
+Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Input format. 0 - Compressed 8-bit BAYER (1.3MP or 2.0MP only), 1 - Compressed YUV422 (default in BULK), 2 - Jpeg, 5 - Uncompressed YUV422 (default in ISO), 6 - Uncompressed 8-bit BAYER (1.3MP or 2.0MP only), 7 - Uncompressed YUYV
+InputFormat=5
+
+; Anti Flicker. 0 - Off (default), 50 - 50Hz, 60 - 60 Hz.
+;Flicker=50
+
+; Image quality when using Jpeg. 1-10 (1 - Lowest, 10 - Highest (default))
+;Quality=10
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Image.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[IR]
+; Output format. 200 - RGB888, 203 - Grayscale 16-bit (default)
+;OutputFormat=203
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA(1.3MP)
+;Resolution=0
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[IR.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
diff --git a/include/openni2_redist/arm64/libOpenNI2.jni.so b/include/openni2_redist/arm64/libOpenNI2.jni.so
new file mode 100644
index 00000000..63f519f3
Binary files /dev/null and b/include/openni2_redist/arm64/libOpenNI2.jni.so differ
diff --git a/include/openni2_redist/arm64/libOpenNI2.so b/include/openni2_redist/arm64/libOpenNI2.so
new file mode 100644
index 00000000..fa62d17c
Binary files /dev/null and b/include/openni2_redist/arm64/libOpenNI2.so differ
diff --git a/include/openni2_redist/arm64/org.openni.jar b/include/openni2_redist/arm64/org.openni.jar
new file mode 100644
index 00000000..4f98b2fb
Binary files /dev/null and b/include/openni2_redist/arm64/org.openni.jar differ
diff --git a/include/openni2_redist/x64/OpenNI.ini b/include/openni2_redist/x64/OpenNI.ini
new file mode 100644
index 00000000..3375b84d
--- /dev/null
+++ b/include/openni2_redist/x64/OpenNI.ini
@@ -0,0 +1,14 @@
+[Log]
+; 0 - Verbose; 1 - Info; 2 - Warning; 3 - Error. Default - None
+Verbosity=3
+LogToConsole=0
+LogToFile=0
+
+[Device]
+;Override=""
+
+[Drivers]
+; Location of the drivers specified by a relative path based on OpenNI's shared library or an absolute path.
+; Path separator "/" can be used to be portable for any platforms.
+; Default - OpenNI2/Drivers
+;Repository=OpenNI2/Drivers
diff --git a/include/openni2_redist/x64/OpenNI2/Drivers/Orbbec.ini b/include/openni2_redist/x64/OpenNI2/Drivers/Orbbec.ini
new file mode 100644
index 00000000..9027fe0e
--- /dev/null
+++ b/include/openni2_redist/x64/OpenNI2/Drivers/Orbbec.ini
@@ -0,0 +1,158 @@
+;---------------- Sensor Default Configuration -------------------
+[Device]
+; Mirroring. 0 - Off (default), 1 - On
+;Mirror=1
+
+; FrameSync. 0 - Off (default), 1 - On
+;FrameSync=1
+
+; Stream Data Timestamps. 0 - milliseconds, 1 - microseconds (default)
+;HighResTimestamps=1
+
+; Stream Data Timestamps Source. 0 - Firmware (default), 1 - Host
+;HostTimestamps=0
+
+; A filter for the firmware log. Default is determined by firmware.
+;FirmwareLogFilter=0
+
+; Automatic firmare log retrieval. 0 - Off (default), or the number of milliseconds between log retrievals operations.
+;FirmwareLogInterval=1000
+
+; Print firmware log to console when automatic firmware log retrieval is on. 0 - Off (default), 1 - On
+;FirmwareLogPrint=1
+
+; Is APC enabled. 0 - Off, 1 - On (default)
+;APCEnabled=1
+
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines), 3 - ISO endpoints for low-bandwidth depth
+UsbInterface=2
+
+[Depth]
+; Output format. 100 - 1mm depth values (default), 102 - u9.2 Shift values.
+;OutputFormat=102
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA, 1 - VGA, 4 - QQVGA. Default: Arm - 4, other platforms - 0
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Min depth cutoff. 0-10000 mm (default is 0)
+;MinDepthValue=0
+
+; Max depth cutoff. 0-10000 mm (default is 10000)
+;MaxDepthValue=10000
+
+; Input format. 0 - Uncompressed 16-bit, 1 - PS Compression, 3 - Packed 11-bit, 4 - Packed 12-bit. Default: Arm - 4, other platforms - 3
+;InputFormat=1
+
+; Registration. 0 - Off (default), 1 - On
+;Registration=1
+
+; Registration Type. 0 - Don't care (default), 1 - use hardware accelaration, 2 - perform in software
+;RegistrationType=0
+
+; Hole Filler. 0 - Off, 1 - On (default)
+;HoleFilter=1
+
+; White Balance. 0 - Off, 1 - On (default)
+;WhiteBalancedEnabled=1
+
+; Gain. 0-50 (0 - Auto, 1 - Min., 50 - Max.). Default value is set by firmware.
+;Gain=0
+
+; Close Range Mode. 0 - Off (default), 1 - On
+;CloseRange=0
+
+; GMC Mode. 0 - Off, 1 - On (default)
+;GMCMode=0
+
+; GMC Debug. 0 - Off (default), 1 - On
+;GMCDebug=1
+
+; Depth Auto Gain Region-of-Interest. Default values are set by firmware.
+;DepthAGCBin0MinDepth=500
+;DepthAGCBin0MaxDepth=800
+;DepthAGCBin1MinDepth=1500
+;DepthAGCBin1MaxDepth=1800
+;DepthAGCBin2MinDepth=2500
+;DepthAGCBin2MaxDepth=2800
+;DepthAGCBin3MinDepth=3500
+;DepthAGCBin3MaxDepth=3800
+
+; Wavelength Correction Mechanism. 0 - Off (default), 1 - On
+;WavelengthCorrection=1
+
+; Wavelength Correction debug info. 0 - Off (default), 1 - On
+;WavelengthCorrectionDebug=1
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Depth.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[Image]
+; Output format. 200 - RGB888 (default), 201 - YUV422, 202 - Gray8 (2.0 MP only), 205 - YUYV
+;OutputFormat=200
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA (1.3MP), 3 - UXGA (2.0MP), 14 - 720p, 15 - 1280x960
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Input format. 0 - Compressed 8-bit BAYER (1.3MP or 2.0MP only), 1 - Compressed YUV422 (default in BULK), 2 - Jpeg, 5 - Uncompressed YUV422 (default in ISO), 6 - Uncompressed 8-bit BAYER (1.3MP or 2.0MP only), 7 - Uncompressed YUYV
+InputFormat=5
+
+; Anti Flicker. 0 - Off (default), 50 - 50Hz, 60 - 60 Hz.
+;Flicker=50
+
+; Image quality when using Jpeg. 1-10 (1 - Lowest, 10 - Highest (default))
+;Quality=10
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Image.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[IR]
+; Output format. 200 - RGB888, 203 - Grayscale 16-bit (default)
+;OutputFormat=203
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA(1.3MP)
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[IR.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
diff --git a/include/openni2_redist/x64/OpenNI2/Drivers/libORBBEC.so b/include/openni2_redist/x64/OpenNI2/Drivers/libORBBEC.so
new file mode 100644
index 00000000..c3fe0592
Binary files /dev/null and b/include/openni2_redist/x64/OpenNI2/Drivers/libORBBEC.so differ
diff --git a/include/openni2_redist/x64/OpenNI2/Drivers/libOniFile.so b/include/openni2_redist/x64/OpenNI2/Drivers/libOniFile.so
new file mode 100644
index 00000000..d8ad0d18
Binary files /dev/null and b/include/openni2_redist/x64/OpenNI2/Drivers/libOniFile.so differ
diff --git a/include/openni2_redist/x64/libOpenNI2.jni.so b/include/openni2_redist/x64/libOpenNI2.jni.so
new file mode 100644
index 00000000..75b8769f
Binary files /dev/null and b/include/openni2_redist/x64/libOpenNI2.jni.so differ
diff --git a/include/openni2_redist/x64/libOpenNI2.so b/include/openni2_redist/x64/libOpenNI2.so
new file mode 100644
index 00000000..7a27646c
Binary files /dev/null and b/include/openni2_redist/x64/libOpenNI2.so differ
diff --git a/include/openni2_redist/x64/org.openni.jar b/include/openni2_redist/x64/org.openni.jar
new file mode 100644
index 00000000..67cd8e00
Binary files /dev/null and b/include/openni2_redist/x64/org.openni.jar differ
diff --git a/include/openni2_redist/x86/OpenNI.ini b/include/openni2_redist/x86/OpenNI.ini
new file mode 100644
index 00000000..3375b84d
--- /dev/null
+++ b/include/openni2_redist/x86/OpenNI.ini
@@ -0,0 +1,14 @@
+[Log]
+; 0 - Verbose; 1 - Info; 2 - Warning; 3 - Error. Default - None
+Verbosity=3
+LogToConsole=0
+LogToFile=0
+
+[Device]
+;Override=""
+
+[Drivers]
+; Location of the drivers specified by a relative path based on OpenNI's shared library or an absolute path.
+; Path separator "/" can be used to be portable for any platforms.
+; Default - OpenNI2/Drivers
+;Repository=OpenNI2/Drivers
diff --git a/include/openni2_redist/x86/OpenNI2/Drivers/Orbbec.ini b/include/openni2_redist/x86/OpenNI2/Drivers/Orbbec.ini
new file mode 100644
index 00000000..9027fe0e
--- /dev/null
+++ b/include/openni2_redist/x86/OpenNI2/Drivers/Orbbec.ini
@@ -0,0 +1,158 @@
+;---------------- Sensor Default Configuration -------------------
+[Device]
+; Mirroring. 0 - Off (default), 1 - On
+;Mirror=1
+
+; FrameSync. 0 - Off (default), 1 - On
+;FrameSync=1
+
+; Stream Data Timestamps. 0 - milliseconds, 1 - microseconds (default)
+;HighResTimestamps=1
+
+; Stream Data Timestamps Source. 0 - Firmware (default), 1 - Host
+;HostTimestamps=0
+
+; A filter for the firmware log. Default is determined by firmware.
+;FirmwareLogFilter=0
+
+; Automatic firmare log retrieval. 0 - Off (default), or the number of milliseconds between log retrievals operations.
+;FirmwareLogInterval=1000
+
+; Print firmware log to console when automatic firmware log retrieval is on. 0 - Off (default), 1 - On
+;FirmwareLogPrint=1
+
+; Is APC enabled. 0 - Off, 1 - On (default)
+;APCEnabled=1
+
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines), 3 - ISO endpoints for low-bandwidth depth
+UsbInterface=2
+
+[Depth]
+; Output format. 100 - 1mm depth values (default), 102 - u9.2 Shift values.
+;OutputFormat=102
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA, 1 - VGA, 4 - QQVGA. Default: Arm - 4, other platforms - 0
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Min depth cutoff. 0-10000 mm (default is 0)
+;MinDepthValue=0
+
+; Max depth cutoff. 0-10000 mm (default is 10000)
+;MaxDepthValue=10000
+
+; Input format. 0 - Uncompressed 16-bit, 1 - PS Compression, 3 - Packed 11-bit, 4 - Packed 12-bit. Default: Arm - 4, other platforms - 3
+;InputFormat=1
+
+; Registration. 0 - Off (default), 1 - On
+;Registration=1
+
+; Registration Type. 0 - Don't care (default), 1 - use hardware accelaration, 2 - perform in software
+;RegistrationType=0
+
+; Hole Filler. 0 - Off, 1 - On (default)
+;HoleFilter=1
+
+; White Balance. 0 - Off, 1 - On (default)
+;WhiteBalancedEnabled=1
+
+; Gain. 0-50 (0 - Auto, 1 - Min., 50 - Max.). Default value is set by firmware.
+;Gain=0
+
+; Close Range Mode. 0 - Off (default), 1 - On
+;CloseRange=0
+
+; GMC Mode. 0 - Off, 1 - On (default)
+;GMCMode=0
+
+; GMC Debug. 0 - Off (default), 1 - On
+;GMCDebug=1
+
+; Depth Auto Gain Region-of-Interest. Default values are set by firmware.
+;DepthAGCBin0MinDepth=500
+;DepthAGCBin0MaxDepth=800
+;DepthAGCBin1MinDepth=1500
+;DepthAGCBin1MaxDepth=1800
+;DepthAGCBin2MinDepth=2500
+;DepthAGCBin2MaxDepth=2800
+;DepthAGCBin3MinDepth=3500
+;DepthAGCBin3MaxDepth=3800
+
+; Wavelength Correction Mechanism. 0 - Off (default), 1 - On
+;WavelengthCorrection=1
+
+; Wavelength Correction debug info. 0 - Off (default), 1 - On
+;WavelengthCorrectionDebug=1
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Depth.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[Image]
+; Output format. 200 - RGB888 (default), 201 - YUV422, 202 - Gray8 (2.0 MP only), 205 - YUYV
+;OutputFormat=200
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA (1.3MP), 3 - UXGA (2.0MP), 14 - 720p, 15 - 1280x960
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Input format. 0 - Compressed 8-bit BAYER (1.3MP or 2.0MP only), 1 - Compressed YUV422 (default in BULK), 2 - Jpeg, 5 - Uncompressed YUV422 (default in ISO), 6 - Uncompressed 8-bit BAYER (1.3MP or 2.0MP only), 7 - Uncompressed YUYV
+InputFormat=5
+
+; Anti Flicker. 0 - Off (default), 50 - 50Hz, 60 - 60 Hz.
+;Flicker=50
+
+; Image quality when using Jpeg. 1-10 (1 - Lowest, 10 - Highest (default))
+;Quality=10
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[Image.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
+
+[IR]
+; Output format. 200 - RGB888, 203 - Grayscale 16-bit (default)
+;OutputFormat=203
+
+; Is stream mirrored. 0 - Off, 1 - On
+;Mirror=1
+
+; 0 - QVGA (default), 1 - VGA, 2 - SXGA(1.3MP)
+;Resolution=1
+
+; Frames per second (default is 30)
+;FPS=30
+
+; Cropping mode. 1 - Normal (default), 2 - Increased FPS, 3 - Software only
+;CroppingMode=1
+
+; Cropping area
+[IR.Cropping]
+;OffsetX=0
+;OffsetY=0
+;SizeX=320
+;SizeY=240
+;Enabled=1
diff --git a/include/openni2_redist/x86/OpenNI2/Drivers/PSLink.ini b/include/openni2_redist/x86/OpenNI2/Drivers/PSLink.ini
new file mode 100644
index 00000000..705a7bfb
--- /dev/null
+++ b/include/openni2_redist/x86/OpenNI2/Drivers/PSLink.ini
@@ -0,0 +1,45 @@
+;---------------- PSLink Driver Default Configuration -------------------
+
+[Device]
+; USB interface to be used. 0 - FW Default, 1 - ISO endpoints (default on Windows), 2 - BULK endpoints (default on Linux/Mac/Android machines)
+;UsbInterface=2
+
+[Depth]
+; Allows dumping all frames to files. 0 - Off (default), 1 - On
+;DumpData=1
+
+; Allow flipping the frame horizontally. 0 - Off, 1 - On (default)
+;Mirror=0
+
+; Compression of the data passed from device to host. 0 - None, 2 - 16z, 6 - 11-bit packed, 7 - 12-bit packed. Default is set by the firmware
+;Compression=2
+
+[Depth.VideoMode]
+; Pixel Format. 100 - Depth 1 mm, 101 - Depth 100 um, 102 - Shifts 9.2, 103 - Shifts 9.3
+;PixelFormat=100
+; Requested X resolution
+;XResolution=320
+; Requested Y resolution
+;YResolution=240
+; Requested FPS
+;FPS=30
+
+[IR]
+; Allows dumping all frames to files. 0 - Off (default), 1 - On
+;DumpData=1
+
+; Allow flipping the frame horizontally. 0 - Off, 1 - On (default)
+;Mirror=0
+
+; Compression of the data passed from device to host. 0 - None, 5 - 10-bit packed. Default is set by the firmware
+;Compression=5
+
+[IR.VideoMode]
+; Pixel Format. 200 - RGB888, 202 - Grayscale 8-bit, 203 - Grayscale 16-bit
+;PixelFormat=200
+; Requested X resolution
+;XResolution=320
+; Requested Y resolution
+;YResolution=240
+; Requested FPS
+;FPS=30
diff --git a/include/openni2_redist/x86/OpenNI2/Drivers/libORBBEC.so b/include/openni2_redist/x86/OpenNI2/Drivers/libORBBEC.so
new file mode 100644
index 00000000..54e8b9d0
Binary files /dev/null and b/include/openni2_redist/x86/OpenNI2/Drivers/libORBBEC.so differ
diff --git a/include/openni2_redist/x86/OpenNI2/Drivers/libOniFile.so b/include/openni2_redist/x86/OpenNI2/Drivers/libOniFile.so
new file mode 100644
index 00000000..9c52819e
Binary files /dev/null and b/include/openni2_redist/x86/OpenNI2/Drivers/libOniFile.so differ
diff --git a/include/openni2_redist/x86/OpenNI2/Drivers/libPSLink.so b/include/openni2_redist/x86/OpenNI2/Drivers/libPSLink.so
new file mode 100644
index 00000000..b57e72ac
Binary files /dev/null and b/include/openni2_redist/x86/OpenNI2/Drivers/libPSLink.so differ
diff --git a/include/openni2_redist/x86/libOpenNI2.jni.so b/include/openni2_redist/x86/libOpenNI2.jni.so
new file mode 100644
index 00000000..a75dfb02
Binary files /dev/null and b/include/openni2_redist/x86/libOpenNI2.jni.so differ
diff --git a/include/openni2_redist/x86/libOpenNI2.so b/include/openni2_redist/x86/libOpenNI2.so
new file mode 100644
index 00000000..dae6fd49
Binary files /dev/null and b/include/openni2_redist/x86/libOpenNI2.so differ
diff --git a/include/openni2_redist/x86/org.openni.jar b/include/openni2_redist/x86/org.openni.jar
new file mode 100644
index 00000000..4635d821
Binary files /dev/null and b/include/openni2_redist/x86/org.openni.jar differ
diff --git a/package.xml b/package.xml
new file mode 100644
index 00000000..b0e161a3
--- /dev/null
+++ b/package.xml
@@ -0,0 +1,34 @@
+
+
+ astra_camera
+ 0.1.0
+ Drivers for Orbbec Astra Devices.
+ Tim Liu
+ BSD
+
+ Tim Liu
+
+ catkin
+
+ libastra-dev
+ camera_info_manager
+ nodelet
+ sensor_msgs
+ roscpp
+ dynamic_reconfigure
+ image_transport
+ message_generation
+
+ libastra-dev
+ camera_info_manager
+ nodelet
+ dynamic_reconfigure
+ sensor_msgs
+ roscpp
+ image_transport
+ message_runtime
+
+
+
+
+
diff --git a/ros/astra_camera_node.cpp b/ros/astra_camera_node.cpp
new file mode 100644
index 00000000..19b9daea
--- /dev/null
+++ b/ros/astra_camera_node.cpp
@@ -0,0 +1,45 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#include "astra_camera/astra_driver.h"
+
+int main(int argc, char **argv){
+
+ ros::init(argc, argv, "astra_camera");
+ ros::NodeHandle n;
+ ros::NodeHandle pnh("~");
+
+ astra_wrapper::AstraDriver drv(n, pnh);
+
+ ros::spin();
+
+ return 0;
+}
diff --git a/ros/astra_camera_nodelet.cpp b/ros/astra_camera_nodelet.cpp
new file mode 100644
index 00000000..0ff34e97
--- /dev/null
+++ b/ros/astra_camera_nodelet.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#include "astra_camera/astra_driver.h"
+#include
+
+namespace astra_camera
+{
+
+class AstraDriverNodelet : public nodelet::Nodelet
+{
+public:
+ AstraDriverNodelet() {};
+
+ ~AstraDriverNodelet() {}
+
+private:
+ virtual void onInit()
+ {
+ lp.reset(new astra_wrapper::AstraDriver(getNodeHandle(), getPrivateNodeHandle()));
+ };
+
+ boost::shared_ptr lp;
+};
+
+}
+
+#include
+PLUGINLIB_DECLARE_CLASS(astra_camera, AstraDriverNodelet, astra_camera::AstraDriverNodelet, nodelet::Nodelet);
diff --git a/src/astra_convert.cpp b/src/astra_convert.cpp
new file mode 100644
index 00000000..63474f63
--- /dev/null
+++ b/src/astra_convert.cpp
@@ -0,0 +1,100 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#include "astra_camera/astra_convert.h"
+#include "astra_camera/astra_exception.h"
+
+#include
+
+#include
+
+namespace astra_wrapper
+{
+
+const AstraDeviceInfo astra_convert(const openni::DeviceInfo* pInfo)
+{
+ if (!pInfo)
+ THROW_OPENNI_EXCEPTION("astra_convert called with zero pointer\n");
+
+ AstraDeviceInfo output;
+
+ output.name_ = pInfo->getName();
+ output.uri_ = pInfo->getUri();
+ output.vendor_ = pInfo->getVendor();
+ output.product_id_ = pInfo->getUsbProductId();
+ output.vendor_id_ = pInfo->getUsbVendorId();
+
+ return output;
+}
+
+
+const AstraVideoMode astra_convert(const openni::VideoMode& input)
+{
+ AstraVideoMode output;
+
+ output.x_resolution_ = input.getResolutionX();
+ output.y_resolution_ = input.getResolutionY();
+ output.frame_rate_ = input.getFps();
+ output.pixel_format_ = static_cast(input.getPixelFormat());
+
+ return output;
+}
+
+const openni::VideoMode astra_convert(const AstraVideoMode& input)
+{
+
+ openni::VideoMode output;
+
+ output.setResolution(input.x_resolution_, input.y_resolution_);
+ output.setFps(input.frame_rate_);
+ output.setPixelFormat(static_cast(input.pixel_format_));
+
+ return output;
+}
+
+
+const std::vector astra_convert(const openni::Array& input)
+{
+ std::vector output;
+
+ int size = input.getSize();
+
+ output.reserve(size);
+
+ for (int i=0; i
+#include
+
+#include "astra_camera/astra_device.h"
+#include "astra_camera/astra_exception.h"
+#include "astra_camera/astra_convert.h"
+#include "astra_camera/astra_frame_listener.h"
+
+#include
+#include
+
+#include
+
+namespace astra_wrapper
+{
+
+AstraDevice::AstraDevice(const std::string& device_URI) throw (AstraException) :
+ openni_device_(),
+ ir_video_started_(false),
+ color_video_started_(false),
+ depth_video_started_(false),
+ image_registration_activated_(false),
+ use_device_time_(false)
+{
+ openni::Status rc = openni::OpenNI::initialize();
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Initialize failed\n%s\n", openni::OpenNI::getExtendedError());
+
+ openni_device_ = boost::make_shared();
+
+ if (device_URI.length() > 0)
+ {
+ rc = openni_device_->open(device_URI.c_str());
+ }
+ else
+ {
+ rc = openni_device_->open(openni::ANY_DEVICE);
+ }
+
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Device open failed\n%s\n", openni::OpenNI::getExtendedError());
+
+ device_info_ = boost::make_shared();
+ *device_info_ = openni_device_->getDeviceInfo();
+
+ ir_frame_listener = boost::make_shared();
+ color_frame_listener = boost::make_shared();
+ depth_frame_listener = boost::make_shared();
+
+}
+
+AstraDevice::~AstraDevice()
+{
+ stopAllStreams();
+
+ shutdown();
+
+ openni_device_->close();
+}
+
+const std::string AstraDevice::getUri() const
+{
+ return std::string(device_info_->getUri());
+}
+
+const std::string AstraDevice::getVendor() const
+{
+ return std::string(device_info_->getVendor());
+}
+
+const std::string AstraDevice::getName() const
+{
+ return std::string(device_info_->getName());
+}
+
+uint16_t AstraDevice::getUsbVendorId() const
+{
+ return device_info_->getUsbVendorId();
+}
+
+uint16_t AstraDevice::getUsbProductId() const
+{
+ return device_info_->getUsbProductId();
+}
+
+const std::string AstraDevice::getStringID() const
+{
+ std::string ID_str = getName() + "_" + getVendor();
+
+ boost::replace_all(ID_str, "/", "");
+ boost::replace_all(ID_str, ".", "");
+ boost::replace_all(ID_str, "@", "");
+
+ return ID_str;
+}
+
+bool AstraDevice::isValid() const
+{
+ return (openni_device_.get() != 0) && openni_device_->isValid();
+}
+
+float AstraDevice::getIRFocalLength(int output_y_resolution) const
+{
+ float focal_length = 0.0f;
+ boost::shared_ptr stream = getIRVideoStream();
+
+ if (stream)
+ {
+ focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
+ }
+
+ return focal_length;
+}
+
+float AstraDevice::getColorFocalLength(int output_y_resolution) const
+{
+ float focal_length = 0.0f;
+ boost::shared_ptr stream = getColorVideoStream();
+
+ if (stream)
+ {
+ focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
+ }
+
+ return focal_length;
+}
+
+float AstraDevice::getDepthFocalLength(int output_y_resolution) const
+{
+ float focal_length = 0.0f;
+ boost::shared_ptr stream = getDepthVideoStream();
+
+ if (stream)
+ {
+ focal_length = (float)output_y_resolution / (2 * tan(stream->getVerticalFieldOfView() / 2));
+ }
+
+ return focal_length;
+}
+
+bool AstraDevice::isIRVideoModeSupported(const AstraVideoMode& video_mode) const
+{
+ getSupportedIRVideoModes();
+
+ bool supported = false;
+
+ std::vector::const_iterator it = ir_video_modes_.begin();
+ std::vector::const_iterator it_end = ir_video_modes_.end();
+
+ while (it != it_end && !supported)
+ {
+ supported = (*it == video_mode);
+ ++it;
+ }
+
+ return supported;
+}
+
+bool AstraDevice::isColorVideoModeSupported(const AstraVideoMode& video_mode) const
+{
+ getSupportedColorVideoModes();
+
+ bool supported = false;
+
+ std::vector::const_iterator it = color_video_modes_.begin();
+ std::vector::const_iterator it_end = color_video_modes_.end();
+
+ while (it != it_end && !supported)
+ {
+ supported = (*it == video_mode);
+ ++it;
+ }
+
+ return supported;
+}
+
+bool AstraDevice::isDepthVideoModeSupported(const AstraVideoMode& video_mode) const
+{
+ getSupportedDepthVideoModes();
+
+ bool supported = false;
+
+ std::vector::const_iterator it = depth_video_modes_.begin();
+ std::vector::const_iterator it_end = depth_video_modes_.end();
+
+ while (it != it_end && !supported)
+ {
+ supported = (*it == video_mode);
+ ++it;
+ }
+
+ return supported;
+
+}
+
+bool AstraDevice::hasIRSensor() const
+{
+ return openni_device_->hasSensor(openni::SENSOR_IR);
+}
+
+bool AstraDevice::hasColorSensor() const
+{
+ return openni_device_->hasSensor(openni::SENSOR_COLOR);
+}
+
+bool AstraDevice::hasDepthSensor() const
+{
+ return openni_device_->hasSensor(openni::SENSOR_DEPTH);
+}
+
+void AstraDevice::startIRStream()
+{
+ boost::shared_ptr stream = getIRVideoStream();
+
+ if (stream)
+ {
+ stream->setMirroringEnabled(false);
+ stream->start();
+ stream->addNewFrameListener(ir_frame_listener.get());
+ ir_video_started_ = true;
+ }
+
+}
+
+void AstraDevice::startColorStream()
+{
+ boost::shared_ptr stream = getColorVideoStream();
+
+ if (stream)
+ {
+ stream->setMirroringEnabled(false);
+ stream->start();
+ stream->addNewFrameListener(color_frame_listener.get());
+ color_video_started_ = true;
+ }
+}
+void AstraDevice::startDepthStream()
+{
+ boost::shared_ptr stream = getDepthVideoStream();
+
+ if (stream)
+ {
+ stream->setMirroringEnabled(false);
+ stream->start();
+ stream->addNewFrameListener(depth_frame_listener.get());
+ depth_video_started_ = true;
+ }
+}
+
+void AstraDevice::stopAllStreams()
+{
+ stopIRStream();
+ stopColorStream();
+ stopDepthStream();
+}
+
+void AstraDevice::stopIRStream()
+{
+ if (ir_video_stream_.get() != 0)
+ {
+ ir_video_started_ = false;
+
+ ir_video_stream_->removeNewFrameListener(ir_frame_listener.get());
+
+ ir_video_stream_->stop();
+ }
+}
+void AstraDevice::stopColorStream()
+{
+ if (color_video_stream_.get() != 0)
+ {
+ color_video_started_ = false;
+
+ color_video_stream_->removeNewFrameListener(color_frame_listener.get());
+
+ color_video_stream_->stop();
+ }
+}
+void AstraDevice::stopDepthStream()
+{
+ if (depth_video_stream_.get() != 0)
+ {
+ depth_video_started_ = false;
+
+ depth_video_stream_->removeNewFrameListener(depth_frame_listener.get());
+
+ depth_video_stream_->stop();
+ }
+}
+
+void AstraDevice::shutdown()
+{
+ if (ir_video_stream_.get() != 0)
+ ir_video_stream_->destroy();
+
+ if (color_video_stream_.get() != 0)
+ color_video_stream_->destroy();
+
+ if (depth_video_stream_.get() != 0)
+ depth_video_stream_->destroy();
+
+}
+
+bool AstraDevice::isIRStreamStarted()
+{
+ return ir_video_started_;
+}
+bool AstraDevice::isColorStreamStarted()
+{
+ return color_video_started_;
+}
+bool AstraDevice::isDepthStreamStarted()
+{
+ return depth_video_started_;
+}
+
+const std::vector& AstraDevice::getSupportedIRVideoModes() const
+{
+ boost::shared_ptr stream = getIRVideoStream();
+
+ ir_video_modes_.clear();
+
+ if (stream)
+ {
+ const openni::SensorInfo& sensor_info = stream->getSensorInfo();
+
+ ir_video_modes_ = astra_convert(sensor_info.getSupportedVideoModes());
+ }
+
+ return ir_video_modes_;
+}
+
+const std::vector& AstraDevice::getSupportedColorVideoModes() const
+{
+ boost::shared_ptr stream = getColorVideoStream();
+
+ color_video_modes_.clear();
+
+ if (stream)
+ {
+ const openni::SensorInfo& sensor_info = stream->getSensorInfo();
+
+ color_video_modes_ = astra_convert(sensor_info.getSupportedVideoModes());
+ }
+
+ return color_video_modes_;
+}
+
+const std::vector& AstraDevice::getSupportedDepthVideoModes() const
+{
+ boost::shared_ptr stream = getDepthVideoStream();
+
+ depth_video_modes_.clear();
+
+ if (stream)
+ {
+ const openni::SensorInfo& sensor_info = stream->getSensorInfo();
+
+ depth_video_modes_ = astra_convert(sensor_info.getSupportedVideoModes());
+ }
+
+ return depth_video_modes_;
+}
+
+bool AstraDevice::isImageRegistrationModeSupported() const
+{
+ return openni_device_->isImageRegistrationModeSupported(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
+}
+
+void AstraDevice::setImageRegistrationMode(bool enabled) throw (AstraException)
+{
+ if (isImageRegistrationModeSupported())
+ {
+ image_registration_activated_ = enabled;
+ if (enabled)
+ {
+ openni::Status rc = openni_device_->setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Enabling image registration mode failed: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+ else
+ {
+ openni::Status rc = openni_device_->setImageRegistrationMode(openni::IMAGE_REGISTRATION_OFF);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Enabling image registration mode failed: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+ }
+}
+
+void AstraDevice::setDepthColorSync(bool enabled) throw (AstraException)
+{
+ openni::Status rc = openni_device_->setDepthColorSyncEnabled(enabled);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Enabling depth color synchronization failed: \n%s\n", openni::OpenNI::getExtendedError());
+}
+
+const AstraVideoMode AstraDevice::getIRVideoMode() throw (AstraException)
+{
+ AstraVideoMode ret;
+
+ boost::shared_ptr stream = getIRVideoStream();
+
+ if (stream)
+ {
+ openni::VideoMode video_mode = stream->getVideoMode();
+
+ ret = astra_convert(video_mode);
+ }
+ else
+ THROW_OPENNI_EXCEPTION("Could not create video stream.");
+
+ return ret;
+}
+
+const AstraVideoMode AstraDevice::getColorVideoMode() throw (AstraException)
+{
+ AstraVideoMode ret;
+
+ boost::shared_ptr stream = getColorVideoStream();
+
+ if (stream)
+ {
+ openni::VideoMode video_mode = stream->getVideoMode();
+
+ ret = astra_convert(video_mode);
+ }
+ else
+ THROW_OPENNI_EXCEPTION("Could not create video stream.");
+
+ return ret;
+}
+
+const AstraVideoMode AstraDevice::getDepthVideoMode() throw (AstraException)
+{
+ AstraVideoMode ret;
+
+ boost::shared_ptr stream = getDepthVideoStream();
+
+ if (stream)
+ {
+ openni::VideoMode video_mode = stream->getVideoMode();
+
+ ret = astra_convert(video_mode);
+ }
+ else
+ THROW_OPENNI_EXCEPTION("Could not create video stream.");
+
+ return ret;
+}
+
+void AstraDevice::setIRVideoMode(const AstraVideoMode& video_mode) throw (AstraException)
+{
+ boost::shared_ptr stream = getIRVideoStream();
+
+ if (stream)
+ {
+ const openni::VideoMode videoMode = astra_convert(video_mode);
+ const openni::Status rc = stream->setVideoMode(videoMode);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Couldn't set IR video mode: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+}
+
+void AstraDevice::setColorVideoMode(const AstraVideoMode& video_mode) throw (AstraException)
+{
+ boost::shared_ptr stream = getColorVideoStream();
+
+ if (stream)
+ {
+ const openni::VideoMode videoMode = astra_convert(video_mode);
+ const openni::Status rc = stream->setVideoMode(videoMode);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Couldn't set color video mode: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+}
+
+void AstraDevice::setDepthVideoMode(const AstraVideoMode& video_mode) throw (AstraException)
+{
+ boost::shared_ptr stream = getDepthVideoStream();
+
+ if (stream)
+ {
+ const openni::VideoMode videoMode = astra_convert(video_mode);
+ const openni::Status rc = stream->setVideoMode(videoMode);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Couldn't set depth video mode: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+}
+
+void AstraDevice::setAutoExposure(bool enable) throw (AstraException)
+{
+ boost::shared_ptr stream = getColorVideoStream();
+
+ if (stream)
+ {
+ openni::CameraSettings* camera_seeting = stream->getCameraSettings();
+ if (camera_seeting)
+ {
+ const openni::Status rc = camera_seeting->setAutoExposureEnabled(enable);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Couldn't set auto exposure: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+
+ }
+}
+void AstraDevice::setAutoWhiteBalance(bool enable) throw (AstraException)
+{
+ boost::shared_ptr stream = getColorVideoStream();
+
+ if (stream)
+ {
+ openni::CameraSettings* camera_seeting = stream->getCameraSettings();
+ if (camera_seeting)
+ {
+ const openni::Status rc = camera_seeting->setAutoWhiteBalanceEnabled(enable);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Couldn't set auto white balance: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+
+ }
+}
+
+bool AstraDevice::getAutoExposure() const
+{
+ bool ret = false;
+
+ boost::shared_ptr stream = getColorVideoStream();
+
+ if (stream)
+ {
+ openni::CameraSettings* camera_seeting = stream->getCameraSettings();
+ if (camera_seeting)
+ ret = camera_seeting->getAutoExposureEnabled();
+ }
+
+ return ret;
+}
+bool AstraDevice::getAutoWhiteBalance() const
+{
+ bool ret = false;
+
+ boost::shared_ptr stream = getColorVideoStream();
+
+ if (stream)
+ {
+ openni::CameraSettings* camera_seeting = stream->getCameraSettings();
+ if (camera_seeting)
+ ret = camera_seeting->getAutoWhiteBalanceEnabled();
+ }
+
+ return ret;
+}
+
+void AstraDevice::setUseDeviceTimer(bool enable)
+{
+ if (ir_frame_listener)
+ ir_frame_listener->setUseDeviceTimer(enable);
+
+ if (color_frame_listener)
+ color_frame_listener->setUseDeviceTimer(enable);
+
+ if (depth_frame_listener)
+ depth_frame_listener->setUseDeviceTimer(enable);
+}
+
+void AstraDevice::setIRFrameCallback(FrameCallbackFunction callback)
+{
+ ir_frame_listener->setCallback(callback);
+}
+
+void AstraDevice::setColorFrameCallback(FrameCallbackFunction callback)
+{
+ color_frame_listener->setCallback(callback);
+}
+
+void AstraDevice::setDepthFrameCallback(FrameCallbackFunction callback)
+{
+ depth_frame_listener->setCallback(callback);
+}
+
+boost::shared_ptr AstraDevice::getIRVideoStream() const throw (AstraException)
+{
+ if (ir_video_stream_.get() == 0)
+ {
+ if (hasIRSensor())
+ {
+ ir_video_stream_ = boost::make_shared();
+
+ const openni::Status rc = ir_video_stream_->create(*openni_device_, openni::SENSOR_IR);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Couldn't create IR video stream: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+ }
+ return ir_video_stream_;
+}
+
+boost::shared_ptr AstraDevice::getColorVideoStream() const throw (AstraException)
+{
+ if (color_video_stream_.get() == 0)
+ {
+ if (hasColorSensor())
+ {
+ color_video_stream_ = boost::make_shared();
+
+ const openni::Status rc = color_video_stream_->create(*openni_device_, openni::SENSOR_COLOR);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Couldn't create color video stream: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+ }
+ return color_video_stream_;
+}
+
+boost::shared_ptr AstraDevice::getDepthVideoStream() const throw (AstraException)
+{
+ if (depth_video_stream_.get() == 0)
+ {
+ if (hasDepthSensor())
+ {
+ depth_video_stream_ = boost::make_shared();
+
+ const openni::Status rc = depth_video_stream_->create(*openni_device_, openni::SENSOR_DEPTH);
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Couldn't create depth video stream: \n%s\n", openni::OpenNI::getExtendedError());
+ }
+ }
+ return depth_video_stream_;
+}
+
+std::ostream& operator <<(std::ostream& stream, const AstraDevice& device)
+{
+
+ stream << "Device info (" << device.getUri() << ")" << std::endl;
+ stream << " Vendor: " << device.getVendor() << std::endl;
+ stream << " Name: " << device.getName() << std::endl;
+ stream << " USB Vendor ID: " << device.getUsbVendorId() << std::endl;
+ stream << " USB Product ID: " << device.getUsbVendorId() << std::endl << std::endl;
+
+ if (device.hasIRSensor())
+ {
+ stream << "IR sensor video modes:" << std::endl;
+ const std::vector& video_modes = device.getSupportedIRVideoModes();
+
+ std::vector::const_iterator it = video_modes.begin();
+ std::vector::const_iterator it_end = video_modes.end();
+ for (; it != it_end; ++it)
+ stream << " - " << *it << std::endl;
+ }
+ else
+ {
+ stream << "No IR sensor available" << std::endl;
+ }
+
+ if (device.hasColorSensor())
+ {
+ stream << "Color sensor video modes:" << std::endl;
+ const std::vector& video_modes = device.getSupportedColorVideoModes();
+
+ std::vector::const_iterator it = video_modes.begin();
+ std::vector::const_iterator it_end = video_modes.end();
+ for (; it != it_end; ++it)
+ stream << " - " << *it << std::endl;
+ }
+ else
+ {
+ stream << "No Color sensor available" << std::endl;
+ }
+
+ if (device.hasDepthSensor())
+ {
+ stream << "Depth sensor video modes:" << std::endl;
+ const std::vector& video_modes = device.getSupportedDepthVideoModes();
+
+ std::vector::const_iterator it = video_modes.begin();
+ std::vector::const_iterator it_end = video_modes.end();
+ for (; it != it_end; ++it)
+ stream << " - " << *it << std::endl;
+ }
+ else
+ {
+ stream << "No Depth sensor available" << std::endl;
+ }
+
+ return stream;
+}
+
+}
diff --git a/src/astra_device_info.cpp b/src/astra_device_info.cpp
new file mode 100644
index 00000000..8b55148f
--- /dev/null
+++ b/src/astra_device_info.cpp
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#include "astra_camera/astra_device_info.h"
+
+namespace astra_wrapper
+{
+
+
+std::ostream& operator << (std::ostream& stream, const AstraDeviceInfo& device_info) {
+ stream << "Uri: " << device_info.uri_ << " (Vendor: " << device_info.vendor_ <<
+ ", Name: " << device_info.name_ <<
+ ", Vendor ID: " << std::hex << device_info.vendor_id_ <<
+ ", Product ID: " << std::hex << device_info.product_id_ <<
+ ")" << std::endl;
+ return stream;
+}
+
+
+
+} //namespace openni2_wrapper
diff --git a/src/astra_device_manager.cpp b/src/astra_device_manager.cpp
new file mode 100644
index 00000000..98e4c14d
--- /dev/null
+++ b/src/astra_device_manager.cpp
@@ -0,0 +1,276 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#include "astra_camera/astra_device_manager.h"
+#include "astra_camera/astra_convert.h"
+#include "astra_camera/astra_device.h"
+#include "astra_camera/astra_exception.h"
+
+#include
+
+#include
+
+#include
+#include
+
+#include "openni2/OpenNI.h"
+
+namespace astra_wrapper
+{
+
+class AstraDeviceInfoComparator
+{
+public:
+ bool operator()(const AstraDeviceInfo& di1, const AstraDeviceInfo& di2)
+ {
+ return (di1.uri_.compare(di2.uri_) < 0);
+ }
+};
+
+typedef std::set DeviceSet;
+
+class AstraDeviceListener : public openni::OpenNI::DeviceConnectedListener,
+ public openni::OpenNI::DeviceDisconnectedListener,
+ public openni::OpenNI::DeviceStateChangedListener
+{
+public:
+ AstraDeviceListener() :
+ openni::OpenNI::DeviceConnectedListener(),
+ openni::OpenNI::DeviceDisconnectedListener(),
+ openni::OpenNI::DeviceStateChangedListener()
+ {
+ openni::OpenNI::addDeviceConnectedListener(this);
+ openni::OpenNI::addDeviceDisconnectedListener(this);
+ openni::OpenNI::addDeviceStateChangedListener(this);
+
+ // get list of currently connected devices
+ openni::Array device_info_list;
+ openni::OpenNI::enumerateDevices(&device_info_list);
+
+ for (int i = 0; i < device_info_list.getSize(); ++i)
+ {
+ onDeviceConnected(&device_info_list[i]);
+ }
+ }
+
+ ~AstraDeviceListener()
+ {
+ openni::OpenNI::removeDeviceConnectedListener(this);
+ openni::OpenNI::removeDeviceDisconnectedListener(this);
+ openni::OpenNI::removeDeviceStateChangedListener(this);
+ }
+
+ virtual void onDeviceStateChanged(const openni::DeviceInfo* pInfo, openni::DeviceState state)
+ {
+ ROS_INFO("Device \"%s\" error state changed to %d\n", pInfo->getUri(), state);
+
+ switch (state)
+ {
+ case openni::DEVICE_STATE_OK:
+ onDeviceConnected(pInfo);
+ break;
+ case openni::DEVICE_STATE_ERROR:
+ case openni::DEVICE_STATE_NOT_READY:
+ case openni::DEVICE_STATE_EOF:
+ default:
+ onDeviceDisconnected(pInfo);
+ break;
+ }
+ }
+
+ virtual void onDeviceConnected(const openni::DeviceInfo* pInfo)
+ {
+ boost::mutex::scoped_lock l(device_mutex_);
+
+ const AstraDeviceInfo device_info_wrapped = astra_convert(pInfo);
+
+ ROS_INFO("Device \"%s\" found.", pInfo->getUri());
+
+ // make sure it does not exist in set before inserting
+ device_set_.erase(device_info_wrapped);
+ device_set_.insert(device_info_wrapped);
+ }
+
+
+ virtual void onDeviceDisconnected(const openni::DeviceInfo* pInfo)
+ {
+ boost::mutex::scoped_lock l(device_mutex_);
+
+ ROS_WARN("Device \"%s\" disconnected\n", pInfo->getUri());
+
+ const AstraDeviceInfo device_info_wrapped = astra_convert(pInfo);
+ device_set_.erase(device_info_wrapped);
+ }
+
+ boost::shared_ptr > getConnectedDeviceURIs()
+ {
+ boost::mutex::scoped_lock l(device_mutex_);
+
+ boost::shared_ptr > result = boost::make_shared >();
+
+ result->reserve(device_set_.size());
+
+ std::set::const_iterator it;
+ std::set::const_iterator it_end = device_set_.end();
+
+ for (it = device_set_.begin(); it != it_end; ++it)
+ result->push_back(it->uri_);
+
+ return result;
+ }
+
+ boost::shared_ptr > getConnectedDeviceInfos()
+ {
+ boost::mutex::scoped_lock l(device_mutex_);
+
+ boost::shared_ptr > result = boost::make_shared >();
+
+ result->reserve(device_set_.size());
+
+ DeviceSet::const_iterator it;
+ DeviceSet::const_iterator it_end = device_set_.end();
+
+ for (it = device_set_.begin(); it != it_end; ++it)
+ result->push_back(*it);
+
+ return result;
+ }
+
+ std::size_t getNumOfConnectedDevices()
+ {
+ boost::mutex::scoped_lock l(device_mutex_);
+
+ return device_set_.size();
+ }
+
+ boost::mutex device_mutex_;
+ DeviceSet device_set_;
+};
+
+//////////////////////////////////////////////////////////////////////////
+
+boost::shared_ptr AstraDeviceManager::singelton_;
+
+AstraDeviceManager::AstraDeviceManager()
+{
+ openni::Status rc = openni::OpenNI::initialize();
+ if (rc != openni::STATUS_OK)
+ THROW_OPENNI_EXCEPTION("Initialize failed\n%s\n", openni::OpenNI::getExtendedError());
+
+ device_listener_ = boost::make_shared();
+}
+
+AstraDeviceManager::~AstraDeviceManager()
+{
+}
+
+boost::shared_ptr AstraDeviceManager::getSingelton()
+{
+ if (singelton_.get()==0)
+ singelton_ = boost::make_shared();
+
+ return singelton_;
+}
+
+boost::shared_ptr > AstraDeviceManager::getConnectedDeviceInfos() const
+{
+return device_listener_->getConnectedDeviceInfos();
+}
+
+boost::shared_ptr > AstraDeviceManager::getConnectedDeviceURIs() const
+{
+ return device_listener_->getConnectedDeviceURIs();
+}
+
+std::size_t AstraDeviceManager::getNumOfConnectedDevices() const
+{
+ return device_listener_->getNumOfConnectedDevices();
+}
+
+std::string AstraDeviceManager::getSerial(const std::string& Uri) const
+{
+ openni::Device openni_device;
+ std::string ret;
+
+ // we need to open the device to query the serial number
+ if (Uri.length() > 0 && openni_device.open(Uri.c_str()) == openni::STATUS_OK)
+ {
+ int serial_len = 100;
+ char serial[serial_len];
+
+ openni::Status rc = openni_device.getProperty(openni::DEVICE_PROPERTY_SERIAL_NUMBER, serial, &serial_len);
+ if (rc == openni::STATUS_OK)
+ ret = serial;
+ else
+ {
+ THROW_OPENNI_EXCEPTION("Serial number query failed: %s", openni::OpenNI::getExtendedError());
+ }
+ // close the device again
+ openni_device.close();
+ }
+ else
+ {
+ THROW_OPENNI_EXCEPTION("Device open failed: %s", openni::OpenNI::getExtendedError());
+ }
+ return ret;
+}
+
+boost::shared_ptr AstraDeviceManager::getAnyDevice()
+{
+ return boost::make_shared("");
+}
+boost::shared_ptr AstraDeviceManager::getDevice(const std::string& device_URI)
+{
+ return boost::make_shared(device_URI);
+}
+
+
+std::ostream& operator << (std::ostream& stream, const AstraDeviceManager& device_manager) {
+
+ boost::shared_ptr > device_info = device_manager.getConnectedDeviceInfos();
+
+ std::vector::const_iterator it;
+ std::vector::const_iterator it_end = device_info->end();
+
+ for (it = device_info->begin(); it != it_end; ++it)
+ {
+ stream << "Uri: " << it->uri_ << " (Vendor: " << it->vendor_ <<
+ ", Name: " << it->name_ <<
+ ", Vendor ID: " << it->vendor_id_ <<
+ ", Product ID: " << it->product_id_ <<
+ ")" << std::endl;
+ }
+
+ return stream;
+}
+
+
+} //namespace openni2_wrapper
diff --git a/src/astra_driver.cpp b/src/astra_driver.cpp
new file mode 100644
index 00000000..7bb65aa5
--- /dev/null
+++ b/src/astra_driver.cpp
@@ -0,0 +1,917 @@
+/*
+ * Copyright (c) 2016, Orbbec Ltd.
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * * Neither the name of the Orbbec Ltd. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Author: Tim Liu (liuhua@orbbec.com)
+ */
+
+#include "astra_camera/astra_driver.h"
+#include "astra_camera/astra_exception.h"
+
+#include
+#include
+
+#include
+#include
+
+namespace astra_wrapper
+{
+
+AstraDriver::AstraDriver(ros::NodeHandle& n, ros::NodeHandle& pnh) :
+ nh_(n),
+ pnh_(pnh),
+ device_manager_(AstraDeviceManager::getSingelton()),
+ config_init_(false),
+ data_skip_ir_counter_(0),
+ data_skip_color_counter_(0),
+ data_skip_depth_counter_ (0),
+ ir_subscribers_(false),
+ color_subscribers_(false),
+ depth_subscribers_(false),
+ depth_raw_subscribers_(false)
+{
+
+ genVideoModeTableMap();
+
+ readConfigFromParameterServer();
+
+ initDevice();
+
+ // Initialize dynamic reconfigure
+ reconfigure_server_.reset(new ReconfigureServer(pnh_));
+ reconfigure_server_->setCallback(boost::bind(&AstraDriver::configCb, this, _1, _2));
+
+ while (!config_init_)
+ {
+ ROS_DEBUG("Waiting for dynamic reconfigure configuration.");
+ boost::this_thread::sleep(boost::posix_time::milliseconds(100));
+ }
+ ROS_DEBUG("Dynamic reconfigure configuration received.");
+
+ advertiseROSTopics();
+
+}
+
+void AstraDriver::advertiseROSTopics()
+{
+
+ // Allow remapping namespaces rgb, ir, depth, depth_registered
+ ros::NodeHandle color_nh(nh_, "rgb");
+ image_transport::ImageTransport color_it(color_nh);
+ ros::NodeHandle ir_nh(nh_, "ir");
+ image_transport::ImageTransport ir_it(ir_nh);
+ ros::NodeHandle depth_nh(nh_, "depth");
+ image_transport::ImageTransport depth_it(depth_nh);
+ ros::NodeHandle depth_raw_nh(nh_, "depth");
+ image_transport::ImageTransport depth_raw_it(depth_raw_nh);
+ // Advertise all published topics
+
+ // Prevent connection callbacks from executing until we've set all the publishers. Otherwise
+ // connectCb() can fire while we're advertising (say) "depth/image_raw", but before we actually
+ // assign to pub_depth_raw_. Then pub_depth_raw_.getNumSubscribers() returns 0, and we fail to start
+ // the depth generator.
+ boost::lock_guard lock(connect_mutex_);
+
+ // Asus Xtion PRO does not have an RGB camera
+ if (device_->hasColorSensor())
+ {
+ image_transport::SubscriberStatusCallback itssc = boost::bind(&AstraDriver::colorConnectCb, this);
+ ros::SubscriberStatusCallback rssc = boost::bind(&AstraDriver::colorConnectCb, this);
+ pub_color_ = color_it.advertiseCamera("image", 1, itssc, itssc, rssc, rssc);
+ }
+
+ if (device_->hasIRSensor())
+ {
+ image_transport::SubscriberStatusCallback itssc = boost::bind(&AstraDriver::irConnectCb, this);
+ ros::SubscriberStatusCallback rssc = boost::bind(&AstraDriver::irConnectCb, this);
+ pub_ir_ = ir_it.advertiseCamera("image", 1, itssc, itssc, rssc, rssc);
+ }
+
+ if (device_->hasDepthSensor())
+ {
+ image_transport::SubscriberStatusCallback itssc = boost::bind(&AstraDriver::depthConnectCb, this);
+ ros::SubscriberStatusCallback rssc = boost::bind(&AstraDriver::depthConnectCb, this);
+ pub_depth_raw_ = depth_it.advertiseCamera("image_raw", 1, itssc, itssc, rssc, rssc);
+ pub_depth_ = depth_raw_it.advertiseCamera("image", 1, itssc, itssc, rssc, rssc);
+ }
+
+ ////////// CAMERA INFO MANAGER
+
+ // Pixel offset between depth and IR images.
+ // By default assume offset of (5,4) from 9x7 correlation window.
+ // NOTE: These are now (temporarily?) dynamically reconfigurable, to allow tweaking.
+ //param_nh.param("depth_ir_offset_x", depth_ir_offset_x_, 5.0);
+ //param_nh.param("depth_ir_offset_y", depth_ir_offset_y_, 4.0);
+
+ // The camera names are set to [rgb|depth]_[serial#], e.g. depth_B00367707227042B.
+ // camera_info_manager substitutes this for ${NAME} in the URL.
+ std::string serial_number = device_->getStringID();
+ std::string color_name, ir_name;
+
+ color_name = "rgb_" + serial_number;
+ ir_name = "depth_" + serial_number;
+
+ // Load the saved calibrations, if they exist
+ color_info_manager_ = boost::make_shared(color_nh, color_name, color_info_url_);
+ ir_info_manager_ = boost::make_shared(ir_nh, ir_name, ir_info_url_);
+
+ get_serial_server = nh_.advertiseService("get_serial", &AstraDriver::getSerialCb,this);
+
+}
+
+bool AstraDriver::getSerialCb(astra_camera::GetSerialRequest& req, astra_camera::GetSerialResponse& res) {
+ res.serial = device_manager_->getSerial(device_->getUri());
+ return true;
+}
+
+void AstraDriver::configCb(Config &config, uint32_t level)
+{
+ bool stream_reset = false;
+
+ depth_ir_offset_x_ = config.depth_ir_offset_x;
+ depth_ir_offset_y_ = config.depth_ir_offset_y;
+ z_offset_mm_ = config.z_offset_mm;
+ z_scaling_ = config.z_scaling;
+
+ ir_time_offset_ = ros::Duration(config.ir_time_offset);
+ color_time_offset_ = ros::Duration(config.color_time_offset);
+ depth_time_offset_ = ros::Duration(config.depth_time_offset);
+
+ if (lookupVideoModeFromDynConfig(config.ir_mode, ir_video_mode_)<0)
+ {
+ ROS_ERROR("Undefined IR video mode received from dynamic reconfigure");
+ exit(-1);
+ }
+
+ if (lookupVideoModeFromDynConfig(config.color_mode, color_video_mode_)<0)
+ {
+ ROS_ERROR("Undefined color video mode received from dynamic reconfigure");
+ exit(-1);
+ }
+
+ if (lookupVideoModeFromDynConfig(config.depth_mode, depth_video_mode_)<0)
+ {
+ ROS_ERROR("Undefined depth video mode received from dynamic reconfigure");
+ exit(-1);
+ }
+
+ // assign pixel format
+
+ ir_video_mode_.pixel_format_ = PIXEL_FORMAT_GRAY16;
+ color_video_mode_.pixel_format_ = PIXEL_FORMAT_RGB888;
+ depth_video_mode_.pixel_format_ = PIXEL_FORMAT_DEPTH_1_MM;
+
+ color_depth_synchronization_ = config.color_depth_synchronization;
+ depth_registration_ = config.depth_registration;
+
+ auto_exposure_ = config.auto_exposure;
+ auto_white_balance_ = config.auto_white_balance;
+
+ use_device_time_ = config.use_device_time;
+
+ data_skip_ = config.data_skip+1;
+
+ applyConfigToOpenNIDevice();
+
+ config_init_ = true;
+
+ old_config_ = config;
+}
+
+void AstraDriver::setIRVideoMode(const AstraVideoMode& ir_video_mode)
+{
+ if (device_->isIRVideoModeSupported(ir_video_mode))
+ {
+ if (ir_video_mode != device_->getIRVideoMode())
+ {
+ device_->setIRVideoMode(ir_video_mode);
+ }
+
+ }
+ else
+ {
+ ROS_ERROR_STREAM("Unsupported IR video mode - " << ir_video_mode);
+ }
+}
+void AstraDriver::setColorVideoMode(const AstraVideoMode& color_video_mode)
+{
+ if (device_->isColorVideoModeSupported(color_video_mode))
+ {
+ if (color_video_mode != device_->getColorVideoMode())
+ {
+ device_->setColorVideoMode(color_video_mode);
+ }
+ }
+ else
+ {
+ ROS_ERROR_STREAM("Unsupported color video mode - " << color_video_mode);
+ }
+}
+void AstraDriver::setDepthVideoMode(const AstraVideoMode& depth_video_mode)
+{
+ if (device_->isDepthVideoModeSupported(depth_video_mode))
+ {
+ if (depth_video_mode != device_->getDepthVideoMode())
+ {
+ device_->setDepthVideoMode(depth_video_mode);
+ }
+ }
+ else
+ {
+ ROS_ERROR_STREAM("Unsupported depth video mode - " << depth_video_mode);
+ }
+}
+
+void AstraDriver::applyConfigToOpenNIDevice()
+{
+
+ data_skip_ir_counter_ = 0;
+ data_skip_color_counter_= 0;
+ data_skip_depth_counter_ = 0;
+
+ setIRVideoMode(ir_video_mode_);
+ setColorVideoMode(color_video_mode_);
+ setDepthVideoMode(depth_video_mode_);
+
+ if (device_->isImageRegistrationModeSupported())
+ {
+ try
+ {
+ if (!config_init_ || (old_config_.depth_registration != depth_registration_))
+ device_->setImageRegistrationMode(depth_registration_);
+ }
+ catch (const AstraException& exception)
+ {
+ ROS_ERROR("Could not set image registration. Reason: %s", exception.what());
+ }
+ }
+
+ try
+ {
+ if (!config_init_ || (old_config_.color_depth_synchronization != color_depth_synchronization_))
+ device_->setDepthColorSync(color_depth_synchronization_);
+ }
+ catch (const AstraException& exception)
+ {
+ ROS_ERROR("Could not set color depth synchronization. Reason: %s", exception.what());
+ }
+
+ try
+ {
+ if (!config_init_ || (old_config_.auto_exposure != auto_exposure_))
+ device_->setAutoExposure(auto_exposure_);
+ }
+ catch (const AstraException& exception)
+ {
+ ROS_ERROR("Could not set auto exposure. Reason: %s", exception.what());
+ }
+
+ try
+ {
+ if (!config_init_ || (old_config_.auto_white_balance != auto_white_balance_))
+ device_->setAutoWhiteBalance(auto_white_balance_);
+ }
+ catch (const AstraException& exception)
+ {
+ ROS_ERROR("Could not set auto white balance. Reason: %s", exception.what());
+ }
+
+ device_->setUseDeviceTimer(use_device_time_);
+
+}
+
+void AstraDriver::colorConnectCb()
+{
+ boost::lock_guard lock(connect_mutex_);
+
+ color_subscribers_ = pub_color_.getNumSubscribers() > 0;
+
+ if (color_subscribers_ && !device_->isColorStreamStarted())
+ {
+ // Can't stream IR and RGB at the same time. Give RGB preference.
+ if (device_->isIRStreamStarted())
+ {
+ ROS_ERROR("Cannot stream RGB and IR at the same time. Streaming RGB only.");
+ ROS_INFO("Stopping IR stream.");
+ device_->stopIRStream();
+ }
+
+ device_->setColorFrameCallback(boost::bind(&AstraDriver::newColorFrameCallback, this, _1));
+
+ ROS_INFO("Starting color stream.");
+ device_->startColorStream();
+
+ }
+ else if (!color_subscribers_ && device_->isColorStreamStarted())
+ {
+ ROS_INFO("Stopping color stream.");
+ device_->stopColorStream();
+
+ // Start IR if it's been blocked on RGB subscribers
+ bool need_ir = pub_ir_.getNumSubscribers() > 0;
+ if (need_ir && !device_->isIRStreamStarted())
+ {
+ device_->setIRFrameCallback(boost::bind(&AstraDriver::newIRFrameCallback, this, _1));
+
+ ROS_INFO("Starting IR stream.");
+ device_->startIRStream();
+ }
+ }
+}
+
+void AstraDriver::depthConnectCb()
+{
+ boost::lock_guard lock(connect_mutex_);
+
+ depth_subscribers_ = pub_depth_.getNumSubscribers() > 0;
+ depth_raw_subscribers_ = pub_depth_raw_.getNumSubscribers() > 0;
+
+ bool need_depth = depth_subscribers_ || depth_raw_subscribers_;
+
+ if (need_depth && !device_->isDepthStreamStarted())
+ {
+ device_->setDepthFrameCallback(boost::bind(&AstraDriver::newDepthFrameCallback, this, _1));
+
+ ROS_INFO("Starting depth stream.");
+ device_->startDepthStream();
+ }
+ else if (!need_depth && device_->isDepthStreamStarted())
+ {
+ ROS_INFO("Stopping depth stream.");
+ device_->stopDepthStream();
+ }
+}
+
+void AstraDriver::irConnectCb()
+{
+ boost::lock_guard lock(connect_mutex_);
+
+ ir_subscribers_ = pub_ir_.getNumSubscribers() > 0;
+
+ if (ir_subscribers_ && !device_->isIRStreamStarted())
+ {
+ // Can't stream IR and RGB at the same time
+ if (device_->isColorStreamStarted())
+ {
+ ROS_ERROR("Cannot stream RGB and IR at the same time. Streaming RGB only.");
+ }
+ else
+ {
+ device_->setIRFrameCallback(boost::bind(&AstraDriver::newIRFrameCallback, this, _1));
+
+ ROS_INFO("Starting IR stream.");
+ device_->startIRStream();
+ }
+ }
+ else if (!ir_subscribers_ && device_->isIRStreamStarted())
+ {
+ ROS_INFO("Stopping IR stream.");
+ device_->stopIRStream();
+ }
+}
+
+void AstraDriver::newIRFrameCallback(sensor_msgs::ImagePtr image)
+{
+ if ((++data_skip_ir_counter_)%data_skip_==0)
+ {
+ data_skip_ir_counter_ = 0;
+
+ if (ir_subscribers_)
+ {
+ image->header.frame_id = ir_frame_id_;
+ image->header.stamp = image->header.stamp + ir_time_offset_;
+
+ pub_ir_.publish(image, getIRCameraInfo(image->width, image->height, image->header.stamp));
+ }
+ }
+}
+
+void AstraDriver::newColorFrameCallback(sensor_msgs::ImagePtr image)
+{
+ if ((++data_skip_color_counter_)%data_skip_==0)
+ {
+ data_skip_color_counter_ = 0;
+
+ if (color_subscribers_)
+ {
+ image->header.frame_id = color_frame_id_;
+ image->header.stamp = image->header.stamp + color_time_offset_;
+
+ pub_color_.publish(image, getColorCameraInfo(image->width, image->height, image->header.stamp));
+ }
+ }
+}
+
+void AstraDriver::newDepthFrameCallback(sensor_msgs::ImagePtr image)
+{
+ if ((++data_skip_depth_counter_)%data_skip_==0)
+ {
+
+ data_skip_depth_counter_ = 0;
+
+ if (depth_raw_subscribers_||depth_subscribers_)
+ {
+ image->header.stamp = image->header.stamp + depth_time_offset_;
+
+ if (z_offset_mm_ != 0)
+ {
+ uint16_t* data = reinterpret_cast(&image->data[0]);
+ for (unsigned int i = 0; i < image->width * image->height; ++i)
+ if (data[i] != 0)
+ data[i] += z_offset_mm_;
+ }
+
+ if (fabs(z_scaling_ - 1.0) > 1e-6)
+ {
+ uint16_t* data = reinterpret_cast(&image->data[0]);
+ for (unsigned int i = 0; i < image->width * image->height; ++i)
+ if (data[i] != 0)
+ data[i] = static_cast(data[i] * z_scaling_);
+ }
+
+ sensor_msgs::CameraInfoPtr cam_info;
+
+ if (depth_registration_)
+ {
+ image->header.frame_id = color_frame_id_;
+ cam_info = getColorCameraInfo(image->width,image->height, image->header.stamp);
+ } else
+ {
+ image->header.frame_id = depth_frame_id_;
+ cam_info = getDepthCameraInfo(image->width,image->height, image->header.stamp);
+ }
+
+ if (depth_raw_subscribers_)
+ {
+ pub_depth_raw_.publish(image, cam_info);
+ }
+
+ if (depth_subscribers_ )
+ {
+ sensor_msgs::ImageConstPtr floating_point_image = rawToFloatingPointConversion(image);
+ pub_depth_.publish(floating_point_image, cam_info);
+ }
+ }
+ }
+}
+
+// Methods to get calibration parameters for the various cameras
+sensor_msgs::CameraInfoPtr AstraDriver::getDefaultCameraInfo(int width, int height, double f) const
+{
+ sensor_msgs::CameraInfoPtr info = boost::make_shared();
+
+ info->width = width;
+ info->height = height;
+
+ // No distortion
+ info->D.resize(5, 0.0);
+ info->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB;
+
+ // Simple camera matrix: square pixels (fx = fy), principal point at center
+ info->K.assign(0.0);
+ info->K[0] = info->K[4] = f;
+ info->K[2] = (width / 2) - 0.5;
+ // Aspect ratio for the camera center on Astra (and other devices?) is 4/3
+ // This formula keeps the principal point the same in VGA and SXGA modes
+ info->K[5] = (width * (3./8.)) - 0.5;
+ info->K[8] = 1.0;
+
+ // No separate rectified image plane, so R = I
+ info->R.assign(0.0);
+ info->R[0] = info->R[4] = info->R[8] = 1.0;
+
+ // Then P=K(I|0) = (K|0)
+ info->P.assign(0.0);
+ info->P[0] = info->P[5] = f; // fx, fy
+ info->P[2] = info->K[2]; // cx
+ info->P[6] = info->K[5]; // cy
+ info->P[10] = 1.0;
+
+ return info;
+}
+
+/// @todo Use binning/ROI properly in publishing camera infos
+sensor_msgs::CameraInfoPtr AstraDriver::getColorCameraInfo(int width, int height, ros::Time time) const
+{
+ sensor_msgs::CameraInfoPtr info;
+
+ if (color_info_manager_->isCalibrated())
+ {
+ info = boost::make_shared(color_info_manager_->getCameraInfo());
+ if ( info->width != width )
+ {
+ // Use uncalibrated values
+ ROS_WARN_ONCE("Image resolution doesn't match calibration of the RGB camera. Using default parameters.");
+ info = getDefaultCameraInfo(width, height, device_->getColorFocalLength(height));
+ }
+ }
+ else
+ {
+ // If uncalibrated, fill in default values
+ info = getDefaultCameraInfo(width, height, device_->getColorFocalLength(height));
+ }
+
+ // Fill in header
+ info->header.stamp = time;
+ info->header.frame_id = color_frame_id_;
+
+ return info;
+}
+
+
+sensor_msgs::CameraInfoPtr AstraDriver::getIRCameraInfo(int width, int height, ros::Time time) const
+{
+ sensor_msgs::CameraInfoPtr info;
+
+ if (ir_info_manager_->isCalibrated())
+ {
+ info = boost::make_shared(ir_info_manager_->getCameraInfo());
+ if ( info->width != width )
+ {
+ // Use uncalibrated values
+ ROS_WARN_ONCE("Image resolution doesn't match calibration of the IR camera. Using default parameters.");
+ info = getDefaultCameraInfo(width, height, device_->getIRFocalLength(height));
+ }
+ }
+ else
+ {
+ // If uncalibrated, fill in default values
+ info = getDefaultCameraInfo(width, height, device_->getDepthFocalLength(height));
+ }
+
+ // Fill in header
+ info->header.stamp = time;
+ info->header.frame_id = depth_frame_id_;
+
+ return info;
+}
+
+sensor_msgs::CameraInfoPtr AstraDriver::getDepthCameraInfo(int width, int height, ros::Time time) const
+{
+ // The depth image has essentially the same intrinsics as the IR image, BUT the
+ // principal point is offset by half the size of the hardware correlation window
+ // (probably 9x9 or 9x7 in 640x480 mode). See http://www.ros.org/wiki/kinect_calibration/technical
+
+ double scaling = (double)width / 640;
+
+ sensor_msgs::CameraInfoPtr info = getIRCameraInfo(width, height, time);
+ info->K[2] -= depth_ir_offset_x_*scaling; // cx
+ info->K[5] -= depth_ir_offset_y_*scaling; // cy
+ info->P[2] -= depth_ir_offset_x_*scaling; // cx
+ info->P[6] -= depth_ir_offset_y_*scaling; // cy
+
+ /// @todo Could put this in projector frame so as to encode the baseline in P[3]
+ return info;
+}
+
+void AstraDriver::readConfigFromParameterServer()
+{
+ if (!pnh_.getParam("device_id", device_id_))
+ {
+ ROS_WARN ("~device_id is not set! Using first device.");
+ device_id_ = "#1";
+ }
+
+ // Camera TF frames
+ pnh_.param("ir_frame_id", ir_frame_id_, std::string("/openni_ir_optical_frame"));
+ pnh_.param("rgb_frame_id", color_frame_id_, std::string("/openni_rgb_optical_frame"));
+ pnh_.param("depth_frame_id", depth_frame_id_, std::string("/openni_depth_optical_frame"));
+
+ ROS_DEBUG("ir_frame_id = '%s' ", ir_frame_id_.c_str());
+ ROS_DEBUG("rgb_frame_id = '%s' ", color_frame_id_.c_str());
+ ROS_DEBUG("depth_frame_id = '%s' ", depth_frame_id_.c_str());
+
+ pnh_.param("rgb_camera_info_url", color_info_url_, std::string());
+ pnh_.param("depth_camera_info_url", ir_info_url_, std::string());
+
+}
+
+std::string AstraDriver::resolveDeviceURI(const std::string& device_id) throw(AstraException)
+{
+ // retrieve available device URIs, they look like this: "1d27/0601@1/5"
+ // which is /@/
+ boost::shared_ptr > available_device_URIs =
+ device_manager_->getConnectedDeviceURIs();
+
+ // look for '#' format
+ if (device_id.size() > 1 && device_id[0] == '#')
+ {
+ std::istringstream device_number_str(device_id.substr(1));
+ int device_number;
+ device_number_str >> device_number;
+ int device_index = device_number - 1; // #1 refers to first device
+ if (device_index >= available_device_URIs->size() || device_index < 0)
+ {
+ THROW_OPENNI_EXCEPTION(
+ "Invalid device number %i, there are %zu devices connected.",
+ device_number, available_device_URIs->size());
+ }
+ else
+ {
+ return available_device_URIs->at(device_index);
+ }
+ }
+ // look for '@' format
+ // is usb bus id, typically start at 1
+ // is the device number, for consistency with astra_camera, these start at 1
+ // although 0 specifies "any device on this bus"
+ else if (device_id.size() > 1 && device_id.find('@') != std::string::npos && device_id.find('/') == std::string::npos)
+ {
+ // get index of @ character
+ size_t index = device_id.find('@');
+ if (index <= 0)
+ {
+ THROW_OPENNI_EXCEPTION(
+ "%s is not a valid device URI, you must give the bus number before the @.",
+ device_id.c_str());
+ }
+ if (index >= device_id.size() - 1)
+ {
+ THROW_OPENNI_EXCEPTION(
+ "%s is not a valid device URI, you must give a number after the @, specify 0 for first device",
+ device_id.c_str());
+ }
+
+ // pull out device number on bus
+ std::istringstream device_number_str(device_id.substr(index+1));
+ int device_number;
+ device_number_str >> device_number;
+
+ // reorder to @
+ std::string bus = device_id.substr(0, index);
+ bus.insert(0, "@");
+
+ for (size_t i = 0; i < available_device_URIs->size(); ++i)
+ {
+ std::string s = (*available_device_URIs)[i];
+ if (s.find(bus) != std::string::npos)
+ {
+ // this matches our bus, check device number
+ --device_number;
+ if (device_number <= 0)
+ return s;
+ }
+ }
+
+ THROW_OPENNI_EXCEPTION("Device not found %s", device_id.c_str());
+ }
+ else
+ {
+ // check if the device id given matches a serial number of a connected device
+ for(std::vector::const_iterator it = available_device_URIs->begin();
+ it != available_device_URIs->end(); ++it)
+ {
+ try {
+ std::string serial = device_manager_->getSerial(*it);
+ if (serial.size() > 0 && device_id == serial)
+ return *it;
+ }
+ catch (const AstraException& exception)
+ {
+ ROS_WARN("Could not query serial number of device \"%s\":", exception.what());
+ }
+ }
+
+ // everything else is treated as part of the device_URI
+ bool match_found = false;
+ std::string matched_uri;
+ for (size_t i = 0; i < available_device_URIs->size(); ++i)
+ {
+ std::string s = (*available_device_URIs)[i];
+ if (s.find(device_id) != std::string::npos)
+ {
+ if (!match_found)
+ {
+ matched_uri = s;
+ match_found = true;
+ }
+ else
+ {
+ // more than one match
+ THROW_OPENNI_EXCEPTION("Two devices match the given device id '%s': %s and %s.", device_id.c_str(), matched_uri.c_str(), s.c_str());
+ }
+ }
+ }
+ return matched_uri;
+ }
+
+ return "INVALID";
+}
+
+void AstraDriver::initDevice()
+{
+ while (ros::ok() && !device_)
+ {
+ try
+ {
+ std::string device_URI = resolveDeviceURI(device_id_);
+ device_ = device_manager_->getDevice(device_URI);
+ }
+ catch (const AstraException& exception)
+ {
+ if (!device_)
+ {
+ ROS_INFO("No matching device found.... waiting for devices. Reason: %s", exception.what());
+ boost::this_thread::sleep(boost::posix_time::seconds(3));
+ continue;
+ }
+ else
+ {
+ ROS_ERROR("Could not retrieve device. Reason: %s", exception.what());
+ exit(-1);
+ }
+ }
+ }
+
+ while (ros::ok() && !device_->isValid())
+ {
+ ROS_DEBUG("Waiting for device initialization..");
+ boost::this_thread::sleep(boost::posix_time::milliseconds(100));
+ }
+
+}
+
+void AstraDriver::genVideoModeTableMap()
+{
+ /*
+ * # Video modes defined by dynamic reconfigure:
+output_mode_enum = gen.enum([ gen.const( "SXGA_30Hz", int_t, 1, "1280x1024@30Hz"),
+ gen.const( "SXGA_15Hz", int_t, 2, "1280x1024@15Hz"),
+ gen.const( "XGA_30Hz", int_t, 3, "1280x720@30Hz"),
+ gen.const( "XGA_15Hz", int_t, 4, "1280x720@15Hz"),
+ gen.const( "VGA_30Hz", int_t, 5, "640x480@30Hz"),
+ gen.const( "VGA_25Hz", int_t, 6, "640x480@25Hz"),
+ gen.const( "QVGA_25Hz", int_t, 7, "320x240@25Hz"),
+ gen.const( "QVGA_30Hz", int_t, 8, "320x240@30Hz"),
+ gen.const( "QVGA_60Hz", int_t, 9, "320x240@60Hz"),
+ gen.const( "QQVGA_25Hz", int_t, 10, "160x120@25Hz"),
+ gen.const( "QQVGA_30Hz", int_t, 11, "160x120@30Hz"),
+ gen.const( "QQVGA_60Hz", int_t, 12, "160x120@60Hz")],
+ "output mode")
+ */
+
+ video_modes_lookup_.clear();
+
+ AstraVideoMode video_mode;
+
+ // SXGA_30Hz
+ video_mode.x_resolution_ = 1280;
+ video_mode.y_resolution_ = 1024;
+ video_mode.frame_rate_ = 30;
+
+ video_modes_lookup_[1] = video_mode;
+
+ // SXGA_15Hz
+ video_mode.x_resolution_ = 1280;
+ video_mode.y_resolution_ = 1024;
+ video_mode.frame_rate_ = 15;
+
+ video_modes_lookup_[2] = video_mode;
+
+ // XGA_30Hz
+ video_mode.x_resolution_ = 1280;
+ video_mode.y_resolution_ = 720;
+ video_mode.frame_rate_ = 30;
+
+ video_modes_lookup_[3] = video_mode;
+
+ // XGA_15Hz
+ video_mode.x_resolution_ = 1280;
+ video_mode.y_resolution_ = 720;
+ video_mode.frame_rate_ = 15;
+
+ video_modes_lookup_[4] = video_mode;
+
+ // VGA_30Hz
+ video_mode.x_resolution_ = 640;
+ video_mode.y_resolution_ = 480;
+ video_mode.frame_rate_ = 30;
+
+ video_modes_lookup_[5] = video_mode;
+
+ // VGA_25Hz
+ video_mode.x_resolution_ = 640;
+ video_mode.y_resolution_ = 480;
+ video_mode.frame_rate_ = 25;
+
+ video_modes_lookup_[6] = video_mode;
+
+ // QVGA_25Hz
+ video_mode.x_resolution_ = 320;
+ video_mode.y_resolution_ = 240;
+ video_mode.frame_rate_ = 25;
+
+ video_modes_lookup_[7] = video_mode;
+
+ // QVGA_30Hz
+ video_mode.x_resolution_ = 320;
+ video_mode.y_resolution_ = 240;
+ video_mode.frame_rate_ = 30;
+
+ video_modes_lookup_[8] = video_mode;
+
+ // QVGA_60Hz
+ video_mode.x_resolution_ = 320;
+ video_mode.y_resolution_ = 240;
+ video_mode.frame_rate_ = 60;
+
+ video_modes_lookup_[9] = video_mode;
+
+ // QQVGA_25Hz
+ video_mode.x_resolution_ = 160;
+ video_mode.y_resolution_ = 120;
+ video_mode.frame_rate_ = 25;
+
+ video_modes_lookup_[10] = video_mode;
+
+ // QQVGA_30Hz
+ video_mode.x_resolution_ = 160;
+ video_mode.y_resolution_ = 120;
+ video_mode.frame_rate_ = 30;
+
+ video_modes_lookup_[11] = video_mode;
+
+ // QQVGA_60Hz
+ video_mode.x_resolution_ = 160;
+ video_mode.y_resolution_ = 120;
+ video_mode.frame_rate_ = 60;
+
+ video_modes_lookup_[12] = video_mode;
+
+}
+
+int AstraDriver::lookupVideoModeFromDynConfig(int mode_nr, AstraVideoMode& video_mode)
+{
+ int ret = -1;
+
+ std::map::const_iterator it;
+
+ it = video_modes_lookup_.find(mode_nr);
+
+ if (it!=video_modes_lookup_.end())
+ {
+ video_mode = it->second;
+ ret = 0;
+ }
+
+ return ret;
+}
+
+sensor_msgs::ImageConstPtr AstraDriver::rawToFloatingPointConversion(sensor_msgs::ImageConstPtr raw_image)
+{
+ static const float bad_point = std::numeric_limits::quiet_NaN ();
+
+ sensor_msgs::ImagePtr new_image = boost::make_shared