diff --git a/src/modules/newtek/interop/Processing.NDI.DynamicLoad.h b/src/modules/newtek/interop/Processing.NDI.DynamicLoad.h index 8979a1f7e9..fc6e2916d8 100644 --- a/src/modules/newtek/interop/Processing.NDI.DynamicLoad.h +++ b/src/modules/newtek/interop/Processing.NDI.DynamicLoad.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -27,555 +27,558 @@ // //*********************************************************************************************************** -typedef struct NDIlib_v5 -{ // V1.5 - union - { bool(*initialize)(void); +typedef struct NDIlib_v5 { + // V1.5 + union { + bool(*initialize)(void); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_initialize)(void); }; - union - { void(*destroy)(void); + union { + void(*destroy)(void); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_destroy)(void); }; - union - { const char* (*version)(void); + union { + const char* (*version)(void); PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_version)(void); }; - union - { bool(*is_supported_CPU)(void); + union { + bool(*is_supported_CPU)(void); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_is_supported_CPU)(void); }; - union - { PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t(*find_create)(const NDIlib_find_create_t* p_create_settings); + union { + PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t(*find_create)(const NDIlib_find_create_t* p_create_settings); PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t(*NDIlib_find_create)(const NDIlib_find_create_t* p_create_settings); }; - union - { NDIlib_find_instance_t(*find_create_v2)(const NDIlib_find_create_t* p_create_settings); + union { + NDIlib_find_instance_t(*find_create_v2)(const NDIlib_find_create_t* p_create_settings); PROCESSINGNDILIB_DEPRECATED NDIlib_find_instance_t(*NDIlib_find_create_v2)(const NDIlib_find_create_t* p_create_settings); }; - union - { void(*find_destroy)(NDIlib_find_instance_t p_instance); + union { + void(*find_destroy)(NDIlib_find_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_find_destroy)(NDIlib_find_instance_t p_instance); }; - union - { const NDIlib_source_t* (*find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms); + union { + const NDIlib_source_t* (*find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms); PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_find_get_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms); }; - union - { NDIlib_send_instance_t(*send_create)(const NDIlib_send_create_t* p_create_settings); + union { + NDIlib_send_instance_t(*send_create)(const NDIlib_send_create_t* p_create_settings); PROCESSINGNDILIB_DEPRECATED NDIlib_send_instance_t(*NDIlib_send_create)(const NDIlib_send_create_t* p_create_settings); }; - union - { void(*send_destroy)(NDIlib_send_instance_t p_instance); + + union { + void(*send_destroy)(NDIlib_send_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_destroy)(NDIlib_send_instance_t p_instance); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + union { + PROCESSINGNDILIB_DEPRECATED void(*send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_send_video)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + + union { + PROCESSINGNDILIB_DEPRECATED void(*send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_send_video_async)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); + union { + PROCESSINGNDILIB_DEPRECATED void(*send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_send_audio)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); }; - union - { void(*send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + union { + void(*send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_send_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); }; - union - { NDIlib_frame_type_e(*send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); + union { + NDIlib_frame_type_e(*send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e(*NDIlib_send_capture)(NDIlib_send_instance_t p_instance, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); }; - union - { void(*send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + union { + void(*send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_free_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); }; - union - { bool(*send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms); + union { + bool(*send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_send_get_tally)(NDIlib_send_instance_t p_instance, NDIlib_tally_t* p_tally, uint32_t timeout_in_ms); }; - union - { int(*send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms); + union { + int(*send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms); PROCESSINGNDILIB_DEPRECATED int(*NDIlib_send_get_no_connections)(NDIlib_send_instance_t p_instance, uint32_t timeout_in_ms); }; - union - { void(*send_clear_connection_metadata)(NDIlib_send_instance_t p_instance); + union { + void(*send_clear_connection_metadata)(NDIlib_send_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_clear_connection_metadata)(NDIlib_send_instance_t p_instance); }; - union - { void(*send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + union { + void(*send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_add_connection_metadata)(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); }; - union - { void(*send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source); + union { + void(*send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_set_failover)(NDIlib_send_instance_t p_instance, const NDIlib_source_t* p_failover_source); }; - union - { PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t(*recv_create_v2)(const NDIlib_recv_create_t* p_create_settings); + union { + PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t(*recv_create_v2)(const NDIlib_recv_create_t* p_create_settings); PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t(*NDIlib_recv_create_v2)(const NDIlib_recv_create_t* p_create_settings); }; - union - { PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t(*recv_create)(const NDIlib_recv_create_t* p_create_settings); + union { + PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t(*recv_create)(const NDIlib_recv_create_t* p_create_settings); PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t(*NDIlib_recv_create)(const NDIlib_recv_create_t* p_create_settings); }; - union - { void(*recv_destroy)(NDIlib_recv_instance_t p_instance); + union { + void(*recv_destroy)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_destroy)(NDIlib_recv_instance_t p_instance); }; - union - { PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e(*recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); + union { + PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e(*recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e(*NDIlib_recv_capture)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_t* p_video_data, NDIlib_audio_frame_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); + union { + PROCESSINGNDILIB_DEPRECATED void(*recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_free_video)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); + union { + PROCESSINGNDILIB_DEPRECATED void(*recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_free_audio)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); }; - union - { void(*recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + union { + void(*recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_free_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); }; - union - { bool(*recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + union { + bool(*recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_send_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); }; - union - { bool(*recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally); + union { + bool(*recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_set_tally)(NDIlib_recv_instance_t p_instance, const NDIlib_tally_t* p_tally); }; - union - { void(*recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped); + union { + void(*recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_get_performance)(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped); }; - union - { void(*recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total); + union { + void(*recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_get_queue)(NDIlib_recv_instance_t p_instance, NDIlib_recv_queue_t* p_total); }; - union - { void(*recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance); + union { + void(*recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_clear_connection_metadata)(NDIlib_recv_instance_t p_instance); }; - union - { void(*recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); + union { + void(*recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_add_connection_metadata)(NDIlib_recv_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); }; - union - { int(*recv_get_no_connections)(NDIlib_recv_instance_t p_instance); + union { + int(*recv_get_no_connections)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED int(*NDIlib_recv_get_no_connections)(NDIlib_recv_instance_t p_instance); }; - union - { NDIlib_routing_instance_t(*routing_create)(const NDIlib_routing_create_t* p_create_settings); + + union { + NDIlib_routing_instance_t(*routing_create)(const NDIlib_routing_create_t* p_create_settings); PROCESSINGNDILIB_DEPRECATED NDIlib_routing_instance_t(*NDIlib_routing_create)(const NDIlib_routing_create_t* p_create_settings); }; - union - { void(*routing_destroy)(NDIlib_routing_instance_t p_instance); + + union { + void(*routing_destroy)(NDIlib_routing_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_routing_destroy)(NDIlib_routing_instance_t p_instance); }; - union - { bool(*routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source); + union { + bool(*routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_routing_change)(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source); }; - union - { bool(*routing_clear)(NDIlib_routing_instance_t p_instance); + union { + bool(*routing_clear)(NDIlib_routing_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_routing_clear)(NDIlib_routing_instance_t p_instance); }; - union - { void(*util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data); + + union { + void(*util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_send_send_audio_interleaved_16s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); + union { + PROCESSINGNDILIB_DEPRECATED void(*util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_to_interleaved_16s)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst); + union { + PROCESSINGNDILIB_DEPRECATED void(*util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_from_interleaved_16s)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_t* p_dst); }; // V2 - union - { bool(*find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms); + union { + bool(*find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_find_wait_for_sources)(NDIlib_find_instance_t p_instance, uint32_t timeout_in_ms); }; - union - { const NDIlib_source_t* (*find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources); + union { + const NDIlib_source_t* (*find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources); PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_find_get_current_sources)(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); + union { + PROCESSINGNDILIB_DEPRECATED void(*util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_to_interleaved_32f)(const NDIlib_audio_frame_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); }; - union - { PROCESSINGNDILIB_DEPRECATED void(*util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst); + union { + PROCESSINGNDILIB_DEPRECATED void(*util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_from_interleaved_32f)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_t* p_dst); }; - union - { void(*util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data); + union { + void(*util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_send_send_audio_interleaved_32f)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data); }; // V3 - union - { void(*recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + union { + void(*recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_free_video_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); }; - union - { void(*recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); + union { + void(*recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_free_audio_v2)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); }; - union - { NDIlib_frame_type_e(*recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + union { + NDIlib_frame_type_e(*recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e(*NDIlib_recv_capture_v2)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v2_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. }; - union - { void(*send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + union { + void(*send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_send_video_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); }; - union - { void(*send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); + union { + void(*send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_send_video_async_v2)(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); }; - union - { void(*send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); + union { + void(*send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_send_audio_v2)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); }; - union - { void(*util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); + union { + void(*util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_to_interleaved_16s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); }; - union - { void(*util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + union { + void(*util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_from_interleaved_16s_v2)(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); }; - union - { void(*util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); + union { + void(*util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_to_interleaved_32f_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); }; - union - { void(*util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + union { + void(*util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_from_interleaved_32f_v2)(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst); }; // V3.01 - union - { void(*recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string); + union { + void(*recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_free_string)(NDIlib_recv_instance_t p_instance, const char* p_string); }; - union - { bool(*recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance); + union { + bool(*recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_is_supported)(NDIlib_recv_instance_t p_instance); }; - union - { // This functionality is now provided via external NDI recording, see SDK documentation. + union { + // This functionality is now provided via external NDI recording, see SDK documentation. PROCESSINGNDILIB_DEPRECATED bool(*recv_recording_is_supported)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_recording_is_supported)(NDIlib_recv_instance_t p_instance); }; - union - { const char*(*recv_get_web_control)(NDIlib_recv_instance_t p_instance); - PROCESSINGNDILIB_DEPRECATED const char*(*NDIlib_recv_get_web_control)(NDIlib_recv_instance_t p_instance); + union { + const char* (*recv_get_web_control)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_get_web_control)(NDIlib_recv_instance_t p_instance); }; - union - { bool(*recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value); + union { + bool(*recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_zoom)(NDIlib_recv_instance_t p_instance, const float zoom_value); }; - union - { bool(*recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed); + union { + bool(*recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_zoom_speed)(NDIlib_recv_instance_t p_instance, const float zoom_speed); }; - union - { bool(*recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value); + union { + bool(*recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_pan_tilt)(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value); }; - union - { bool(*recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed); + union { + bool(*recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_pan_tilt_speed)(NDIlib_recv_instance_t p_instance, const float pan_speed, const float tilt_speed); }; - union - { bool(*recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no); + union { + bool(*recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_store_preset)(NDIlib_recv_instance_t p_instance, const int preset_no); }; - union - { bool(*recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed); + union { + bool(*recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_recall_preset)(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed); }; - union - { bool(*recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance); + union { + bool(*recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_auto_focus)(NDIlib_recv_instance_t p_instance); }; - union - { bool(*recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value); + union { + bool(*recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_focus)(NDIlib_recv_instance_t p_instance, const float focus_value); }; - union - { bool(*recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed); + union { + bool(*recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_focus_speed)(NDIlib_recv_instance_t p_instance, const float focus_speed); }; - union - { bool(*recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance); + union { + bool(*recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_white_balance_auto)(NDIlib_recv_instance_t p_instance); }; - union - { bool(*recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance); + union { + bool(*recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_white_balance_indoor)(NDIlib_recv_instance_t p_instance); }; - union - { bool(*recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance); + union { + bool(*recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_white_balance_outdoor)(NDIlib_recv_instance_t p_instance); }; - union - { bool(*recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance); + union { + bool(*recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_white_balance_oneshot)(NDIlib_recv_instance_t p_instance); }; - union - { bool(*recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue); + union { + bool(*recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_white_balance_manual)(NDIlib_recv_instance_t p_instance, const float red, const float blue); }; - union - { bool(*recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance); + union { + bool(*recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_exposure_auto)(NDIlib_recv_instance_t p_instance); }; - union - { bool(*recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level); + union { + bool(*recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_exposure_manual)(NDIlib_recv_instance_t p_instance, const float exposure_level); }; - union - { // This functionality is now provided via external NDI recording, see SDK documentation. + union { + // This functionality is now provided via external NDI recording, see SDK documentation. PROCESSINGNDILIB_DEPRECATED bool(*recv_recording_start)(NDIlib_recv_instance_t p_instance, const char* p_filename_hint); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_recording_start)(NDIlib_recv_instance_t p_instance, const char* p_filename_hint); }; - union - { // This functionality is now provided via external NDI recording, see SDK documentation. + union { + // This functionality is now provided via external NDI recording, see SDK documentation. PROCESSINGNDILIB_DEPRECATED bool(*recv_recording_stop)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_recording_stop)(NDIlib_recv_instance_t p_instance); }; - union - { // This functionality is now provided via external NDI recording, see SDK documentation. + union { + // This functionality is now provided via external NDI recording, see SDK documentation. PROCESSINGNDILIB_DEPRECATED bool(*recv_recording_set_audio_level)(NDIlib_recv_instance_t p_instance, const float level_dB); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_recording_set_audio_level)(NDIlib_recv_instance_t p_instance, const float level_dB); }; - union - { // This functionality is now provided via external NDI recording, see SDK documentation. + union { // This functionality is now provided via external NDI recording, see SDK documentation. PROCESSINGNDILIB_DEPRECATED bool(*recv_recording_is_recording)(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_recording_is_recording)(NDIlib_recv_instance_t p_instance); }; - union - { // This functionality is now provided via external NDI recording, see SDK documentation. - PROCESSINGNDILIB_DEPRECATED const char*(*recv_recording_get_filename)(NDIlib_recv_instance_t p_instance); - PROCESSINGNDILIB_DEPRECATED const char*(*NDIlib_recv_recording_get_filename)(NDIlib_recv_instance_t p_instance); + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED const char* (*recv_recording_get_filename)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_recording_get_filename)(NDIlib_recv_instance_t p_instance); }; - union - { // This functionality is now provided via external NDI recording, see SDK documentation. - PROCESSINGNDILIB_DEPRECATED const char*(*recv_recording_get_error)(NDIlib_recv_instance_t p_instance); - PROCESSINGNDILIB_DEPRECATED const char*(*NDIlib_recv_recording_get_error)(NDIlib_recv_instance_t p_instance); + union { + // This functionality is now provided via external NDI recording, see SDK documentation. + PROCESSINGNDILIB_DEPRECATED const char* (*recv_recording_get_error)(NDIlib_recv_instance_t p_instance); + PROCESSINGNDILIB_DEPRECATED const char* (*NDIlib_recv_recording_get_error)(NDIlib_recv_instance_t p_instance); }; - union - { // This functionality is now provided via external NDI recording, see SDK documentation. + union { + // This functionality is now provided via external NDI recording, see SDK documentation. PROCESSINGNDILIB_DEPRECATED bool(*recv_recording_get_times)(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times); PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_recording_get_times)(NDIlib_recv_instance_t p_instance, NDIlib_recv_recording_time_t* p_times); }; // V3.1 - union - { NDIlib_recv_instance_t(*recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings); + union { + NDIlib_recv_instance_t(*recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings); PROCESSINGNDILIB_DEPRECATED NDIlib_recv_instance_t(*NDIlib_recv_create_v3)(const NDIlib_recv_create_v3_t* p_create_settings); }; // V3.5 - union - { void(*recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src); + union { + void(*recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_connect)(NDIlib_recv_instance_t p_instance, const NDIlib_source_t* p_src); }; // V3.6 - union - { NDIlib_framesync_instance_t(*framesync_create)(NDIlib_recv_instance_t p_receiver); + union { + NDIlib_framesync_instance_t(*framesync_create)(NDIlib_recv_instance_t p_receiver); PROCESSINGNDILIB_DEPRECATED NDIlib_framesync_instance_t(*NDIlib_framesync_create)(NDIlib_recv_instance_t p_receiver); }; - union - { void(*framesync_destroy)(NDIlib_framesync_instance_t p_instance); + union { + void(*framesync_destroy)(NDIlib_framesync_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_framesync_destroy)(NDIlib_framesync_instance_t p_instance); }; - union - { void(*framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples); + union { + void(*framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_framesync_capture_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data, int sample_rate, int no_channels, int no_samples); }; - union - { void(*framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data); + union { + void(*framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_framesync_free_audio)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data); }; - union - { void(*framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type); + union { + void(*framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_framesync_capture_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_frame_format_type_e field_type); }; - union - { void(*framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data); + union { + void(*framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_framesync_free_video)(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data); }; - union - { void(*util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data); + union { + void(*util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_send_send_audio_interleaved_32s)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data); }; - union - { void(*util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst); + union { + void(*util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_to_interleaved_32s_v2)(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst); }; - union - { void(*util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); + union { + void(*util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_audio_from_interleaved_32s_v2)(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); }; // V3.8 - union - { const NDIlib_source_t* (*send_get_source_name)(NDIlib_send_instance_t p_instance); + union { + const NDIlib_source_t* (*send_get_source_name)(NDIlib_send_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_send_get_source_name)(NDIlib_send_instance_t p_instance); }; // V4.0 - union - { void(*send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); + union { + void(*send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_send_send_audio_v3)(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); }; - union - { void(*util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216); + union { + void(*util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_V210_to_P216)(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216); }; - union - { void(*util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210); + union { + void(*util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_util_P216_to_V210)(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210); }; // V4.1 - union - { int (*routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms); + union { + int (*routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms); PROCESSINGNDILIB_DEPRECATED int(*NDIlib_routing_get_no_connections)(NDIlib_routing_instance_t p_instance, uint32_t timeout_in_ms); }; - union - { const NDIlib_source_t* (*routing_get_source_name)(NDIlib_routing_instance_t p_instance); + union { + const NDIlib_source_t* (*routing_get_source_name)(NDIlib_routing_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* (*NDIlib_routing_get_source_name)(NDIlib_routing_instance_t p_instance); }; - union - { NDIlib_frame_type_e(*recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + union { + NDIlib_frame_type_e(*recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e(*NDIlib_recv_capture_v3)(NDIlib_recv_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data, NDIlib_audio_frame_v3_t* p_audio_data, NDIlib_metadata_frame_t* p_metadata, uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. }; - union - { void(*recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); + union { + void(*recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_recv_free_audio_v3)(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); }; - union - { void(*framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples); + union { + void(*framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_framesync_capture_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data, int sample_rate, int no_channels, int no_samples); }; - union - { void(*framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data); + union { + void(*framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data); PROCESSINGNDILIB_DEPRECATED void(*NDIlib_framesync_free_audio_v2)(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data); }; - union - { int(*framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance); + union { + int(*framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance); PROCESSINGNDILIB_DEPRECATED int(*NDIlib_framesync_audio_queue_depth)(NDIlib_framesync_instance_t p_instance); }; - // v4.5 - union - { bool(*recv_ptz_exposure_manual_v2)(NDIlib_framesync_instance_t p_instance, const float iris, const float gain, const float shutter_speed); - PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_exposure_manual_v2)(NDIlib_framesync_instance_t p_instance, const float iris, const float gain, const float shutter_speed); + // v5 + union { + bool(*recv_ptz_exposure_manual_v2)(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed); + PROCESSINGNDILIB_DEPRECATED bool(*NDIlib_recv_ptz_exposure_manual_v2)(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed); }; - } NDIlib_v5; typedef struct NDIlib_v5 NDIlib_v4_5; @@ -583,20 +586,21 @@ typedef struct NDIlib_v5 NDIlib_v4; typedef struct NDIlib_v5 NDIlib_v3; typedef struct NDIlib_v5 NDIlib_v2; -// Load the library +// Load the library. PROCESSINGNDILIB_API const NDIlib_v5* NDIlib_v5_load(void); PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED const NDIlib_v4_5* NDIlib_v4_5_load(void); -// Load the library +// Load the library. PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED const NDIlib_v4* NDIlib_v4_load(void); -// Load the library +// Load the library. PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED const NDIlib_v3* NDIlib_v3_load(void); +// Load the library. PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED const NDIlib_v2* NDIlib_v2_load(void); diff --git a/src/modules/newtek/interop/Processing.NDI.Find.h b/src/modules/newtek/interop/Processing.NDI.Find.h index 2f2d4fc1e6..f3f1a06e90 100644 --- a/src/modules/newtek/interop/Processing.NDI.Find.h +++ b/src/modules/newtek/interop/Processing.NDI.Find.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -27,17 +27,18 @@ // //*********************************************************************************************************** -// Structures and type definitions required by NDI finding -// The reference to an instance of the finder -typedef void* NDIlib_find_instance_t; +// Structures and type definitions required by NDI finding. +// The reference to an instance of the finder. +struct NDIlib_find_instance_type; +typedef struct NDIlib_find_instance_type* NDIlib_find_instance_t; -// The creation structure that is used when you are creating a finder -typedef struct NDIlib_find_create_t -{ // Do we want to include the list of NDI sources that are running on the local machine? If TRUE then +// The creation structure that is used when you are creating a finder. +typedef struct NDIlib_find_create_t { + // Do we want to include the list of NDI sources that are running on the local machine? If TRUE then // local sources will be visible, if FALSE then they will not. bool show_local_sources; - // Which groups do you want to search in for sources + // Which groups do you want to search in for sources. const char* p_groups; // The list of additional IP addresses that exist that we should query for sources on. For instance, if @@ -49,9 +50,12 @@ typedef struct NDIlib_find_create_t const char* p_extra_ips; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_find_create_t(bool show_local_sources_ = true, const char* p_groups_ = NULL, const char* p_extra_ips_ = NULL); + NDIlib_find_create_t( + bool show_local_sources_ = true, + const char* p_groups_ = NULL, + const char* p_extra_ips_ = NULL + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_find_create_t; //*********************************************************************************************************** diff --git a/src/modules/newtek/interop/Processing.NDI.FrameSync.h b/src/modules/newtek/interop/Processing.NDI.FrameSync.h index d7527a7705..6a8b193e92 100644 --- a/src/modules/newtek/interop/Processing.NDI.FrameSync.h +++ b/src/modules/newtek/interop/Processing.NDI.FrameSync.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -74,7 +74,8 @@ // so that they can be ISO edited, then you might want a frame-sync. // The type instance for a frame-synchronizer. -typedef void* NDIlib_framesync_instance_t; +struct NDIlib_framesync_instance_type; +typedef struct NDIlib_framesync_instance_type* NDIlib_framesync_instance_t; // Create a frame synchronizer instance that can be used to get frames from a receiver. Once this receiver // has been bound to a frame-sync then you should use it in order to receive video frames. You can continue @@ -101,36 +102,30 @@ void NDIlib_framesync_destroy(NDIlib_framesync_instance_t p_instance); // // NDIlib_framesync_capture_audio(p_instance, p_audio_data, 0, 0, 0); // -// will return in p_audio_data the current received audio format if there is one or sample-rate and +// will return in p_audio_data the current received audio format if there is one or sample_rate and // no_channels equal to zero if there is not one. At any time you can specify sample_rate and no_channels as // zero and it will return the current received audio format. // PROCESSINGNDILIB_API -void NDIlib_framesync_capture_audio(// The frame sync instance data. - NDIlib_framesync_instance_t p_instance, - // The destination audio buffer that you wish to have filled in. - NDIlib_audio_frame_v2_t* p_audio_data, - // Your desired sample rate, number of channels and the number of desired samples. - int sample_rate, int no_channels, int no_samples); +void NDIlib_framesync_capture_audio( + NDIlib_framesync_instance_t p_instance, + NDIlib_audio_frame_v2_t* p_audio_data, + int sample_rate, int no_channels, int no_samples +); PROCESSINGNDILIB_API -void NDIlib_framesync_capture_audio_v2(// The frame sync instance data. - NDIlib_framesync_instance_t p_instance, - // The destination audio buffer that you wish to have filled in. - NDIlib_audio_frame_v3_t* p_audio_data, - // Your desired sample rate, number of channels and the number of desired samples. - int sample_rate, int no_channels, int no_samples); +void NDIlib_framesync_capture_audio_v2( + NDIlib_framesync_instance_t p_instance, + NDIlib_audio_frame_v3_t* p_audio_data, + int sample_rate, int no_channels, int no_samples +); // Free audio returned by NDIlib_framesync_capture_audio. PROCESSINGNDILIB_API -void NDIlib_framesync_free_audio(// The frame sync instance data. - NDIlib_framesync_instance_t p_instance, - // The destination audio buffer that you wish to have filled in. - NDIlib_audio_frame_v2_t* p_audio_data); +void NDIlib_framesync_free_audio(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v2_t* p_audio_data); + +// Free audio returned by NDIlib_framesync_capture_audio_v2. PROCESSINGNDILIB_API -void NDIlib_framesync_free_audio_v2(// The frame sync instance data. - NDIlib_framesync_instance_t p_instance, - // The destination audio buffer that you wish to have filled in. - NDIlib_audio_frame_v3_t* p_audio_data); +void NDIlib_framesync_free_audio_v2(NDIlib_framesync_instance_t p_instance, NDIlib_audio_frame_v3_t* p_audio_data); // This function will tell you the approximate current depth of the audio queue to give you an indication // of the number of audio samples you can request. Note that if you should treat the results of this function @@ -166,17 +161,12 @@ int NDIlib_framesync_audio_queue_depth(NDIlib_framesync_instance_t p_instance); // format, or black. // PROCESSINGNDILIB_API -void NDIlib_framesync_capture_video(// The frame sync instance data. - NDIlib_framesync_instance_t p_instance, - // The destination video buffer that you wish to have filled in. - NDIlib_video_frame_v2_t* p_video_data, - // The frame type that you would prefer, all effort is made to match these. - NDIlib_frame_format_type_e field_type NDILIB_CPP_DEFAULT_VALUE(NDIlib_frame_format_type_progressive)); +void NDIlib_framesync_capture_video( + NDIlib_framesync_instance_t p_instance, + NDIlib_video_frame_v2_t* p_video_data, + NDIlib_frame_format_type_e field_type NDILIB_CPP_DEFAULT_VALUE(NDIlib_frame_format_type_progressive) +); // Free audio returned by NDIlib_framesync_capture_video. -// PROCESSINGNDILIB_API -void NDIlib_framesync_free_video(// The frame sync instance data. - NDIlib_framesync_instance_t p_instance, - // The destination video buffer that you wish to have filled in. - NDIlib_video_frame_v2_t* p_video_data); +void NDIlib_framesync_free_video(NDIlib_framesync_instance_t p_instance, NDIlib_video_frame_v2_t* p_video_data); diff --git a/src/modules/newtek/interop/Processing.NDI.Lib.cplusplus.h b/src/modules/newtek/interop/Processing.NDI.Lib.cplusplus.h index aa5a8cde73..25e6485a01 100644 --- a/src/modules/newtek/interop/Processing.NDI.Lib.cplusplus.h +++ b/src/modules/newtek/interop/Processing.NDI.Lib.cplusplus.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including diff --git a/src/modules/newtek/interop/Processing.NDI.Lib.h b/src/modules/newtek/interop/Processing.NDI.Lib.h index 7f2580a4a9..35b7cbd6d5 100644 --- a/src/modules/newtek/interop/Processing.NDI.Lib.h +++ b/src/modules/newtek/interop/Processing.NDI.Lib.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -50,18 +50,18 @@ # ifdef _WIN64 # define NDILIB_LIBRARY_NAME "Processing.NDI.Lib.x64.dll" # define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V5" -# define NDILIB_REDIST_URL "http://new.tk/NDIRedistV5" +# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV5" # else // _WIN64 # define NDILIB_LIBRARY_NAME "Processing.NDI.Lib.x86.dll" # define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V5" -# define NDILIB_REDIST_URL "http://new.tk/NDIRedistV5" +# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV5" # endif // _WIN64 # endif // PROCESSINGNDILIB_EXPORTS # else // _WIN32 # ifdef __APPLE__ # define NDILIB_LIBRARY_NAME "libndi.dylib" # define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V5" -# define NDILIB_REDIST_URL "http://new.tk/NDIRedistV5Apple" +# define NDILIB_REDIST_URL "http://ndi.link/NDIRedistV5Apple" # else // __APPLE__ # define NDILIB_LIBRARY_NAME "libndi.so.5" # define NDILIB_REDIST_FOLDER "NDI_RUNTIME_DIR_V5" @@ -103,7 +103,7 @@ # endif // __cplusplus #endif // NDILIB_CPP_DEFAULT_VALUE -// Data structures shared by multiple SDKs +// Data structures shared by multiple SDKs. #include "Processing.NDI.compat.h" #include "Processing.NDI.structs.h" @@ -126,34 +126,34 @@ const char* NDIlib_version(void); PROCESSINGNDILIB_API bool NDIlib_is_supported_CPU(void); -// The finding (discovery API) +// The finding (discovery API). #include "Processing.NDI.Find.h" -// The receiving video and audio API +// The receiving video and audio API. #include "Processing.NDI.Recv.h" // Extensions to support PTZ control, etc... #include "Processing.NDI.Recv.ex.h" -// The sending video API +// The sending video API. #include "Processing.NDI.Send.h" -// The routing of inputs API +// The routing of inputs API. #include "Processing.NDI.Routing.h" -// Utility functions +// Utility functions. #include "Processing.NDI.utilities.h" -// Deprecated structures and functions +// Deprecated structures and functions. #include "Processing.NDI.deprecated.h" -// The frame synchronizer +// The frame synchronizer. #include "Processing.NDI.FrameSync.h" -// Dynamic loading used for OSS libraries +// Dynamic loading used for OSS libraries. #include "Processing.NDI.DynamicLoad.h" -// The C++ implementations +// The C++ implementations. #if NDILIB_CPP_DEFAULT_CONSTRUCTORS #include "Processing.NDI.Lib.cplusplus.h" #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS diff --git a/src/modules/newtek/interop/Processing.NDI.Recv.ex.h b/src/modules/newtek/interop/Processing.NDI.Recv.ex.h index 7fb9c9c00b..1b633c5021 100644 --- a/src/modules/newtek/interop/Processing.NDI.Recv.ex.h +++ b/src/modules/newtek/interop/Processing.NDI.Recv.ex.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -29,7 +29,7 @@ // Has this receiver got PTZ control. Note that it might take a second or two after the connection for this // value to be set. To avoid the need to poll this function, you can know when the value of this function -// might have changed when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change +// might have changed when the NDILib_recv_capture* call would return NDIlib_frame_type_status_change. PROCESSINGNDILIB_API bool NDIlib_recv_ptz_is_supported(NDIlib_recv_instance_t p_instance); @@ -48,24 +48,24 @@ bool NDIlib_recv_ptz_is_supported(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED bool NDIlib_recv_recording_is_supported(NDIlib_recv_instance_t p_instance); -// PTZ Controls +// PTZ Controls. // Zoom to an absolute value. // zoom_value = 0.0 (zoomed in) ... 1.0 (zoomed out) PROCESSINGNDILIB_API bool NDIlib_recv_ptz_zoom(NDIlib_recv_instance_t p_instance, const float zoom_value); -// Zoom at a particular speed +// Zoom at a particular speed. // zoom_speed = -1.0 (zoom outwards) ... +1.0 (zoom inwards) PROCESSINGNDILIB_API bool NDIlib_recv_ptz_zoom_speed(NDIlib_recv_instance_t p_instance, const float zoom_speed); -// Set the pan and tilt to an absolute value +// Set the pan and tilt to an absolute value. // pan_value = -1.0 (left) ... 0.0 (centered) ... +1.0 (right) // tilt_value = -1.0 (bottom) ... 0.0 (centered) ... +1.0 (top) PROCESSINGNDILIB_API bool NDIlib_recv_ptz_pan_tilt(NDIlib_recv_instance_t p_instance, const float pan_value, const float tilt_value); -// Set the pan and tilt direction and speed +// Set the pan and tilt direction and speed. // pan_speed = -1.0 (moving right) ... 0.0 (stopped) ... +1.0 (moving left) // tilt_speed = -1.0 (down) ... 0.0 (stopped) ... +1.0 (moving up) PROCESSINGNDILIB_API @@ -78,11 +78,11 @@ bool NDIlib_recv_ptz_store_preset(NDIlib_recv_instance_t p_instance, const int p // Recall a preset, including position, focus, etc... // preset_no = 0 ... 99 -// speed = 0.0(as slow as possible) ... 1.0(as fast as possible) The speed at which to move to the new preset +// speed = 0.0(as slow as possible) ... 1.0(as fast as possible) The speed at which to move to the new preset. PROCESSINGNDILIB_API bool NDIlib_recv_ptz_recall_preset(NDIlib_recv_instance_t p_instance, const int preset_no, const float speed); -// Put the camera in auto-focus +// Put the camera in auto-focus. PROCESSINGNDILIB_API bool NDIlib_recv_ptz_auto_focus(NDIlib_recv_instance_t p_instance); @@ -91,50 +91,53 @@ bool NDIlib_recv_ptz_auto_focus(NDIlib_recv_instance_t p_instance); PROCESSINGNDILIB_API bool NDIlib_recv_ptz_focus(NDIlib_recv_instance_t p_instance, const float focus_value); -// Focus at a particular speed +// Focus at a particular speed. // focus_speed = -1.0 (focus outwards) ... +1.0 (focus inwards) PROCESSINGNDILIB_API bool NDIlib_recv_ptz_focus_speed(NDIlib_recv_instance_t p_instance, const float focus_speed); -// Put the camera in auto white balance mode +// Put the camera in auto white balance mode. PROCESSINGNDILIB_API bool NDIlib_recv_ptz_white_balance_auto(NDIlib_recv_instance_t p_instance); -// Put the camera in indoor white balance +// Put the camera in indoor white balance. PROCESSINGNDILIB_API bool NDIlib_recv_ptz_white_balance_indoor(NDIlib_recv_instance_t p_instance); -// Put the camera in indoor white balance +// Put the camera in indoor white balance. PROCESSINGNDILIB_API bool NDIlib_recv_ptz_white_balance_outdoor(NDIlib_recv_instance_t p_instance); -// Use the current brightness to automatically set the current white balance +// Use the current brightness to automatically set the current white balance. PROCESSINGNDILIB_API bool NDIlib_recv_ptz_white_balance_oneshot(NDIlib_recv_instance_t p_instance); -// Set the manual camera white balance using the R, B values +// Set the manual camera white balance using the R, B values. // red = 0.0(not red) ... 1.0(very red) // blue = 0.0(not blue) ... 1.0(very blue) PROCESSINGNDILIB_API bool NDIlib_recv_ptz_white_balance_manual(NDIlib_recv_instance_t p_instance, const float red, const float blue); -// Put the camera in auto-exposure mode +// Put the camera in auto-exposure mode. PROCESSINGNDILIB_API bool NDIlib_recv_ptz_exposure_auto(NDIlib_recv_instance_t p_instance); -// Manually set the camera exposure iris +// Manually set the camera exposure iris. // exposure_level = 0.0(dark) ... 1.0(light) PROCESSINGNDILIB_API bool NDIlib_recv_ptz_exposure_manual(NDIlib_recv_instance_t p_instance, const float exposure_level); -// Manually set the camera exposure parameters +// Manually set the camera exposure parameters. // iris = 0.0(dark) ... 1.0(light) // gain = 0.0(dark) ... 1.0(light) // shutter_speed = 0.0(slow) ... 1.0(fast) PROCESSINGNDILIB_API -bool NDIlib_recv_ptz_exposure_manual_v2(NDIlib_recv_instance_t p_instance, const float iris, const float gain, const float shutter_speed); +bool NDIlib_recv_ptz_exposure_manual_v2( + NDIlib_recv_instance_t p_instance, + const float iris, const float gain, const float shutter_speed +); -// Recording control +// Recording control. // This will start recording.If the recorder was already recording then the message is ignored.A filename is // passed in as a "hint".Since the recorder might already be recording(or might not allow complete // flexibility over its filename), the filename might or might not be used.If the filename is empty, or not @@ -151,7 +154,7 @@ bool NDIlib_recv_recording_start(NDIlib_recv_instance_t p_instance, const char* PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED bool NDIlib_recv_recording_stop(NDIlib_recv_instance_t p_instance); -// This will control the audio level for the recording.dB is specified in decibels relative to the reference +// This will control the audio level for the recording. dB is specified in decibels relative to the reference // level of the source. Not all recording sources support controlling audio levels.For instance, a digital // audio device would not be able to avoid clipping on sources already at the wrong level, thus might not // support this message. @@ -185,20 +188,20 @@ const char* NDIlib_recv_recording_get_filename(NDIlib_recv_instance_t p_instance PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED const char* NDIlib_recv_recording_get_error(NDIlib_recv_instance_t p_instance); -// In order to get the duration +// In order to get the duration. typedef struct NDIlib_recv_recording_time_t -{ // The number of actual video frames recorded. +{ + // The number of actual video frames recorded. int64_t no_frames; - // The starting time and current largest time of the record, in UTC time, at 100ns unit intervals. This - // allows you to know the record time irrespective of frame-rate. For instance, last_time - start_time - // would give you the recording length in 100ns intervals. + // The starting time and current largest time of the record, in UTC time, at 100-nanosecond unit + // intervals. This allows you to know the record time irrespective of frame rate. For instance, + // last_time - start_time would give you the recording length in 100-nanosecond intervals. int64_t start_time, last_time; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS NDIlib_recv_recording_time_t(void); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_recv_recording_time_t; // Get the current recording times. diff --git a/src/modules/newtek/interop/Processing.NDI.Recv.h b/src/modules/newtek/interop/Processing.NDI.Recv.h index 05c6676e7e..c053bddc31 100644 --- a/src/modules/newtek/interop/Processing.NDI.Recv.h +++ b/src/modules/newtek/interop/Processing.NDI.Recv.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -29,20 +29,21 @@ // Structures and type definitions required by NDI finding. // The reference to an instance of the receiver. -typedef void* NDIlib_recv_instance_t; +struct NDIlib_recv_instance_type; +typedef struct NDIlib_recv_instance_type* NDIlib_recv_instance_t; -typedef enum NDIlib_recv_bandwidth_e -{ NDIlib_recv_bandwidth_metadata_only = -10, // Receive metadata. +typedef enum NDIlib_recv_bandwidth_e { + NDIlib_recv_bandwidth_metadata_only = -10, // Receive metadata. NDIlib_recv_bandwidth_audio_only = 10, // Receive metadata, audio. NDIlib_recv_bandwidth_lowest = 0, // Receive metadata, audio, video at a lower bandwidth and resolution. NDIlib_recv_bandwidth_highest = 100, // Receive metadata, audio, video at full resolution. - // Ensure this is 32bits in size. + // Make sure this is a 32-bit enumeration. NDIlib_recv_bandwidth_max = 0x7fffffff } NDIlib_recv_bandwidth_e; -typedef enum NDIlib_recv_color_format_e -{ // When there is no alpha channel, this mode delivers BGRX. +typedef enum NDIlib_recv_color_format_e { + // When there is no alpha channel, this mode delivers BGRX. // When there is an alpha channel, this mode delivers BGRA. NDIlib_recv_color_format_BGRX_BGRA = 0, @@ -95,13 +96,13 @@ typedef enum NDIlib_recv_color_format_e NDIlib_recv_color_format_BGRX_BGRA_flipped = 1000 + NDIlib_recv_color_format_BGRX_BGRA, #endif - // Force the size to be 32bits. + // Make sure this is a 32-bit enumeration. NDIlib_recv_color_format_max = 0x7fffffff } NDIlib_recv_color_format_e; // The creation structure that is used when you are creating a receiver. -typedef struct NDIlib_recv_create_v3_t -{ // The source that you wish to connect to. +typedef struct NDIlib_recv_create_v3_t { + // The source that you wish to connect to. NDIlib_source_t source_to_connect_to; // Your preference of color space. See above. @@ -125,8 +126,13 @@ typedef struct NDIlib_recv_create_v3_t const char* p_ndi_recv_name; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_recv_create_v3_t(const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(), NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA, - NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest, bool allow_video_fields_ = true, const char* p_ndi_name_ = NULL); + NDIlib_recv_create_v3_t( + const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(), + NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA, + NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest, + bool allow_video_fields_ = true, + const char* p_ndi_name_ = NULL + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS } NDIlib_recv_create_v3_t; @@ -134,8 +140,8 @@ typedef struct NDIlib_recv_create_v3_t // This allows you determine the current performance levels of the receiving to be able to detect whether // frames have been dropped. -typedef struct NDIlib_recv_performance_t -{ // The number of video frames. +typedef struct NDIlib_recv_performance_t { + // The number of video frames. int64_t video_frames; // The number of audio frames. @@ -147,12 +153,11 @@ typedef struct NDIlib_recv_performance_t #if NDILIB_CPP_DEFAULT_CONSTRUCTORS NDIlib_recv_performance_t(void); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_recv_performance_t; // Get the current queue depths. -typedef struct NDIlib_recv_queue_t -{ // The number of video frames. +typedef struct NDIlib_recv_queue_t { + // The number of video frames. int video_frames; // The number of audio frames. @@ -164,7 +169,6 @@ typedef struct NDIlib_recv_queue_t #if NDILIB_CPP_DEFAULT_CONSTRUCTORS NDIlib_recv_queue_t(void); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_recv_queue_t; //************************************************************************************************************************** @@ -194,7 +198,8 @@ NDIlib_frame_type_e NDIlib_recv_capture_v2( NDIlib_video_frame_v2_t* p_video_data, // The video data received (can be NULL). NDIlib_audio_frame_v2_t* p_audio_data, // The audio data received (can be NULL). NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL). - uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data. +); // This will allow you to receive video, audio and metadata frames. Any of the buffers can be NULL, in which // case data of that type will not be captured in this call. This call can be called simultaneously on @@ -208,7 +213,8 @@ NDIlib_frame_type_e NDIlib_recv_capture_v3( NDIlib_video_frame_v2_t* p_video_data, // The video data received (can be NULL). NDIlib_audio_frame_v3_t* p_audio_data, // The audio data received (can be NULL). NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL). - uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data. +); // Free the buffers returned by capture for video. PROCESSINGNDILIB_API @@ -246,7 +252,10 @@ bool NDIlib_recv_set_tally(NDIlib_recv_instance_t p_instance, const NDIlib_tally // structure will give you the total frame counts received, the dropped structure will tell you how many // frames have been dropped. Either of these could be NULL. PROCESSINGNDILIB_API -void NDIlib_recv_get_performance(NDIlib_recv_instance_t p_instance, NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped); +void NDIlib_recv_get_performance( + NDIlib_recv_instance_t p_instance, + NDIlib_recv_performance_t* p_total, NDIlib_recv_performance_t* p_dropped +); // This will allow you to determine the current queue depth for all of the frame sources at any time. PROCESSINGNDILIB_API diff --git a/src/modules/newtek/interop/Processing.NDI.Routing.h b/src/modules/newtek/interop/Processing.NDI.Routing.h index 29442efb48..b42a9ea462 100644 --- a/src/modules/newtek/interop/Processing.NDI.Routing.h +++ b/src/modules/newtek/interop/Processing.NDI.Routing.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -29,35 +29,36 @@ // Structures and type definitions required by NDI routing. // The reference to an instance of the router. -typedef void* NDIlib_routing_instance_t; +struct NDIlib_routing_instance_type; +typedef struct NDIlib_routing_instance_type* NDIlib_routing_instance_t; -// The creation structure that is used when you are creating a sender +// The creation structure that is used when you are creating a sender. typedef struct NDIlib_routing_create_t -{ // The name of the NDI source to create. This is a NULL terminated UTF8 string. +{ + // The name of the NDI source to create. This is a NULL terminated UTF8 string. const char* p_ndi_name; - // What groups should this source be part of + // What groups should this source be part of. const char* p_groups; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS NDIlib_routing_create_t(const char* p_ndi_name_ = NULL, const char* p_groups_ = NULL); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_routing_create_t; -// Create an NDI routing source +// Create an NDI routing source. PROCESSINGNDILIB_API NDIlib_routing_instance_t NDIlib_routing_create(const NDIlib_routing_create_t* p_create_settings); -// Destroy and NDI routing source +// Destroy and NDI routing source. PROCESSINGNDILIB_API void NDIlib_routing_destroy(NDIlib_routing_instance_t p_instance); -// Change the routing of this source to another destination +// Change the routing of this source to another destination. PROCESSINGNDILIB_API bool NDIlib_routing_change(NDIlib_routing_instance_t p_instance, const NDIlib_source_t* p_source); -// Change the routing of this source to another destination +// Change the routing of this source to another destination. PROCESSINGNDILIB_API bool NDIlib_routing_clear(NDIlib_routing_instance_t p_instance); diff --git a/src/modules/newtek/interop/Processing.NDI.Send.h b/src/modules/newtek/interop/Processing.NDI.Send.h index a776d90b42..00eb046c47 100644 --- a/src/modules/newtek/interop/Processing.NDI.Send.h +++ b/src/modules/newtek/interop/Processing.NDI.Send.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -27,29 +27,33 @@ // //*********************************************************************************************************** -// Structures and type definitions required by NDI sending -// The reference to an instance of the sender -typedef void* NDIlib_send_instance_t; +// Structures and type definitions required by NDI sending. +// The reference to an instance of the sender. +struct NDIlib_send_instance_type; +typedef struct NDIlib_send_instance_type* NDIlib_send_instance_t; -// The creation structure that is used when you are creating a sender -typedef struct NDIlib_send_create_t -{ // The name of the NDI source to create. This is a NULL terminated UTF8 string. +// The creation structure that is used when you are creating a sender. +typedef struct NDIlib_send_create_t { + // The name of the NDI source to create. This is a NULL terminated UTF8 string. const char* p_ndi_name; // What groups should this source be part of. NULL means default. const char* p_groups; // Do you want audio and video to "clock" themselves. When they are clocked then by adding video frames, - // they will be rate limited to match the current frame-rate that you are submitting at. The same is true + // they will be rate limited to match the current frame rate that you are submitting at. The same is true // for audio. In general if you are submitting video and audio off a single thread then you should only // clock one of them (video is probably the better of the two to clock off). If you are submitting audio // and video of separate threads then having both clocked can be useful. bool clock_video, clock_audio; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_send_create_t(const char* p_ndi_name_ = NULL, const char* p_groups_ = NULL, bool clock_video_ = true, bool clock_audio_ = true); + NDIlib_send_create_t( + const char* p_ndi_name_ = NULL, + const char* p_groups_ = NULL, + bool clock_video_ = true, bool clock_audio_ = true + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_send_create_t; // Create a new sender instance. This will return NULL if it fails. If you specify leave p_create_settings @@ -61,7 +65,7 @@ NDIlib_send_instance_t NDIlib_send_create(const NDIlib_send_create_t* p_create_s PROCESSINGNDILIB_API void NDIlib_send_destroy(NDIlib_send_instance_t p_instance); -// This will add a video frame +// This will add a video frame. PROCESSINGNDILIB_API void NDIlib_send_send_video_v2(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); @@ -82,26 +86,27 @@ void NDIlib_send_send_video_v2(NDIlib_send_instance_t p_instance, const NDIlib_v PROCESSINGNDILIB_API void NDIlib_send_send_video_async_v2(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_v2_t* p_video_data); -// This will add an audio frame +// This will add an audio frame. PROCESSINGNDILIB_API void NDIlib_send_send_audio_v2(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v2_t* p_audio_data); -// This will add an audio frame +// This will add an audio frame. PROCESSINGNDILIB_API void NDIlib_send_send_audio_v3(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_v3_t* p_audio_data); -// This will add a metadata frame +// This will add a metadata frame. PROCESSINGNDILIB_API void NDIlib_send_send_metadata(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); -// This allows you to receive metadata from the other end of the connection +// This allows you to receive metadata from the other end of the connection. PROCESSINGNDILIB_API NDIlib_frame_type_e NDIlib_send_capture( - NDIlib_send_instance_t p_instance, // The instance data - NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL) - uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + NDIlib_send_instance_t p_instance, // The instance data. + NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL). + uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data. +); -// Free the buffers returned by capture for metadata +// Free the buffers returned by capture for metadata. PROCESSINGNDILIB_API void NDIlib_send_free_metadata(NDIlib_send_instance_t p_instance, const NDIlib_metadata_frame_t* p_metadata); diff --git a/src/modules/newtek/interop/Processing.NDI.compat.h b/src/modules/newtek/interop/Processing.NDI.compat.h index 093076e273..19cd00d95a 100644 --- a/src/modules/newtek/interop/Processing.NDI.compat.h +++ b/src/modules/newtek/interop/Processing.NDI.compat.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including diff --git a/src/modules/newtek/interop/Processing.NDI.deprecated.h b/src/modules/newtek/interop/Processing.NDI.deprecated.h index 1bea5ae647..4ad2137fa9 100644 --- a/src/modules/newtek/interop/Processing.NDI.deprecated.h +++ b/src/modules/newtek/interop/Processing.NDI.deprecated.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -29,67 +29,74 @@ // This describes a video frame PROCESSINGNDILIB_DEPRECATED -typedef struct NDIlib_video_frame_t -{ // The resolution of this frame +typedef struct NDIlib_video_frame_t { + // The resolution of this frame. int xres, yres; - // What FourCC this is with. This can be two values + // What FourCC this is with. This can be two values. NDIlib_FourCC_video_type_e FourCC; - // What is the frame-rate of this frame. - // For instance NTSC is 30000,1001 = 30000/1001 = 29.97fps + // What is the frame rate of this frame. + // For instance NTSC is 30000,1001 = 30000/1001 = 29.97 fps int frame_rate_N, frame_rate_D; // What is the picture aspect ratio of this frame. - // For instance 16.0/9.0 = 1.778 is 16:9 video. If this is zero, then square pixels are assumed (xres/yres) + // For instance 16.0/9.0 = 1.778 is 16:9 video. If this is zero, then square pixels are assumed (xres/yres). float picture_aspect_ratio; - // Is this a fielded frame, or is it progressive + // Is this a fielded frame, or is it progressive. NDIlib_frame_format_type_e frame_format_type; - // The timecode of this frame in 100ns intervals + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; - // The video data itself + // The video data itself. uint8_t* p_data; - // The inter line stride of the video data, in bytes. + // The inter-line stride of the video data, in bytes. int line_stride_in_bytes; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_video_frame_t(int xres_ = 0, int yres_ = 0, NDIlib_FourCC_video_type_e FourCC_ = NDIlib_FourCC_type_UYVY, int frame_rate_N_ = 30000, int frame_rate_D_ = 1001, - float picture_aspect_ratio_ = 0.0f, NDIlib_frame_format_type_e frame_format_type_ = NDIlib_frame_format_type_progressive, - int64_t timecode_ = NDIlib_send_timecode_synthesize, uint8_t* p_data_ = NULL, int line_stride_in_bytes_ = 0); + NDIlib_video_frame_t( + int xres_ = 0, int yres_ = 0, + NDIlib_FourCC_video_type_e FourCC_ = NDIlib_FourCC_type_UYVY, + int frame_rate_N_ = 30000, int frame_rate_D_ = 1001, + float picture_aspect_ratio_ = 0.0f, + NDIlib_frame_format_type_e frame_format_type_ = NDIlib_frame_format_type_progressive, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + uint8_t* p_data_ = NULL, int line_stride_in_bytes_ = 0 + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_video_frame_t; // This describes an audio frame PROCESSINGNDILIB_DEPRECATED -typedef struct NDIlib_audio_frame_t -{ // The sample-rate of this buffer +typedef struct NDIlib_audio_frame_t { + // The sample-rate of this buffer. int sample_rate; - // The number of audio channels + // The number of audio channels. int no_channels; - // The number of audio samples per channel + // The number of audio samples per channel. int no_samples; - // The timecode of this frame in 100ns intervals + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; - // The audio data + // The audio data. float* p_data; - // The inter channel stride of the audio channels, in bytes + // The inter channel stride of the audio channels, in bytes. int channel_stride_in_bytes; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_audio_frame_t(int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, - float* p_data_ = NULL, int channel_stride_in_bytes_ = 0); + NDIlib_audio_frame_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + float* p_data_ = NULL, int channel_stride_in_bytes_ = 0 + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_audio_frame_t; // For legacy reasons I called this the wrong thing. For backwards compatibility. @@ -106,10 +113,10 @@ NDIlib_find_instance_t NDIlib_find_create(const NDIlib_find_create_t* p_create_s PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED const NDIlib_source_t* NDIlib_find_get_sources(NDIlib_find_instance_t p_instance, uint32_t* p_no_sources, uint32_t timeout_in_ms); -// The creation structure that is used when you are creating a receiver +// The creation structure that is used when you are creating a receiver. PROCESSINGNDILIB_DEPRECATED -typedef struct NDIlib_recv_create_t -{ // The source that you wish to connect to. +typedef struct NDIlib_recv_create_t { + // The source that you wish to connect to. NDIlib_source_t source_to_connect_to; // Your preference of color space. See above. @@ -127,10 +134,13 @@ typedef struct NDIlib_recv_create_t bool allow_video_fields; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_recv_create_t(const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(), NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA, - NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest, bool allow_video_fields_ = true); + NDIlib_recv_create_t( + const NDIlib_source_t source_to_connect_to_ = NDIlib_source_t(), + NDIlib_recv_color_format_e color_format_ = NDIlib_recv_color_format_UYVY_BGRA, + NDIlib_recv_bandwidth_e bandwidth_ = NDIlib_recv_bandwidth_highest, + bool allow_video_fields_ = true + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_recv_create_t; // This function is deprecated, please use NDIlib_recv_create_v3 if you can. Using this function will @@ -159,21 +169,22 @@ NDIlib_recv_instance_t NDIlib_recv_create(const NDIlib_recv_create_t* p_create_s // appropriate free function below. PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED NDIlib_frame_type_e NDIlib_recv_capture( -NDIlib_recv_instance_t p_instance, // The library instance -NDIlib_video_frame_t* p_video_data, // The video data received (can be NULL) -NDIlib_audio_frame_t* p_audio_data, // The audio data received (can be NULL) -NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL) -uint32_t timeout_in_ms); // The amount of time in milliseconds to wait for data. + NDIlib_recv_instance_t p_instance, // The library instance. + NDIlib_video_frame_t* p_video_data, // The video data received (can be NULL). + NDIlib_audio_frame_t* p_audio_data, // The audio data received (can be NULL). + NDIlib_metadata_frame_t* p_metadata, // The metadata received (can be NULL). + uint32_t timeout_in_ms // The amount of time in milliseconds to wait for data. +); -// Free the buffers returned by capture for video +// Free the buffers returned by capture for video. PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED void NDIlib_recv_free_video(NDIlib_recv_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); -// Free the buffers returned by capture for audio +// Free the buffers returned by capture for audio. PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED void NDIlib_recv_free_audio(NDIlib_recv_instance_t p_instance, const NDIlib_audio_frame_t* p_audio_data); -// This will add a video frame +// This will add a video frame. PROCESSINGNDILIB_API PROCESSINGNDILIB_DEPRECATED void NDIlib_send_send_video(NDIlib_send_instance_t p_instance, const NDIlib_video_frame_t* p_video_data); diff --git a/src/modules/newtek/interop/Processing.NDI.structs.h b/src/modules/newtek/interop/Processing.NDI.structs.h index 6d5dcbb7c0..1d69ff2ae6 100644 --- a/src/modules/newtek/interop/Processing.NDI.structs.h +++ b/src/modules/newtek/interop/Processing.NDI.structs.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -32,9 +32,9 @@ ((uint32_t)(uint8_t)(ch0) | ((uint32_t)(uint8_t)(ch1) << 8) | ((uint32_t)(uint8_t)(ch2) << 16) | ((uint32_t)(uint8_t)(ch3) << 24)) #endif -// An enumeration to specify the type of a packet returned by the functions -typedef enum NDIlib_frame_type_e -{ // What frame type is this? +// An enumeration to specify the type of a packet returned by the functions. +typedef enum NDIlib_frame_type_e { + // What frame type is this? NDIlib_frame_type_none = 0, NDIlib_frame_type_video = 1, NDIlib_frame_type_audio = 2, @@ -46,13 +46,13 @@ typedef enum NDIlib_frame_type_e // instance the web URL has changed or the device is now known to be a PTZ camera. NDIlib_frame_type_status_change = 100, - // Ensure that the size is 32bits + // Make sure this is a 32-bit enumeration. NDIlib_frame_type_max = 0x7fffffff } NDIlib_frame_type_e; -// FourCC values for video frames -typedef enum NDIlib_FourCC_video_type_e -{ // YCbCr color space using 4:2:2. +// FourCC values for video frames. +typedef enum NDIlib_FourCC_video_type_e { + // YCbCr color space using 4:2:2. NDIlib_FourCC_video_type_UYVY = NDI_LIB_FOURCC('U', 'Y', 'V', 'Y'), NDIlib_FourCC_type_UYVY = NDIlib_FourCC_video_type_UYVY, @@ -63,14 +63,14 @@ typedef enum NDIlib_FourCC_video_type_e NDIlib_FourCC_video_type_UYVA = NDI_LIB_FOURCC('U', 'Y', 'V', 'A'), NDIlib_FourCC_type_UYVA = NDIlib_FourCC_video_type_UYVA, - // YCbCr color space using 4:2:2 in 16bpp + // YCbCr color space using 4:2:2 in 16bpp. // In memory this is a semi-planar format. This is identical to a 16bpp version of the NV16 format. // The first buffer is a 16bpp luminance buffer. // Immediately after this is an interleaved buffer of 16bpp Cb, Cr pairs. NDIlib_FourCC_video_type_P216 = NDI_LIB_FOURCC('P', '2', '1', '6'), NDIlib_FourCC_type_P216 = NDIlib_FourCC_video_type_P216, - // YCbCr color space with an alpha channel, using 4:2:2:4 + // YCbCr color space with an alpha channel, using 4:2:2:4. // In memory this is a semi-planar format. // The first buffer is a 16bpp luminance buffer. // Immediately after this is an interleaved buffer of 16bpp Cb, Cr pairs. @@ -113,41 +113,41 @@ typedef enum NDIlib_FourCC_video_type_e NDIlib_FourCC_type_RGBA = NDIlib_FourCC_video_type_RGBA, // Planar 8bit, 4:4:4 video format, packed into 32bit pixels. - // Color ordering in memory is red, green, blue, 255 + // Color ordering in memory is red, green, blue, 255. NDIlib_FourCC_video_type_RGBX = NDI_LIB_FOURCC('R', 'G', 'B', 'X'), NDIlib_FourCC_type_RGBX = NDIlib_FourCC_video_type_RGBX, - // Ensure that the size is 32bits + // Make sure this is a 32-bit enumeration. NDIlib_FourCC_video_type_max = 0x7fffffff } NDIlib_FourCC_video_type_e; -// Really for backwards compatibility +// Really for backwards compatibility. PROCESSINGNDILIB_DEPRECATED typedef NDIlib_FourCC_video_type_e NDIlib_FourCC_type_e; -// FourCC values for audio frames -typedef enum NDIlib_FourCC_audio_type_e -{ // Planar 32-bit floating point. Be sure to specify the channel stride. +// FourCC values for audio frames. +typedef enum NDIlib_FourCC_audio_type_e { + // Planar 32-bit floating point. Be sure to specify the channel stride. NDIlib_FourCC_audio_type_FLTP = NDI_LIB_FOURCC('F', 'L', 'T', 'p'), NDIlib_FourCC_type_FLTP = NDIlib_FourCC_audio_type_FLTP, - // Ensure that the size is 32bits + // Make sure this is a 32-bit enumeration. NDIlib_FourCC_audio_type_max = 0x7fffffff } NDIlib_FourCC_audio_type_e; -typedef enum NDIlib_frame_format_type_e -{ // A progressive frame +typedef enum NDIlib_frame_format_type_e { + // A progressive frame. NDIlib_frame_format_type_progressive = 1, // A fielded frame with the field 0 being on the even lines and field 1 being - // on the odd lines/ + // on the odd lines. NDIlib_frame_format_type_interleaved = 0, - // Individual fields + // Individual fields. NDIlib_frame_format_type_field_0 = 2, NDIlib_frame_format_type_field_1 = 3, - // Ensure that the size is 32bits + // Make sure this is a 32-bit enumeration. NDIlib_frame_format_type_max = 0x7fffffff } NDIlib_frame_format_type_e; @@ -175,12 +175,12 @@ typedef enum NDIlib_frame_format_type_e // since it was sent. static const int64_t NDIlib_send_timecode_synthesize = INT64_MAX; -// If the time-stamp is not available (i.e. a version of a sender before v2.5) +// If the time-stamp is not available (i.e. a version of a sender before v2.5). static const int64_t NDIlib_recv_timestamp_undefined = INT64_MAX; // This is a descriptor of a NDI source available on the network. -typedef struct NDIlib_source_t -{ // A UTF8 string that provides a user readable name for this source. This can be used for serialization, +typedef struct NDIlib_source_t { + // A UTF8 string that provides a user readable name for this source. This can be used for serialization, // etc... and comprises the machine name and the source name on that machine. In the form, // MACHINE_NAME (NDI_SOURCE_NAME) // If you specify this parameter either as NULL, or an EMPTY string then the specific IP address and port @@ -191,8 +191,7 @@ typedef struct NDIlib_source_t // application readable and might well change in the future. This can be NULL if you do not know it and // the API internally will instantiate a finder that is used to discover it even if it is not yet // available on the network. - union - { // The current way of addressing the value + union { // The current way of addressing the value. const char* p_url_address; // We used to use an IP address before we used the more general URL notification this is now @@ -200,41 +199,38 @@ typedef struct NDIlib_source_t PROCESSINGNDILIB_DEPRECATED const char* p_ip_address; }; - // Default constructor in C++ #if NDILIB_CPP_DEFAULT_CONSTRUCTORS NDIlib_source_t(const char* p_ndi_name_ = NULL, const char* p_url_address_ = NULL); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_source_t; -// This describes a video frame -typedef struct NDIlib_video_frame_v2_t -{ // The resolution of this frame +// This describes a video frame. +typedef struct NDIlib_video_frame_v2_t { + // The resolution of this frame. int xres, yres; - // What FourCC describing the type of data for this frame + // What FourCC describing the type of data for this frame. NDIlib_FourCC_video_type_e FourCC; - // What is the frame-rate of this frame. - // For instance NTSC is 30000,1001 = 30000/1001 = 29.97fps + // What is the frame rate of this frame. + // For instance NTSC is 30000,1001 = 30000/1001 = 29.97 fps. int frame_rate_N, frame_rate_D; // What is the picture aspect ratio of this frame. // For instance 16.0/9.0 = 1.778 is 16:9 video - // 0 means square pixels + // 0 means square pixels. float picture_aspect_ratio; - // Is this a fielded frame, or is it progressive + // Is this a fielded frame, or is it progressive. NDIlib_frame_format_type_e frame_format_type; - // The timecode of this frame in 100ns intervals + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; - // The video data itself + // The video data itself. uint8_t* p_data; - union - { // If the FourCC is not a compressed type, then this will be the inter-line stride of the video data + union { // If the FourCC is not a compressed type, then this will be the inter-line stride of the video data // in bytes. If the stride is 0, then it will default to sizeof(one pixel)*xres. int line_stride_in_bytes; @@ -246,77 +242,88 @@ typedef struct NDIlib_video_frame_v2_t // If you do not want any metadata then you may specify NULL here. const char* p_metadata; // Present in >= v2.5 - // This is only valid when receiving a frame and is specified as a 100ns time that was the exact moment - // that the frame was submitted by the sending side and is generated by the SDK. If this value is + // This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact + // moment that the frame was submitted by the sending side and is generated by the SDK. If this value is // NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined. int64_t timestamp; // Present in >= v2.5 #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_video_frame_v2_t(int xres_ = 0, int yres_ = 0, NDIlib_FourCC_video_type_e FourCC_ = NDIlib_FourCC_video_type_UYVY, int frame_rate_N_ = 30000, int frame_rate_D_ = 1001, - float picture_aspect_ratio_ = 0.0f, NDIlib_frame_format_type_e frame_format_type_ = NDIlib_frame_format_type_progressive, - int64_t timecode_ = NDIlib_send_timecode_synthesize, uint8_t* p_data_ = NULL, int line_stride_in_bytes_ = 0, const char* p_metadata_ = NULL, int64_t timestamp_ = 0); + NDIlib_video_frame_v2_t( + int xres_ = 0, int yres_ = 0, + NDIlib_FourCC_video_type_e FourCC_ = NDIlib_FourCC_video_type_UYVY, + int frame_rate_N_ = 30000, int frame_rate_D_ = 1001, + float picture_aspect_ratio_ = 0.0f, + NDIlib_frame_format_type_e frame_format_type_ = NDIlib_frame_format_type_progressive, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + uint8_t* p_data_ = NULL, int line_stride_in_bytes_ = 0, + const char* p_metadata_ = NULL, + int64_t timestamp_ = 0 + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_video_frame_v2_t; -// This describes an audio frame -typedef struct NDIlib_audio_frame_v2_t -{ // The sample-rate of this buffer +// This describes an audio frame. +typedef struct NDIlib_audio_frame_v2_t { + // The sample-rate of this buffer. int sample_rate; - // The number of audio channels + // The number of audio channels. int no_channels; - // The number of audio samples per channel + // The number of audio samples per channel. int no_samples; - // The timecode of this frame in 100ns intervals + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; - // The audio data + // The audio data. float* p_data; - // The inter channel stride of the audio channels, in bytes + // The inter channel stride of the audio channels, in bytes. int channel_stride_in_bytes; // Per frame metadata for this frame. This is a NULL terminated UTF8 string that should be in XML format. // If you do not want any metadata then you may specify NULL here. const char* p_metadata; // Present in >= v2.5 - // This is only valid when receiving a frame and is specified as a 100ns time that was the exact moment - // that the frame was submitted by the sending side and is generated by the SDK. If this value is + // This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact + // moment that the frame was submitted by the sending side and is generated by the SDK. If this value is // NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined. int64_t timestamp; // Present in >= v2.5 #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_audio_frame_v2_t(int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, - float* p_data_ = NULL, int channel_stride_in_bytes_ = 0, const char* p_metadata_ = NULL, int64_t timestamp_ = 0); + NDIlib_audio_frame_v2_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + float* p_data_ = NULL, int channel_stride_in_bytes_ = 0, + const char* p_metadata_ = NULL, + int64_t timestamp_ = 0 + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_audio_frame_v2_t; -// This describes an audio frame -typedef struct NDIlib_audio_frame_v3_t -{ // The sample-rate of this buffer +// This describes an audio frame. +typedef struct NDIlib_audio_frame_v3_t { + // The sample-rate of this buffer. int sample_rate; - // The number of audio channels + // The number of audio channels. int no_channels; - // The number of audio samples per channel + // The number of audio samples per channel. int no_samples; - // The timecode of this frame in 100ns intervals + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; - // What FourCC describing the type of data for this frame + // What FourCC describing the type of data for this frame. NDIlib_FourCC_audio_type_e FourCC; - // The audio data + // The audio data. uint8_t* p_data; - union - { // If the FourCC is not a compressed type and the audio format is planar, then this will be the + union { + // If the FourCC is not a compressed type and the audio format is planar, then this will be the // stride in bytes for a single channel. int channel_stride_in_bytes; @@ -328,26 +335,30 @@ typedef struct NDIlib_audio_frame_v3_t // If you do not want any metadata then you may specify NULL here. const char* p_metadata; - // This is only valid when receiving a frame and is specified as a 100ns time that was the exact moment - // that the frame was submitted by the sending side and is generated by the SDK. If this value is + // This is only valid when receiving a frame and is specified as a 100-nanosecond time that was the exact + // moment that the frame was submitted by the sending side and is generated by the SDK. If this value is // NDIlib_recv_timestamp_undefined then this value is not available and is NDIlib_recv_timestamp_undefined. int64_t timestamp; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_audio_frame_v3_t(int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, - NDIlib_FourCC_audio_type_e FourCC_ = NDIlib_FourCC_audio_type_FLTP, uint8_t* p_data_ = NULL, int channel_stride_in_bytes_ = 0, - const char* p_metadata_ = NULL, int64_t timestamp_ = 0); + NDIlib_audio_frame_v3_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + NDIlib_FourCC_audio_type_e FourCC_ = NDIlib_FourCC_audio_type_FLTP, + uint8_t* p_data_ = NULL, int channel_stride_in_bytes_ = 0, + const char* p_metadata_ = NULL, + int64_t timestamp_ = 0 + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_audio_frame_v3_t; -// The data description for metadata -typedef struct NDIlib_metadata_frame_t -{ // The length of the string in UTF8 characters. This includes the NULL terminating character. If this is +// The data description for metadata. +typedef struct NDIlib_metadata_frame_t { + // The length of the string in UTF8 characters. This includes the NULL terminating character. If this is // 0, then the length is assume to be the length of a NULL terminated string. int length; - // The timecode of this frame in 100ns intervals + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; // The metadata as a UTF8 XML string. This is a NULL terminated string. @@ -356,19 +367,17 @@ typedef struct NDIlib_metadata_frame_t #if NDILIB_CPP_DEFAULT_CONSTRUCTORS NDIlib_metadata_frame_t(int length_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, char* p_data_ = NULL); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_metadata_frame_t; // Tally structures -typedef struct NDIlib_tally_t -{ // Is this currently on program output +typedef struct NDIlib_tally_t { + // Is this currently on program output. bool on_program; - // Is this currently on preview output + // Is this currently on preview output. bool on_preview; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS NDIlib_tally_t(bool on_program_ = false, bool on_preview_ = false); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_tally_t; diff --git a/src/modules/newtek/interop/Processing.NDI.utilities.h b/src/modules/newtek/interop/Processing.NDI.utilities.h index 67c9a802fc..6bc4b92a8d 100644 --- a/src/modules/newtek/interop/Processing.NDI.utilities.h +++ b/src/modules/newtek/interop/Processing.NDI.utilities.h @@ -2,13 +2,13 @@ // NOTE : The following MIT license applies to this file ONLY and not to the SDK as a whole. Please review // the SDK documentation for the description of the full license terms, which are also provided in the file -// "NDI License Agreement.pdf" within the SDK or online at http://new.tk/ndisdk_license/. Your use of any +// "NDI License Agreement.pdf" within the SDK or online at http://ndi.link/ndisdk_license. Your use of any // part of this SDK is acknowledgment that you agree to the SDK license terms. The full NDI SDK may be -// downloaded at http://ndi.tv/ +// downloaded at http://ndi.video/ // //*********************************************************************************************************** // -// Copyright (C)2014-2021, NewTek, inc. +// Copyright (C) 2023 Vizrt NDI AB. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and // associated documentation files(the "Software"), to deal in the Software without restriction, including @@ -27,18 +27,18 @@ // //*********************************************************************************************************** -// Because many applications like submitting 16bit interleaved audio, these functions will convert in and out -// of that format. It is important to note that the NDI SDK does define fully audio levels, something that -// most applications that you use do not. Specifically, the floating point -1.0 to +1.0 range is defined as a -// professional audio reference level of +4dBU. If we take 16bit audio and scale it into this range it is -// almost always correct for sending and will cause no problems. For receiving however it is not at all -// uncommon that the user has audio that exceeds reference level and in this case it is likely that audio -// exceeds the reference level and so if you are not careful you will end up having audio clipping when you -// use the 16 bit range. - -// This describes an audio frame -typedef struct NDIlib_audio_frame_interleaved_16s_t -{ // The sample-rate of this buffer. +// Because many applications like submitting 16-bit interleaved audio, these functions will convert in and +// out of that format. It is important to note that the NDI SDK does define fully audio levels, something +// that most applications that you use do not. Specifically, the floating-point range, -1.0 to +1.0, is +// defined as a professional audio reference level of +4 dBU. If we take 16-bit audio and scale it into this +// range it is almost always correct for sending and will cause no problems. For receiving however it is not +// at all uncommon that the user has audio that exceeds reference level and in this case it is likely that +// audio exceeds the reference level and so if you are not careful you will end up having audio clipping when +// you use the 16-bit range. + +// This describes an audio frame. +typedef struct NDIlib_audio_frame_interleaved_16s_t { + // The sample-rate of this buffer. int sample_rate; // The number of audio channels. @@ -47,30 +47,33 @@ typedef struct NDIlib_audio_frame_interleaved_16s_t // The number of audio samples per channel. int no_samples; - // The timecode of this frame in 100ns intervals. + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; - // The audio reference level in dB. This specifies how many dB above the reference level (+4dBU) is the - // full range of 16 bit audio. If you do not understand this and want to just use numbers: - // - If you are sending audio, specify +0dB. Most common applications produce audio at reference level. - // - If receiving audio, specify +20dB. This means that the full 16 bit range corresponds to professional - // level audio with 20dB of headroom. Note that if you are writing it into a file it might sound soft - // because you have 20dB of headroom before clipping. + // The audio reference level in dB. This specifies how many dB above the reference level (+4 dBU) is the + // full range of 16-bit audio. If you do not understand this and want to just use numbers: + // - If you are sending audio, specify +0 dB. Most common applications produce audio at reference level. + // - If receiving audio, specify +20 dB. This means that the full 16-bit range corresponds to + // professional level audio with 20 dB of headroom. Note that if you are writing it into a file it + // might sound soft because you have 20 dB of headroom before clipping. int reference_level; - // The audio data, interleaved 16bpp. + // The audio data, interleaved 16-bit samples. int16_t* p_data; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_audio_frame_interleaved_16s_t(int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, - int reference_level_ = 0, int16_t* p_data_ = NULL); + NDIlib_audio_frame_interleaved_16s_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + int reference_level_ = 0, + int16_t* p_data_ = NULL + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_audio_frame_interleaved_16s_t; // This describes an audio frame. -typedef struct NDIlib_audio_frame_interleaved_32s_t -{ // The sample-rate of this buffer. +typedef struct NDIlib_audio_frame_interleaved_32s_t { + // The sample-rate of this buffer. int sample_rate; // The number of audio channels. @@ -79,30 +82,33 @@ typedef struct NDIlib_audio_frame_interleaved_32s_t // The number of audio samples per channel. int no_samples; - // The timecode of this frame in 100ns intervals. + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; - // The audio reference level in dB. This specifies how many dB above the reference level (+4dBU) is the - // full range of 16 bit audio. If you do not understand this and want to just use numbers: - // - If you are sending audio, specify +0dB. Most common applications produce audio at reference level. - // - If receiving audio, specify +20dB. This means that the full 16 bit range corresponds to professional - // level audio with 20dB of headroom. Note that if you are writing it into a file it might sound soft - // because you have 20dB of headroom before clipping. + // The audio reference level in dB. This specifies how many dB above the reference level (+4 dBU) is the + // full range of 32-bit audio. If you do not understand this and want to just use numbers: + // - If you are sending audio, specify +0 dB. Most common applications produce audio at reference level. + // - If receiving audio, specify +20 dB. This means that the full 32-bit range corresponds to + // professional level audio with 20 dB of headroom. Note that if you are writing it into a file it + // might sound soft because you have 20 dB of headroom before clipping. int reference_level; - // The audio data, interleaved 32bpp. + // The audio data, interleaved 32-bit samples. int32_t* p_data; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_audio_frame_interleaved_32s_t(int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, - int reference_level_ = 0, int32_t* p_data_ = NULL); + NDIlib_audio_frame_interleaved_32s_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + int reference_level_ = 0, + int32_t* p_data_ = NULL + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_audio_frame_interleaved_32s_t; // This describes an audio frame. -typedef struct NDIlib_audio_frame_interleaved_32f_t -{ // The sample-rate of this buffer. +typedef struct NDIlib_audio_frame_interleaved_32f_t { + // The sample-rate of this buffer. int sample_rate; // The number of audio channels. @@ -111,62 +117,91 @@ typedef struct NDIlib_audio_frame_interleaved_32f_t // The number of audio samples per channel. int no_samples; - // The timecode of this frame in 100ns intervals. + // The timecode of this frame in 100-nanosecond intervals. int64_t timecode; - // The audio data, interleaved 32bpp. + // The audio data, interleaved 32-bit floating-point samples. float* p_data; #if NDILIB_CPP_DEFAULT_CONSTRUCTORS - NDIlib_audio_frame_interleaved_32f_t(int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, int64_t timecode_ = NDIlib_send_timecode_synthesize, - float* p_data_ = NULL); + NDIlib_audio_frame_interleaved_32f_t( + int sample_rate_ = 48000, int no_channels_ = 2, int no_samples_ = 0, + int64_t timecode_ = NDIlib_send_timecode_synthesize, + float* p_data_ = NULL + ); #endif // NDILIB_CPP_DEFAULT_CONSTRUCTORS - } NDIlib_audio_frame_interleaved_32f_t; -// This will add an audio frame in interleaved 16bpp. +// This will add an audio frame in interleaved 16-bit. PROCESSINGNDILIB_API -void NDIlib_util_send_send_audio_interleaved_16s(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_16s_t* p_audio_data); +void NDIlib_util_send_send_audio_interleaved_16s( + NDIlib_send_instance_t p_instance, + const NDIlib_audio_frame_interleaved_16s_t* p_audio_data +); -// This will add an audio frame in interleaved 32bpp. +// This will add an audio frame in interleaved 32-bit. PROCESSINGNDILIB_API -void NDIlib_util_send_send_audio_interleaved_32s(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32s_t* p_audio_data); +void NDIlib_util_send_send_audio_interleaved_32s( + NDIlib_send_instance_t p_instance, + const NDIlib_audio_frame_interleaved_32s_t* p_audio_data +); // This will add an audio frame in interleaved floating point. PROCESSINGNDILIB_API -void NDIlib_util_send_send_audio_interleaved_32f(NDIlib_send_instance_t p_instance, const NDIlib_audio_frame_interleaved_32f_t* p_audio_data); +void NDIlib_util_send_send_audio_interleaved_32f( + NDIlib_send_instance_t p_instance, + const NDIlib_audio_frame_interleaved_32f_t* p_audio_data +); -// Convert to interleaved 16bpp. +// Convert to interleaved 16-bit. PROCESSINGNDILIB_API -void NDIlib_util_audio_to_interleaved_16s_v2(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_16s_t* p_dst); +void NDIlib_util_audio_to_interleaved_16s_v2( + const NDIlib_audio_frame_v2_t* p_src, + NDIlib_audio_frame_interleaved_16s_t* p_dst +); -// Convert from interleaved 16bpp. +// Convert from interleaved 16-bit. PROCESSINGNDILIB_API -void NDIlib_util_audio_from_interleaved_16s_v2(const NDIlib_audio_frame_interleaved_16s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); +void NDIlib_util_audio_from_interleaved_16s_v2( + const NDIlib_audio_frame_interleaved_16s_t* p_src, + NDIlib_audio_frame_v2_t* p_dst +); -// Convert to interleaved 32bpp. +// Convert to interleaved 32-bit. PROCESSINGNDILIB_API -void NDIlib_util_audio_to_interleaved_32s_v2(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32s_t* p_dst); +void NDIlib_util_audio_to_interleaved_32s_v2( + const NDIlib_audio_frame_v2_t* p_src, + NDIlib_audio_frame_interleaved_32s_t* p_dst +); -// Convert from interleaved 32bpp. +// Convert from interleaved 32-bit. PROCESSINGNDILIB_API -void NDIlib_util_audio_from_interleaved_32s_v2(const NDIlib_audio_frame_interleaved_32s_t* p_src, NDIlib_audio_frame_v2_t* p_dst); +void NDIlib_util_audio_from_interleaved_32s_v2( + const NDIlib_audio_frame_interleaved_32s_t* p_src, + NDIlib_audio_frame_v2_t* p_dst +); // Convert to interleaved floating point. PROCESSINGNDILIB_API -void NDIlib_util_audio_to_interleaved_32f_v2(const NDIlib_audio_frame_v2_t* p_src, NDIlib_audio_frame_interleaved_32f_t* p_dst); +void NDIlib_util_audio_to_interleaved_32f_v2( + const NDIlib_audio_frame_v2_t* p_src, + NDIlib_audio_frame_interleaved_32f_t* p_dst +); // Convert from interleaved floating point. PROCESSINGNDILIB_API -void NDIlib_util_audio_from_interleaved_32f_v2(const NDIlib_audio_frame_interleaved_32f_t* p_src, NDIlib_audio_frame_v2_t* p_dst); +void NDIlib_util_audio_from_interleaved_32f_v2( + const NDIlib_audio_frame_interleaved_32f_t* p_src, + NDIlib_audio_frame_v2_t* p_dst +); -// This is a helper function that you may use to convert from 10bit packed UYVY into 16bit semi-planar. The +// This is a helper function that you may use to convert from 10-bit packed UYVY into 16-bit semi-planar. The // FourCC on the source is ignored in this function since we do not define a V210 format in NDI. You must // make sure that there is memory and a stride allocated in p_dst. PROCESSINGNDILIB_API void NDIlib_util_V210_to_P216(const NDIlib_video_frame_v2_t* p_src_v210, NDIlib_video_frame_v2_t* p_dst_p216); -// This converts from 16bit semi-planar to 10bit. You must make sure that there is memory and a stride +// This converts from 16-bit semi-planar to 10-bit. You must make sure that there is memory and a stride // allocated in p_dst. PROCESSINGNDILIB_API void NDIlib_util_P216_to_V210(const NDIlib_video_frame_v2_t* p_src_p216, NDIlib_video_frame_v2_t* p_dst_v210);