aboutsummaryrefslogtreecommitdiff
path: root/plugins/rtp
diff options
context:
space:
mode:
authorfiaxh <git@lightrise.org>2021-05-11 12:57:02 +0200
committerfiaxh <git@lightrise.org>2021-05-11 12:57:02 +0200
commitd71604913dd5b3372a823320db83c37c845fac5c (patch)
tree2ffbff97a02c81d48d8aef4a4b7ee870507236e9 /plugins/rtp
parente92ed27317ae398c867c946cf7206b1f0b32f3b4 (diff)
parent90f9ecf62b2ebfef14de2874e7942552409632bf (diff)
downloaddino-d71604913dd5b3372a823320db83c37c845fac5c.tar.gz
dino-d71604913dd5b3372a823320db83c37c845fac5c.zip
Merge remote-tracking branch 'origin/feature/calls'
Diffstat (limited to 'plugins/rtp')
-rw-r--r--plugins/rtp/CMakeLists.txt61
-rw-r--r--plugins/rtp/src/codec_util.vala307
-rw-r--r--plugins/rtp/src/device.vala272
-rw-r--r--plugins/rtp/src/module.vala237
-rw-r--r--plugins/rtp/src/participant.vala39
-rw-r--r--plugins/rtp/src/plugin.vala449
-rw-r--r--plugins/rtp/src/register_plugin.vala3
-rw-r--r--plugins/rtp/src/stream.vala681
-rw-r--r--plugins/rtp/src/video_widget.vala110
-rw-r--r--plugins/rtp/src/voice_processor.vala176
-rw-r--r--plugins/rtp/src/voice_processor_native.cpp148
-rw-r--r--plugins/rtp/vapi/gstreamer-rtp-1.0.vapi625
12 files changed, 3108 insertions, 0 deletions
diff --git a/plugins/rtp/CMakeLists.txt b/plugins/rtp/CMakeLists.txt
new file mode 100644
index 00000000..52419425
--- /dev/null
+++ b/plugins/rtp/CMakeLists.txt
@@ -0,0 +1,61 @@
+find_package(GstRtp REQUIRED)
+find_package(WebRTCAudioProcessing 0.2)
+find_packages(RTP_PACKAGES REQUIRED
+ Gee
+ GLib
+ GModule
+ GnuTLS
+ GObject
+ GTK3
+ Gst
+ GstApp
+ GstAudio
+)
+
+if(Gst_VERSION VERSION_GREATER "1.16")
+ set(RTP_DEFINITIONS GST_1_16)
+endif()
+
+if(WebRTCAudioProcessing_VERSION GREATER "0.4")
+ message(STATUS "Ignoring WebRTCAudioProcessing, only versions < 0.4 supported so far")
+ unset(WebRTCAudioProcessing_FOUND)
+endif()
+
+if(WebRTCAudioProcessing_FOUND)
+ set(RTP_DEFINITIONS ${RTP_DEFINITIONS} WITH_VOICE_PROCESSOR)
+ set(RTP_VOICE_PROCESSOR_VALA src/voice_processor.vala)
+ set(RTP_VOICE_PROCESSOR_CXX src/voice_processor_native.cpp)
+ set(RTP_VOICE_PROCESSOR_LIB webrtc-audio-processing)
+else()
+ message(STATUS "WebRTCAudioProcessing not found, build without voice pre-processing!")
+endif()
+
+vala_precompile(RTP_VALA_C
+SOURCES
+ src/codec_util.vala
+ src/device.vala
+ src/module.vala
+ src/plugin.vala
+ src/stream.vala
+ src/video_widget.vala
+ src/register_plugin.vala
+ ${RTP_VOICE_PROCESSOR_VALA}
+CUSTOM_VAPIS
+ ${CMAKE_BINARY_DIR}/exports/crypto-vala.vapi
+ ${CMAKE_BINARY_DIR}/exports/xmpp-vala.vapi
+ ${CMAKE_BINARY_DIR}/exports/dino.vapi
+ ${CMAKE_BINARY_DIR}/exports/qlite.vapi
+ ${CMAKE_CURRENT_SOURCE_DIR}/vapi/gstreamer-rtp-1.0.vapi
+PACKAGES
+ ${RTP_PACKAGES}
+DEFINITIONS
+ ${RTP_DEFINITIONS}
+)
+
+add_definitions(${VALA_CFLAGS} -DG_LOG_DOMAIN="rtp" -I${CMAKE_CURRENT_SOURCE_DIR}/src)
+add_library(rtp SHARED ${RTP_VALA_C} ${RTP_VOICE_PROCESSOR_CXX})
+target_link_libraries(rtp libdino crypto-vala ${RTP_PACKAGES} gstreamer-rtp-1.0 ${RTP_VOICE_PROCESSOR_LIB})
+set_target_properties(rtp PROPERTIES PREFIX "")
+set_target_properties(rtp PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/plugins/)
+
+install(TARGETS rtp ${PLUGIN_INSTALL})
diff --git a/plugins/rtp/src/codec_util.vala b/plugins/rtp/src/codec_util.vala
new file mode 100644
index 00000000..6a2438f1
--- /dev/null
+++ b/plugins/rtp/src/codec_util.vala
@@ -0,0 +1,307 @@
+using Gee;
+using Xmpp;
+using Xmpp.Xep;
+
+public class Dino.Plugins.Rtp.CodecUtil {
+ private Set<string> supported_elements = new HashSet<string>();
+ private Set<string> unsupported_elements = new HashSet<string>();
+
+ public static Gst.Caps get_caps(string media, JingleRtp.PayloadType payload_type, bool incoming) {
+ Gst.Caps caps = new Gst.Caps.simple("application/x-rtp",
+ "media", typeof(string), media,
+ "payload", typeof(int), payload_type.id);
+ //"channels", typeof(int), payloadType.channels,
+ //"max-ptime", typeof(int), payloadType.maxptime);
+ unowned Gst.Structure s = caps.get_structure(0);
+ if (payload_type.clockrate != 0) {
+ s.set("clock-rate", typeof(int), payload_type.clockrate);
+ }
+ if (payload_type.name != null) {
+ s.set("encoding-name", typeof(string), payload_type.name.up());
+ }
+ if (incoming) {
+ foreach (JingleRtp.RtcpFeedback rtcp_fb in payload_type.rtcp_fbs) {
+ if (rtcp_fb.subtype == null) {
+ s.set(@"rtcp-fb-$(rtcp_fb.type_)", typeof(bool), true);
+ } else {
+ s.set(@"rtcp-fb-$(rtcp_fb.type_)-$(rtcp_fb.subtype)", typeof(bool), true);
+ }
+ }
+ }
+ return caps;
+ }
+
+ public static string? get_codec_from_payload(string media, JingleRtp.PayloadType payload_type) {
+ if (payload_type.name != null) return payload_type.name.down();
+ if (media == "audio") {
+ switch (payload_type.id) {
+ case 0:
+ return "pcmu";
+ case 8:
+ return "pcma";
+ }
+ }
+ return null;
+ }
+
+ public static string? get_media_type_from_payload(string media, JingleRtp.PayloadType payload_type) {
+ return get_media_type(media, get_codec_from_payload(media, payload_type));
+ }
+
+ public static string? get_media_type(string media, string? codec) {
+ if (codec == null) return null;
+ if (media == "audio") {
+ switch (codec) {
+ case "pcma":
+ return "audio/x-alaw";
+ case "pcmu":
+ return "audio/x-mulaw";
+ }
+ }
+ return @"$media/x-$codec";
+ }
+
+ public static string? get_rtp_pay_element_name_from_payload(string media, JingleRtp.PayloadType payload_type) {
+ return get_pay_candidate(media, get_codec_from_payload(media, payload_type));
+ }
+
+ public static string? get_pay_candidate(string media, string? codec) {
+ if (codec == null) return null;
+ return @"rtp$(codec)pay";
+ }
+
+ public static string? get_rtp_depay_element_name_from_payload(string media, JingleRtp.PayloadType payload_type) {
+ return get_depay_candidate(media, get_codec_from_payload(media, payload_type));
+ }
+
+ public static string? get_depay_candidate(string media, string? codec) {
+ if (codec == null) return null;
+ return @"rtp$(codec)depay";
+ }
+
+ public static string[] get_encode_candidates(string media, string? codec) {
+ if (codec == null) return new string[0];
+ if (media == "audio") {
+ switch (codec) {
+ case "opus":
+ return new string[] {"opusenc"};
+ case "speex":
+ return new string[] {"speexenc"};
+ case "pcma":
+ return new string[] {"alawenc"};
+ case "pcmu":
+ return new string[] {"mulawenc"};
+ }
+ } else if (media == "video") {
+ switch (codec) {
+ case "h264":
+ return new string[] {/*"msdkh264enc", */"vaapih264enc", "x264enc"};
+ case "vp9":
+ return new string[] {/*"msdkvp9enc", */"vaapivp9enc" /*, "vp9enc" */};
+ case "vp8":
+ return new string[] {/*"msdkvp8enc", */"vaapivp8enc", "vp8enc"};
+ }
+ }
+ return new string[0];
+ }
+
+ public static string[] get_decode_candidates(string media, string? codec) {
+ if (codec == null) return new string[0];
+ if (media == "audio") {
+ switch (codec) {
+ case "opus":
+ return new string[] {"opusdec"};
+ case "speex":
+ return new string[] {"speexdec"};
+ case "pcma":
+ return new string[] {"alawdec"};
+ case "pcmu":
+ return new string[] {"mulawdec"};
+ }
+ } else if (media == "video") {
+ switch (codec) {
+ case "h264":
+ return new string[] {/*"msdkh264dec", */"vaapih264dec"};
+ case "vp9":
+ return new string[] {/*"msdkvp9dec", */"vaapivp9dec", "vp9dec"};
+ case "vp8":
+ return new string[] {/*"msdkvp8dec", */"vaapivp8dec", "vp8dec"};
+ }
+ }
+ return new string[0];
+ }
+
+ public static string? get_encode_prefix(string media, string codec, string encode, JingleRtp.PayloadType? payload_type) {
+ if (encode == "msdkh264enc") return "video/x-raw,format=NV12 ! ";
+ if (encode == "vaapih264enc") return "video/x-raw,format=NV12 ! ";
+ return null;
+ }
+
+ public static string? get_encode_args(string media, string codec, string encode, JingleRtp.PayloadType? payload_type) {
+ // H264
+ if (encode == "msdkh264enc") return @" rate-control=vbr";
+ if (encode == "vaapih264enc") return @" tune=low-power";
+ if (encode == "x264enc") return @" byte-stream=1 profile=baseline speed-preset=ultrafast tune=zerolatency";
+
+ // VP8
+ if (encode == "msdkvp8enc") return " rate-control=vbr";
+ if (encode == "vaapivp8enc") return " rate-control=vbr";
+ if (encode == "vp8enc") return " deadline=1 error-resilient=1";
+
+ // OPUS
+ if (encode == "opusenc") {
+ if (payload_type != null && payload_type.parameters.has("useinbandfec", "1")) return " audio-type=voice inband-fec=true";
+ return " audio-type=voice";
+ }
+
+ return null;
+ }
+
+ public static string? get_encode_suffix(string media, string codec, string encode, JingleRtp.PayloadType? payload_type) {
+ // H264
+ if (media == "video" && codec == "h264") return " ! video/x-h264,profile=constrained-baseline ! h264parse";
+ return null;
+ }
+
+ public uint update_bitrate(string media, JingleRtp.PayloadType payload_type, Gst.Element encode_element, uint bitrate) {
+ Gst.Bin? encode_bin = encode_element as Gst.Bin;
+ if (encode_bin == null) return 0;
+ string? codec = get_codec_from_payload(media, payload_type);
+ string? encode_name = get_encode_element_name(media, codec);
+ if (encode_name == null) return 0;
+ Gst.Element encode = encode_bin.get_by_name(@"$(encode_bin.name)_encode");
+
+ bitrate = uint.min(2048000, bitrate);
+
+ switch (encode_name) {
+ case "msdkh264enc":
+ case "vaapih264enc":
+ case "x264enc":
+ case "msdkvp8enc":
+ case "vaapivp8enc":
+ bitrate = uint.min(2048000, bitrate);
+ encode.set("bitrate", bitrate);
+ return bitrate;
+ case "vp8enc":
+ bitrate = uint.min(2147483, bitrate);
+ encode.set("target-bitrate", bitrate * 1000);
+ return bitrate;
+ }
+
+ return 0;
+ }
+
+ public static string? get_decode_prefix(string media, string codec, string decode, JingleRtp.PayloadType? payload_type) {
+ return null;
+ }
+
+ public static string? get_decode_args(string media, string codec, string decode, JingleRtp.PayloadType? payload_type) {
+ if (decode == "opusdec" && payload_type != null && payload_type.parameters.has("useinbandfec", "1")) return " use-inband-fec=true";
+ if (decode == "vaapivp9dec" || decode == "vaapivp8dec" || decode == "vaapih264dec") return " max-errors=100";
+ return null;
+ }
+
+ public static string? get_decode_suffix(string media, string codec, string encode, JingleRtp.PayloadType? payload_type) {
+ return null;
+ }
+
+ public static string? get_depay_args(string media, string codec, string encode, JingleRtp.PayloadType? payload_type) {
+ if (codec == "vp8") return " wait-for-keyframe=true";
+ return null;
+ }
+
+ public bool is_element_supported(string element_name) {
+ if (unsupported_elements.contains(element_name)) return false;
+ if (supported_elements.contains(element_name)) return true;
+ var test_element = Gst.ElementFactory.make(element_name, @"test-$element_name");
+ if (test_element != null) {
+ supported_elements.add(element_name);
+ return true;
+ } else {
+ debug("%s is not supported on this platform", element_name);
+ unsupported_elements.add(element_name);
+ return false;
+ }
+ }
+
+ public string? get_encode_element_name(string media, string? codec) {
+ if (!is_element_supported(get_pay_element_name(media, codec))) return null;
+ foreach (string candidate in get_encode_candidates(media, codec)) {
+ if (is_element_supported(candidate)) return candidate;
+ }
+ return null;
+ }
+
+ public string? get_pay_element_name(string media, string? codec) {
+ string candidate = get_pay_candidate(media, codec);
+ if (is_element_supported(candidate)) return candidate;
+ return null;
+ }
+
+ public string? get_decode_element_name(string media, string? codec) {
+ foreach (string candidate in get_decode_candidates(media, codec)) {
+ if (is_element_supported(candidate)) return candidate;
+ }
+ return null;
+ }
+
+ public string? get_depay_element_name(string media, string? codec) {
+ string candidate = get_depay_candidate(media, codec);
+ if (is_element_supported(candidate)) return candidate;
+ return null;
+ }
+
+ public void mark_element_unsupported(string element_name) {
+ unsupported_elements.add(element_name);
+ }
+
+ public string? get_decode_bin_description(string media, string? codec, JingleRtp.PayloadType? payload_type, string? element_name = null, string? name = null) {
+ if (codec == null) return null;
+ string base_name = name ?? @"encode-$codec-$(Random.next_int())";
+ string depay = get_depay_element_name(media, codec);
+ string decode = element_name ?? get_decode_element_name(media, codec);
+ if (depay == null || decode == null) return null;
+ string decode_prefix = get_decode_prefix(media, codec, decode, payload_type) ?? "";
+ string decode_args = get_decode_args(media, codec, decode, payload_type) ?? "";
+ string decode_suffix = get_decode_suffix(media, codec, decode, payload_type) ?? "";
+ string depay_args = get_depay_args(media, codec, decode, payload_type) ?? "";
+ string resample = media == "audio" ? @" ! audioresample name=$(base_name)_resample" : "";
+ return @"$depay$depay_args name=$(base_name)_rtp_depay ! $decode_prefix$decode$decode_args name=$(base_name)_$(codec)_decode$decode_suffix ! $(media)convert name=$(base_name)_convert$resample";
+ }
+
+ public Gst.Element? get_decode_bin(string media, JingleRtp.PayloadType payload_type, string? name = null) {
+ string? codec = get_codec_from_payload(media, payload_type);
+ string base_name = name ?? @"encode-$codec-$(Random.next_int())";
+ string? desc = get_decode_bin_description(media, codec, payload_type, null, base_name);
+ if (desc == null) return null;
+ debug("Pipeline to decode %s %s: %s", media, codec, desc);
+ Gst.Element bin = Gst.parse_bin_from_description(desc, true);
+ bin.name = name;
+ return bin;
+ }
+
+ public string? get_encode_bin_description(string media, string? codec, JingleRtp.PayloadType? payload_type, string? element_name = null, string? name = null) {
+ if (codec == null) return null;
+ string base_name = name ?? @"encode_$(codec)_$(Random.next_int())";
+ string pay = get_pay_element_name(media, codec);
+ string encode = element_name ?? get_encode_element_name(media, codec);
+ if (pay == null || encode == null) return null;
+ string encode_prefix = get_encode_prefix(media, codec, encode, payload_type) ?? "";
+ string encode_args = get_encode_args(media, codec, encode, payload_type) ?? "";
+ string encode_suffix = get_encode_suffix(media, codec, encode, payload_type) ?? "";
+ string resample = media == "audio" ? @" ! audioresample name=$(base_name)_resample" : "";
+ return @"$(media)convert name=$(base_name)_convert$resample ! $encode_prefix$encode$encode_args name=$(base_name)_encode$encode_suffix ! $pay pt=$(payload_type != null ? payload_type.id : 96) name=$(base_name)_rtp_pay";
+ }
+
+ public Gst.Element? get_encode_bin(string media, JingleRtp.PayloadType payload_type, string? name = null) {
+ string? codec = get_codec_from_payload(media, payload_type);
+ string base_name = name ?? @"encode_$(codec)_$(Random.next_int())";
+ string? desc = get_encode_bin_description(media, codec, payload_type, null, base_name);
+ if (desc == null) return null;
+ debug("Pipeline to encode %s %s: %s", media, codec, desc);
+ Gst.Element bin = Gst.parse_bin_from_description(desc, true);
+ bin.name = name;
+ return bin;
+ }
+
+} \ No newline at end of file
diff --git a/plugins/rtp/src/device.vala b/plugins/rtp/src/device.vala
new file mode 100644
index 00000000..e25271b1
--- /dev/null
+++ b/plugins/rtp/src/device.vala
@@ -0,0 +1,272 @@
+public class Dino.Plugins.Rtp.Device : MediaDevice, Object {
+ public Plugin plugin { get; private set; }
+ public Gst.Device device { get; private set; }
+
+ private string device_name;
+ public string id { get {
+ return device_name;
+ }}
+ private string device_display_name;
+ public string display_name { get {
+ return device_display_name;
+ }}
+ public string detail_name { get {
+ return device.properties.get_string("alsa.card_name") ?? device.properties.get_string("alsa.id") ?? id;
+ }}
+ public Gst.Pipeline pipe { get {
+ return plugin.pipe;
+ }}
+ public string? media { get {
+ if (device.device_class.has_prefix("Audio/")) {
+ return "audio";
+ } else if (device.device_class.has_prefix("Video/")) {
+ return "video";
+ } else {
+ return null;
+ }
+ }}
+ public bool is_source { get {
+ return device.device_class.has_suffix("/Source");
+ }}
+ public bool is_sink { get {
+ return device.device_class.has_suffix("/Sink");
+ }}
+
+ private Gst.Element element;
+ private Gst.Element tee;
+ private Gst.Element dsp;
+ private Gst.Element mixer;
+ private Gst.Element filter;
+ private Gst.Element rate;
+ private int links = 0;
+
+ public Device(Plugin plugin, Gst.Device device) {
+ this.plugin = plugin;
+ update(device);
+ }
+
+ public bool matches(Gst.Device device) {
+ if (this.device.name == device.name) return true;
+ return false;
+ }
+
+ public void update(Gst.Device device) {
+ this.device = device;
+ this.device_name = device.name;
+ this.device_display_name = device.display_name;
+ }
+
+ public Gst.Element? link_sink() {
+ if (element == null) create();
+ links++;
+ if (mixer != null) return mixer;
+ if (is_sink && media == "audio") return filter;
+ return element;
+ }
+
+ public Gst.Element? link_source() {
+ if (element == null) create();
+ links++;
+ if (tee != null) return tee;
+ return element;
+ }
+
+ public void unlink() {
+ if (links <= 0) {
+ critical("Link count below zero.");
+ return;
+ }
+ links--;
+ if (links == 0) {
+ destroy();
+ }
+ }
+
+ private Gst.Caps get_best_caps() {
+ if (media == "audio") {
+ return Gst.Caps.from_string("audio/x-raw,rate=48000,channels=1");
+ } else if (media == "video" && device.caps.get_size() > 0) {
+ int best_index = 0;
+ Value? best_fraction = null;
+ int best_fps = 0;
+ int best_width = 0;
+ int best_height = 0;
+ for (int i = 0; i < device.caps.get_size(); i++) {
+ unowned Gst.Structure? that = device.caps.get_structure(i);
+ if (!that.has_name("video/x-raw")) continue;
+ int num = 0, den = 0, width = 0, height = 0;
+ if (!that.has_field("framerate")) continue;
+ Value framerate = that.get_value("framerate");
+ if (framerate.type() == typeof(Gst.Fraction)) {
+ num = Gst.Value.get_fraction_numerator(framerate);
+ den = Gst.Value.get_fraction_denominator(framerate);
+ } else if (framerate.type() == typeof(Gst.ValueList)) {
+ for(uint j = 0; j < Gst.ValueList.get_size(framerate); j++) {
+ Value fraction = Gst.ValueList.get_value(framerate, j);
+ int in_num = Gst.Value.get_fraction_numerator(fraction);
+ int in_den = Gst.Value.get_fraction_denominator(fraction);
+ int fps = den > 0 ? (num/den) : 0;
+ int in_fps = in_den > 0 ? (in_num/in_den) : 0;
+ if (in_fps > fps) {
+ best_fraction = fraction;
+ num = in_num;
+ den = in_den;
+ }
+ }
+ } else {
+ debug("Unknown type for framerate: %s", framerate.type_name());
+ }
+ if (den == 0) continue;
+ if (!that.has_field("width") || !that.get_int("width", out width)) continue;
+ if (!that.has_field("height") || !that.get_int("height", out height)) continue;
+ int fps = num/den;
+ if (best_fps < fps || best_fps == fps && best_width < width || best_fps == fps && best_width == width && best_height < height) {
+ best_fps = fps;
+ best_width = width;
+ best_height = height;
+ best_index = i;
+ }
+ }
+ Gst.Caps res = caps_copy_nth(device.caps, best_index);
+ unowned Gst.Structure? that = res.get_structure(0);
+ Value framerate = that.get_value("framerate");
+ if (framerate.type() == typeof(Gst.ValueList)) {
+ that.set_value("framerate", best_fraction);
+ }
+ debug("Selected caps %s", res.to_string());
+ return res;
+ } else if (device.caps.get_size() > 0) {
+ return caps_copy_nth(device.caps, 0);
+ } else {
+ return new Gst.Caps.any();
+ }
+ }
+
+ // Backport from gst_caps_copy_nth added in GStreamer 1.16
+ private static Gst.Caps caps_copy_nth(Gst.Caps source, uint index) {
+ Gst.Caps target = new Gst.Caps.empty();
+ target.flags = source.flags;
+ target.append_structure_full(source.get_structure(index).copy(), source.get_features(index).copy());
+ return target;
+ }
+
+ private void create() {
+ debug("Creating device %s", id);
+ plugin.pause();
+ element = device.create_element(id);
+ pipe.add(element);
+ if (is_source) {
+ element.@set("do-timestamp", true);
+ filter = Gst.ElementFactory.make("capsfilter", @"caps_filter_$id");
+ filter.@set("caps", get_best_caps());
+ pipe.add(filter);
+ element.link(filter);
+#if WITH_VOICE_PROCESSOR
+ if (media == "audio" && plugin.echoprobe != null) {
+ dsp = new VoiceProcessor(plugin.echoprobe as EchoProbe, element as Gst.Audio.StreamVolume);
+ dsp.name = @"dsp_$id";
+ pipe.add(dsp);
+ filter.link(dsp);
+ }
+#endif
+ tee = Gst.ElementFactory.make("tee", @"tee_$id");
+ tee.@set("allow-not-linked", true);
+ pipe.add(tee);
+ (dsp ?? filter).link(tee);
+ }
+ if (is_sink) {
+ element.@set("async", false);
+ element.@set("sync", false);
+ }
+ if (is_sink && media == "audio") {
+ filter = Gst.ElementFactory.make("capsfilter", @"caps_filter_$id");
+ filter.@set("caps", get_best_caps());
+ pipe.add(filter);
+ if (plugin.echoprobe != null) {
+ rate = Gst.ElementFactory.make("audiorate", @"rate_$id");
+ rate.@set("tolerance", 100000000);
+ pipe.add(rate);
+ filter.link(rate);
+ rate.link(plugin.echoprobe);
+ plugin.echoprobe.link(element);
+ } else {
+ filter.link(element);
+ }
+ }
+ plugin.unpause();
+ }
+
+ private void destroy() {
+ if (mixer != null) {
+ if (is_sink && media == "audio" && plugin.echoprobe != null) {
+ plugin.echoprobe.unlink(mixer);
+ }
+ int linked_sink_pads = 0;
+ mixer.foreach_sink_pad((_, pad) => {
+ if (pad.is_linked()) linked_sink_pads++;
+ return true;
+ });
+ if (linked_sink_pads > 0) {
+ warning("%s-mixer still has %i sink pads while being destroyed", id, linked_sink_pads);
+ }
+ mixer.set_locked_state(true);
+ mixer.set_state(Gst.State.NULL);
+ mixer.unlink(element);
+ pipe.remove(mixer);
+ mixer = null;
+ } else if (is_sink && media == "audio") {
+ if (filter != null) {
+ filter.set_locked_state(true);
+ filter.set_state(Gst.State.NULL);
+ filter.unlink(rate ?? ((Gst.Element)plugin.echoprobe) ?? element);
+ pipe.remove(filter);
+ filter = null;
+ }
+ if (rate != null) {
+ rate.set_locked_state(true);
+ rate.set_state(Gst.State.NULL);
+ rate.unlink(plugin.echoprobe);
+ pipe.remove(rate);
+ rate = null;
+ }
+ if (plugin.echoprobe != null) {
+ plugin.echoprobe.unlink(element);
+ }
+ }
+ element.set_locked_state(true);
+ element.set_state(Gst.State.NULL);
+ if (filter != null) element.unlink(filter);
+ else if (is_source) element.unlink(tee);
+ pipe.remove(element);
+ element = null;
+ if (filter != null) {
+ filter.set_locked_state(true);
+ filter.set_state(Gst.State.NULL);
+ filter.unlink(dsp ?? tee);
+ pipe.remove(filter);
+ filter = null;
+ }
+ if (dsp != null) {
+ dsp.set_locked_state(true);
+ dsp.set_state(Gst.State.NULL);
+ dsp.unlink(tee);
+ pipe.remove(dsp);
+ dsp = null;
+ }
+ if (tee != null) {
+ int linked_src_pads = 0;
+ tee.foreach_src_pad((_, pad) => {
+ if (pad.is_linked()) linked_src_pads++;
+ return true;
+ });
+ if (linked_src_pads != 0) {
+ warning("%s-tee still has %d src pads while being destroyed", id, linked_src_pads);
+ }
+ tee.set_locked_state(true);
+ tee.set_state(Gst.State.NULL);
+ pipe.remove(tee);
+ tee = null;
+ }
+ debug("Destroyed device %s", id);
+ }
+} \ No newline at end of file
diff --git a/plugins/rtp/src/module.vala b/plugins/rtp/src/module.vala
new file mode 100644
index 00000000..19a7501d
--- /dev/null
+++ b/plugins/rtp/src/module.vala
@@ -0,0 +1,237 @@
+using Gee;
+using Xmpp;
+using Xmpp.Xep;
+
+public class Dino.Plugins.Rtp.Module : JingleRtp.Module {
+ private Set<string> supported_codecs = new HashSet<string>();
+ private Set<string> unsupported_codecs = new HashSet<string>();
+ public Plugin plugin { get; private set; }
+ public CodecUtil codec_util { get {
+ return plugin.codec_util;
+ }}
+
+ public Module(Plugin plugin) {
+ base();
+ this.plugin = plugin;
+ }
+
+ private async bool pipeline_works(string media, string element_desc) {
+ var supported = false;
+ string pipeline_desc = @"$(media)testsrc is-live=true ! $element_desc ! appsink name=output";
+ try {
+ var pipeline = Gst.parse_launch(pipeline_desc);
+ var output = (pipeline as Gst.Bin).get_by_name("output") as Gst.App.Sink;
+ SourceFunc callback = pipeline_works.callback;
+ var finished = false;
+ output.emit_signals = true;
+ output.new_sample.connect(() => {
+ if (!finished) {
+ finished = true;
+ supported = true;
+ Idle.add(() => {
+ callback();
+ return Source.REMOVE;
+ });
+ }
+ return Gst.FlowReturn.EOS;
+ });
+ pipeline.bus.add_watch(Priority.DEFAULT, (_, message) => {
+ if (message.type == Gst.MessageType.ERROR && !finished) {
+ Error e;
+ string d;
+ message.parse_error(out e, out d);
+ debug("pipeline [%s] failed: %s", pipeline_desc, e.message);
+ debug(d);
+ finished = true;
+ callback();
+ }
+ return true;
+ });
+ Timeout.add(2000, () => {
+ if (!finished) {
+ finished = true;
+ callback();
+ }
+ return Source.REMOVE;
+ });
+ pipeline.set_state(Gst.State.PLAYING);
+ yield;
+ pipeline.set_state(Gst.State.NULL);
+ } catch (Error e) {
+ debug("pipeline [%s] failed: %s", pipeline_desc, e.message);
+ }
+ return supported;
+ }
+
+ private async bool is_payload_supported(string media, JingleRtp.PayloadType payload_type) {
+ string? codec = CodecUtil.get_codec_from_payload(media, payload_type);
+ if (codec == null) return false;
+ if (unsupported_codecs.contains(codec)) return false;
+ if (supported_codecs.contains(codec)) return true;
+
+ string? encode_element = codec_util.get_encode_element_name(media, codec);
+ string? decode_element = codec_util.get_decode_element_name(media, codec);
+ if (encode_element == null || decode_element == null) {
+ debug("No suitable encoder or decoder found for %s", codec);
+ unsupported_codecs.add(codec);
+ return false;
+ }
+
+ string encode_bin = codec_util.get_encode_bin_description(media, codec, null, encode_element);
+ while (!(yield pipeline_works(media, encode_bin))) {
+ debug("%s not suited for encoding %s", encode_element, codec);
+ codec_util.mark_element_unsupported(encode_element);
+ encode_element = codec_util.get_encode_element_name(media, codec);
+ if (encode_element == null) {
+ debug("No suitable encoder found for %s", codec);
+ unsupported_codecs.add(codec);
+ return false;
+ }
+ encode_bin = codec_util.get_encode_bin_description(media, codec, null, encode_element);
+ }
+ debug("using %s to encode %s", encode_element, codec);
+
+ string decode_bin = codec_util.get_decode_bin_description(media, codec, null, decode_element);
+ while (!(yield pipeline_works(media, @"$encode_bin ! $decode_bin"))) {
+ debug("%s not suited for decoding %s", decode_element, codec);
+ codec_util.mark_element_unsupported(decode_element);
+ decode_element = codec_util.get_decode_element_name(media, codec);
+ if (decode_element == null) {
+ debug("No suitable decoder found for %s", codec);
+ unsupported_codecs.add(codec);
+ return false;
+ }
+ decode_bin = codec_util.get_decode_bin_description(media, codec, null, decode_element);
+ }
+ debug("using %s to decode %s", decode_element, codec);
+
+ supported_codecs.add(codec);
+ return true;
+ }
+
+ public override bool is_header_extension_supported(string media, JingleRtp.HeaderExtension ext) {
+ if (media == "video" && ext.uri == "urn:3gpp:video-orientation") return true;
+ return false;
+ }
+
+ public override Gee.List<JingleRtp.HeaderExtension> get_suggested_header_extensions(string media) {
+ Gee.List<JingleRtp.HeaderExtension> exts = new ArrayList<JingleRtp.HeaderExtension>();
+ if (media == "video") {
+ exts.add(new JingleRtp.HeaderExtension(1, "urn:3gpp:video-orientation"));
+ }
+ return exts;
+ }
+
+ public async void add_if_supported(Gee.List<JingleRtp.PayloadType> list, string media, JingleRtp.PayloadType payload_type) {
+ if (yield is_payload_supported(media, payload_type)) {
+ list.add(payload_type);
+ }
+ }
+
+ public override async Gee.List<JingleRtp.PayloadType> get_supported_payloads(string media) {
+ Gee.List<JingleRtp.PayloadType> list = new ArrayList<JingleRtp.PayloadType>(JingleRtp.PayloadType.equals_func);
+ if (media == "audio") {
+ var opus = new JingleRtp.PayloadType() { channels = 2, clockrate = 48000, name = "opus", id = 99 };
+ opus.parameters["useinbandfec"] = "1";
+ var speex32 = new JingleRtp.PayloadType() { channels = 1, clockrate = 32000, name = "speex", id = 100 };
+ var speex16 = new JingleRtp.PayloadType() { channels = 1, clockrate = 16000, name = "speex", id = 101 };
+ var speex8 = new JingleRtp.PayloadType() { channels = 1, clockrate = 8000, name = "speex", id = 102 };
+ var pcmu = new JingleRtp.PayloadType() { channels = 1, clockrate = 8000, name = "PCMU", id = 0 };
+ var pcma = new JingleRtp.PayloadType() { channels = 1, clockrate = 8000, name = "PCMA", id = 8 };
+ yield add_if_supported(list, media, opus);
+ yield add_if_supported(list, media, speex32);
+ yield add_if_supported(list, media, speex16);
+ yield add_if_supported(list, media, speex8);
+ yield add_if_supported(list, media, pcmu);
+ yield add_if_supported(list, media, pcma);
+ } else if (media == "video") {
+ var h264 = new JingleRtp.PayloadType() { clockrate = 90000, name = "H264", id = 96 };
+ var vp9 = new JingleRtp.PayloadType() { clockrate = 90000, name = "VP9", id = 97 };
+ var vp8 = new JingleRtp.PayloadType() { clockrate = 90000, name = "VP8", id = 98 };
+ var rtcp_fbs = new ArrayList<JingleRtp.RtcpFeedback>();
+ rtcp_fbs.add(new JingleRtp.RtcpFeedback("goog-remb"));
+ rtcp_fbs.add(new JingleRtp.RtcpFeedback("ccm", "fir"));
+ rtcp_fbs.add(new JingleRtp.RtcpFeedback("nack"));
+ rtcp_fbs.add(new JingleRtp.RtcpFeedback("nack", "pli"));
+ h264.rtcp_fbs.add_all(rtcp_fbs);
+ vp9.rtcp_fbs.add_all(rtcp_fbs);
+ vp8.rtcp_fbs.add_all(rtcp_fbs);
+ yield add_if_supported(list, media, h264);
+ yield add_if_supported(list, media, vp9);
+ yield add_if_supported(list, media, vp8);
+ } else {
+ warning("Unsupported media type: %s", media);
+ }
+ return list;
+ }
+
+ public override async JingleRtp.PayloadType? pick_payload_type(string media, Gee.List<JingleRtp.PayloadType> payloads) {
+ if (media == "audio") {
+ foreach (JingleRtp.PayloadType type in payloads) {
+ if (yield is_payload_supported(media, type)) return adjust_payload_type(media, type.clone());
+ }
+ } else if (media == "video") {
+ // We prefer H.264 (best support for hardware acceleration and good overall codec quality)
+ JingleRtp.PayloadType? h264 = payloads.first_match((it) => it.name.up() == "H264");
+ if (h264 != null && yield is_payload_supported(media, h264)) return adjust_payload_type(media, h264.clone());
+ // Take first of the list that we do support otherwise
+ foreach (JingleRtp.PayloadType type in payloads) {
+ if (yield is_payload_supported(media, type)) return adjust_payload_type(media, type.clone());
+ }
+ } else {
+ warning("Unsupported media type: %s", media);
+ }
+ return null;
+ }
+
+ public JingleRtp.PayloadType adjust_payload_type(string media, JingleRtp.PayloadType type) {
+ var iter = type.rtcp_fbs.iterator();
+ while (iter.next()) {
+ var fb = iter.@get();
+ switch (fb.type_) {
+ case "goog-remb":
+ if (fb.subtype != null) iter.remove();
+ break;
+ case "ccm":
+ if (fb.subtype != "fir") iter.remove();
+ break;
+ case "nack":
+ if (fb.subtype != null && fb.subtype != "pli") iter.remove();
+ break;
+ default:
+ iter.remove();
+ break;
+ }
+ }
+ return type;
+ }
+
+ public override JingleRtp.Stream create_stream(Jingle.Content content) {
+ return plugin.open_stream(content);
+ }
+
+ public override void close_stream(JingleRtp.Stream stream) {
+ var rtp_stream = stream as Rtp.Stream;
+ plugin.close_stream(rtp_stream);
+ }
+
+ public override JingleRtp.Crypto? generate_local_crypto() {
+ uint8[] key_and_salt = new uint8[30];
+ Crypto.randomize(key_and_salt);
+ return JingleRtp.Crypto.create(JingleRtp.Crypto.AES_CM_128_HMAC_SHA1_80, key_and_salt);
+ }
+
+ public override JingleRtp.Crypto? pick_remote_crypto(Gee.List<JingleRtp.Crypto> cryptos) {
+ foreach (JingleRtp.Crypto crypto in cryptos) {
+ if (crypto.is_valid) return crypto;
+ }
+ return null;
+ }
+
+ public override JingleRtp.Crypto? pick_local_crypto(JingleRtp.Crypto? remote) {
+ if (remote == null || !remote.is_valid) return null;
+ uint8[] key_and_salt = new uint8[30];
+ Crypto.randomize(key_and_salt);
+ return remote.rekey(key_and_salt);
+ }
+} \ No newline at end of file
diff --git a/plugins/rtp/src/participant.vala b/plugins/rtp/src/participant.vala
new file mode 100644
index 00000000..1ca13191
--- /dev/null
+++ b/plugins/rtp/src/participant.vala
@@ -0,0 +1,39 @@
+using Gee;
+using Xmpp;
+
+public class Dino.Plugins.Rtp.Participant {
+ public Jid full_jid { get; private set; }
+
+ protected Gst.Pipeline pipe;
+ private Map<Stream, uint32> ssrcs = new HashMap<Stream, uint32>();
+
+ public Participant(Gst.Pipeline pipe, Jid full_jid) {
+ this.pipe = pipe;
+ this.full_jid = full_jid;
+ }
+
+ public uint32 get_ssrc(Stream stream) {
+ if (ssrcs.has_key(stream)) {
+ return ssrcs[stream];
+ }
+ return 0;
+ }
+
+ public void set_ssrc(Stream stream, uint32 ssrc) {
+ if (ssrcs.has_key(stream)) {
+ warning("Learning ssrc %ul for %s in %s when it is already known as %ul", ssrc, full_jid.to_string(), stream.to_string(), ssrcs[stream]);
+ } else {
+ stream.on_destroy.connect(unset_ssrc);
+ }
+ ssrcs[stream] = ssrc;
+ }
+
+ public void unset_ssrc(Stream stream) {
+ ssrcs.unset(stream);
+ stream.on_destroy.disconnect(unset_ssrc);
+ }
+
+ public string to_string() {
+ return @"participant $full_jid";
+ }
+} \ No newline at end of file
diff --git a/plugins/rtp/src/plugin.vala b/plugins/rtp/src/plugin.vala
new file mode 100644
index 00000000..19a266b1
--- /dev/null
+++ b/plugins/rtp/src/plugin.vala
@@ -0,0 +1,449 @@
+using Gee;
+using Xmpp;
+using Xmpp.Xep;
+
+public class Dino.Plugins.Rtp.Plugin : RootInterface, VideoCallPlugin, Object {
+ public Dino.Application app { get; private set; }
+ public CodecUtil codec_util { get; private set; }
+ public Gst.DeviceMonitor device_monitor { get; private set; }
+ public Gst.Pipeline pipe { get; private set; }
+ public Gst.Bin rtpbin { get; private set; }
+ public Gst.Element echoprobe { get; private set; }
+
+ private Gee.List<Stream> streams = new ArrayList<Stream>();
+ private Gee.List<Device> devices = new ArrayList<Device>();
+ // private Gee.List<Participant> participants = new ArrayList<Participant>();
+
+ public void registered(Dino.Application app) {
+ this.app = app;
+ this.codec_util = new CodecUtil();
+ app.startup.connect(startup);
+ app.add_option_group(Gst.init_get_option_group());
+ app.stream_interactor.module_manager.initialize_account_modules.connect((account, list) => {
+ list.add(new Module(this));
+ });
+ app.plugin_registry.video_call_plugin = this;
+ }
+
+ private int pause_count = 0;
+ public void pause() {
+// if (pause_count == 0) {
+// debug("Pausing pipe for modifications");
+// pipe.set_state(Gst.State.PAUSED);
+// }
+ pause_count++;
+ }
+ public void unpause() {
+ pause_count--;
+ if (pause_count == 0) {
+ debug("Continue pipe after modifications");
+ pipe.set_state(Gst.State.PLAYING);
+ }
+ if (pause_count < 0) warning("Pause count below zero!");
+ }
+
+ public void startup() {
+ device_monitor = new Gst.DeviceMonitor();
+ device_monitor.show_all = true;
+ device_monitor.get_bus().add_watch(Priority.DEFAULT, on_device_monitor_message);
+ device_monitor.start();
+ foreach (Gst.Device device in device_monitor.get_devices()) {
+ if (device.properties.has_name("pipewire-proplist") && device.device_class.has_prefix("Audio/")) continue;
+ if (device.properties.get_string("device.class") == "monitor") continue;
+ if (devices.any_match((it) => it.matches(device))) continue;
+ devices.add(new Device(this, device));
+ }
+
+ pipe = new Gst.Pipeline(null);
+
+ // RTP
+ rtpbin = Gst.ElementFactory.make("rtpbin", null) as Gst.Bin;
+ if (rtpbin == null) {
+ warning("RTP not supported");
+ pipe = null;
+ return;
+ }
+ rtpbin.pad_added.connect(on_rtp_pad_added);
+ rtpbin.@set("latency", 100);
+ rtpbin.@set("do-lost", true);
+ rtpbin.@set("do-sync-event", true);
+ rtpbin.@set("drop-on-latency", true);
+ rtpbin.connect("signal::request-pt-map", request_pt_map, this);
+ pipe.add(rtpbin);
+
+#if WITH_VOICE_PROCESSOR
+ // Audio echo probe
+ echoprobe = new EchoProbe();
+ if (echoprobe != null) pipe.add(echoprobe);
+#endif
+
+ // Pipeline
+ pipe.auto_flush_bus = true;
+ pipe.bus.add_watch(GLib.Priority.DEFAULT, (_, message) => {
+ on_pipe_bus_message(message);
+ return true;
+ });
+ pipe.set_state(Gst.State.PLAYING);
+ }
+
+ private static Gst.Caps? request_pt_map(Gst.Element rtpbin, uint session, uint pt, Plugin plugin) {
+ debug("request-pt-map");
+ return null;
+ }
+
+ private void on_rtp_pad_added(Gst.Pad pad) {
+ debug("pad added: %s", pad.name);
+ if (pad.name.has_prefix("recv_rtp_src_")) {
+ string[] split = pad.name.split("_");
+ uint8 rtpid = (uint8)int.parse(split[3]);
+ foreach (Stream stream in streams) {
+ if (stream.rtpid == rtpid) {
+ stream.on_ssrc_pad_added(split[4], pad);
+ }
+ }
+ }
+ if (pad.name.has_prefix("send_rtp_src_")) {
+ string[] split = pad.name.split("_");
+ uint8 rtpid = (uint8)int.parse(split[3]);
+ debug("pad %s for stream %hhu", pad.name, rtpid);
+ foreach (Stream stream in streams) {
+ if (stream.rtpid == rtpid) {
+ stream.on_send_rtp_src_added(pad);
+ }
+ }
+ }
+ }
+
+ private void on_pipe_bus_message(Gst.Message message) {
+ switch (message.type) {
+ case Gst.MessageType.ERROR:
+ Error error;
+ string str;
+ message.parse_error(out error, out str);
+ warning("Error in pipeline: %s", error.message);
+ debug(str);
+ break;
+ case Gst.MessageType.WARNING:
+ Error error;
+ string str;
+ message.parse_warning(out error, out str);
+ warning("Warning in pipeline: %s", error.message);
+ debug(str);
+ break;
+ case Gst.MessageType.CLOCK_LOST:
+ debug("Clock lost. Restarting");
+ pipe.set_state(Gst.State.READY);
+ pipe.set_state(Gst.State.PLAYING);
+ break;
+ case Gst.MessageType.STATE_CHANGED:
+ // Ignore
+ break;
+ case Gst.MessageType.STREAM_STATUS:
+ Gst.StreamStatusType status;
+ Gst.Element owner;
+ message.parse_stream_status(out status, out owner);
+ if (owner != null) {
+ debug("%s stream changed status to %s", owner.name, status.to_string());
+ }
+ break;
+ case Gst.MessageType.ELEMENT:
+ unowned Gst.Structure struc = message.get_structure();
+ if (struc != null && message.src is Gst.Element) {
+ debug("Message from %s in pipeline: %s", ((Gst.Element)message.src).name, struc.to_string());
+ }
+ break;
+ case Gst.MessageType.NEW_CLOCK:
+ debug("New clock.");
+ break;
+ case Gst.MessageType.TAG:
+ // Ignore
+ break;
+ case Gst.MessageType.QOS:
+ // Ignore
+ break;
+ case Gst.MessageType.LATENCY:
+ if (message.src != null && message.src.name != null && message.src is Gst.Element) {
+ Gst.Query latency_query = new Gst.Query.latency();
+ if (((Gst.Element)message.src).query(latency_query)) {
+ bool live;
+ Gst.ClockTime min_latency, max_latency;
+ latency_query.parse_latency(out live, out min_latency, out max_latency);
+ debug("Latency message from %s: live=%s, min_latency=%s, max_latency=%s", message.src.name, live.to_string(), min_latency.to_string(), max_latency.to_string());
+ }
+ }
+ break;
+ default:
+ debug("Pipe bus message: %s", message.type.to_string());
+ break;
+ }
+ }
+
+ private bool on_device_monitor_message(Gst.Bus bus, Gst.Message message) {
+ Gst.Device old_device = null;
+ Gst.Device device = null;
+ Device old = null;
+ switch (message.type) {
+ case Gst.MessageType.DEVICE_ADDED:
+ message.parse_device_added(out device);
+ if (device.properties.has_name("pipewire-proplist") && device.device_class.has_prefix("Audio/")) return Source.CONTINUE;
+ if (device.properties.get_string("device.class") == "monitor") return Source.CONTINUE;
+ if (devices.any_match((it) => it.matches(device))) return Source.CONTINUE;
+ devices.add(new Device(this, device));
+ break;
+#if GST_1_16
+ case Gst.MessageType.DEVICE_CHANGED:
+ message.parse_device_changed(out device, out old_device);
+ if (device.properties.has_name("pipewire-proplist") && device.device_class.has_prefix("Audio/")) return Source.CONTINUE;
+ if (device.properties.get_string("device.class") == "monitor") return Source.CONTINUE;
+ old = devices.first_match((it) => it.matches(old_device));
+ if (old != null) old.update(device);
+ break;
+#endif
+ case Gst.MessageType.DEVICE_REMOVED:
+ message.parse_device_removed(out device);
+ if (device.properties.has_name("pipewire-proplist") && device.device_class.has_prefix("Audio/")) return Source.CONTINUE;
+ if (device.properties.get_string("device.class") == "monitor") return Source.CONTINUE;
+ old = devices.first_match((it) => it.matches(device));
+ if (old != null) devices.remove(old);
+ break;
+ }
+ if (device != null) {
+ switch (device.device_class) {
+ case "Audio/Source":
+ devices_changed("audio", false);
+ break;
+ case "Audio/Sink":
+ devices_changed("audio", true);
+ break;
+ case "Video/Source":
+ devices_changed("video", false);
+ break;
+ case "Video/Sink":
+ devices_changed("video", true);
+ break;
+ }
+ }
+ return Source.CONTINUE;
+ }
+
+ public uint8 next_free_id() {
+ uint8 rtpid = 0;
+ while (streams.size < 100 && streams.any_match((stream) => stream.rtpid == rtpid)) {
+ rtpid++;
+ }
+ return rtpid;
+ }
+
+ // public Participant get_participant(Jid full_jid, bool self) {
+// foreach (Participant participant in participants) {
+// if (participant.full_jid.equals(full_jid)) {
+// return participant;
+// }
+// }
+// Participant participant;
+// if (self) {
+// participant = new SelfParticipant(pipe, full_jid);
+// } else {
+// participant = new Participant(pipe, full_jid);
+// }
+// participants.add(participant);
+// return participant;
+// }
+
+ public Stream open_stream(Xmpp.Xep.Jingle.Content content) {
+ var content_params = content.content_params as Xmpp.Xep.JingleRtp.Parameters;
+ if (content_params == null) return null;
+ Stream stream;
+ if (content_params.media == "video") {
+ stream = new VideoStream(this, content);
+ } else {
+ stream = new Stream(this, content);
+ }
+ streams.add(stream);
+ return stream;
+ }
+
+ public void close_stream(Stream stream) {
+ streams.remove(stream);
+ stream.destroy();
+ }
+
+ public void shutdown() {
+ device_monitor.stop();
+ pipe.set_state(Gst.State.NULL);
+ rtpbin = null;
+ pipe = null;
+ Gst.deinit();
+ }
+
+ public bool supports(string media) {
+ if (rtpbin == null) return false;
+
+ if (media == "audio") {
+ if (get_devices("audio", false).is_empty) return false;
+ if (get_devices("audio", true).is_empty) return false;
+ }
+
+ if (media == "video") {
+ if (Gst.ElementFactory.make("gtksink", null) == null) return false;
+ if (get_devices("video", false).is_empty) return false;
+ }
+
+ return true;
+ }
+
+ public VideoCallWidget? create_widget(WidgetType type) {
+ if (type == WidgetType.GTK) {
+ return new VideoWidget(this);
+ }
+ return null;
+ }
+
+ public Gee.List<MediaDevice> get_devices(string media, bool incoming) {
+ if (media == "video" && !incoming) {
+ return get_video_sources();
+ }
+
+ ArrayList<MediaDevice> result = new ArrayList<MediaDevice>();
+ foreach (Device device in devices) {
+ if (device.media == media && (incoming && device.is_sink || !incoming && device.is_source)) {
+ result.add(device);
+ }
+ }
+ if (media == "audio") {
+ // Reorder sources
+ result.sort((media_left, media_right) => {
+ Device left = media_left as Device;
+ Device right = media_right as Device;
+ if (left == null) return 1;
+ if (right == null) return -1;
+
+ bool left_is_pipewire = left.device.properties.has_name("pipewire-proplist");
+ bool right_is_pipewire = right.device.properties.has_name("pipewire-proplist");
+
+ bool left_is_default = false;
+ left.device.properties.get_boolean("is-default", out left_is_default);
+ bool right_is_default = false;
+ right.device.properties.get_boolean("is-default", out right_is_default);
+
+ // Prefer pipewire
+ if (left_is_pipewire && !right_is_pipewire) return -1;
+ if (right_is_pipewire && !left_is_pipewire) return 1;
+
+ // Prefer pulse audio default device
+ if (left_is_default && !right_is_default) return -1;
+ if (right_is_default && !left_is_default) return 1;
+
+
+ return 0;
+ });
+ }
+ return result;
+ }
+
+ public Gee.List<MediaDevice> get_video_sources() {
+ ArrayList<MediaDevice> pipewire_devices = new ArrayList<MediaDevice>();
+ ArrayList<MediaDevice> other_devices = new ArrayList<MediaDevice>();
+
+ foreach (Device device in devices) {
+ if (device.media != "video") continue;
+ if (device.is_sink) continue;
+
+ bool is_color = false;
+ for (int i = 0; i < device.device.caps.get_size(); i++) {
+ unowned Gst.Structure structure = device.device.caps.get_structure(i);
+ if (structure.has_field("format") && !structure.get_string("format").has_prefix("GRAY")) {
+ is_color = true;
+ }
+ }
+
+ // Don't allow grey-scale devices
+ if (!is_color) continue;
+
+ if (device.device.properties.has_name("pipewire-proplist")) {
+ pipewire_devices.add(device);
+ } else {
+ other_devices.add(device);
+ }
+ }
+
+ // If we have any pipewire devices, present only those. Don't want duplicated devices from pipewire and video for linux.
+ ArrayList<MediaDevice> devices = pipewire_devices.size > 0 ? pipewire_devices : other_devices;
+
+ // Reorder sources
+ devices.sort((media_left, media_right) => {
+ Device left = media_left as Device;
+ Device right = media_right as Device;
+ if (left == null) return 1;
+ if (right == null) return -1;
+
+ int left_fps = 0;
+ for (int i = 0; i < left.device.caps.get_size(); i++) {
+ unowned Gst.Structure structure = left.device.caps.get_structure(i);
+ int num = 0, den = 0;
+ if (structure.has_field("framerate") && structure.get_fraction("framerate", out num, out den)) left_fps = int.max(left_fps, num / den);
+ }
+
+ int right_fps = 0;
+ for (int i = 0; i < left.device.caps.get_size(); i++) {
+ unowned Gst.Structure structure = left.device.caps.get_structure(i);
+ int num = 0, den = 0;
+ if (structure.has_field("framerate") && structure.get_fraction("framerate", out num, out den)) right_fps = int.max(right_fps, num / den);
+ }
+
+ // More FPS is better
+ if (left_fps > right_fps) return -1;
+ if (right_fps > left_fps) return 1;
+
+ return 0;
+ });
+
+ return devices;
+ }
+
+ public Device? get_preferred_device(string media, bool incoming) {
+ foreach (MediaDevice media_device in get_devices(media, incoming)) {
+ Device? device = media_device as Device;
+ if (device != null) return device;
+ }
+ warning("No preferred device for %s %s. Media will not be processed.", incoming ? "incoming" : "outgoing", media);
+ return null;
+ }
+
+ public MediaDevice? get_device(Xmpp.Xep.JingleRtp.Stream stream, bool incoming) {
+ Stream plugin_stream = stream as Stream;
+ if (plugin_stream == null) return null;
+ if (incoming) {
+ return plugin_stream.output_device ?? get_preferred_device(stream.media, incoming);
+ } else {
+ return plugin_stream.input_device ?? get_preferred_device(stream.media, incoming);
+ }
+ }
+
+ private void dump_dot() {
+ string name = @"pipe-$(pipe.clock.get_time())-$(pipe.current_state)";
+ Gst.Debug.bin_to_dot_file(pipe, Gst.DebugGraphDetails.ALL, name);
+ debug("Stored pipe details as %s", name);
+ }
+
+ public void set_pause(Xmpp.Xep.JingleRtp.Stream stream, bool pause) {
+ Stream plugin_stream = stream as Stream;
+ if (plugin_stream == null) return;
+ if (pause) {
+ plugin_stream.pause();
+ } else {
+ plugin_stream.unpause();
+ }
+ }
+
+ public void set_device(Xmpp.Xep.JingleRtp.Stream stream, MediaDevice? device) {
+ Device real_device = device as Device;
+ Stream plugin_stream = stream as Stream;
+ if (real_device == null || plugin_stream == null) return;
+ if (real_device.is_source) {
+ plugin_stream.input_device = real_device;
+ } else if (real_device.is_sink) {
+ plugin_stream.output_device = real_device;
+ }
+ }
+}
diff --git a/plugins/rtp/src/register_plugin.vala b/plugins/rtp/src/register_plugin.vala
new file mode 100644
index 00000000..a80137ea
--- /dev/null
+++ b/plugins/rtp/src/register_plugin.vala
@@ -0,0 +1,3 @@
+public Type register_plugin(Module module) {
+ return typeof (Dino.Plugins.Rtp.Plugin);
+}
diff --git a/plugins/rtp/src/stream.vala b/plugins/rtp/src/stream.vala
new file mode 100644
index 00000000..bd8a279f
--- /dev/null
+++ b/plugins/rtp/src/stream.vala
@@ -0,0 +1,681 @@
+using Gee;
+using Xmpp;
+
+public class Dino.Plugins.Rtp.Stream : Xmpp.Xep.JingleRtp.Stream {
+ public uint8 rtpid { get; private set; }
+
+ public Plugin plugin { get; private set; }
+ public Gst.Pipeline pipe { get {
+ return plugin.pipe;
+ }}
+ public Gst.Element rtpbin { get {
+ return plugin.rtpbin;
+ }}
+ public CodecUtil codec_util { get {
+ return plugin.codec_util;
+ }}
+ private Gst.App.Sink send_rtp;
+ private Gst.App.Sink send_rtcp;
+ private Gst.App.Src recv_rtp;
+ private Gst.App.Src recv_rtcp;
+ private Gst.Element encode;
+ private Gst.RTP.BasePayload encode_pay;
+ private Gst.Element decode;
+ private Gst.RTP.BaseDepayload decode_depay;
+ private Gst.Element input;
+ private Gst.Element output;
+ private Gst.Element session;
+
+ private Device _input_device;
+ public Device input_device { get { return _input_device; } set {
+ if (!paused) {
+ if (this._input_device != null) {
+ this._input_device.unlink();
+ this._input_device = null;
+ }
+ set_input(value != null ? value.link_source() : null);
+ }
+ this._input_device = value;
+ }}
+ private Device _output_device;
+ public Device output_device { get { return _output_device; } set {
+ if (output != null) remove_output(output);
+ if (value != null) add_output(value.link_sink());
+ this._output_device = value;
+ }}
+
+ public bool created { get; private set; default = false; }
+ public bool paused { get; private set; default = false; }
+ private bool push_recv_data = false;
+ private string participant_ssrc = null;
+
+ private Gst.Pad recv_rtcp_sink_pad;
+ private Gst.Pad recv_rtp_sink_pad;
+ private Gst.Pad recv_rtp_src_pad;
+ private Gst.Pad send_rtcp_src_pad;
+ private Gst.Pad send_rtp_sink_pad;
+ private Gst.Pad send_rtp_src_pad;
+
+ private Crypto.Srtp.Session? crypto_session = new Crypto.Srtp.Session();
+
+ public Stream(Plugin plugin, Xmpp.Xep.Jingle.Content content) {
+ base(content);
+ this.plugin = plugin;
+ this.rtpid = plugin.next_free_id();
+
+ content.notify["senders"].connect_after(on_senders_changed);
+ }
+
+ public void on_senders_changed() {
+ if (sending && input == null) {
+ input_device = plugin.get_preferred_device(media, false);
+ }
+ if (receiving && output == null) {
+ output_device = plugin.get_preferred_device(media, true);
+ }
+ }
+
+ public override void create() {
+ plugin.pause();
+
+ // Create i/o if needed
+
+ if (input == null && input_device == null && sending) {
+ input_device = plugin.get_preferred_device(media, false);
+ }
+ if (output == null && output_device == null && receiving && media == "audio") {
+ output_device = plugin.get_preferred_device(media, true);
+ }
+
+ // Create app elements
+ send_rtp = Gst.ElementFactory.make("appsink", @"rtp_sink_$rtpid") as Gst.App.Sink;
+ send_rtp.async = false;
+ send_rtp.caps = CodecUtil.get_caps(media, payload_type, false);
+ send_rtp.emit_signals = true;
+ send_rtp.sync = false;
+ send_rtp.new_sample.connect(on_new_sample);
+ pipe.add(send_rtp);
+
+ send_rtcp = Gst.ElementFactory.make("appsink", @"rtcp_sink_$rtpid") as Gst.App.Sink;
+ send_rtcp.async = false;
+ send_rtcp.caps = new Gst.Caps.empty_simple("application/x-rtcp");
+ send_rtcp.emit_signals = true;
+ send_rtcp.sync = false;
+ send_rtcp.new_sample.connect(on_new_sample);
+ pipe.add(send_rtcp);
+
+ recv_rtp = Gst.ElementFactory.make("appsrc", @"rtp_src_$rtpid") as Gst.App.Src;
+ recv_rtp.caps = CodecUtil.get_caps(media, payload_type, true);
+ recv_rtp.do_timestamp = true;
+ recv_rtp.format = Gst.Format.TIME;
+ recv_rtp.is_live = true;
+ pipe.add(recv_rtp);
+
+ recv_rtcp = Gst.ElementFactory.make("appsrc", @"rtcp_src_$rtpid") as Gst.App.Src;
+ recv_rtcp.caps = new Gst.Caps.empty_simple("application/x-rtcp");
+ recv_rtcp.do_timestamp = true;
+ recv_rtcp.format = Gst.Format.TIME;
+ recv_rtcp.is_live = true;
+ pipe.add(recv_rtcp);
+
+ // Connect RTCP
+ send_rtcp_src_pad = rtpbin.get_request_pad(@"send_rtcp_src_$rtpid");
+ send_rtcp_src_pad.link(send_rtcp.get_static_pad("sink"));
+ recv_rtcp_sink_pad = rtpbin.get_request_pad(@"recv_rtcp_sink_$rtpid");
+ recv_rtcp.get_static_pad("src").link(recv_rtcp_sink_pad);
+
+ // Connect input
+ encode = codec_util.get_encode_bin(media, payload_type, @"encode_$rtpid");
+ encode_pay = (Gst.RTP.BasePayload)((Gst.Bin)encode).get_by_name(@"encode_$(rtpid)_rtp_pay");
+ pipe.add(encode);
+ send_rtp_sink_pad = rtpbin.get_request_pad(@"send_rtp_sink_$rtpid");
+ encode.get_static_pad("src").link(send_rtp_sink_pad);
+ if (input != null) {
+ input.link(encode);
+ }
+
+ // Connect output
+ decode = codec_util.get_decode_bin(media, payload_type, @"decode_$rtpid");
+ decode_depay = (Gst.RTP.BaseDepayload)((Gst.Bin)encode).get_by_name(@"decode_$(rtpid)_rtp_depay");
+ pipe.add(decode);
+ if (output != null) {
+ decode.link(output);
+ }
+
+ // Connect RTP
+ recv_rtp_sink_pad = rtpbin.get_request_pad(@"recv_rtp_sink_$rtpid");
+ recv_rtp.get_static_pad("src").link(recv_rtp_sink_pad);
+
+ created = true;
+ push_recv_data = true;
+ plugin.unpause();
+
+ GLib.Signal.emit_by_name(rtpbin, "get-session", rtpid, out session);
+ if (session != null && payload_type.rtcp_fbs.any_match((it) => it.type_ == "goog-remb")) {
+ Object internal_session;
+ session.@get("internal-session", out internal_session);
+ if (internal_session != null) {
+ internal_session.connect("signal::on-feedback-rtcp", on_feedback_rtcp, this);
+ }
+ Timeout.add(1000, () => remb_adjust());
+ }
+ if (media == "video") {
+ codec_util.update_bitrate(media, payload_type, encode, 256);
+ }
+ }
+
+ private uint remb = 256;
+ private int last_packets_lost = -1;
+ private uint64 last_packets_received;
+ private uint64 last_octets_received;
+ private bool remb_adjust() {
+ unowned Gst.Structure? stats;
+ if (session == null) {
+ debug("Session for %u finished, turning off remb adjustment", rtpid);
+ return Source.REMOVE;
+ }
+ session.get("stats", out stats);
+ if (stats == null) {
+ warning("No stats for session %u", rtpid);
+ return Source.REMOVE;
+ }
+ unowned ValueArray? source_stats;
+ stats.get("source-stats", typeof(ValueArray), out source_stats);
+ if (source_stats == null) {
+ warning("No source-stats for session %u", rtpid);
+ return Source.REMOVE;
+ }
+ foreach (Value value in source_stats.values) {
+ unowned Gst.Structure source_stat = (Gst.Structure) value.get_boxed();
+ uint ssrc;
+ if (!source_stat.get_uint("ssrc", out ssrc)) continue;
+ if (ssrc.to_string() == participant_ssrc) {
+ int packets_lost;
+ uint64 packets_received, octets_received;
+ source_stat.get_int("packets-lost", out packets_lost);
+ source_stat.get_uint64("packets-received", out packets_received);
+ source_stat.get_uint64("octets-received", out octets_received);
+ int new_lost = packets_lost - last_packets_lost;
+ uint64 new_received = packets_received - last_packets_received;
+ uint64 new_octets = octets_received - last_octets_received;
+ if (new_received == 0) continue;
+ last_packets_lost = packets_lost;
+ last_packets_received = packets_received;
+ last_octets_received = octets_received;
+ double loss_rate = (double)new_lost / (double)(new_lost + new_received);
+ if (new_lost <= 0 || loss_rate < 0.02) {
+ remb = (uint)(1.08 * (double)remb);
+ } else if (loss_rate > 0.1) {
+ remb = (uint)((1.0 - 0.5 * loss_rate) * (double)remb);
+ }
+ remb = uint.max(remb, (uint)((new_octets * 8) / 1000));
+ remb = uint.max(16, remb); // Never go below 16
+ uint8[] data = new uint8[] {
+ 143, 206, 0, 5,
+ 0, 0, 0, 0,
+ 0, 0, 0, 0,
+ 'R', 'E', 'M', 'B',
+ 1, 0, 0, 0,
+ 0, 0, 0, 0
+ };
+ data[4] = (uint8)((encode_pay.ssrc >> 24) & 0xff);
+ data[5] = (uint8)((encode_pay.ssrc >> 16) & 0xff);
+ data[6] = (uint8)((encode_pay.ssrc >> 8) & 0xff);
+ data[7] = (uint8)(encode_pay.ssrc & 0xff);
+ uint8 br_exp = 0;
+ uint32 br_mant = remb * 1000;
+ uint8 bits = (uint8)Math.log2(br_mant);
+ if (bits > 16) {
+ br_exp = (uint8)bits - 16;
+ br_mant = br_mant >> br_exp;
+ }
+ data[17] = (uint8)((br_exp << 2) | ((br_mant >> 16) & 0x3));
+ data[18] = (uint8)((br_mant >> 8) & 0xff);
+ data[19] = (uint8)(br_mant & 0xff);
+ data[20] = (uint8)((ssrc >> 24) & 0xff);
+ data[21] = (uint8)((ssrc >> 16) & 0xff);
+ data[22] = (uint8)((ssrc >> 8) & 0xff);
+ data[23] = (uint8)(ssrc & 0xff);
+ encrypt_and_send_rtcp(data);
+ }
+ }
+ return Source.CONTINUE;
+ }
+
+ private static void on_feedback_rtcp(Gst.Element session, uint type, uint fbtype, uint sender_ssrc, uint media_ssrc, Gst.Buffer? fci, Stream self) {
+ if (type == 206 && fbtype == 15 && fci != null && sender_ssrc.to_string() == self.participant_ssrc) {
+ // https://tools.ietf.org/html/draft-alvestrand-rmcat-remb-03
+ uint8[] data;
+ fci.extract_dup(0, fci.get_size(), out data);
+ if (data[0] != 'R' || data[1] != 'E' || data[2] != 'M' || data[3] != 'B') return;
+ uint8 br_exp = data[5] >> 2;
+ uint32 br_mant = (((uint32)data[5] & 0x3) << 16) + ((uint32)data[6] << 8) + (uint32)data[7];
+ uint bitrate = (br_mant << br_exp) / 1000;
+ self.codec_util.update_bitrate(self.media, self.payload_type, self.encode, bitrate * 8);
+ }
+ }
+
+ private void prepare_local_crypto() {
+ if (local_crypto != null && local_crypto.is_valid && !crypto_session.has_encrypt) {
+ crypto_session.set_encryption_key(local_crypto.crypto_suite, local_crypto.key, local_crypto.salt);
+ debug("Setting up encryption with key params %s", local_crypto.key_params);
+ }
+ }
+
+ private Gst.FlowReturn on_new_sample(Gst.App.Sink sink) {
+ if (sink == null) {
+ debug("Sink is null");
+ return Gst.FlowReturn.EOS;
+ }
+ Gst.Sample sample = sink.pull_sample();
+ Gst.Buffer buffer = sample.get_buffer();
+ uint8[] data;
+ buffer.extract_dup(0, buffer.get_size(), out data);
+ prepare_local_crypto();
+ if (sink == send_rtp) {
+ if (crypto_session.has_encrypt) {
+ data = crypto_session.encrypt_rtp(data);
+ }
+ on_send_rtp_data(new Bytes.take((owned) data));
+ } else if (sink == send_rtcp) {
+ encrypt_and_send_rtcp((owned) data);
+ } else {
+ warning("unknown sample");
+ }
+ return Gst.FlowReturn.OK;
+ }
+
+ private void encrypt_and_send_rtcp(owned uint8[] data) {
+ if (crypto_session.has_encrypt) {
+ data = crypto_session.encrypt_rtcp(data);
+ }
+ if (rtcp_mux) {
+ on_send_rtp_data(new Bytes.take((owned) data));
+ } else {
+ on_send_rtcp_data(new Bytes.take((owned) data));
+ }
+ }
+
+ private static Gst.PadProbeReturn drop_probe() {
+ return Gst.PadProbeReturn.DROP;
+ }
+
+ public override void destroy() {
+ // Stop network communication
+ push_recv_data = false;
+ recv_rtp.end_of_stream();
+ recv_rtcp.end_of_stream();
+ send_rtp.new_sample.disconnect(on_new_sample);
+ send_rtcp.new_sample.disconnect(on_new_sample);
+
+ // Disconnect input device
+ if (input != null) {
+ input.unlink(encode);
+ input = null;
+ }
+ if (this._input_device != null) {
+ if (!paused) this._input_device.unlink();
+ this._input_device = null;
+ }
+
+ // Disconnect encode
+ encode.set_locked_state(true);
+ encode.set_state(Gst.State.NULL);
+ encode.get_static_pad("src").unlink(send_rtp_sink_pad);
+ pipe.remove(encode);
+ encode = null;
+ encode_pay = null;
+
+ // Disconnect RTP sending
+ if (send_rtp_src_pad != null) {
+ send_rtp_src_pad.add_probe(Gst.PadProbeType.BLOCK, drop_probe);
+ send_rtp_src_pad.unlink(send_rtp.get_static_pad("sink"));
+ }
+ send_rtp.set_locked_state(true);
+ send_rtp.set_state(Gst.State.NULL);
+ pipe.remove(send_rtp);
+ send_rtp = null;
+
+ // Disconnect decode
+ if (recv_rtp_src_pad != null) {
+ recv_rtp_src_pad.add_probe(Gst.PadProbeType.BLOCK, drop_probe);
+ recv_rtp_src_pad.unlink(decode.get_static_pad("sink"));
+ }
+
+ // Disconnect RTP receiving
+ recv_rtp.set_locked_state(true);
+ recv_rtp.set_state(Gst.State.NULL);
+ recv_rtp.get_static_pad("src").unlink(recv_rtp_sink_pad);
+ pipe.remove(recv_rtp);
+ recv_rtp = null;
+
+ // Disconnect output
+ if (output != null) {
+ decode.unlink(output);
+ }
+ decode.set_locked_state(true);
+ decode.set_state(Gst.State.NULL);
+ pipe.remove(decode);
+ decode = null;
+ decode_depay = null;
+ output = null;
+
+ // Disconnect output device
+ if (this._output_device != null) {
+ this._output_device.unlink();
+ this._output_device = null;
+ }
+
+ // Disconnect RTCP receiving
+ recv_rtcp.get_static_pad("src").unlink(recv_rtcp_sink_pad);
+ recv_rtcp.set_locked_state(true);
+ recv_rtcp.set_state(Gst.State.NULL);
+ pipe.remove(recv_rtcp);
+ recv_rtcp = null;
+
+ // Disconnect RTCP sending
+ send_rtcp_src_pad.unlink(send_rtcp.get_static_pad("sink"));
+ send_rtcp.set_locked_state(true);
+ send_rtcp.set_state(Gst.State.NULL);
+ pipe.remove(send_rtcp);
+ send_rtcp = null;
+
+ // Release rtp pads
+ rtpbin.release_request_pad(send_rtp_sink_pad);
+ send_rtp_sink_pad = null;
+ rtpbin.release_request_pad(recv_rtp_sink_pad);
+ recv_rtp_sink_pad = null;
+ rtpbin.release_request_pad(recv_rtcp_sink_pad);
+ recv_rtcp_sink_pad = null;
+ rtpbin.release_request_pad(send_rtcp_src_pad);
+ send_rtcp_src_pad = null;
+ send_rtp_src_pad = null;
+ recv_rtp_src_pad = null;
+
+ session = null;
+ }
+
+ private void prepare_remote_crypto() {
+ if (remote_crypto != null && remote_crypto.is_valid && !crypto_session.has_decrypt) {
+ crypto_session.set_decryption_key(remote_crypto.crypto_suite, remote_crypto.key, remote_crypto.salt);
+ debug("Setting up decryption with key params %s", remote_crypto.key_params);
+ }
+ }
+
+ private uint16 previous_video_orientation_degree = uint16.MAX;
+ public signal void video_orientation_changed(uint16 degree);
+
+ public override void on_recv_rtp_data(Bytes bytes) {
+ if (rtcp_mux && bytes.length >= 2 && bytes.get(1) >= 192 && bytes.get(1) < 224) {
+ on_recv_rtcp_data(bytes);
+ return;
+ }
+ prepare_remote_crypto();
+ uint8[] data = bytes.get_data();
+ if (crypto_session.has_decrypt) {
+ try {
+ data = crypto_session.decrypt_rtp(data);
+ } catch (Error e) {
+ warning("%s (%d)", e.message, e.code);
+ }
+ }
+ if (push_recv_data) {
+ Gst.Buffer buffer = new Gst.Buffer.wrapped((owned) data);
+ Gst.RTP.Buffer rtp_buffer;
+ if (Gst.RTP.Buffer.map(buffer, Gst.MapFlags.READ, out rtp_buffer)) {
+ if (rtp_buffer.get_extension()) {
+ Xmpp.Xep.JingleRtp.HeaderExtension? ext = header_extensions.first_match((it) => it.uri == "urn:3gpp:video-orientation");
+ if (ext != null) {
+ unowned uint8[] extension_data;
+ if (rtp_buffer.get_extension_onebyte_header(ext.id, 0, out extension_data) && extension_data.length == 1) {
+ bool camera = (extension_data[0] & 0x8) > 0;
+ bool flip = (extension_data[0] & 0x4) > 0;
+ uint8 rotation = extension_data[0] & 0x3;
+ uint16 rotation_degree = uint16.MAX;
+ switch(rotation) {
+ case 0: rotation_degree = 0; break;
+ case 1: rotation_degree = 90; break;
+ case 2: rotation_degree = 180; break;
+ case 3: rotation_degree = 270; break;
+ }
+ if (rotation_degree != previous_video_orientation_degree) {
+ video_orientation_changed(rotation_degree);
+ previous_video_orientation_degree = rotation_degree;
+ }
+ }
+ }
+ }
+ rtp_buffer.unmap();
+ }
+
+ // FIXME: VAPI file in Vala < 0.49.1 has a bug that results in broken ownership of buffer in push_buffer()
+ // We workaround by using the plain signal. The signal unfortunately will cause an unnecessary copy of
+ // the underlying buffer, so and some point we should move over to the new version (once we require
+ // Vala >= 0.50)
+#if FIXED_APPSRC_PUSH_BUFFER_IN_VAPI
+ recv_rtp.push_buffer((owned) buffer);
+#else
+ Gst.FlowReturn ret;
+ GLib.Signal.emit_by_name(recv_rtp, "push-buffer", buffer, out ret);
+#endif
+ }
+ }
+
+ public override void on_recv_rtcp_data(Bytes bytes) {
+ prepare_remote_crypto();
+ uint8[] data = bytes.get_data();
+ if (crypto_session.has_decrypt) {
+ try {
+ data = crypto_session.decrypt_rtcp(data);
+ } catch (Error e) {
+ warning("%s (%d)", e.message, e.code);
+ }
+ }
+ if (push_recv_data) {
+ Gst.Buffer buffer = new Gst.Buffer.wrapped((owned) data);
+ // See above
+#if FIXED_APPSRC_PUSH_BUFFER_IN_VAPI
+ recv_rtcp.push_buffer((owned) buffer);
+#else
+ Gst.FlowReturn ret;
+ GLib.Signal.emit_by_name(recv_rtcp, "push-buffer", buffer, out ret);
+#endif
+ }
+ }
+
+ public override void on_rtp_ready() {
+ // If full frame has been sent before the connection was ready, the counterpart would only display our video after the next full frame.
+ // Send a full frame to let the counterpart display our video asap
+ rtpbin.send_event(new Gst.Event.custom(
+ Gst.EventType.CUSTOM_UPSTREAM,
+ new Gst.Structure("GstForceKeyUnit", "all-headers", typeof(bool), true, null))
+ );
+ }
+
+ public override void on_rtcp_ready() {
+ int rtp_session_id = (int) rtpid;
+ uint64 max_delay = int.MAX;
+ Object rtp_session;
+ bool rtp_sent;
+ GLib.Signal.emit_by_name(rtpbin, "get-internal-session", rtp_session_id, out rtp_session);
+ GLib.Signal.emit_by_name(rtp_session, "send-rtcp-full", max_delay, out rtp_sent);
+ debug("RTCP is ready, resending rtcp: %s", rtp_sent.to_string());
+ }
+
+ public void on_ssrc_pad_added(string ssrc, Gst.Pad pad) {
+ debug("New ssrc %s with pad %s", ssrc, pad.name);
+ if (participant_ssrc != null && participant_ssrc != ssrc) {
+ warning("Got second ssrc on stream (old: %s, new: %s), ignoring", participant_ssrc, ssrc);
+ return;
+ }
+ participant_ssrc = ssrc;
+ recv_rtp_src_pad = pad;
+ if (decode != null) {
+ plugin.pause();
+ debug("Link %s to %s decode for %s", recv_rtp_src_pad.name, media, name);
+ recv_rtp_src_pad.link(decode.get_static_pad("sink"));
+ plugin.unpause();
+ }
+ }
+
+ public void on_send_rtp_src_added(Gst.Pad pad) {
+ send_rtp_src_pad = pad;
+ if (send_rtp != null) {
+ plugin.pause();
+ debug("Link %s to %s send_rtp for %s", send_rtp_src_pad.name, media, name);
+ send_rtp_src_pad.link(send_rtp.get_static_pad("sink"));
+ plugin.unpause();
+ }
+ }
+
+ public void set_input(Gst.Element? input) {
+ set_input_and_pause(input, paused);
+ }
+
+ private void set_input_and_pause(Gst.Element? input, bool paused) {
+ if (created && this.input != null) {
+ this.input.unlink(encode);
+ this.input = null;
+ }
+
+ this.input = input;
+ this.paused = paused;
+
+ if (created && sending && !paused && input != null) {
+ plugin.pause();
+ input.link(encode);
+ plugin.unpause();
+ }
+ }
+
+ public void pause() {
+ if (paused) return;
+ set_input_and_pause(null, true);
+ if (input_device != null) input_device.unlink();
+ }
+
+ public void unpause() {
+ if (!paused) return;
+ set_input_and_pause(input_device != null ? input_device.link_source() : null, false);
+ }
+
+ ulong block_probe_handler_id = 0;
+ public virtual void add_output(Gst.Element element) {
+ if (output != null) {
+ critical("add_output() invoked more than once");
+ return;
+ }
+ this.output = element;
+ if (created) {
+ plugin.pause();
+ decode.link(element);
+ if (block_probe_handler_id != 0) {
+ decode.get_static_pad("src").remove_probe(block_probe_handler_id);
+ }
+ plugin.unpause();
+ }
+ }
+
+ public virtual void remove_output(Gst.Element element) {
+ if (output != element) {
+ critical("remove_output() invoked without prior add_output()");
+ return;
+ }
+ if (created) {
+ block_probe_handler_id = decode.get_static_pad("src").add_probe(Gst.PadProbeType.BLOCK, drop_probe);
+ decode.unlink(element);
+ }
+ if (this._output_device != null) {
+ this._output_device.unlink();
+ this._output_device = null;
+ }
+ this.output = null;
+ }
+}
+
+public class Dino.Plugins.Rtp.VideoStream : Stream {
+ private Gee.List<Gst.Element> outputs = new ArrayList<Gst.Element>();
+ private Gst.Element output_tee;
+ private Gst.Element rotate;
+ private ulong video_orientation_changed_handler;
+
+ public VideoStream(Plugin plugin, Xmpp.Xep.Jingle.Content content) {
+ base(plugin, content);
+ if (media != "video") critical("VideoStream created for non-video media");
+ }
+
+ public override void create() {
+ video_orientation_changed_handler = video_orientation_changed.connect(on_video_orientation_changed);
+ plugin.pause();
+ rotate = Gst.ElementFactory.make("videoflip", @"video_rotate_$rtpid");
+ pipe.add(rotate);
+ output_tee = Gst.ElementFactory.make("tee", @"video_tee_$rtpid");
+ output_tee.@set("allow-not-linked", true);
+ pipe.add(output_tee);
+ rotate.link(output_tee);
+ add_output(rotate);
+ base.create();
+ foreach (Gst.Element output in outputs) {
+ output_tee.link(output);
+ }
+ plugin.unpause();
+ }
+
+ private void on_video_orientation_changed(uint16 degree) {
+ if (rotate != null) {
+ switch (degree) {
+ case 0:
+ rotate.@set("method", 0);
+ break;
+ case 90:
+ rotate.@set("method", 1);
+ break;
+ case 180:
+ rotate.@set("method", 2);
+ break;
+ case 270:
+ rotate.@set("method", 3);
+ break;
+ }
+ }
+ }
+
+ public override void destroy() {
+ foreach (Gst.Element output in outputs) {
+ output_tee.unlink(output);
+ }
+ base.destroy();
+ rotate.set_locked_state(true);
+ rotate.set_state(Gst.State.NULL);
+ rotate.unlink(output_tee);
+ pipe.remove(rotate);
+ rotate = null;
+ output_tee.set_locked_state(true);
+ output_tee.set_state(Gst.State.NULL);
+ pipe.remove(output_tee);
+ output_tee = null;
+ disconnect(video_orientation_changed_handler);
+ }
+
+ public override void add_output(Gst.Element element) {
+ if (element == output_tee || element == rotate) {
+ base.add_output(element);
+ return;
+ }
+ outputs.add(element);
+ if (output_tee != null) {
+ output_tee.link(element);
+ }
+ }
+
+ public override void remove_output(Gst.Element element) {
+ if (element == output_tee || element == rotate) {
+ base.remove_output(element);
+ return;
+ }
+ outputs.remove(element);
+ if (output_tee != null) {
+ output_tee.unlink(element);
+ }
+ }
+} \ No newline at end of file
diff --git a/plugins/rtp/src/video_widget.vala b/plugins/rtp/src/video_widget.vala
new file mode 100644
index 00000000..351069a7
--- /dev/null
+++ b/plugins/rtp/src/video_widget.vala
@@ -0,0 +1,110 @@
+public class Dino.Plugins.Rtp.VideoWidget : Gtk.Bin, Dino.Plugins.VideoCallWidget {
+ private static uint last_id = 0;
+
+ public uint id { get; private set; }
+ public Gst.Element element { get; private set; }
+ public Gtk.Widget widget { get; private set; }
+
+ public Plugin plugin { get; private set; }
+ public Gst.Pipeline pipe { get {
+ return plugin.pipe;
+ }}
+
+ private bool attached;
+ private Device? connected_device;
+ private Stream? connected_stream;
+ private Gst.Element convert;
+
+ public VideoWidget(Plugin plugin) {
+ this.plugin = plugin;
+
+ id = last_id++;
+ element = Gst.ElementFactory.make("gtksink", @"video_widget_$id");
+ if (element != null) {
+ Gtk.Widget widget;
+ element.@get("widget", out widget);
+ element.@set("async", false);
+ element.@set("sync", false);
+ this.widget = widget;
+ add(widget);
+ widget.visible = true;
+
+ // Listen for resolution changes
+ element.get_static_pad("sink").notify["caps"].connect(() => {
+ if (element.get_static_pad("sink").caps == null) return;
+
+ int width, height;
+ element.get_static_pad("sink").caps.get_structure(0).get_int("width", out width);
+ element.get_static_pad("sink").caps.get_structure(0).get_int("height", out height);
+ resolution_changed(width, height);
+ });
+ } else {
+ warning("Could not create GTK video sink. Won't display videos.");
+ }
+ }
+
+ public void display_stream(Xmpp.Xep.JingleRtp.Stream stream) {
+ if (element == null) return;
+ detach();
+ if (stream.media != "video") return;
+ connected_stream = stream as Stream;
+ if (connected_stream == null) return;
+ plugin.pause();
+ pipe.add(element);
+ convert = Gst.parse_bin_from_description(@"videoconvert name=video_widget_$(id)_convert", true);
+ convert.name = @"video_widget_$(id)_prepare";
+ pipe.add(convert);
+ convert.link(element);
+ connected_stream.add_output(convert);
+ element.set_locked_state(false);
+ plugin.unpause();
+ attached = true;
+ }
+
+ public void display_device(MediaDevice media_device) {
+ if (element == null) return;
+ detach();
+ connected_device = media_device as Device;
+ if (connected_device == null) return;
+ plugin.pause();
+ pipe.add(element);
+ convert = Gst.parse_bin_from_description(@"videoflip method=horizontal-flip name=video_widget_$(id)_flip ! videoconvert name=video_widget_$(id)_convert", true);
+ convert.name = @"video_widget_$(id)_prepare";
+ pipe.add(convert);
+ convert.link(element);
+ connected_device.link_source().link(convert);
+ element.set_locked_state(false);
+ plugin.unpause();
+ attached = true;
+ }
+
+ public void detach() {
+ if (element == null) return;
+ if (attached) {
+ if (connected_stream != null) {
+ connected_stream.remove_output(convert);
+ connected_stream = null;
+ }
+ if (connected_device != null) {
+ connected_device.link_source().unlink(element);
+ connected_device.unlink(); // We get a new ref to recover the element, so unlink twice
+ connected_device.unlink();
+ connected_device = null;
+ }
+ convert.set_locked_state(true);
+ convert.set_state(Gst.State.NULL);
+ pipe.remove(convert);
+ convert = null;
+ element.set_locked_state(true);
+ element.set_state(Gst.State.NULL);
+ pipe.remove(element);
+ attached = false;
+ }
+ }
+
+ public override void dispose() {
+ detach();
+ widget = null;
+ element = null;
+ }
+} \ No newline at end of file
diff --git a/plugins/rtp/src/voice_processor.vala b/plugins/rtp/src/voice_processor.vala
new file mode 100644
index 00000000..66e95d72
--- /dev/null
+++ b/plugins/rtp/src/voice_processor.vala
@@ -0,0 +1,176 @@
+using Gst;
+
+namespace Dino.Plugins.Rtp {
+public static extern Buffer adjust_to_running_time(Base.Transform transform, Buffer buf);
+}
+
+public class Dino.Plugins.Rtp.EchoProbe : Audio.Filter {
+ private static StaticPadTemplate sink_template = {"sink", PadDirection.SINK, PadPresence.ALWAYS, {null, "audio/x-raw,rate=48000,channels=1,layout=interleaved,format=S16LE"}};
+ private static StaticPadTemplate src_template = {"src", PadDirection.SRC, PadPresence.ALWAYS, {null, "audio/x-raw,rate=48000,channels=1,layout=interleaved,format=S16LE"}};
+ public Audio.Info audio_info { get; private set; }
+ public signal void on_new_buffer(Buffer buffer);
+ private uint period_samples;
+ private uint period_size;
+ private Base.Adapter adapter = new Base.Adapter();
+
+ static construct {
+ add_static_pad_template(sink_template);
+ add_static_pad_template(src_template);
+ set_static_metadata("Acoustic Echo Canceller probe", "Generic/Audio", "Gathers playback buffers for echo cancellation", "Dino Team <contact@dino.im>");
+ }
+
+ construct {
+ set_passthrough(true);
+ }
+
+ public override bool setup(Audio.Info info) {
+ audio_info = info;
+ period_samples = info.rate / 100; // 10ms buffers
+ period_size = period_samples * info.bpf;
+ return true;
+ }
+
+
+ public override FlowReturn transform_ip(Buffer buf) {
+ lock (adapter) {
+ adapter.push(adjust_to_running_time(this, buf));
+ while (adapter.available() > period_size) {
+ on_new_buffer(adapter.take_buffer(period_size));
+ }
+ }
+ return FlowReturn.OK;
+ }
+
+ public override bool stop() {
+ adapter.clear();
+ return true;
+ }
+}
+
+public class Dino.Plugins.Rtp.VoiceProcessor : Audio.Filter {
+ private static StaticPadTemplate sink_template = {"sink", PadDirection.SINK, PadPresence.ALWAYS, {null, "audio/x-raw,rate=48000,channels=1,layout=interleaved,format=S16LE"}};
+ private static StaticPadTemplate src_template = {"src", PadDirection.SRC, PadPresence.ALWAYS, {null, "audio/x-raw,rate=48000,channels=1,layout=interleaved,format=S16LE"}};
+ public Audio.Info audio_info { get; private set; }
+ private ulong process_outgoing_buffer_handler_id;
+ private uint adjust_delay_timeout_id;
+ private uint period_samples;
+ private uint period_size;
+ private Base.Adapter adapter = new Base.Adapter();
+ private EchoProbe? echo_probe;
+ private Audio.StreamVolume? stream_volume;
+ private ClockTime last_reverse;
+ private void* native;
+
+ static construct {
+ add_static_pad_template(sink_template);
+ add_static_pad_template(src_template);
+ set_static_metadata("Voice Processor (AGC, AEC, filters, etc.)", "Generic/Audio", "Pre-processes voice with WebRTC Audio Processing Library", "Dino Team <contact@dino.im>");
+ }
+
+ construct {
+ set_passthrough(false);
+ }
+
+ public VoiceProcessor(EchoProbe? echo_probe = null, Audio.StreamVolume? stream_volume = null) {
+ this.echo_probe = echo_probe;
+ this.stream_volume = stream_volume;
+ }
+
+ private static extern void* init_native(int stream_delay);
+ private static extern void setup_native(void* native);
+ private static extern void destroy_native(void* native);
+ private static extern void analyze_reverse_stream(void* native, Audio.Info info, Buffer buffer);
+ private static extern void process_stream(void* native, Audio.Info info, Buffer buffer);
+ private static extern void adjust_stream_delay(void* native);
+ private static extern void notify_gain_level(void* native, int gain_level);
+ private static extern int get_suggested_gain_level(void* native);
+ private static extern bool get_stream_has_voice(void* native);
+
+ public override bool setup(Audio.Info info) {
+ debug("VoiceProcessor.setup(%s)", info.to_caps().to_string());
+ audio_info = info;
+ period_samples = info.rate / 100; // 10ms buffers
+ period_size = period_samples * info.bpf;
+ adapter.clear();
+ setup_native(native);
+ return true;
+ }
+
+ public override bool start() {
+ native = init_native(150);
+ if (process_outgoing_buffer_handler_id == 0 && echo_probe != null) {
+ process_outgoing_buffer_handler_id = echo_probe.on_new_buffer.connect(process_outgoing_buffer);
+ }
+ if (stream_volume == null && sinkpad.get_peer() != null && sinkpad.get_peer().get_parent_element() is Audio.StreamVolume) {
+ stream_volume = sinkpad.get_peer().get_parent_element() as Audio.StreamVolume;
+ }
+ return true;
+ }
+
+ private bool adjust_delay() {
+ if (native != null) {
+ adjust_stream_delay(native);
+ return Source.CONTINUE;
+ } else {
+ adjust_delay_timeout_id = 0;
+ return Source.REMOVE;
+ }
+ }
+
+ private void process_outgoing_buffer(Buffer buffer) {
+ if (buffer.pts != uint64.MAX) {
+ last_reverse = buffer.pts;
+ }
+ analyze_reverse_stream(native, echo_probe.audio_info, buffer);
+ if (adjust_delay_timeout_id == 0 && echo_probe != null) {
+ adjust_delay_timeout_id = Timeout.add(1000, adjust_delay);
+ }
+ }
+
+ public override FlowReturn submit_input_buffer(bool is_discont, Buffer input) {
+ lock (adapter) {
+ if (is_discont) {
+ adapter.clear();
+ }
+ adapter.push(adjust_to_running_time(this, input));
+ }
+ return FlowReturn.OK;
+ }
+
+ public override FlowReturn generate_output(out Buffer output_buffer) {
+ lock (adapter) {
+ if (adapter.available() >= period_size) {
+ output_buffer = (Gst.Buffer) adapter.take_buffer(period_size).make_writable();
+ int old_gain_level = 0;
+ if (stream_volume != null) {
+ old_gain_level = (int) (stream_volume.get_volume(Audio.StreamVolumeFormat.LINEAR) * 255.0);
+ notify_gain_level(native, old_gain_level);
+ }
+ process_stream(native, audio_info, output_buffer);
+ if (stream_volume != null) {
+ int new_gain_level = get_suggested_gain_level(native);
+ if (old_gain_level != new_gain_level) {
+ debug("Gain: %i -> %i", old_gain_level, new_gain_level);
+ stream_volume.set_volume(Audio.StreamVolumeFormat.LINEAR, ((double)new_gain_level) / 255.0);
+ }
+ }
+ }
+ }
+ return FlowReturn.OK;
+ }
+
+ public override bool stop() {
+ if (process_outgoing_buffer_handler_id != 0) {
+ echo_probe.disconnect(process_outgoing_buffer_handler_id);
+ process_outgoing_buffer_handler_id = 0;
+ }
+ if (adjust_delay_timeout_id != 0) {
+ Source.remove(adjust_delay_timeout_id);
+ adjust_delay_timeout_id = 0;
+ }
+ adapter.clear();
+ destroy_native(native);
+ native = null;
+ return true;
+ }
+} \ No newline at end of file
diff --git a/plugins/rtp/src/voice_processor_native.cpp b/plugins/rtp/src/voice_processor_native.cpp
new file mode 100644
index 00000000..8a052cf8
--- /dev/null
+++ b/plugins/rtp/src/voice_processor_native.cpp
@@ -0,0 +1,148 @@
+#include <algorithm>
+#include <gst/gst.h>
+#include <gst/audio/audio.h>
+#include <webrtc/modules/audio_processing/include/audio_processing.h>
+#include <webrtc/modules/interface/module_common_types.h>
+#include <webrtc/system_wrappers/include/trace.h>
+
+#define SAMPLE_RATE 48000
+#define SAMPLE_CHANNELS 1
+
+struct _DinoPluginsRtpVoiceProcessorNative {
+ webrtc::AudioProcessing *apm;
+ gint stream_delay;
+ gint last_median;
+ gint last_poor_delays;
+};
+
+extern "C" void *dino_plugins_rtp_adjust_to_running_time(GstBaseTransform *transform, GstBuffer *buffer) {
+ GstBuffer *copy = gst_buffer_copy(buffer);
+ GST_BUFFER_PTS(copy) = gst_segment_to_running_time(&transform->segment, GST_FORMAT_TIME, GST_BUFFER_PTS(buffer));
+ return copy;
+}
+
+extern "C" void *dino_plugins_rtp_voice_processor_init_native(gint stream_delay) {
+ _DinoPluginsRtpVoiceProcessorNative *native = new _DinoPluginsRtpVoiceProcessorNative();
+ webrtc::Config config;
+ config.Set<webrtc::ExtendedFilter>(new webrtc::ExtendedFilter(true));
+ config.Set<webrtc::ExperimentalAgc>(new webrtc::ExperimentalAgc(true, 85));
+ native->apm = webrtc::AudioProcessing::Create(config);
+ native->stream_delay = stream_delay;
+ native->last_median = 0;
+ native->last_poor_delays = 0;
+ return native;
+}
+
+extern "C" void dino_plugins_rtp_voice_processor_setup_native(void *native_ptr) {
+ _DinoPluginsRtpVoiceProcessorNative *native = (_DinoPluginsRtpVoiceProcessorNative *) native_ptr;
+ webrtc::AudioProcessing *apm = native->apm;
+ webrtc::ProcessingConfig pconfig;
+ pconfig.streams[webrtc::ProcessingConfig::kInputStream] =
+ webrtc::StreamConfig(SAMPLE_RATE, SAMPLE_CHANNELS, false);
+ pconfig.streams[webrtc::ProcessingConfig::kOutputStream] =
+ webrtc::StreamConfig(SAMPLE_RATE, SAMPLE_CHANNELS, false);
+ pconfig.streams[webrtc::ProcessingConfig::kReverseInputStream] =
+ webrtc::StreamConfig(SAMPLE_RATE, SAMPLE_CHANNELS, false);
+ pconfig.streams[webrtc::ProcessingConfig::kReverseOutputStream] =
+ webrtc::StreamConfig(SAMPLE_RATE, SAMPLE_CHANNELS, false);
+ apm->Initialize(pconfig);
+ apm->high_pass_filter()->Enable(true);
+ apm->echo_cancellation()->enable_drift_compensation(false);
+ apm->echo_cancellation()->set_suppression_level(webrtc::EchoCancellation::kModerateSuppression);
+ apm->echo_cancellation()->enable_delay_logging(true);
+ apm->echo_cancellation()->Enable(true);
+ apm->noise_suppression()->set_level(webrtc::NoiseSuppression::kModerate);
+ apm->noise_suppression()->Enable(true);
+ apm->gain_control()->set_analog_level_limits(0, 255);
+ apm->gain_control()->set_mode(webrtc::GainControl::kAdaptiveAnalog);
+ apm->gain_control()->set_target_level_dbfs(3);
+ apm->gain_control()->set_compression_gain_db(9);
+ apm->gain_control()->enable_limiter(true);
+ apm->gain_control()->Enable(true);
+ apm->voice_detection()->set_likelihood(webrtc::VoiceDetection::Likelihood::kLowLikelihood);
+ apm->voice_detection()->Enable(true);
+}
+
+extern "C" void
+dino_plugins_rtp_voice_processor_analyze_reverse_stream(void *native_ptr, GstAudioInfo *info, GstBuffer *buffer) {
+ _DinoPluginsRtpVoiceProcessorNative *native = (_DinoPluginsRtpVoiceProcessorNative *) native_ptr;
+ webrtc::StreamConfig config(SAMPLE_RATE, SAMPLE_CHANNELS, false);
+ webrtc::AudioProcessing *apm = native->apm;
+
+ GstMapInfo map;
+ gst_buffer_map(buffer, &map, GST_MAP_READ);
+
+ webrtc::AudioFrame frame;
+ frame.num_channels_ = info->channels;
+ frame.sample_rate_hz_ = info->rate;
+ frame.samples_per_channel_ = gst_buffer_get_size(buffer) / info->bpf;
+ memcpy(frame.data_, map.data, frame.samples_per_channel_ * info->bpf);
+
+ int err = apm->AnalyzeReverseStream(&frame);
+ if (err < 0) g_warning("voice_processor_native.cpp: ProcessReverseStream %i", err);
+
+ gst_buffer_unmap(buffer, &map);
+}
+
+extern "C" void dino_plugins_rtp_voice_processor_notify_gain_level(void *native_ptr, gint gain_level) {
+ _DinoPluginsRtpVoiceProcessorNative *native = (_DinoPluginsRtpVoiceProcessorNative *) native_ptr;
+ webrtc::AudioProcessing *apm = native->apm;
+ apm->gain_control()->set_stream_analog_level(gain_level);
+}
+
+extern "C" gint dino_plugins_rtp_voice_processor_get_suggested_gain_level(void *native_ptr) {
+ _DinoPluginsRtpVoiceProcessorNative *native = (_DinoPluginsRtpVoiceProcessorNative *) native_ptr;
+ webrtc::AudioProcessing *apm = native->apm;
+ return apm->gain_control()->stream_analog_level();
+}
+
+extern "C" bool dino_plugins_rtp_voice_processor_get_stream_has_voice(void *native_ptr) {
+ _DinoPluginsRtpVoiceProcessorNative *native = (_DinoPluginsRtpVoiceProcessorNative *) native_ptr;
+ webrtc::AudioProcessing *apm = native->apm;
+ return apm->voice_detection()->stream_has_voice();
+}
+
+extern "C" void dino_plugins_rtp_voice_processor_adjust_stream_delay(void *native_ptr) {
+ _DinoPluginsRtpVoiceProcessorNative *native = (_DinoPluginsRtpVoiceProcessorNative *) native_ptr;
+ webrtc::AudioProcessing *apm = native->apm;
+ int median, std, poor_delays;
+ float fraction_poor_delays;
+ apm->echo_cancellation()->GetDelayMetrics(&median, &std, &fraction_poor_delays);
+ poor_delays = (int)(fraction_poor_delays * 100.0);
+ if (fraction_poor_delays < 0 || (native->last_median == median && native->last_poor_delays == poor_delays)) return;
+ g_debug("voice_processor_native.cpp: Stream delay metrics: median=%i std=%i poor_delays=%i%%", median, std, poor_delays);
+ native->last_median = median;
+ native->last_poor_delays = poor_delays;
+ if (poor_delays > 90) {
+ native->stream_delay = std::min(std::max(0, native->stream_delay + std::min(48, std::max(median, -48))), 384);
+ g_debug("voice_processor_native.cpp: set stream_delay=%i", native->stream_delay);
+ }
+}
+
+extern "C" void
+dino_plugins_rtp_voice_processor_process_stream(void *native_ptr, GstAudioInfo *info, GstBuffer *buffer) {
+ _DinoPluginsRtpVoiceProcessorNative *native = (_DinoPluginsRtpVoiceProcessorNative *) native_ptr;
+ webrtc::StreamConfig config(SAMPLE_RATE, SAMPLE_CHANNELS, false);
+ webrtc::AudioProcessing *apm = native->apm;
+
+ GstMapInfo map;
+ gst_buffer_map(buffer, &map, GST_MAP_READWRITE);
+
+ webrtc::AudioFrame frame;
+ frame.num_channels_ = info->channels;
+ frame.sample_rate_hz_ = info->rate;
+ frame.samples_per_channel_ = info->rate / 100;
+ memcpy(frame.data_, map.data, frame.samples_per_channel_ * info->bpf);
+
+ apm->set_stream_delay_ms(native->stream_delay);
+ int err = apm->ProcessStream(&frame);
+ if (err >= 0) memcpy(map.data, frame.data_, frame.samples_per_channel_ * info->bpf);
+ if (err < 0) g_warning("voice_processor_native.cpp: ProcessStream %i", err);
+
+ gst_buffer_unmap(buffer, &map);
+}
+
+extern "C" void dino_plugins_rtp_voice_processor_destroy_native(void *native_ptr) {
+ _DinoPluginsRtpVoiceProcessorNative *native = (_DinoPluginsRtpVoiceProcessorNative *) native_ptr;
+ delete native;
+} \ No newline at end of file
diff --git a/plugins/rtp/vapi/gstreamer-rtp-1.0.vapi b/plugins/rtp/vapi/gstreamer-rtp-1.0.vapi
new file mode 100644
index 00000000..30490896
--- /dev/null
+++ b/plugins/rtp/vapi/gstreamer-rtp-1.0.vapi
@@ -0,0 +1,625 @@
+// Fixme: This is fetched from development code of Vala upstream which fixed a few bugs.
+/* gstreamer-rtp-1.0.vapi generated by vapigen, do not modify. */
+
+[CCode (cprefix = "Gst", gir_namespace = "GstRtp", gir_version = "1.0", lower_case_cprefix = "gst_")]
+namespace Gst {
+ namespace RTCP {
+ [CCode (cheader_filename = "gst/rtp/rtp.h", has_type_id = false)]
+ [GIR (name = "RTCPBuffer")]
+ public struct Buffer {
+ public weak Gst.Buffer buffer;
+ public bool add_packet (Gst.RTCP.Type type, Gst.RTCP.Packet packet);
+ public bool get_first_packet (Gst.RTCP.Packet packet);
+ public uint get_packet_count ();
+ public static bool map (Gst.Buffer buffer, Gst.MapFlags flags, out Gst.RTCP.Buffer rtcp);
+ public static Gst.Buffer @new (uint mtu);
+ public static Gst.Buffer new_copy_data ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "guint")] uint8[] data);
+ public static Gst.Buffer new_take_data ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "guint")] owned uint8[] data);
+ public bool unmap ();
+ public static bool validate (Gst.Buffer buffer);
+ public static bool validate_data ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "guint")] uint8[] data);
+ [Version (since = "1.6")]
+ public static bool validate_data_reduced ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "guint")] uint8[] data);
+ [Version (since = "1.6")]
+ public static bool validate_reduced (Gst.Buffer buffer);
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", has_type_id = false)]
+ [GIR (name = "RTCPPacket")]
+ public struct Packet {
+ public weak Gst.RTCP.Buffer? rtcp;
+ public uint offset;
+ [Version (since = "1.10")]
+ public bool add_profile_specific_ext ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "guint")] uint8[] data);
+ public bool add_rb (uint32 ssrc, uint8 fractionlost, int32 packetslost, uint32 exthighestseq, uint32 jitter, uint32 lsr, uint32 dlsr);
+ [Version (since = "1.10")]
+ public uint8 app_get_data ();
+ [Version (since = "1.10")]
+ public uint16 app_get_data_length ();
+ [Version (since = "1.10")]
+ public unowned string app_get_name ();
+ [Version (since = "1.10")]
+ public uint32 app_get_ssrc ();
+ [Version (since = "1.10")]
+ public uint8 app_get_subtype ();
+ [Version (since = "1.10")]
+ public bool app_set_data_length (uint16 wordlen);
+ [Version (since = "1.10")]
+ public void app_set_name (string name);
+ [Version (since = "1.10")]
+ public void app_set_ssrc (uint32 ssrc);
+ [Version (since = "1.10")]
+ public void app_set_subtype (uint8 subtype);
+ public bool bye_add_ssrc (uint32 ssrc);
+ public bool bye_add_ssrcs ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "guint")] uint32[] ssrc);
+ public uint32 bye_get_nth_ssrc (uint nth);
+ public string bye_get_reason ();
+ public uint8 bye_get_reason_len ();
+ public uint bye_get_ssrc_count ();
+ public bool bye_set_reason (string reason);
+ [Version (since = "1.10")]
+ public bool copy_profile_specific_ext ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "guint")] out uint8[] data);
+ public uint8 fb_get_fci ();
+ public uint16 fb_get_fci_length ();
+ public uint32 fb_get_media_ssrc ();
+ public uint32 fb_get_sender_ssrc ();
+ public Gst.RTCP.FBType fb_get_type ();
+ public bool fb_set_fci_length (uint16 wordlen);
+ public void fb_set_media_ssrc (uint32 ssrc);
+ public void fb_set_sender_ssrc (uint32 ssrc);
+ public void fb_set_type (Gst.RTCP.FBType type);
+ public uint8 get_count ();
+ public uint16 get_length ();
+ public bool get_padding ();
+ [Version (since = "1.10")]
+ public bool get_profile_specific_ext ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "guint")] out unowned uint8[] data);
+ [Version (since = "1.10")]
+ public uint16 get_profile_specific_ext_length ();
+ public void get_rb (uint nth, out uint32 ssrc, out uint8 fractionlost, out int32 packetslost, out uint32 exthighestseq, out uint32 jitter, out uint32 lsr, out uint32 dlsr);
+ public uint get_rb_count ();
+ public Gst.RTCP.Type get_type ();
+ public bool move_to_next ();
+ public bool remove ();
+ public uint32 rr_get_ssrc ();
+ public void rr_set_ssrc (uint32 ssrc);
+ public bool sdes_add_entry (Gst.RTCP.SDESType type, [CCode (array_length_cname = "len", array_length_pos = 1.5, array_length_type = "guint8")] uint8[] data);
+ public bool sdes_add_item (uint32 ssrc);
+ public bool sdes_copy_entry (out Gst.RTCP.SDESType type, [CCode (array_length_cname = "len", array_length_pos = 1.5, array_length_type = "guint8")] out uint8[] data);
+ public bool sdes_first_entry ();
+ public bool sdes_first_item ();
+ public bool sdes_get_entry (out Gst.RTCP.SDESType type, [CCode (array_length_cname = "len", array_length_pos = 1.5, array_length_type = "guint8")] out unowned uint8[] data);
+ public uint sdes_get_item_count ();
+ public uint32 sdes_get_ssrc ();
+ public bool sdes_next_entry ();
+ public bool sdes_next_item ();
+ public void set_rb (uint nth, uint32 ssrc, uint8 fractionlost, int32 packetslost, uint32 exthighestseq, uint32 jitter, uint32 lsr, uint32 dlsr);
+ public void sr_get_sender_info (out uint32 ssrc, out uint64 ntptime, out uint32 rtptime, out uint32 packet_count, out uint32 octet_count);
+ public void sr_set_sender_info (uint32 ssrc, uint64 ntptime, uint32 rtptime, uint32 packet_count, uint32 octet_count);
+ [Version (since = "1.16")]
+ public bool xr_first_rb ();
+ [Version (since = "1.16")]
+ public uint16 xr_get_block_length ();
+ [Version (since = "1.16")]
+ public Gst.RTCP.XRType xr_get_block_type ();
+ [Version (since = "1.16")]
+ public bool xr_get_dlrr_block (uint nth, out uint32 ssrc, out uint32 last_rr, out uint32 delay);
+ [Version (since = "1.16")]
+ public bool xr_get_prt_by_seq (uint16 seq, out uint32 receipt_time);
+ [Version (since = "1.16")]
+ public bool xr_get_prt_info (out uint32 ssrc, out uint8 thinning, out uint16 begin_seq, out uint16 end_seq);
+ [Version (since = "1.16")]
+ public bool xr_get_rle_info (out uint32 ssrc, out uint8 thinning, out uint16 begin_seq, out uint16 end_seq, out uint32 chunk_count);
+ [Version (since = "1.16")]
+ public bool xr_get_rle_nth_chunk (uint nth, out uint16 chunk);
+ [Version (since = "1.16")]
+ public bool xr_get_rrt (out uint64 timestamp);
+ [Version (since = "1.16")]
+ public uint32 xr_get_ssrc ();
+ [Version (since = "1.16")]
+ public bool xr_get_summary_info (out uint32 ssrc, out uint16 begin_seq, out uint16 end_seq);
+ [Version (since = "1.16")]
+ public bool xr_get_summary_jitter (out uint32 min_jitter, out uint32 max_jitter, out uint32 mean_jitter, out uint32 dev_jitter);
+ [Version (since = "1.16")]
+ public bool xr_get_summary_pkt (out uint32 lost_packets, out uint32 dup_packets);
+ [Version (since = "1.16")]
+ public bool xr_get_summary_ttl (out bool is_ipv4, out uint8 min_ttl, out uint8 max_ttl, out uint8 mean_ttl, out uint8 dev_ttl);
+ [Version (since = "1.16")]
+ public bool xr_get_voip_burst_metrics (out uint8 burst_density, out uint8 gap_density, out uint16 burst_duration, out uint16 gap_duration);
+ [Version (since = "1.16")]
+ public bool xr_get_voip_configuration_params (out uint8 gmin, out uint8 rx_config);
+ [Version (since = "1.16")]
+ public bool xr_get_voip_delay_metrics (out uint16 roundtrip_delay, out uint16 end_system_delay);
+ [Version (since = "1.16")]
+ public bool xr_get_voip_jitter_buffer_params (out uint16 jb_nominal, out uint16 jb_maximum, out uint16 jb_abs_max);
+ [Version (since = "1.16")]
+ public bool xr_get_voip_metrics_ssrc (out uint32 ssrc);
+ [Version (since = "1.16")]
+ public bool xr_get_voip_packet_metrics (out uint8 loss_rate, out uint8 discard_rate);
+ [Version (since = "1.16")]
+ public bool xr_get_voip_quality_metrics (out uint8 r_factor, out uint8 ext_r_factor, out uint8 mos_lq, out uint8 mos_cq);
+ [Version (since = "1.16")]
+ public bool xr_get_voip_signal_metrics (out uint8 signal_level, out uint8 noise_level, out uint8 rerl, out uint8 gmin);
+ [Version (since = "1.16")]
+ public bool xr_next_rb ();
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTCP_", type_id = "gst_rtcpfb_type_get_type ()")]
+ [GIR (name = "RTCPFBType")]
+ public enum FBType {
+ FB_TYPE_INVALID,
+ RTPFB_TYPE_NACK,
+ RTPFB_TYPE_TMMBR,
+ RTPFB_TYPE_TMMBN,
+ RTPFB_TYPE_RTCP_SR_REQ,
+ RTPFB_TYPE_TWCC,
+ PSFB_TYPE_PLI,
+ PSFB_TYPE_SLI,
+ PSFB_TYPE_RPSI,
+ PSFB_TYPE_AFB,
+ PSFB_TYPE_FIR,
+ PSFB_TYPE_TSTR,
+ PSFB_TYPE_TSTN,
+ PSFB_TYPE_VBCN
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTCP_SDES_", type_id = "gst_rtcpsdes_type_get_type ()")]
+ [GIR (name = "RTCPSDESType")]
+ public enum SDESType {
+ INVALID,
+ END,
+ CNAME,
+ NAME,
+ EMAIL,
+ PHONE,
+ LOC,
+ TOOL,
+ NOTE,
+ PRIV;
+ [CCode (cname = "gst_rtcp_sdes_name_to_type")]
+ public static Gst.RTCP.SDESType from_string (string name);
+ [CCode (cname = "gst_rtcp_sdes_type_to_name")]
+ public unowned string to_string ();
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTCP_TYPE_", type_id = "gst_rtcp_type_get_type ()")]
+ [GIR (name = "RTCPType")]
+ public enum Type {
+ INVALID,
+ SR,
+ RR,
+ SDES,
+ BYE,
+ APP,
+ RTPFB,
+ PSFB,
+ XR
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTCP_XR_TYPE_", type_id = "gst_rtcpxr_type_get_type ()")]
+ [GIR (name = "RTCPXRType")]
+ [Version (since = "1.16")]
+ public enum XRType {
+ INVALID,
+ LRLE,
+ DRLE,
+ PRT,
+ RRT,
+ DLRR,
+ SSUMM,
+ VOIP_METRICS
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTCP_MAX_BYE_SSRC_COUNT")]
+ public const int MAX_BYE_SSRC_COUNT;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTCP_MAX_RB_COUNT")]
+ public const int MAX_RB_COUNT;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTCP_MAX_SDES")]
+ public const int MAX_SDES;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTCP_MAX_SDES_ITEM_COUNT")]
+ public const int MAX_SDES_ITEM_COUNT;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTCP_REDUCED_SIZE_VALID_MASK")]
+ public const int REDUCED_SIZE_VALID_MASK;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTCP_VALID_MASK")]
+ public const int VALID_MASK;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTCP_VALID_VALUE")]
+ public const int VALID_VALUE;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTCP_VERSION")]
+ public const int VERSION;
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static uint64 ntp_to_unix (uint64 ntptime);
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static uint64 unix_to_ntp (uint64 unixtime);
+ }
+ namespace RTP {
+ [CCode (cheader_filename = "gst/rtp/rtp.h", type_id = "gst_rtp_base_audio_payload_get_type ()")]
+ [GIR (name = "RTPBaseAudioPayload")]
+ public class BaseAudioPayload : Gst.RTP.BasePayload {
+ public Gst.ClockTime base_ts;
+ public int frame_duration;
+ public int frame_size;
+ public int sample_size;
+ [CCode (has_construct_function = false)]
+ protected BaseAudioPayload ();
+ public Gst.FlowReturn flush (uint payload_len, Gst.ClockTime timestamp);
+ public Gst.Base.Adapter get_adapter ();
+ public Gst.FlowReturn push ([CCode (array_length_cname = "payload_len", array_length_pos = 1.5, array_length_type = "guint")] uint8[] data, Gst.ClockTime timestamp);
+ public void set_frame_based ();
+ public void set_frame_options (int frame_duration, int frame_size);
+ public void set_sample_based ();
+ public void set_sample_options (int sample_size);
+ public void set_samplebits_options (int sample_size);
+ [NoAccessorMethod]
+ public bool buffer_list { get; set; }
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", type_id = "gst_rtp_base_depayload_get_type ()")]
+ [GIR (name = "RTPBaseDepayload")]
+ public abstract class BaseDepayload : Gst.Element {
+ public uint clock_rate;
+ public bool need_newsegment;
+ public weak Gst.Segment segment;
+ public weak Gst.Pad sinkpad;
+ public weak Gst.Pad srcpad;
+ [CCode (has_construct_function = false)]
+ protected BaseDepayload ();
+ [NoWrapper]
+ public virtual bool handle_event (Gst.Event event);
+ [Version (since = "1.16")]
+ public bool is_source_info_enabled ();
+ [NoWrapper]
+ public virtual bool packet_lost (Gst.Event event);
+ [NoWrapper]
+ public virtual Gst.Buffer process (Gst.Buffer @in);
+ [NoWrapper]
+ public virtual Gst.Buffer process_rtp_packet (Gst.RTP.Buffer rtp_buffer);
+ public Gst.FlowReturn push (Gst.Buffer out_buf);
+ public Gst.FlowReturn push_list (Gst.BufferList out_list);
+ [NoWrapper]
+ public virtual bool set_caps (Gst.Caps caps);
+ [Version (since = "1.16")]
+ public void set_source_info_enabled (bool enable);
+ [NoAccessorMethod]
+ [Version (since = "1.20")]
+ public bool auto_header_extension { get; set; }
+ [NoAccessorMethod]
+ [Version (since = "1.18")]
+ public int max_reorder { get; set; }
+ [NoAccessorMethod]
+ [Version (since = "1.16")]
+ public bool source_info { get; set; }
+ [NoAccessorMethod]
+ public Gst.Structure stats { owned get; }
+ [Version (since = "1.20")]
+ public signal void add_extension (owned Gst.RTP.HeaderExtension ext);
+ [Version (since = "1.20")]
+ public signal void clear_extensions ();
+ [Version (since = "1.20")]
+ public signal Gst.RTP.HeaderExtension request_extension (uint ext_id, string? ext_uri);
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", type_id = "gst_rtp_base_payload_get_type ()")]
+ [GIR (name = "RTPBasePayload")]
+ public abstract class BasePayload : Gst.Element {
+ [CCode (has_construct_function = false)]
+ protected BasePayload ();
+ [Version (since = "1.16")]
+ public Gst.Buffer allocate_output_buffer (uint payload_len, uint8 pad_len, uint8 csrc_count);
+ [NoWrapper]
+ public virtual Gst.Caps get_caps (Gst.Pad pad, Gst.Caps filter);
+ [Version (since = "1.16")]
+ public uint get_source_count (Gst.Buffer buffer);
+ [NoWrapper]
+ public virtual Gst.FlowReturn handle_buffer (Gst.Buffer buffer);
+ public bool is_filled (uint size, Gst.ClockTime duration);
+ [Version (since = "1.16")]
+ public bool is_source_info_enabled ();
+ public Gst.FlowReturn push (Gst.Buffer buffer);
+ public Gst.FlowReturn push_list (Gst.BufferList list);
+ [NoWrapper]
+ public virtual bool query (Gst.Pad pad, Gst.Query query);
+ [NoWrapper]
+ public virtual bool set_caps (Gst.Caps caps);
+ public void set_options (string media, bool @dynamic, string encoding_name, uint32 clock_rate);
+ [Version (since = "1.20")]
+ public bool set_outcaps_structure (Gst.Structure? s);
+ [Version (since = "1.16")]
+ public void set_source_info_enabled (bool enable);
+ [NoWrapper]
+ public virtual bool sink_event (Gst.Event event);
+ [NoWrapper]
+ public virtual bool src_event (Gst.Event event);
+ [NoAccessorMethod]
+ [Version (since = "1.20")]
+ public bool auto_header_extension { get; set; }
+ [NoAccessorMethod]
+ public int64 max_ptime { get; set; }
+ [NoAccessorMethod]
+ public int64 min_ptime { get; set; }
+ [NoAccessorMethod]
+ public uint mtu { get; set; }
+ [NoAccessorMethod]
+ [Version (since = "1.16")]
+ public bool onvif_no_rate_control { get; set; }
+ [NoAccessorMethod]
+ public bool perfect_rtptime { get; set; }
+ [NoAccessorMethod]
+ public uint pt { get; set; }
+ [NoAccessorMethod]
+ public int64 ptime_multiple { get; set; }
+ [NoAccessorMethod]
+ [Version (since = "1.18")]
+ public bool scale_rtptime { get; set; }
+ [NoAccessorMethod]
+ public uint seqnum { get; }
+ [NoAccessorMethod]
+ public int seqnum_offset { get; set; }
+ [NoAccessorMethod]
+ [Version (since = "1.16")]
+ public bool source_info { get; set; }
+ [NoAccessorMethod]
+ public uint ssrc { get; set; }
+ [NoAccessorMethod]
+ public Gst.Structure stats { owned get; }
+ [NoAccessorMethod]
+ public uint timestamp { get; }
+ [NoAccessorMethod]
+ public uint timestamp_offset { get; set; }
+ [Version (since = "1.20")]
+ public signal void add_extension (owned Gst.RTP.HeaderExtension ext);
+ [Version (since = "1.20")]
+ public signal void clear_extensions ();
+ [Version (since = "1.20")]
+ public signal Gst.RTP.HeaderExtension request_extension (uint ext_id, string ext_uri);
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", type_id = "gst_rtp_header_extension_get_type ()")]
+ [GIR (name = "RTPHeaderExtension")]
+ [Version (since = "1.20")]
+ public abstract class HeaderExtension : Gst.Element {
+ public uint ext_id;
+ [CCode (has_construct_function = false)]
+ protected HeaderExtension ();
+ public static Gst.RTP.HeaderExtension? create_from_uri (string uri);
+ public uint get_id ();
+ public virtual size_t get_max_size (Gst.Buffer input_meta);
+ public string get_sdp_caps_field_name ();
+ public virtual Gst.RTP.HeaderExtensionFlags get_supported_flags ();
+ public unowned string get_uri ();
+ public virtual bool read (Gst.RTP.HeaderExtensionFlags read_flags, [CCode (array_length_cname = "size", array_length_pos = 2.5, array_length_type = "gsize", type = "const guint8*")] uint8[] data, Gst.Buffer buffer);
+ public virtual bool set_attributes_from_caps (Gst.Caps caps);
+ public bool set_attributes_from_caps_simple_sdp (Gst.Caps caps);
+ public virtual bool set_caps_from_attributes (Gst.Caps caps);
+ public bool set_caps_from_attributes_simple_sdp (Gst.Caps caps);
+ public void set_id (uint ext_id);
+ public virtual bool set_non_rtp_sink_caps (Gst.Caps caps);
+ [CCode (cname = "gst_rtp_header_extension_class_set_uri")]
+ public class void set_uri (string uri);
+ public void set_wants_update_non_rtp_src_caps (bool state);
+ public virtual bool update_non_rtp_src_caps (Gst.Caps caps);
+ public virtual size_t write (Gst.Buffer input_meta, Gst.RTP.HeaderExtensionFlags write_flags, Gst.Buffer output, [CCode (array_length_cname = "size", array_length_pos = 4.1, array_length_type = "gsize", type = "guint8*")] uint8[] data);
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", has_type_id = false)]
+ [GIR (name = "RTPBuffer")]
+ public struct Buffer {
+ public weak Gst.Buffer buffer;
+ public uint state;
+ [CCode (array_length = false)]
+ public weak void* data[4];
+ [CCode (array_length = false)]
+ public weak size_t size[4];
+ public bool add_extension_onebyte_header (uint8 id, [CCode (array_length_cname = "size", array_length_pos = 2.1, array_length_type = "guint")] uint8[] data);
+ public bool add_extension_twobytes_header (uint8 appbits, uint8 id, [CCode (array_length_cname = "size", array_length_pos = 3.1, array_length_type = "guint")] uint8[] data);
+ [CCode (cname = "gst_buffer_add_rtp_source_meta")]
+ [Version (since = "1.16")]
+ public static unowned Gst.RTP.SourceMeta? add_rtp_source_meta (Gst.Buffer buffer, uint32? ssrc, uint32? csrc, uint csrc_count);
+ public static void allocate_data (Gst.Buffer buffer, uint payload_len, uint8 pad_len, uint8 csrc_count);
+ public static uint calc_header_len (uint8 csrc_count);
+ public static uint calc_packet_len (uint payload_len, uint8 pad_len, uint8 csrc_count);
+ public static uint calc_payload_len (uint packet_len, uint8 pad_len, uint8 csrc_count);
+ public static int compare_seqnum (uint16 seqnum1, uint16 seqnum2);
+ public static uint32 default_clock_rate (uint8 payload_type);
+ public static uint64 ext_timestamp (ref uint64 exttimestamp, uint32 timestamp);
+ public uint32 get_csrc (uint8 idx);
+ public uint8 get_csrc_count ();
+ public bool get_extension ();
+ [Version (since = "1.2")]
+ public GLib.Bytes get_extension_bytes (out uint16 bits);
+ public bool get_extension_data (out uint16 bits, [CCode (array_length = false)] out unowned uint8[] data, out uint wordlen);
+ public bool get_extension_onebyte_header (uint8 id, uint nth, [CCode (array_length_cname = "size", array_length_pos = 3.1, array_length_type = "guint")] out unowned uint8[] data);
+ [Version (since = "1.18")]
+ public static bool get_extension_onebyte_header_from_bytes (GLib.Bytes bytes, uint16 bit_pattern, uint8 id, uint nth, [CCode (array_length_cname = "size", array_length_pos = 5.1, array_length_type = "guint")] out unowned uint8[] data);
+ public bool get_extension_twobytes_header (out uint8 appbits, uint8 id, uint nth, [CCode (array_length_cname = "size", array_length_pos = 4.1, array_length_type = "guint")] out unowned uint8[] data);
+ public uint get_header_len ();
+ public bool get_marker ();
+ public uint get_packet_len ();
+ public bool get_padding ();
+ [CCode (array_length = false)]
+ public unowned uint8[] get_payload ();
+ public Gst.Buffer get_payload_buffer ();
+ [Version (since = "1.2")]
+ public GLib.Bytes get_payload_bytes ();
+ public uint get_payload_len ();
+ public Gst.Buffer get_payload_subbuffer (uint offset, uint len);
+ public uint8 get_payload_type ();
+ [CCode (cname = "gst_buffer_get_rtp_source_meta")]
+ [Version (since = "1.16")]
+ public static unowned Gst.RTP.SourceMeta? get_rtp_source_meta (Gst.Buffer buffer);
+ public uint16 get_seq ();
+ public uint32 get_ssrc ();
+ public uint32 get_timestamp ();
+ public uint8 get_version ();
+ public static bool map (Gst.Buffer buffer, Gst.MapFlags flags, out Gst.RTP.Buffer rtp);
+ public static Gst.Buffer new_allocate (uint payload_len, uint8 pad_len, uint8 csrc_count);
+ public static Gst.Buffer new_allocate_len (uint packet_len, uint8 pad_len, uint8 csrc_count);
+ public static Gst.Buffer new_copy_data ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "gsize")] uint8[] data);
+ public static Gst.Buffer new_take_data ([CCode (array_length_cname = "len", array_length_pos = 1.1, array_length_type = "gsize")] owned uint8[] data);
+ public void pad_to (uint len);
+ public void set_csrc (uint8 idx, uint32 csrc);
+ public void set_extension (bool extension);
+ public bool set_extension_data (uint16 bits, uint16 length);
+ public void set_marker (bool marker);
+ public void set_packet_len (uint len);
+ public void set_padding (bool padding);
+ public void set_payload_type (uint8 payload_type);
+ public void set_seq (uint16 seq);
+ public void set_ssrc (uint32 ssrc);
+ public void set_timestamp (uint32 timestamp);
+ public void set_version (uint8 version);
+ public void unmap ();
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", has_type_id = false)]
+ [GIR (name = "RTPPayloadInfo")]
+ public struct PayloadInfo {
+ public uint8 payload_type;
+ public weak string media;
+ public weak string encoding_name;
+ public uint clock_rate;
+ public weak string encoding_parameters;
+ public uint bitrate;
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", has_type_id = false)]
+ [GIR (name = "RTPSourceMeta")]
+ [Version (since = "1.16")]
+ public struct SourceMeta {
+ public Gst.Meta meta;
+ public uint32 ssrc;
+ public bool ssrc_valid;
+ [CCode (array_length = false)]
+ public weak uint32 csrc[15];
+ public uint csrc_count;
+ public bool append_csrc ([CCode (array_length_cname = "csrc_count", array_length_pos = 1.1, array_length_type = "guint", type = "const guint32*")] uint32[] csrc);
+ public uint get_source_count ();
+ public bool set_ssrc (uint32? ssrc);
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTP_BUFFER_FLAG_", type_id = "gst_rtp_buffer_flags_get_type ()")]
+ [Flags]
+ [GIR (name = "RTPBufferFlags")]
+ [Version (since = "1.10")]
+ public enum BufferFlags {
+ RETRANSMISSION,
+ REDUNDANT,
+ LAST
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTP_BUFFER_MAP_FLAG_", type_id = "gst_rtp_buffer_map_flags_get_type ()")]
+ [Flags]
+ [GIR (name = "RTPBufferMapFlags")]
+ [Version (since = "1.6.1")]
+ public enum BufferMapFlags {
+ SKIP_PADDING,
+ LAST
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTP_HEADER_EXTENSION_", type_id = "gst_rtp_header_extension_flags_get_type ()")]
+ [Flags]
+ [GIR (name = "RTPHeaderExtensionFlags")]
+ [Version (since = "1.20")]
+ public enum HeaderExtensionFlags {
+ ONE_BYTE,
+ TWO_BYTE
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTP_PAYLOAD_", type_id = "gst_rtp_payload_get_type ()")]
+ [GIR (name = "RTPPayload")]
+ public enum Payload {
+ PCMU,
+ @1016,
+ G721,
+ GSM,
+ G723,
+ DVI4_8000,
+ DVI4_16000,
+ LPC,
+ PCMA,
+ G722,
+ L16_STEREO,
+ L16_MONO,
+ QCELP,
+ CN,
+ MPA,
+ G728,
+ DVI4_11025,
+ DVI4_22050,
+ G729,
+ CELLB,
+ JPEG,
+ NV,
+ H261,
+ MPV,
+ MP2T,
+ H263;
+ public const string @1016_STRING;
+ public const string CELLB_STRING;
+ public const string CN_STRING;
+ public const string DVI4_11025_STRING;
+ public const string DVI4_16000_STRING;
+ public const string DVI4_22050_STRING;
+ public const string DVI4_8000_STRING;
+ public const string DYNAMIC_STRING;
+ public const string G721_STRING;
+ public const string G722_STRING;
+ public const int G723_53;
+ public const string G723_53_STRING;
+ public const int G723_63;
+ public const string G723_63_STRING;
+ public const string G723_STRING;
+ public const string G728_STRING;
+ public const string G729_STRING;
+ public const string GSM_STRING;
+ public const string H261_STRING;
+ public const string H263_STRING;
+ public const string JPEG_STRING;
+ public const string L16_MONO_STRING;
+ public const string L16_STEREO_STRING;
+ public const string LPC_STRING;
+ public const string MP2T_STRING;
+ public const string MPA_STRING;
+ public const string MPV_STRING;
+ public const string NV_STRING;
+ public const string PCMA_STRING;
+ public const string PCMU_STRING;
+ public const string QCELP_STRING;
+ public const int TS41;
+ public const string TS41_STRING;
+ public const int TS48;
+ public const string TS48_STRING;
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cprefix = "GST_RTP_PROFILE_", type_id = "gst_rtp_profile_get_type ()")]
+ [GIR (name = "RTPProfile")]
+ [Version (since = "1.6")]
+ public enum Profile {
+ UNKNOWN,
+ AVP,
+ SAVP,
+ AVPF,
+ SAVPF
+ }
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_HDREXT_BASE")]
+ public const string HDREXT_BASE;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_HDREXT_ELEMENT_CLASS")]
+ [Version (since = "1.20")]
+ public const string HDREXT_ELEMENT_CLASS;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_HDREXT_NTP_56")]
+ public const string HDREXT_NTP_56;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_HDREXT_NTP_56_SIZE")]
+ public const int HDREXT_NTP_56_SIZE;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_HDREXT_NTP_64")]
+ public const string HDREXT_NTP_64;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_HDREXT_NTP_64_SIZE")]
+ public const int HDREXT_NTP_64_SIZE;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_HEADER_EXTENSION_URI_METADATA_KEY")]
+ [Version (since = "1.20")]
+ public const string HEADER_EXTENSION_URI_METADATA_KEY;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_SOURCE_META_MAX_CSRC_COUNT")]
+ public const int SOURCE_META_MAX_CSRC_COUNT;
+ [CCode (cheader_filename = "gst/rtp/rtp.h", cname = "GST_RTP_VERSION")]
+ public const int VERSION;
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ [Version (since = "1.20")]
+ public static GLib.List<Gst.RTP.HeaderExtension> get_header_extension_list ();
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static bool hdrext_get_ntp_56 ([CCode (array_length_cname = "size", array_length_pos = 1.5, array_length_type = "guint")] uint8[] data, out uint64 ntptime);
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static bool hdrext_get_ntp_64 ([CCode (array_length_cname = "size", array_length_pos = 1.5, array_length_type = "guint")] uint8[] data, out uint64 ntptime);
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static bool hdrext_set_ntp_56 (void* data, uint size, uint64 ntptime);
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static bool hdrext_set_ntp_64 (void* data, uint size, uint64 ntptime);
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static unowned Gst.RTP.PayloadInfo? payload_info_for_name (string media, string encoding_name);
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static unowned Gst.RTP.PayloadInfo? payload_info_for_pt (uint8 payload_type);
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static GLib.Type source_meta_api_get_type ();
+ [CCode (cheader_filename = "gst/rtp/rtp.h")]
+ public static unowned Gst.MetaInfo? source_meta_get_info ();
+ }
+}