- Adds support for ZeroArtifacts (Perfect video quality)

- Better interop with WebRTC endpoints (better video quality)
- Lock-free on MediaSessionMgr for better performances on both audio and video
- Re-design the video jitter buffer for better CPU prefs and video quality. Request lost frames (RTC-NACK) as many times as required to deal with RTCP-losses. The FPS guesser is smarter.
- Fix issues on RTP timestamps on video pkts
- Update libsrtp binaries on Android and Windows (Use latest CVS)
- Better interop with other h264-rtp implementations (e.g. gstreamer, bria, cisco, polycom, lync...)
- Fix issue 233 (tinyNET does not compile on MAC + fix/patch)
- Fix issue 234 (tinyDAV does not compile on MAC)
- Fix issue 238 (iOS: Bad audio quality when audio/video call uses cpu intensive audio codec (e.g. g729 or speex))
- Fix issue 239 (Adds support for thread priority setting). Timers and audio/video threads now use high priority.
- Fix issue 242 (Hold/Resume fails when audio driver is opensl-es (Android))
- Fix issue 243 (PictureID in VP8 is not correct (only happens when there is overflow on the first 4 bytes))
- Fix issue 244 (Adds callbacks from codecs to session to signal IDR frames decoding)
- Fix issue 245 (Fail to decode h264 buffer)
- FIx issue 246 (Gnu Autotools: Detect support for monotonic timers in configure.ac)
This commit is contained in:
bossiel 2013-04-09 22:22:16 +00:00
parent 1bbef7e2c6
commit 181bc7b13d
65 changed files with 5658 additions and 5013 deletions

View File

@ -479,9 +479,17 @@ bool MediaSessionMgr::defaultsGetByPassDecoding(){
bool MediaSessionMgr::defaultsSetVideoJbEnabled(bool enabled){
return (tmedia_defaults_set_videojb_enabled(enabled ? tsk_true : tsk_false) == 0);
}
bool MediaSessionMgr::defaultsGetVideoJbEnabled(bool enabled){
bool MediaSessionMgr::defaultsGetVideoJbEnabled(){
return (tmedia_defaults_get_videojb_enabled() == tsk_true);
}
bool MediaSessionMgr::defaultsSetVideoZeroArtifactsEnabled(bool enabled){
return (tmedia_defaults_set_video_zeroartifacts_enabled(enabled ? tsk_true : tsk_false) == 0);
}
bool MediaSessionMgr::defaultsGetVideoZeroArtifactsEnabled(){
return (tmedia_defaults_get_video_zeroartifacts_enabled() == tsk_true);
}
bool MediaSessionMgr::defaultsSetRtpBuffSize(unsigned buffSize){
return (tmedia_defaults_set_rtpbuff_size(buffSize) == 0);
}

View File

@ -139,7 +139,9 @@ public:
static bool defaultsSetByPassDecoding(bool enabled);
static bool defaultsGetByPassDecoding();
static bool defaultsSetVideoJbEnabled(bool enabled);
static bool defaultsGetVideoJbEnabled(bool enabled);
static bool defaultsGetVideoJbEnabled();
static bool defaultsSetVideoZeroArtifactsEnabled(bool enabled);
static bool defaultsGetVideoZeroArtifactsEnabled();
static bool defaultsSetRtpBuffSize(unsigned buffSize);
static unsigned defaultsGetRtpBuffSize();
static bool defaultsSetAvpfTail(unsigned tail_min, unsigned tail_max);

View File

@ -46,6 +46,7 @@ typedef struct twrap_consumer_proxy_audio_s
uint64_t id;
tsk_bool_t started;
const ProxyAudioConsumer* pcConsumer; // thread-safe and will be destroyed at the time as the "struct"
}
twrap_consumer_proxy_audio_t;
#define TWRAP_CONSUMER_PROXY_AUDIO(self) ((twrap_consumer_proxy_audio_t*)(self))
@ -61,15 +62,15 @@ int twrap_consumer_proxy_audio_set(tmedia_consumer_t* _self, const tmedia_param_
int twrap_consumer_proxy_audio_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
twrap_consumer_proxy_audio_t* audio = TWRAP_CONSUMER_PROXY_AUDIO(self);
ProxyPluginMgr* manager;
int ret = -1;
if(codec && (manager = ProxyPluginMgr::getInstance())){
const ProxyAudioConsumer* audioConsumer;
if((audioConsumer = manager->findAudioConsumer(TWRAP_CONSUMER_PROXY_AUDIO(self)->id)) && audioConsumer->getCallback()){
if((audio->pcConsumer = manager->findAudioConsumer(audio->id)) && audio->pcConsumer->getCallback()){
self->audio.ptime = codec->plugin->audio.ptime;
self->audio.in.channels = codec->plugin->audio.channels;
self->audio.in.rate = codec->plugin->rate;
ret = audioConsumer->getCallback()->prepare((int)codec->plugin->audio.ptime, codec->plugin->rate, codec->plugin->audio.channels);
ret = audio->pcConsumer->getCallback()->prepare((int)codec->plugin->audio.ptime, codec->plugin->rate, codec->plugin->audio.channels);
}
}
@ -93,17 +94,23 @@ int twrap_consumer_proxy_audio_start(tmedia_consumer_t* self)
int twrap_consumer_proxy_audio_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
ProxyPluginMgr* manager;
twrap_consumer_proxy_audio_t* audio = TWRAP_CONSUMER_PROXY_AUDIO(self);
if(!audio->pcConsumer){
ProxyPluginMgr* manager;
if((manager = ProxyPluginMgr::getInstance())){
audio->pcConsumer = manager->findAudioConsumer(audio->id);
}
}
ProxyAudioConsumerCallback* callback;
int ret = -1;
if((manager = ProxyPluginMgr::getInstance())){
const ProxyAudioConsumer* audioConsumer;
if((audioConsumer = manager->findAudioConsumer(TWRAP_CONSUMER_PROXY_AUDIO(self)->id)) && audioConsumer->getCallback()){
if(audioConsumer->getCallback()->putInJitterBuffer()){
ret = tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
}
else{
ret = audioConsumer->getCallback()->consume(buffer, size, proto_hdr);
}
if(audio->pcConsumer && (callback = audio->pcConsumer->getCallback())){
if(callback->putInJitterBuffer()){
ret = tdav_consumer_audio_put(TDAV_CONSUMER_AUDIO(self), buffer, size, proto_hdr);
}
else{
ret = callback->consume(buffer, size, proto_hdr);
}
}
@ -381,6 +388,7 @@ typedef struct twrap_consumer_proxy_video_s
uint64_t id;
tsk_bool_t started;
const ProxyVideoConsumer* pcConsumer; // thread-safe and will be destroyed at the time as the "struct"
}
twrap_consumer_proxy_video_t;
#define TWRAP_CONSUMER_PROXY_VIDEO(self) ((twrap_consumer_proxy_video_t*)(self))
@ -393,25 +401,25 @@ int twrap_consumer_proxy_video_set(tmedia_consumer_t* self, const tmedia_param_t
int twrap_consumer_proxy_video_prepare(tmedia_consumer_t* self, const tmedia_codec_t* codec)
{
ProxyPluginMgr* manager;
twrap_consumer_proxy_video_t* video = TWRAP_CONSUMER_PROXY_VIDEO(self);
int ret = -1;
if(codec && (manager = ProxyPluginMgr::getInstance())){
const ProxyVideoConsumer* videoConsumer;
if((videoConsumer = manager->findVideoConsumer(TWRAP_CONSUMER_PROXY_VIDEO(self)->id)) && videoConsumer->getCallback()){
if((video->pcConsumer = manager->findVideoConsumer(video->id)) && video->pcConsumer->getCallback()){
self->video.fps = TMEDIA_CODEC_VIDEO(codec)->in.fps;
// in
self->video.in.chroma = tmedia_chroma_yuv420p;
self->video.in.width = TMEDIA_CODEC_VIDEO(codec)->in.width;
self->video.in.height = TMEDIA_CODEC_VIDEO(codec)->in.height;
// display (out)
self->video.display.chroma = videoConsumer->getChroma();
self->video.display.auto_resize = videoConsumer->getAutoResizeDisplay();
self->video.display.chroma = video->pcConsumer->getChroma();
self->video.display.auto_resize = video->pcConsumer->getAutoResizeDisplay();
if(!self->video.display.width){
self->video.display.width = self->video.in.width;
}
if(!self->video.display.height){
self->video.display.height = self->video.in.height;
}
ret = videoConsumer->getCallback()->prepare(TMEDIA_CODEC_VIDEO(codec)->in.width, TMEDIA_CODEC_VIDEO(codec)->in.height, TMEDIA_CODEC_VIDEO(codec)->in.fps);
ret = video->pcConsumer->getCallback()->prepare(TMEDIA_CODEC_VIDEO(codec)->in.width, TMEDIA_CODEC_VIDEO(codec)->in.height, TMEDIA_CODEC_VIDEO(codec)->in.fps);
}
}
@ -435,37 +443,44 @@ int twrap_consumer_proxy_video_start(tmedia_consumer_t* self)
int twrap_consumer_proxy_video_consume(tmedia_consumer_t* self, const void* buffer, tsk_size_t size, const tsk_object_t* proto_hdr)
{
ProxyPluginMgr* manager;
int ret = -1;
if(!self || !buffer || !size){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if((manager = ProxyPluginMgr::getInstance())){
const ProxyVideoConsumer* videoConsumer;
if((videoConsumer = manager->findVideoConsumer(TWRAP_CONSUMER_PROXY_VIDEO(self)->id)) && videoConsumer->getCallback()){
if(tdav_consumer_video_has_jb(TDAV_CONSUMER_VIDEO(self))){
ret = tdav_consumer_video_put(TDAV_CONSUMER_VIDEO(self), buffer, size, proto_hdr);
}
else{
if(videoConsumer->hasConsumeBuffer()){
unsigned nCopiedSize = videoConsumer->copyBuffer(buffer, size);
ret = videoConsumer->getCallback()->bufferCopied(nCopiedSize, size);
}
else{
ProxyVideoFrame* frame = new ProxyVideoFrame(buffer, size, const_cast<ProxyVideoConsumer*>(videoConsumer)->getDecodedWidth(), const_cast<ProxyVideoConsumer*>(videoConsumer)->getDecodedHeight(), proto_hdr);
ret = videoConsumer->getCallback()->consume(frame);
delete frame, frame = tsk_null;
}
}
}
else{
TSK_DEBUG_ERROR("Cannot find consumer with id=%lld", TWRAP_CONSUMER_PROXY_VIDEO(self)->id);
twrap_consumer_proxy_video_t* video = TWRAP_CONSUMER_PROXY_VIDEO(self);
if(!video->pcConsumer){
ProxyPluginMgr* manager;
if((manager = ProxyPluginMgr::getInstance())){
video->pcConsumer = manager->findVideoConsumer(video->id);
}
}
int ret = -1;
ProxyVideoConsumerCallback* callback;
if(video->pcConsumer && (callback = video->pcConsumer->getCallback())){
if(tdav_consumer_video_has_jb(TDAV_CONSUMER_VIDEO(self))){
ret = tdav_consumer_video_put(TDAV_CONSUMER_VIDEO(self), buffer, size, proto_hdr);
}
else{
if(video->pcConsumer->hasConsumeBuffer()){
unsigned nCopiedSize = video->pcConsumer->copyBuffer(buffer, size);
ret = callback->bufferCopied(nCopiedSize, size);
}
else{
ProxyVideoFrame* frame = new ProxyVideoFrame(buffer, size, const_cast<ProxyVideoConsumer*>(video->pcConsumer)->getDecodedWidth(), const_cast<ProxyVideoConsumer*>(video->pcConsumer)->getDecodedHeight(), proto_hdr);
ret = callback->consume(frame);
delete frame, frame = tsk_null;
}
}
}
else if(!video->pcConsumer){
TSK_DEBUG_ERROR("Cannot find consumer with id=%lld", TWRAP_CONSUMER_PROXY_VIDEO(self)->id);
}
return ret;
}

View File

@ -451,7 +451,7 @@ static tsk_object_t* twrap_producer_proxy_video_dtor(tsk_object_t * self)
{
twrap_producer_proxy_video_t *producer = (twrap_producer_proxy_video_t *)self;
if(producer){
TSK_DEBUG_INFO("twrap_producer_proxy_video_dtor()");
/* stop */
if(producer->started){
twrap_producer_proxy_video_stop(TMEDIA_PRODUCER(producer));
@ -512,6 +512,7 @@ ProxyVideoProducer::ProxyVideoProducer(tmedia_chroma_t eChroma, struct twrap_pro
ProxyVideoProducer::~ProxyVideoProducer()
{
TSK_DEBUG_INFO("~ProxyVideoProducer");
}
int ProxyVideoProducer::getRotation()const

View File

@ -329,8 +329,18 @@ public class MediaSessionMgr : IDisposable {
return ret;
}
public static bool defaultsGetVideoJbEnabled(bool enabled) {
bool ret = tinyWRAPPINVOKE.MediaSessionMgr_defaultsGetVideoJbEnabled(enabled);
public static bool defaultsGetVideoJbEnabled() {
bool ret = tinyWRAPPINVOKE.MediaSessionMgr_defaultsGetVideoJbEnabled();
return ret;
}
public static bool defaultsSetVideoZeroArtifactsEnabled(bool enabled) {
bool ret = tinyWRAPPINVOKE.MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(enabled);
return ret;
}
public static bool defaultsGetVideoZeroArtifactsEnabled() {
bool ret = tinyWRAPPINVOKE.MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled();
return ret;
}

View File

@ -463,7 +463,13 @@ class tinyWRAPPINVOKE {
public static extern bool MediaSessionMgr_defaultsSetVideoJbEnabled(bool jarg1);
[DllImport("tinyWRAP", EntryPoint="CSharp_MediaSessionMgr_defaultsGetVideoJbEnabled")]
public static extern bool MediaSessionMgr_defaultsGetVideoJbEnabled(bool jarg1);
public static extern bool MediaSessionMgr_defaultsGetVideoJbEnabled();
[DllImport("tinyWRAP", EntryPoint="CSharp_MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled")]
public static extern bool MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(bool jarg1);
[DllImport("tinyWRAP", EntryPoint="CSharp_MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled")]
public static extern bool MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled();
[DllImport("tinyWRAP", EntryPoint="CSharp_MediaSessionMgr_defaultsSetRtpBuffSize")]
public static extern bool MediaSessionMgr_defaultsSetRtpBuffSize(uint jarg1);

View File

@ -2288,13 +2288,33 @@ SWIGEXPORT unsigned int SWIGSTDCALL CSharp_MediaSessionMgr_defaultsSetVideoJbEna
}
SWIGEXPORT unsigned int SWIGSTDCALL CSharp_MediaSessionMgr_defaultsGetVideoJbEnabled(unsigned int jarg1) {
SWIGEXPORT unsigned int SWIGSTDCALL CSharp_MediaSessionMgr_defaultsGetVideoJbEnabled() {
unsigned int jresult ;
bool result;
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled();
jresult = result;
return jresult;
}
SWIGEXPORT unsigned int SWIGSTDCALL CSharp_MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(unsigned int jarg1) {
unsigned int jresult ;
bool arg1 ;
bool result;
arg1 = jarg1 ? true : false;
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled(arg1);
result = (bool)MediaSessionMgr::defaultsSetVideoZeroArtifactsEnabled(arg1);
jresult = result;
return jresult;
}
SWIGEXPORT unsigned int SWIGSTDCALL CSharp_MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled() {
unsigned int jresult ;
bool result;
result = (bool)MediaSessionMgr::defaultsGetVideoZeroArtifactsEnabled();
jresult = result;
return jresult;
}

View File

@ -266,8 +266,16 @@ public class MediaSessionMgr {
return tinyWRAPJNI.MediaSessionMgr_defaultsSetVideoJbEnabled(enabled);
}
public static boolean defaultsGetVideoJbEnabled(boolean enabled) {
return tinyWRAPJNI.MediaSessionMgr_defaultsGetVideoJbEnabled(enabled);
public static boolean defaultsGetVideoJbEnabled() {
return tinyWRAPJNI.MediaSessionMgr_defaultsGetVideoJbEnabled();
}
public static boolean defaultsSetVideoZeroArtifactsEnabled(boolean enabled) {
return tinyWRAPJNI.MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(enabled);
}
public static boolean defaultsGetVideoZeroArtifactsEnabled() {
return tinyWRAPJNI.MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled();
}
public static boolean defaultsSetRtpBuffSize(long buffSize) {

View File

@ -266,8 +266,16 @@ public class MediaSessionMgr {
return tinyWRAPJNI.MediaSessionMgr_defaultsSetVideoJbEnabled(enabled);
}
public static boolean defaultsGetVideoJbEnabled(boolean enabled) {
return tinyWRAPJNI.MediaSessionMgr_defaultsGetVideoJbEnabled(enabled);
public static boolean defaultsGetVideoJbEnabled() {
return tinyWRAPJNI.MediaSessionMgr_defaultsGetVideoJbEnabled();
}
public static boolean defaultsSetVideoZeroArtifactsEnabled(boolean enabled) {
return tinyWRAPJNI.MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(enabled);
}
public static boolean defaultsGetVideoZeroArtifactsEnabled() {
return tinyWRAPJNI.MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled();
}
public static boolean defaultsSetRtpBuffSize(long buffSize) {

View File

@ -101,7 +101,9 @@ public class tinyWRAPJNI {
public final static native boolean MediaSessionMgr_defaultsSetByPassDecoding(boolean jarg1);
public final static native boolean MediaSessionMgr_defaultsGetByPassDecoding();
public final static native boolean MediaSessionMgr_defaultsSetVideoJbEnabled(boolean jarg1);
public final static native boolean MediaSessionMgr_defaultsGetVideoJbEnabled(boolean jarg1);
public final static native boolean MediaSessionMgr_defaultsGetVideoJbEnabled();
public final static native boolean MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(boolean jarg1);
public final static native boolean MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled();
public final static native boolean MediaSessionMgr_defaultsSetRtpBuffSize(long jarg1);
public final static native long MediaSessionMgr_defaultsGetRtpBuffSize();
public final static native boolean MediaSessionMgr_defaultsSetAvpfTail(long jarg1, long jarg2);

View File

@ -3412,7 +3412,19 @@ SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionM
}
SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionMgr_1defaultsGetVideoJbEnabled(JNIEnv *jenv, jclass jcls, jboolean jarg1) {
SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionMgr_1defaultsGetVideoJbEnabled(JNIEnv *jenv, jclass jcls) {
jboolean jresult = 0 ;
bool result;
(void)jenv;
(void)jcls;
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled();
jresult = (jboolean)result;
return jresult;
}
SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionMgr_1defaultsSetVideoZeroArtifactsEnabled(JNIEnv *jenv, jclass jcls, jboolean jarg1) {
jboolean jresult = 0 ;
bool arg1 ;
bool result;
@ -3420,7 +3432,19 @@ SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionM
(void)jenv;
(void)jcls;
arg1 = jarg1 ? true : false;
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled(arg1);
result = (bool)MediaSessionMgr::defaultsSetVideoZeroArtifactsEnabled(arg1);
jresult = (jboolean)result;
return jresult;
}
SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionMgr_1defaultsGetVideoZeroArtifactsEnabled(JNIEnv *jenv, jclass jcls) {
jboolean jresult = 0 ;
bool result;
(void)jenv;
(void)jcls;
result = (bool)MediaSessionMgr::defaultsGetVideoZeroArtifactsEnabled();
jresult = (jboolean)result;
return jresult;
}

View File

@ -101,7 +101,9 @@ public class tinyWRAPJNI {
public final static native boolean MediaSessionMgr_defaultsSetByPassDecoding(boolean jarg1);
public final static native boolean MediaSessionMgr_defaultsGetByPassDecoding();
public final static native boolean MediaSessionMgr_defaultsSetVideoJbEnabled(boolean jarg1);
public final static native boolean MediaSessionMgr_defaultsGetVideoJbEnabled(boolean jarg1);
public final static native boolean MediaSessionMgr_defaultsGetVideoJbEnabled();
public final static native boolean MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(boolean jarg1);
public final static native boolean MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled();
public final static native boolean MediaSessionMgr_defaultsSetRtpBuffSize(long jarg1);
public final static native long MediaSessionMgr_defaultsGetRtpBuffSize();
public final static native boolean MediaSessionMgr_defaultsSetAvpfTail(long jarg1, long jarg2);

View File

@ -3412,7 +3412,19 @@ SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionM
}
SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionMgr_1defaultsGetVideoJbEnabled(JNIEnv *jenv, jclass jcls, jboolean jarg1) {
SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionMgr_1defaultsGetVideoJbEnabled(JNIEnv *jenv, jclass jcls) {
jboolean jresult = 0 ;
bool result;
(void)jenv;
(void)jcls;
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled();
jresult = (jboolean)result;
return jresult;
}
SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionMgr_1defaultsSetVideoZeroArtifactsEnabled(JNIEnv *jenv, jclass jcls, jboolean jarg1) {
jboolean jresult = 0 ;
bool arg1 ;
bool result;
@ -3420,7 +3432,19 @@ SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionM
(void)jenv;
(void)jcls;
arg1 = jarg1 ? true : false;
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled(arg1);
result = (bool)MediaSessionMgr::defaultsSetVideoZeroArtifactsEnabled(arg1);
jresult = (jboolean)result;
return jresult;
}
SWIGEXPORT jboolean JNICALL Java_org_doubango_tinyWRAP_tinyWRAPJNI_MediaSessionMgr_1defaultsGetVideoZeroArtifactsEnabled(JNIEnv *jenv, jclass jcls) {
jboolean jresult = 0 ;
bool result;
(void)jenv;
(void)jcls;
result = (bool)MediaSessionMgr::defaultsGetVideoZeroArtifactsEnabled();
jresult = (jboolean)result;
return jresult;
}

View File

@ -289,6 +289,8 @@ sub DESTROY {
*defaultsGetByPassDecoding = *tinyWRAPc::MediaSessionMgr_defaultsGetByPassDecoding;
*defaultsSetVideoJbEnabled = *tinyWRAPc::MediaSessionMgr_defaultsSetVideoJbEnabled;
*defaultsGetVideoJbEnabled = *tinyWRAPc::MediaSessionMgr_defaultsGetVideoJbEnabled;
*defaultsSetVideoZeroArtifactsEnabled = *tinyWRAPc::MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled;
*defaultsGetVideoZeroArtifactsEnabled = *tinyWRAPc::MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled;
*defaultsSetRtpBuffSize = *tinyWRAPc::MediaSessionMgr_defaultsSetRtpBuffSize;
*defaultsGetRtpBuffSize = *tinyWRAPc::MediaSessionMgr_defaultsGetRtpBuffSize;
*defaultsSetAvpfTail = *tinyWRAPc::MediaSessionMgr_defaultsSetAvpfTail;

View File

@ -5044,6 +5044,24 @@ XS(_wrap_MediaSessionMgr_defaultsSetVideoJbEnabled) {
XS(_wrap_MediaSessionMgr_defaultsGetVideoJbEnabled) {
{
int argvi = 0;
bool result;
dXSARGS;
if ((items < 0) || (items > 0)) {
SWIG_croak("Usage: MediaSessionMgr_defaultsGetVideoJbEnabled();");
}
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled();
ST(argvi) = SWIG_From_bool SWIG_PERL_CALL_ARGS_1(static_cast< bool >(result)); argvi++ ;
XSRETURN(argvi);
fail:
SWIG_croak_null();
}
}
XS(_wrap_MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled) {
{
bool arg1 ;
bool val1 ;
@ -5053,14 +5071,14 @@ XS(_wrap_MediaSessionMgr_defaultsGetVideoJbEnabled) {
dXSARGS;
if ((items < 1) || (items > 1)) {
SWIG_croak("Usage: MediaSessionMgr_defaultsGetVideoJbEnabled(enabled);");
SWIG_croak("Usage: MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(enabled);");
}
ecode1 = SWIG_AsVal_bool SWIG_PERL_CALL_ARGS_2(ST(0), &val1);
if (!SWIG_IsOK(ecode1)) {
SWIG_exception_fail(SWIG_ArgError(ecode1), "in method '" "MediaSessionMgr_defaultsGetVideoJbEnabled" "', argument " "1"" of type '" "bool""'");
SWIG_exception_fail(SWIG_ArgError(ecode1), "in method '" "MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled" "', argument " "1"" of type '" "bool""'");
}
arg1 = static_cast< bool >(val1);
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled(arg1);
result = (bool)MediaSessionMgr::defaultsSetVideoZeroArtifactsEnabled(arg1);
ST(argvi) = SWIG_From_bool SWIG_PERL_CALL_ARGS_1(static_cast< bool >(result)); argvi++ ;
XSRETURN(argvi);
@ -5071,6 +5089,24 @@ XS(_wrap_MediaSessionMgr_defaultsGetVideoJbEnabled) {
}
XS(_wrap_MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled) {
{
int argvi = 0;
bool result;
dXSARGS;
if ((items < 0) || (items > 0)) {
SWIG_croak("Usage: MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled();");
}
result = (bool)MediaSessionMgr::defaultsGetVideoZeroArtifactsEnabled();
ST(argvi) = SWIG_From_bool SWIG_PERL_CALL_ARGS_1(static_cast< bool >(result)); argvi++ ;
XSRETURN(argvi);
fail:
SWIG_croak_null();
}
}
XS(_wrap_MediaSessionMgr_defaultsSetRtpBuffSize) {
{
unsigned int arg1 ;
@ -26745,6 +26781,8 @@ static swig_command_info swig_commands[] = {
{"tinyWRAPc::MediaSessionMgr_defaultsGetByPassDecoding", _wrap_MediaSessionMgr_defaultsGetByPassDecoding},
{"tinyWRAPc::MediaSessionMgr_defaultsSetVideoJbEnabled", _wrap_MediaSessionMgr_defaultsSetVideoJbEnabled},
{"tinyWRAPc::MediaSessionMgr_defaultsGetVideoJbEnabled", _wrap_MediaSessionMgr_defaultsGetVideoJbEnabled},
{"tinyWRAPc::MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled", _wrap_MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled},
{"tinyWRAPc::MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled", _wrap_MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled},
{"tinyWRAPc::MediaSessionMgr_defaultsSetRtpBuffSize", _wrap_MediaSessionMgr_defaultsSetRtpBuffSize},
{"tinyWRAPc::MediaSessionMgr_defaultsGetRtpBuffSize", _wrap_MediaSessionMgr_defaultsGetRtpBuffSize},
{"tinyWRAPc::MediaSessionMgr_defaultsSetAvpfTail", _wrap_MediaSessionMgr_defaultsSetAvpfTail},

View File

@ -285,6 +285,10 @@ class MediaSessionMgr(_object):
if _newclass:defaultsSetVideoJbEnabled = staticmethod(_tinyWRAP.MediaSessionMgr_defaultsSetVideoJbEnabled)
__swig_getmethods__["defaultsGetVideoJbEnabled"] = lambda x: _tinyWRAP.MediaSessionMgr_defaultsGetVideoJbEnabled
if _newclass:defaultsGetVideoJbEnabled = staticmethod(_tinyWRAP.MediaSessionMgr_defaultsGetVideoJbEnabled)
__swig_getmethods__["defaultsSetVideoZeroArtifactsEnabled"] = lambda x: _tinyWRAP.MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled
if _newclass:defaultsSetVideoZeroArtifactsEnabled = staticmethod(_tinyWRAP.MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled)
__swig_getmethods__["defaultsGetVideoZeroArtifactsEnabled"] = lambda x: _tinyWRAP.MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled
if _newclass:defaultsGetVideoZeroArtifactsEnabled = staticmethod(_tinyWRAP.MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled)
__swig_getmethods__["defaultsSetRtpBuffSize"] = lambda x: _tinyWRAP.MediaSessionMgr_defaultsSetRtpBuffSize
if _newclass:defaultsSetRtpBuffSize = staticmethod(_tinyWRAP.MediaSessionMgr_defaultsSetRtpBuffSize)
__swig_getmethods__["defaultsGetRtpBuffSize"] = lambda x: _tinyWRAP.MediaSessionMgr_defaultsGetRtpBuffSize
@ -482,10 +486,18 @@ def MediaSessionMgr_defaultsSetVideoJbEnabled(*args):
return _tinyWRAP.MediaSessionMgr_defaultsSetVideoJbEnabled(*args)
MediaSessionMgr_defaultsSetVideoJbEnabled = _tinyWRAP.MediaSessionMgr_defaultsSetVideoJbEnabled
def MediaSessionMgr_defaultsGetVideoJbEnabled(*args):
return _tinyWRAP.MediaSessionMgr_defaultsGetVideoJbEnabled(*args)
def MediaSessionMgr_defaultsGetVideoJbEnabled():
return _tinyWRAP.MediaSessionMgr_defaultsGetVideoJbEnabled()
MediaSessionMgr_defaultsGetVideoJbEnabled = _tinyWRAP.MediaSessionMgr_defaultsGetVideoJbEnabled
def MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(*args):
return _tinyWRAP.MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(*args)
MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled = _tinyWRAP.MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled
def MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled():
return _tinyWRAP.MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled()
MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled = _tinyWRAP.MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled
def MediaSessionMgr_defaultsSetRtpBuffSize(*args):
return _tinyWRAP.MediaSessionMgr_defaultsSetRtpBuffSize(*args)
MediaSessionMgr_defaultsSetRtpBuffSize = _tinyWRAP.MediaSessionMgr_defaultsSetRtpBuffSize

View File

@ -7822,6 +7822,19 @@ fail:
SWIGINTERN PyObject *_wrap_MediaSessionMgr_defaultsGetVideoJbEnabled(PyObject *SWIGUNUSEDPARM(self), PyObject *args) {
PyObject *resultobj = 0;
bool result;
if (!PyArg_ParseTuple(args,(char *)":MediaSessionMgr_defaultsGetVideoJbEnabled")) SWIG_fail;
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled();
resultobj = SWIG_From_bool(static_cast< bool >(result));
return resultobj;
fail:
return NULL;
}
SWIGINTERN PyObject *_wrap_MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled(PyObject *SWIGUNUSEDPARM(self), PyObject *args) {
PyObject *resultobj = 0;
bool arg1 ;
bool val1 ;
@ -7829,13 +7842,26 @@ SWIGINTERN PyObject *_wrap_MediaSessionMgr_defaultsGetVideoJbEnabled(PyObject *S
PyObject * obj0 = 0 ;
bool result;
if (!PyArg_ParseTuple(args,(char *)"O:MediaSessionMgr_defaultsGetVideoJbEnabled",&obj0)) SWIG_fail;
if (!PyArg_ParseTuple(args,(char *)"O:MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled",&obj0)) SWIG_fail;
ecode1 = SWIG_AsVal_bool(obj0, &val1);
if (!SWIG_IsOK(ecode1)) {
SWIG_exception_fail(SWIG_ArgError(ecode1), "in method '" "MediaSessionMgr_defaultsGetVideoJbEnabled" "', argument " "1"" of type '" "bool""'");
SWIG_exception_fail(SWIG_ArgError(ecode1), "in method '" "MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled" "', argument " "1"" of type '" "bool""'");
}
arg1 = static_cast< bool >(val1);
result = (bool)MediaSessionMgr::defaultsGetVideoJbEnabled(arg1);
result = (bool)MediaSessionMgr::defaultsSetVideoZeroArtifactsEnabled(arg1);
resultobj = SWIG_From_bool(static_cast< bool >(result));
return resultobj;
fail:
return NULL;
}
SWIGINTERN PyObject *_wrap_MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled(PyObject *SWIGUNUSEDPARM(self), PyObject *args) {
PyObject *resultobj = 0;
bool result;
if (!PyArg_ParseTuple(args,(char *)":MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled")) SWIG_fail;
result = (bool)MediaSessionMgr::defaultsGetVideoZeroArtifactsEnabled();
resultobj = SWIG_From_bool(static_cast< bool >(result));
return resultobj;
fail:
@ -25331,6 +25357,8 @@ static PyMethodDef SwigMethods[] = {
{ (char *)"MediaSessionMgr_defaultsGetByPassDecoding", _wrap_MediaSessionMgr_defaultsGetByPassDecoding, METH_VARARGS, NULL},
{ (char *)"MediaSessionMgr_defaultsSetVideoJbEnabled", _wrap_MediaSessionMgr_defaultsSetVideoJbEnabled, METH_VARARGS, NULL},
{ (char *)"MediaSessionMgr_defaultsGetVideoJbEnabled", _wrap_MediaSessionMgr_defaultsGetVideoJbEnabled, METH_VARARGS, NULL},
{ (char *)"MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled", _wrap_MediaSessionMgr_defaultsSetVideoZeroArtifactsEnabled, METH_VARARGS, NULL},
{ (char *)"MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled", _wrap_MediaSessionMgr_defaultsGetVideoZeroArtifactsEnabled, METH_VARARGS, NULL},
{ (char *)"MediaSessionMgr_defaultsSetRtpBuffSize", _wrap_MediaSessionMgr_defaultsSetRtpBuffSize, METH_VARARGS, NULL},
{ (char *)"MediaSessionMgr_defaultsGetRtpBuffSize", _wrap_MediaSessionMgr_defaultsGetRtpBuffSize, METH_VARARGS, NULL},
{ (char *)"MediaSessionMgr_defaultsSetAvpfTail", _wrap_MediaSessionMgr_defaultsSetAvpfTail, METH_VARARGS, NULL},

View File

@ -123,6 +123,14 @@ AC_CHECK_LIB([resolv], [dns_search],
AC_SUBST(LIBRESOLV_LIBADD, "")
)
AM_CONDITIONAL(USE_RT, false)
AC_CHECK_LIB([rt], [clock_gettime],
AC_SUBST(LIBRT_LIBADD, "-lrt")
AC_DEFINE(HAVE_CLOCK_GETTIME, 1, [Define to 1 if we have the `clock_gettime' function.])
AM_CONDITIONAL(USE_RT, true),
AC_SUBST(LIBRT_LIBADD, "")
)
AH_TEMPLATE([TNET_HAVE_SS_LEN], [Define if sockaddr_storage.ss_len exists])
AC_CHECK_MEMBER([struct sockaddr_storage.ss_len], AC_DEFINE(TNET_HAVE_SS_LEN, 1), AC_DEFINE(TNET_HAVE_SS_LEN,0), [#include <sys/socket.h>])

View File

@ -498,6 +498,7 @@ int audio_opensles_instance_stop_consumer(audio_opensles_instance_handle_t* _sel
}
else{
self->isConsumerStarted = self->device->Playing();
self->isConsumerPrepared = false;
}
done:
@ -525,6 +526,7 @@ int audio_opensles_instance_stop_producer(audio_opensles_instance_handle_t* _sel
}
else{
self->isProducerStarted = self->device->Recording();
self->isProducerPrepared = false;
}
done:

View File

@ -15,12 +15,12 @@
#define CHECK_TRUE(_bool, _text) { if(!_bool){ AUDIO_OPENSLES_DEBUG_ERROR(_text); return -1; } }
#define CHECK_FALSE(_bool, _text) { if(_bool){ AUDIO_OPENSLES_DEBUG_ERROR(_text); return -1; } }
#define CHECK_PLAYOUT_INITIALIZED() CHECK_TRUE(m_bPlayoutInitialized, "Playout already initialized")
#define CHECK_PLAYOUT_NOT_INITIALIZED() CHECK_FALSE(m_bPlayoutInitialized, "Playout not initialized")
#define CHECK_RECORDING_INITIALIZED() CHECK_TRUE(m_bRecordingInitialized, "Recording already initialized")
#define CHECK_RECORDING_NOT_INITIALIZED() CHECK_FALSE(m_bRecordingInitialized, "Recording not initialized")
#define CHECK_MICROPHONE_INITIALIZED() CHECK_TRUE(m_bMicrophoneInitialized, "Microphone already initialized")
#define CHECK_MICROPHONE_NOT_INITIALIZED() CHECK_FALSE(m_bMicrophoneInitialized, "Microphone not initialized")
#define CHECK_PLAYOUT_INITIALIZED() CHECK_TRUE(m_bPlayoutInitialized, "Playout not initialized")
#define CHECK_PLAYOUT_NOT_INITIALIZED() CHECK_FALSE(m_bPlayoutInitialized, "Playout initialized")
#define CHECK_RECORDING_INITIALIZED() CHECK_TRUE(m_bRecordingInitialized, "Recording not initialized")
#define CHECK_RECORDING_NOT_INITIALIZED() CHECK_FALSE(m_bRecordingInitialized, "Recording initialized")
#define CHECK_MICROPHONE_INITIALIZED() CHECK_TRUE(m_bMicrophoneInitialized, "Microphone not initialized")
#define CHECK_MICROPHONE_NOT_INITIALIZED() CHECK_FALSE(m_bMicrophoneInitialized, "Microphone initialized")
#if AUDIO_OPENSLES_UNDER_ANDROID
static inline SLuint32 SL_SAMPLING_RATE(int RATE_INT){
@ -427,6 +427,8 @@ int SLAudioDevice::PlayoutSampleRate(int *pPlayoutSampleRate)
int SLAudioDevice::StartPlayout()
{
AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StartPlayout()");
CHECK_TRUE(m_bInitialized, "Not initialized");
CHECK_PLAYOUT_INITIALIZED();
@ -493,6 +495,8 @@ bool SLAudioDevice::Playing()
int SLAudioDevice::StopPlayout()
{
AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StopPlayout()");
if(!m_bPlaying){
return 0;
}
@ -557,9 +561,8 @@ int SLAudioDevice::MicrophoneIsAvailable(bool *pAvailable)
int SLAudioDevice::InitMicrophone()
{
CHECK_TRUE(m_bInitialized, "Device not initialized");
AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::InitMicrophone()");
CHECK_TRUE(m_bInitialized, "Device not initialized");
if(m_bMicrophoneInitialized){
return 0;
@ -776,6 +779,8 @@ int SLAudioDevice::RecordingSampleRate(int *pRecordingSampleRate)
int SLAudioDevice::StartRecording()
{
AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StartRecording()");
CHECK_TRUE(m_bInitialized, "Not initialized");
CHECK_RECORDING_INITIALIZED();
@ -840,6 +845,7 @@ bool SLAudioDevice::Recording()
int SLAudioDevice::StopRecording()
{
AUDIO_OPENSLES_DEBUG_INFO("SLAudioDevice::StopRecording()");
if (!m_bRecording) {
return 0;
}
@ -866,6 +872,7 @@ int SLAudioDevice::StopRecording()
AUDIO_OPENSLES_DEBUG_INFO("Recording stopped");
m_bRecording = false;
m_bRecordingInitialized = false;
return 0;
}

View File

@ -47,7 +47,9 @@
/* #undef HAVE_LIBSOCKET */
/* Define to 1 if you have the <machine/types.h> header file. */
/* #undef HAVE_MACHINE_TYPES_H */
#if !defined(ANDROID)
#define HAVE_MACHINE_TYPES_H 1
#endif
/* Define to 1 if you have the <memory.h> header file. */
#define HAVE_MEMORY_H 1
@ -110,10 +112,10 @@
/* #undef HAVE_USLEEP */
/* Define to 1 if you have the <windows.h> header file. */
/* #undef HAVE_WINDOWS_H */
#define HAVE_WINDOWS_H 1
/* Define to 1 if you have the <winsock2.h> header file. */
/* #undef HAVE_WINSOCK2_H */
#define HAVE_WINSOCK2_H 1
/* Define to use X86 inlined assembly code */
/* #undef HAVE_X86 */
@ -149,7 +151,7 @@
/* #undef SRTP_KERNEL_LINUX */
/* Define to 1 if you have the ANSI C header files. */
/* #undef STDC_HEADERS */
#define STDC_HEADERS 1
/* Write errors to this file */
/* #undef USE_ERR_REPORTING_FILE */

View File

@ -11,7 +11,7 @@
/* #undef DEV_URANDOM */
/* Define to compile in dynamic debugging system. */
/* #undef ENABLE_DEBUGGING */
#define ENABLE_DEBUGGING 1
/* Report errors to this file. */
/* #undef ERR_REPORTING_FILE */

View File

@ -43,8 +43,6 @@ typedef struct tdav_producer_audiounit_s
unsigned started:1;
unsigned paused:1;
unsigned muted;
void* senderThreadId[1];
tsk_condwait_handle_t* senderCondWait;
struct {
struct {
@ -53,7 +51,6 @@ typedef struct tdav_producer_audiounit_s
} chunck;
SpeexBuffer* buffer;
tsk_size_t size;
tsk_mutex_handle_t* mutex;
} ring;
}
tdav_producer_audiounit_t;

View File

@ -52,7 +52,7 @@ typedef struct tdav_speakup_jitterBuffer_s
}
tdav_speakup_jitterbuffer_t;
const tmedia_jitterbuffer_plugin_def_t *tdav_speakup_jitterbuffer_plugin_def_t;
TINYDAV_GEXTERN const tmedia_jitterbuffer_plugin_def_t *tdav_speakup_jitterbuffer_plugin_def_t;
TDAV_END_DECLS

View File

@ -49,7 +49,7 @@ typedef struct tdav_speex_jitterBuffer_s
}
tdav_speex_jitterbuffer_t;
const tmedia_jitterbuffer_plugin_def_t *tdav_speex_jitterbuffer_plugin_def_t;
TINYDAV_GEXTERN const tmedia_jitterbuffer_plugin_def_t *tdav_speex_jitterbuffer_plugin_def_t;
TDAV_END_DECLS

View File

@ -49,6 +49,7 @@ typedef struct tdav_video_frame_s
uint8_t payload_type;
uint32_t timestamp;
uint16_t highest_seq_num;
uint32_t ssrc;
trtp_rtp_packets_L_t* pkts;
TSK_DECLARE_SAFEOBJ;
@ -59,6 +60,7 @@ struct tdav_video_frame_s* tdav_video_frame_create(struct trtp_rtp_packet_s* rtp
int tdav_video_frame_put(struct tdav_video_frame_s* self, struct trtp_rtp_packet_s* rtp_pkt);
const struct trtp_rtp_packet_s* tdav_video_frame_find_by_seq_num(const struct tdav_video_frame_s* self, uint16_t seq_num);
tsk_size_t tdav_video_frame_write(struct tdav_video_frame_s* self, void** buffer_ptr, tsk_size_t* buffer_size);
tsk_bool_t tdav_video_frame_is_complete(const struct tdav_video_frame_s* self, int32_t last_seq_num_with_mark, uint16_t* missing_seq_num);
TDAV_END_DECLS

View File

@ -49,6 +49,7 @@ typedef struct tdav_session_video_s
struct tdav_video_jb_s* jb;
tsk_bool_t jb_enabled;
tsk_bool_t zero_artifacts;
struct{
const void* context;
@ -70,6 +71,8 @@ typedef struct tdav_session_video_s
int32_t pkt_loss_prob_good;
int32_t pkt_loss_prob_bad;
uint64_t last_frame_time;
uint8_t payload_type;
struct tmedia_codec_s* codec;
tsk_mutex_handle_t* h_mutex;
@ -82,6 +85,13 @@ typedef struct tdav_session_video_s
void* conv_buffer;
tsk_size_t conv_buffer_size;
// latest decoded RTP seqnum
uint16_t last_seqnum;
// stream is corrupted if packets are lost
tsk_bool_t stream_corrupted;
uint64_t stream_corrupted_since;
uint32_t last_corrupted_timestamp;
uint8_t payload_type;
struct tmedia_codec_s* codec;
} decoder;

View File

@ -41,6 +41,7 @@
// OS X or iOS
#if defined(__APPLE__)
# define TDAV_UNDER_APPLE 1
# include <TargetConditionals.h>
#endif
#if TARGET_OS_MAC
# define TDAV_UNDER_MAC 1
@ -58,7 +59,7 @@
#endif
// Mobile
#if defined(_WIN32_WCE) || defined(ANDROID) // iOS (not true)=> || defined(IOS)
#if defined(_WIN32_WCE) || defined(ANDROID) || TDAV_UNDER_IPHONE || TDAV_UNDER_IPHONE_SIMULATOR
# define TDAV_UNDER_MOBILE 1
#endif

View File

@ -33,15 +33,7 @@
#include "tsk_thread.h"
#include "tsk_debug.h"
#define kRingPacketCount +10
// If the "ptime" value is less than "kMaxPtimeBeforeUsingCondVars", then we can use nonosleep() function instead of conditional
// variables for better performance.
// When the prodcuer's stop() function is called we will wait until the sender thread exist (using join()) this is
// why "kMaxPtimeBeforeUsingCondVars" should be small. This problem will not happen when using conditional variables: thanks to braodcast().
#define kMaxPtimeBeforeUsingCondVars +500 /* milliseconds */
static void *__sender_thread(void *param);
static int __sender_thread_set_realtime(uint32_t ptime);
#define kRingPacketCount 10
static OSStatus __handle_input_buffer(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
@ -71,119 +63,19 @@ static OSStatus __handle_input_buffer(void *inRefCon,
inNumberFrames,
&buffers);
if(status == 0){
tsk_mutex_lock(producer->ring.mutex);
// must not be done on async thread: doing it gives bad audio quality when audio+video call is done with CPU consuming codec (e.g. speex or g729)
speex_buffer_write(producer->ring.buffer, buffers.mBuffers[0].mData, buffers.mBuffers[0].mDataByteSize);
tsk_mutex_unlock(producer->ring.mutex);
int avail = speex_buffer_get_available(producer->ring.buffer);
while (producer->started && avail >= producer->ring.chunck.size) {
avail -= speex_buffer_read(producer->ring.buffer, producer->ring.chunck.buffer, producer->ring.chunck.size);
TMEDIA_PRODUCER(producer)->enc_cb.callback(TMEDIA_PRODUCER(producer)->enc_cb.callback_data,
producer->ring.chunck.buffer, producer->ring.chunck.size);
}
}
return status;
}
static int __sender_thread_set_realtime(uint32_t ptime) {
struct thread_time_constraint_policy policy;
int params [2] = {CTL_HW, HW_BUS_FREQ};
int ret;
// get bus frequence
int freq_ns, freq_ms;
size_t size = sizeof (freq_ns);
if((ret = sysctl (params, 2, &freq_ns, &size, NULL, 0))){
// check errno for more information
TSK_DEBUG_INFO("sysctl() failed with error code=%d", ret);
return ret;
}
freq_ms = freq_ns/1000;
/*
* THREAD_TIME_CONSTRAINT_POLICY:
*
* This scheduling mode is for threads which have real time
* constraints on their execution.
*
* Parameters:
*
* period: This is the nominal amount of time between separate
* processing arrivals, specified in absolute time units. A
* value of 0 indicates that there is no inherent periodicity in
* the computation.
*
* computation: This is the nominal amount of computation
* time needed during a separate processing arrival, specified
* in absolute time units.
*
* constraint: This is the maximum amount of real time that
* may elapse from the start of a separate processing arrival
* to the end of computation for logically correct functioning,
* specified in absolute time units. Must be (>= computation).
* Note that latency = (constraint - computation).
*
* preemptible: This indicates that the computation may be
* interrupted, subject to the constraint specified above.
*/
policy.period = (ptime/2) * freq_ms; // Half of the ptime
policy.computation = 2 * freq_ms;
policy.constraint = 3 * freq_ms;
policy.preemptible = true;
if ((ret = thread_policy_set(mach_thread_self(),
THREAD_TIME_CONSTRAINT_POLICY, (int *)&policy,
THREAD_TIME_CONSTRAINT_POLICY_COUNT)) != KERN_SUCCESS) {
TSK_DEBUG_ERROR("thread_policy_set failed(period=%u,computation=%u,constraint=%u) failed with error code= %d",
policy.period, policy.computation, policy.constraint,
ret);
return ret;
}
return 0;
}
static void *__sender_thread(void *param)
{
TSK_DEBUG_INFO("__sender_thread::ENTER");
tdav_producer_audiounit_t* producer = (tdav_producer_audiounit_t*)param;
uint32_t ptime = TMEDIA_PRODUCER(producer)->audio.ptime;
tsk_ssize_t avail;
// interval to sleep when using nonosleep() instead of conditional variable
struct timespec interval;
interval.tv_sec = (long)(ptime/1000);
interval.tv_nsec = (long)(ptime%1000) * 1000000;
// change thread priority
//#if TARGET_OS_IPHONE
__sender_thread_set_realtime(TMEDIA_PRODUCER(producer)->audio.ptime);
//#endif
// starts looping
for (;;) {
// wait for "ptime" milliseconds
if(ptime <= kMaxPtimeBeforeUsingCondVars){
nanosleep(&interval, 0);
}
else {
tsk_condwait_timedwait(producer->senderCondWait, (uint64_t)ptime);
}
// check state
if(!producer->started){
break;
}
// read data and send them
if(TMEDIA_PRODUCER(producer)->enc_cb.callback) {
tsk_mutex_lock(producer->ring.mutex);
avail = speex_buffer_get_available(producer->ring.buffer);
while (producer->started && avail >= producer->ring.chunck.size) {
avail -= speex_buffer_read(producer->ring.buffer, producer->ring.chunck.buffer, producer->ring.chunck.size);
TMEDIA_PRODUCER(producer)->enc_cb.callback(TMEDIA_PRODUCER(producer)->enc_cb.callback_data,
producer->ring.chunck.buffer, producer->ring.chunck.size);
}
tsk_mutex_unlock(producer->ring.mutex);
}
else;
}
TSK_DEBUG_INFO("__sender_thread::EXIT");
return tsk_null;
}
/* ============ Media Producer Interface ================= */
int tdav_producer_audiounit_set(tmedia_producer_t* self, const tmedia_param_t* param)
{
@ -352,11 +244,6 @@ static int tdav_producer_audiounit_prepare(tmedia_producer_t* self, const tmedia
TSK_DEBUG_ERROR("Failed to allocate new buffer");
return -7;
}
// create mutex for ring buffer
if(!producer->ring.mutex && !(producer->ring.mutex = tsk_mutex_create_2(tsk_false))){
TSK_DEBUG_ERROR("Failed to create new mutex");
return -8;
}
// create ringbuffer
producer->ring.size = kRingPacketCount * producer->ring.chunck.size;
if(!producer->ring.buffer){
@ -412,18 +299,6 @@ static int tdav_producer_audiounit_start(tmedia_producer_t* self)
// apply parameters (because could be lost when the producer is restarted -handle recreated-)
ret = tdav_audiounit_handle_mute(producer->audioUnitHandle, producer->muted);
// create conditional variable
if(!(producer->senderCondWait = tsk_condwait_create())){
TSK_DEBUG_ERROR("Failed to create conditional variable");
return -2;
}
// start the reader thread
ret = tsk_thread_create(&producer->senderThreadId[0], __sender_thread, producer);
if(ret){
TSK_DEBUG_ERROR("Failed to start the sender thread. error code=%d", ret);
return ret;
}
TSK_DEBUG_INFO("AudioUnit producer started");
return 0;
}
@ -466,14 +341,6 @@ static int tdav_producer_audiounit_stop(tmedia_producer_t* self)
#endif
}
producer->started = tsk_false;
// signal
if(producer->senderCondWait){
tsk_condwait_broadcast(producer->senderCondWait);
}
// stop thread
if(producer->senderThreadId[0]){
tsk_thread_join(&(producer->senderThreadId[0]));
}
TSK_DEBUG_INFO("AudioUnit producer stoppped");
return 0;
}
@ -507,16 +374,10 @@ static tsk_object_t* tdav_producer_audiounit_dtor(tsk_object_t * self)
if (producer->audioUnitHandle) {
tdav_audiounit_handle_destroy(&producer->audioUnitHandle);
}
if(producer->ring.mutex){
tsk_mutex_destroy(&producer->ring.mutex);
}
TSK_FREE(producer->ring.chunck.buffer);
if(producer->ring.buffer){
speex_buffer_destroy(producer->ring.buffer);
}
if(producer->senderCondWait){
tsk_condwait_destroy(&producer->senderCondWait);
}
/* deinit base */
tdav_producer_audio_deinit(TDAV_PRODUCER_AUDIO(producer));
}

View File

@ -330,6 +330,7 @@ static tsk_size_t tdav_codec_h263_decode(tmedia_codec_t* self, const void* in_da
tdav_codec_h263_t* h263 = (tdav_codec_h263_t*)self;
const trtp_rtp_header_t* rtp_hdr = proto_hdr;
tsk_bool_t is_idr = tsk_false;
if(!self || !in_data || !in_size || !out_data || !h263->decoder.context){
TSK_DEBUG_ERROR("Invalid parameter");
@ -345,6 +346,10 @@ static tsk_size_t tdav_codec_h263_decode(tmedia_codec_t* self, const void* in_da
Optional PB-frames mode as defined by the H.263 [4]. "0" implies
normal I or P frame, "1" PB-frames. When F=1, P also indicates modes:
mode B if P=0, mode C if P=1.
I: 1 bit.
Picture coding type, bit 9 in PTYPE defined by H.263[4], "0" is
intra-coded, "1" is inter-coded.
*/
F = *pdata >> 7;
P = (*pdata >> 6) & 0x01;
@ -362,6 +367,7 @@ static tsk_size_t tdav_codec_h263_decode(tmedia_codec_t* self, const void* in_da
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
hdr_size = H263_HEADER_MODE_A_SIZE;
is_idr = (in_size >= 2) && !(pdata[1] & 0x10) /* I==1 */;
}
else if(P == 0){ // F=1 and P=0
/* MODE B
@ -374,6 +380,7 @@ static tsk_size_t tdav_codec_h263_decode(tmedia_codec_t* self, const void* in_da
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
hdr_size = H263_HEADER_MODE_B_SIZE;
is_idr = (in_size >= 5) && !(pdata[4] & 0x80) /* I==1 */;
}
else{ // F=1 and P=1
/* MODE C
@ -388,6 +395,7 @@ static tsk_size_t tdav_codec_h263_decode(tmedia_codec_t* self, const void* in_da
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
hdr_size = H263_HEADER_MODE_C_SIZE;
is_idr = (in_size >= 5) && !(pdata[4] & 0x80) /* I==1 */;
}
/* Check size */
@ -458,6 +466,13 @@ static tsk_size_t tdav_codec_h263_decode(tmedia_codec_t* self, const void* in_da
}
else if(got_picture_ptr){
retsize = xsize;
// Is it IDR frame?
if(is_idr && TMEDIA_CODEC_VIDEO(self)->in.callback){
TSK_DEBUG_INFO("Decoded H.263 IDR");
TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
}
TMEDIA_CODEC_VIDEO(h263)->in.width = h263->decoder.context->width;
TMEDIA_CODEC_VIDEO(h263)->in.height = h263->decoder.context->height;
/* copy picture into a linear buffer */
@ -628,7 +643,7 @@ static tsk_size_t tdav_codec_h263p_decode(tmedia_codec_t* self, const void* in_d
}
/*
5.1. General H.263+ Payload Header
rfc4629 - 5.1. General H.263+ Payload Header
0 1
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
@ -1175,7 +1190,7 @@ static void tdav_codec_h263_rtp_callback(tdav_codec_h263_t *self, const void *da
if(TMEDIA_CODEC_VIDEO(self)->out.callback){
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.ptr = self->rtp.ptr;
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.size = (size + H263_HEADER_MODE_A_SIZE);
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (3003* (30/TMEDIA_CODEC_VIDEO(self)->out.fps));
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (1./(double)TMEDIA_CODEC_VIDEO(self)->out.fps) * TMEDIA_CODEC(self)->plugin->rate;
TMEDIA_CODEC_VIDEO(self)->out.result.last_chunck = marker;
TMEDIA_CODEC_VIDEO(self)->out.callback(&TMEDIA_CODEC_VIDEO(self)->out.result);
}
@ -1320,7 +1335,7 @@ static void tdav_codec_h263p_rtp_callback(tdav_codec_h263_t *self, const void *d
if(TMEDIA_CODEC_VIDEO(self)->out.callback){
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.ptr = _ptr;
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.size = _size;
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (3003* (30/TMEDIA_CODEC_VIDEO(self)->out.fps));
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (1./(double)TMEDIA_CODEC_VIDEO(self)->out.fps) * TMEDIA_CODEC(self)->plugin->rate;
TMEDIA_CODEC_VIDEO(self)->out.result.last_chunck = marker;
TMEDIA_CODEC_VIDEO(self)->out.callback(&TMEDIA_CODEC_VIDEO(self)->out.result);
}

View File

@ -218,7 +218,7 @@ static tsk_size_t tdav_codec_h264_encode(tmedia_codec_t* self, const void* in_da
tdav_codec_h264_t* h264 = (tdav_codec_h264_t*)self;
if(!self || !in_data || !in_size || !out_data){
if(!self || !in_data || !in_size){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
@ -265,7 +265,7 @@ static tsk_size_t tdav_codec_h264_encode(tmedia_codec_t* self, const void* in_da
#else
h264->encoder.picture->pict_type = send_idr ? AV_PICTURE_TYPE_I : AV_PICTURE_TYPE_NONE;
#endif
h264->encoder.picture->pts = h264->encoder.frame_count;
h264->encoder.picture->pts = AV_NOPTS_VALUE;
h264->encoder.picture->quality = h264->encoder.context->global_quality;
// h264->encoder.picture->pts = h264->encoder.frame_count; MUST NOT
ret = avcodec_encode_video(h264->encoder.context, h264->encoder.buffer, size, h264->encoder.picture);
@ -292,6 +292,7 @@ static tsk_size_t tdav_codec_h264_decode(tmedia_codec_t* self, const void* in_da
tsk_size_t pay_size = 0;
int ret;
tsk_bool_t append_scp;
tsk_bool_t sps_or_pps;
tsk_size_t retsize = 0, size_to_copy = 0;
static tsk_size_t xmax_size = (1920 * 1080 * 3) >> 3;
static tsk_size_t start_code_prefix_size = sizeof(H264_START_CODE_PREFIX);
@ -325,7 +326,7 @@ static tsk_size_t tdav_codec_h264_decode(tmedia_codec_t* self, const void* in_da
|F|NRI| Type |
+---------------+
*/
if(*((uint8_t*)in_data) >> 7){
if(*((uint8_t*)in_data) & 0x80){
TSK_DEBUG_WARN("F=1");
/* reset accumulator */
h264->decoder.accumulator = 0;
@ -339,6 +340,8 @@ static tsk_size_t tdav_codec_h264_decode(tmedia_codec_t* self, const void* in_da
}
//append_scp = tsk_true;
size_to_copy = pay_size + (append_scp ? start_code_prefix_size : 0);
// whether it's SPS or PPS
sps_or_pps = pay_ptr && ((pay_ptr[0] & 0x1F) == 7 || (pay_ptr[0] & 0x1F) == 8);
// start-accumulator
if(!h264->decoder.accumulator){
@ -375,19 +378,24 @@ static tsk_size_t tdav_codec_h264_decode(tmedia_codec_t* self, const void* in_da
h264->decoder.accumulator_pos += pay_size;
// end-accumulator
if(rtp_hdr->marker){
if(sps_or_pps){
// http://libav-users.943685.n4.nabble.com/Decode-H264-streams-how-to-fill-AVCodecContext-from-SPS-PPS-td2484472.html
// SPS and PPS should be bundled with IDR
TSK_DEBUG_INFO("Receiving SPS or PPS ...to be tied to an IDR");
}
else if(rtp_hdr->marker){
#if HAVE_FFMPEG
AVPacket packet;
/* decode the picture */
av_init_packet(&packet);
packet.dts = packet.pts = AV_NOPTS_VALUE;
packet.size = h264->decoder.accumulator_pos;
packet.data = h264->decoder.accumulator;
ret = avcodec_decode_video2(h264->decoder.context, h264->decoder.picture, &got_picture_ptr, &packet);
if(ret <0){
TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d", ret);
TSK_DEBUG_INFO("Failed to decode the buffer with error code =%d, size=%u, append=%s", ret, h264->decoder.accumulator_pos, append_scp ? "yes" : "no");
if(TMEDIA_CODEC_VIDEO(self)->in.callback){
TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_error;
TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
@ -397,6 +405,13 @@ static tsk_size_t tdav_codec_h264_decode(tmedia_codec_t* self, const void* in_da
else if(got_picture_ptr){
tsk_size_t xsize;
/* IDR ? */
if(((((int8_t*)in_data)[0] & 0x1F) == 0x05) && TMEDIA_CODEC_VIDEO(self)->in.callback){
TSK_DEBUG_INFO("Decoded H.264 IDR");
TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
}
/* fill out */
xsize = avpicture_get_size(h264->decoder.context->pix_fmt, h264->decoder.context->width, h264->decoder.context->height);
if(*out_max_size<xsize){
@ -876,7 +891,7 @@ int tdav_codec_h264_open_decoder(tdav_codec_h264_t* self)
self->decoder.context->height = TMEDIA_CODEC_VIDEO(self)->in.height;
#if TDAV_UNDER_WINDOWS
self->decoder.context->dsp_mask = (FF_MM_MMX | FF_MM_MMXEXT | FF_MM_SSE);
// self->decoder.context->dsp_mask = (FF_MM_MMX | FF_MM_MMXEXT | FF_MM_SSE);
#endif
// Picture (YUV 420)
@ -987,23 +1002,35 @@ int tdav_codec_h264_deinit(tdav_codec_h264_t* self)
static void tdav_codec_h264_encap(const tdav_codec_h264_t* h264, const uint8_t* pdata, tsk_size_t size)
{
register int32_t i;
int32_t last_scp, prev_scp;
static int32_t size_of_scp = sizeof(H264_START_CODE_PREFIX); /* we know it's equal to 4 ..but */
static const tsk_size_t size_of_scp = sizeof(H264_START_CODE_PREFIX); /* we know it's equal to 4 .. */
register tsk_size_t i;
tsk_size_t last_scp, prev_scp;
tsk_size_t _size;
if(!pdata || !size){
if(!pdata || size < size_of_scp){
return;
}
if(pdata[0] == 0 && pdata[1] == 0){
if(pdata[2] == 1){
pdata += 3, size -= 3;
}
else if(pdata[2] == 0 && pdata[3] == 1){
pdata += 4, size -= 4;
}
}
_size = (size - size_of_scp);
last_scp = 0, prev_scp = 0;
for(i = size_of_scp; i<(int32_t)(size - size_of_scp); i++){
if(pdata[i] == H264_START_CODE_PREFIX[0] && pdata[i+1] == H264_START_CODE_PREFIX[1] && pdata[i+2] == H264_START_CODE_PREFIX[2] && pdata[i+3] == H264_START_CODE_PREFIX[3]){ /* Found Start Code Prefix */
for(i = size_of_scp; i<_size; i++){
if(pdata[i] == 0 && pdata[i+1] == 0 && (pdata[i+2] == 1 || (pdata[i+2] == 0 && pdata[i+3] == 1))){ /* Find Start Code Prefix */
prev_scp = last_scp;
if((i - last_scp) >= H264_RTP_PAYLOAD_SIZE || 1){
tdav_codec_h264_rtp_callback(TDAV_CODEC_H264_COMMON(h264), pdata + prev_scp,
(i - prev_scp), (prev_scp == size));
}
last_scp = i;
i += (pdata[i+2] == 1) ? 3 : 4;
}
}

View File

@ -162,6 +162,7 @@ int tdav_codec_h264_parse_profile(const char* profile_level_id, profile_idc_t *p
int tdav_codec_h264_get_pay(const void* in_data, tsk_size_t in_size, const void** out_data, tsk_size_t *out_size, tsk_bool_t* append_scp)
{
const uint8_t* pdata = in_data;
uint8_t nal_type;
if(!in_data || !in_size || !out_data || !out_size){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
@ -169,7 +170,6 @@ int tdav_codec_h264_get_pay(const void* in_data, tsk_size_t in_size, const void*
*out_data = tsk_null;
*out_size = 0;
*append_scp = tsk_true;
/* 5.3. NAL Unit Octet Usage
+---------------+
@ -178,7 +178,7 @@ int tdav_codec_h264_get_pay(const void* in_data, tsk_size_t in_size, const void*
|F|NRI| Type |
+---------------+
*/
switch((pdata[0] & 0x1F)){
switch((nal_type = (pdata[0] & 0x1F))){
case undefined_0:
case undefined_30:
case undefined_31:
@ -191,6 +191,7 @@ int tdav_codec_h264_get_pay(const void* in_data, tsk_size_t in_size, const void*
case fu_a:
return tdav_codec_h264_get_fua_pay(pdata, in_size, out_data, out_size, append_scp);
default: /* NAL unit (1-23) */
*append_scp = tsk_true;//(nal_type != 7 && nal_type != 8); // SPS or PPS
return tdav_codec_h264_get_nalunit_pay(pdata, in_size, out_data, out_size);
}
@ -236,40 +237,28 @@ int tdav_codec_h264_get_fua_pay(const uint8_t* in_data, tsk_size_t in_size, cons
+---------------+
*/
if((in_data[1] & 0x80) == 0x80 /*S*/){
if((in_data[1] & 0x80) /*S*/){
/* discard "FU indicator"
S: 1 bit
When set to one, the Start bit indicates the start of a fragmented
NAL unit. When the following FU payload is not the start of a
fragmented NAL unit payload, the Start bit is set to zero.
*/
if(in_size> H264_NAL_UNIT_TYPE_HEADER_SIZE){
uint8_t hdr;
*out_data = (in_data + H264_NAL_UNIT_TYPE_HEADER_SIZE);
*out_size = (in_size - H264_NAL_UNIT_TYPE_HEADER_SIZE);
uint8_t hdr;
*out_data = (in_data + H264_NAL_UNIT_TYPE_HEADER_SIZE);
*out_size = (in_size - H264_NAL_UNIT_TYPE_HEADER_SIZE);
// F, NRI and Type
hdr = (in_data[0] & 0xe0) /* F,NRI from "FU indicator"*/ | (in_data[1] & 0x1f) /* type from "FU header" */;
*((uint8_t*)*out_data) = hdr;
// Need to append Start Code Prefix
*append_scp = tsk_true;
}
else{
TSK_DEBUG_ERROR("Too short");
return -1;
}
}
// F, NRI and Type
hdr = (in_data[0] & 0xe0) /* F,NRI from "FU indicator"*/ | (in_data[1] & 0x1f) /* type from "FU header" */;
*((uint8_t*)*out_data) = hdr;
// Need to append Start Code Prefix
*append_scp = tsk_true;
}
else{
/* "FU indicator" and "FU header" */
if(in_size> H264_FUA_HEADER_SIZE){
*out_data = (in_data + H264_FUA_HEADER_SIZE);
*out_size = (in_size - H264_FUA_HEADER_SIZE);
*append_scp = tsk_false;
}
else{
TSK_DEBUG_ERROR("Too short");
return -1;
}
*out_data = (in_data + H264_FUA_HEADER_SIZE);
*out_size = (in_size - H264_FUA_HEADER_SIZE);
*append_scp = tsk_false;
}
return 0;
@ -322,7 +311,7 @@ void tdav_codec_h264_rtp_callback(struct tdav_codec_h264_common_s *self, const v
if(TMEDIA_CODEC_VIDEO(self)->out.callback){
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.ptr = pdata;
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.size = size;
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (3003* (30/TMEDIA_CODEC_VIDEO(self)->out.fps));
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (1./(double)TMEDIA_CODEC_VIDEO(self)->out.fps) * TMEDIA_CODEC(self)->plugin->rate;
TMEDIA_CODEC_VIDEO(self)->out.result.last_chunck = marker;
TMEDIA_CODEC_VIDEO(self)->out.callback(&TMEDIA_CODEC_VIDEO(self)->out.result);
}
@ -330,7 +319,7 @@ void tdav_codec_h264_rtp_callback(struct tdav_codec_h264_common_s *self, const v
else if(size > H264_NAL_UNIT_TYPE_HEADER_SIZE){
/* Should be Fragmented as FUA */
uint8_t fua_hdr[H264_FUA_HEADER_SIZE]; /* "FU indicator" and "FU header" - 2bytes */
fua_hdr[0] = pdata[0] & 0x60/* F=0 */, fua_hdr[0] |= fu_a;
fua_hdr[0] = pdata[0] & 0x60/* NRI */, fua_hdr[0] |= fu_a;
fua_hdr[1] = 0x80/* S=1,E=0,R=0 */, fua_hdr[1] |= pdata[0] & 0x1f; /* type */
// discard header
pdata += H264_NAL_UNIT_TYPE_HEADER_SIZE;
@ -364,7 +353,7 @@ void tdav_codec_h264_rtp_callback(struct tdav_codec_h264_common_s *self, const v
if(TMEDIA_CODEC_VIDEO(self)->out.callback){
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.ptr = self->rtp.ptr;
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.size = (packet_size + H264_FUA_HEADER_SIZE);
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (3003* (30/TMEDIA_CODEC_VIDEO(self)->out.fps));
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (1./(double)TMEDIA_CODEC_VIDEO(self)->out.fps) * TMEDIA_CODEC(self)->plugin->rate;
TMEDIA_CODEC_VIDEO(self)->out.result.last_chunck = (size == 0);
TMEDIA_CODEC_VIDEO(self)->out.callback(&TMEDIA_CODEC_VIDEO(self)->out.result);
}

View File

@ -724,7 +724,7 @@ static void tdav_codec_mp4ves_rtp_callback(tdav_codec_mp4ves_t *mp4v, const void
if(TMEDIA_CODEC_VIDEO(mp4v)->out.callback){
TMEDIA_CODEC_VIDEO(mp4v)->out.result.buffer.ptr = data;
TMEDIA_CODEC_VIDEO(mp4v)->out.result.buffer.size = size;
TMEDIA_CODEC_VIDEO(mp4v)->out.result.duration = (3003* (30/TMEDIA_CODEC_VIDEO(mp4v)->out.fps));
TMEDIA_CODEC_VIDEO(mp4v)->out.result.duration = (1./(double)TMEDIA_CODEC_VIDEO(mp4v)->out.fps) * TMEDIA_CODEC(mp4v)->plugin->rate;
TMEDIA_CODEC_VIDEO(mp4v)->out.result.last_chunck = marker;
TMEDIA_CODEC_VIDEO(mp4v)->out.callback(&TMEDIA_CODEC_VIDEO(mp4v)->out.result);
}

View File

@ -834,7 +834,7 @@ int tdav_codec_theora_send(tdav_codec_theora_t* self, const uint8_t* data, tsk_s
if(TMEDIA_CODEC_VIDEO(self)->out.callback){
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.ptr = self->rtp.ptr;
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.size = (pay_size + sizeof(pay_hdr));
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (3003* (30/TMEDIA_CODEC_VIDEO(self)->out.fps));
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (1./(double)TMEDIA_CODEC_VIDEO(self)->out.fps) * TMEDIA_CODEC(self)->plugin->rate;
TMEDIA_CODEC_VIDEO(self)->out.result.last_chunck = (size == 0);
TMEDIA_CODEC_VIDEO(self)->out.callback(&TMEDIA_CODEC_VIDEO(self)->out.result);
}

View File

@ -55,7 +55,7 @@
# define TDAV_VP8_PAY_DESC_SIZE 4
#endif
#define TDAV_SYSTEM_CORES_COUNT 0
#define TDAV_VP8_GOP_SIZE_IN_SECONDS 25
#define TDAV_VP8_GOP_SIZE_IN_SECONDS 60
#define TDAV_VP8_RTP_PAYLOAD_MAX_SIZE 1050
#if !defined(TDAV_VP8_MAX_BANDWIDTH_KB)
# define TDAV_VP8_MAX_BANDWIDTH_KB 6000
@ -75,7 +75,7 @@ typedef struct tdav_codec_vp8_s
tsk_bool_t initialized;
vpx_codec_pts_t pts;
vpx_codec_ctx_t context;
uint16_t pic_id;
unsigned pic_id:15;
uint64_t frame_count;
tsk_bool_t force_idr;
uint32_t target_bitrate;
@ -96,9 +96,8 @@ typedef struct tdav_codec_vp8_s
tsk_size_t accumulator_pos;
tsk_size_t accumulator_size;
uint16_t last_seq;
unsigned last_PartID:4;
unsigned last_S:1;
unsigned last_N:1;
uint32_t last_timestamp;
tsk_bool_t idr;
} decoder;
}
tdav_codec_vp8_t;
@ -237,7 +236,7 @@ static tsk_size_t tdav_codec_vp8_encode(tmedia_codec_t* self, const void* in_dat
vpx_codec_iter_t iter = tsk_null;
vpx_image_t image;
if(!vp8 || !in_data || !in_size || !out_data){
if(!vp8 || !in_data || !in_size){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
@ -298,6 +297,7 @@ static tsk_size_t tdav_codec_vp8_decode(tmedia_codec_t* self, const void* in_dat
const uint8_t* pdata_end = (pdata + in_size);
tsk_size_t ret = 0;
static const tsk_size_t xmax_size = (1920 * 1080 * 3) >> 3;
uint8_t S, PartID;
if(!self || !in_data || in_size<1 || !out_data || !vp8->decoder.initialized){
TSK_DEBUG_ERROR("Invalid parameter");
@ -305,7 +305,7 @@ static tsk_size_t tdav_codec_vp8_decode(tmedia_codec_t* self, const void* in_dat
}
{ /* 4.2. VP8 Payload Descriptor */
uint8_t X, R, N, S, I, L, T, K, PartID;//FIXME: store
uint8_t X, R, N, I, L, T, K;//FIXME: store
X = (*pdata & 0x80)>>7;
R = (*pdata & 0x40)>>6;
@ -317,36 +317,84 @@ static tsk_size_t tdav_codec_vp8_decode(tmedia_codec_t* self, const void* in_dat
S = (*pdata & 0x10)>>4;
PartID = (*pdata & 0x0F);
// skip "REQUIRED" header
if(++pdata >= pdata_end){ TSK_DEBUG_ERROR("Too short"); goto bail; }
if(++pdata >= pdata_end){
TSK_DEBUG_ERROR("Too short"); goto bail;
}
// check "OPTIONAL" headers
if(X){
I = (*pdata & 0x80);
L = (*pdata & 0x40);
T = (*pdata & 0x20);
K = (*pdata & 0x10);
if(++pdata >= pdata_end){ TSK_DEBUG_ERROR("Too short"); goto bail; }
if(++pdata >= pdata_end){
TSK_DEBUG_ERROR("Too short"); goto bail;
}
if(I){
if(*pdata & 0x80){ // M
// PictureID on 16bits
if((pdata += 2) >= pdata_end){ TSK_DEBUG_ERROR("Too short"); goto bail; }
if((pdata += 2) >= pdata_end){
TSK_DEBUG_ERROR("Too short"); goto bail;
}
}
else{
// PictureID on 8bits
if(++pdata >= pdata_end){ TSK_DEBUG_ERROR("Too short"); goto bail; }
if(++pdata >= pdata_end){
TSK_DEBUG_ERROR("Too short"); goto bail;
}
}
}
if(L){
if(++pdata >= pdata_end){ TSK_DEBUG_ERROR("Too short"); goto bail; }
if(++pdata >= pdata_end){
TSK_DEBUG_ERROR("Too short"); goto bail;
}
}
if(T || K){
if(++pdata >= pdata_end){ TSK_DEBUG_ERROR("Too short"); goto bail; }
if(++pdata >= pdata_end){
TSK_DEBUG_ERROR("Too short"); goto bail;
}
}
}
}
in_size = (pdata_end - pdata);
// New frame ?
if(vp8->decoder.last_timestamp != rtp_hdr->timestamp){
/* 4.3. VP8 Payload Header
Note that the header is present only in packets
which have the S bit equal to one and the PartID equal to zero in the
payload descriptor. Subsequent packets for the same frame do not
carry the payload header.
0 1 2 3 4 5 6 7
+-+-+-+-+-+-+-+-+
|Size0|H| VER |P|
+-+-+-+-+-+-+-+-+
| Size1 |
+-+-+-+-+-+-+-+-+
| Size2 |
+-+-+-+-+-+-+-+-+
| Bytes 4..N of |
| VP8 payload |
: :
+-+-+-+-+-+-+-+-+
| OPTIONAL RTP |
| padding |
: :
+-+-+-+-+-+-+-+-+
P: Inverse key frame flag. When set to 0 the current frame is a key
frame. When set to 1 the current frame is an interframe. Defined
in [RFC6386]
*/
if(PartID == 0 && S == 1 && in_size > 0){
vp8->decoder.idr = !(*pdata & 0x01);
}
else{
vp8->decoder.idr = tsk_false;
}
vp8->decoder.last_timestamp = rtp_hdr->timestamp;
}
// Packet lost?
if(vp8->decoder.last_seq && (vp8->decoder.last_seq + 1) != rtp_hdr->seq_num){
TSK_DEBUG_INFO("Packet lost, seq_num=%d", (vp8->decoder.last_seq + 1));
@ -418,6 +466,14 @@ static tsk_size_t tdav_codec_vp8_decode(tmedia_codec_t* self, const void* in_dat
}
goto bail;
}
else if(vp8->decoder.idr){
TSK_DEBUG_INFO("Decoded VP8 IDR");
if(TMEDIA_CODEC_VIDEO(self)->in.callback){
TMEDIA_CODEC_VIDEO(self)->in.result.type = tmedia_video_decode_result_type_idr;
TMEDIA_CODEC_VIDEO(self)->in.result.proto_hdr = proto_hdr;
TMEDIA_CODEC_VIDEO(self)->in.callback(&TMEDIA_CODEC_VIDEO(self)->in.result);
}
}
// copy decoded data
ret = 0;
@ -527,6 +583,7 @@ static tsk_object_t* tdav_codec_vp8_ctor(tsk_object_t * self, va_list * app)
static tsk_object_t* tdav_codec_vp8_dtor(tsk_object_t * self)
{
tdav_codec_vp8_t *vp8 = self;
TSK_DEBUG_INFO("*** tdav_codec_vp8_dtor destroyed ***");
if(vp8){
/* deinit base */
tmedia_codec_video_deinit(vp8);
@ -607,12 +664,18 @@ int tdav_codec_vp8_open_encoder(tdav_codec_vp8_t* self)
self->encoder.cfg.g_timebase.num = 1;
self->encoder.cfg.g_timebase.den = TMEDIA_CODEC_VIDEO(self)->out.fps;
self->encoder.cfg.rc_target_bitrate = self->encoder.target_bitrate = (TMEDIA_CODEC_VIDEO(self)->out.width * TMEDIA_CODEC_VIDEO(self)->out.height * 256 / 352 / 288);
self->encoder.cfg.rc_end_usage = VPX_CBR;
self->encoder.cfg.g_w = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.height : TMEDIA_CODEC_VIDEO(self)->out.width;
self->encoder.cfg.g_h = (self->encoder.rotation == 90 || self->encoder.rotation == 270) ? TMEDIA_CODEC_VIDEO(self)->out.width : TMEDIA_CODEC_VIDEO(self)->out.height;
self->encoder.cfg.kf_mode = VPX_KF_AUTO;
self->encoder.cfg.kf_min_dist = self->encoder.cfg.kf_max_dist = (TDAV_VP8_GOP_SIZE_IN_SECONDS * TMEDIA_CODEC_VIDEO(self)->out.fps);
/*self->encoder.cfg.kf_min_dist =*/ self->encoder.cfg.kf_max_dist = (TDAV_VP8_GOP_SIZE_IN_SECONDS * TMEDIA_CODEC_VIDEO(self)->out.fps);
#if defined(VPX_ERROR_RESILIENT_DEFAULT)
self->encoder.cfg.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT;
#else
self->encoder.cfg.g_error_resilient = 1;
#endif
#if defined(VPX_ERROR_RESILIENT_PARTITIONS)
self->encoder.cfg.g_error_resilient |= VPX_ERROR_RESILIENT_PARTITIONS;
#endif
self->encoder.cfg.g_lag_in_frames = 0;
#if TDAV_UNDER_WINDOWS
{
@ -621,11 +684,17 @@ int tdav_codec_vp8_open_encoder(tdav_codec_vp8_t* self)
self->encoder.cfg.g_threads = SystemInfo.dwNumberOfProcessors;
}
#endif
self->encoder.cfg.rc_dropframe_thresh = 30;
self->encoder.cfg.rc_end_usage = VPX_CBR;
self->encoder.cfg.g_pass = VPX_RC_ONE_PASS;
self->encoder.cfg.rc_min_quantizer = 0;//TSK_CLAMP(self->encoder.cfg.rc_min_quantizer, 10, self->encoder.cfg.rc_max_quantizer);
self->encoder.cfg.rc_max_quantizer = 63;//TSK_CLAMP(self->encoder.cfg.rc_min_quantizer, 51, self->encoder.cfg.rc_max_quantizer);
//self->encoder.cfg.rc_resize_allowed = 0;
self->encoder.cfg.g_profile = 0;
self->encoder.cfg.rc_resize_allowed = 0;
self->encoder.cfg.rc_min_quantizer = 8;
self->encoder.cfg.rc_max_quantizer = 56;
self->encoder.cfg.rc_undershoot_pct = 100;
self->encoder.cfg.rc_overshoot_pct = 15;
self->encoder.cfg.rc_buf_initial_sz = 500;
self->encoder.cfg.rc_buf_optimal_sz = 600;
self->encoder.cfg.rc_buf_sz = 1000;
enc_flags = 0; //VPX_EFLAG_XXX
@ -633,12 +702,14 @@ int tdav_codec_vp8_open_encoder(tdav_codec_vp8_t* self)
TSK_DEBUG_ERROR("vpx_codec_enc_init failed with error =%s", vpx_codec_err_to_string(vpx_ret));
return -3;
}
self->encoder.pic_id = (rand() ^ rand()) % 0x7FFF;
self->encoder.pic_id = /*(rand() ^ rand()) % 0x7FFF*/0/*Use zero: why do you want to make your life harder?*/;
self->encoder.initialized = tsk_true;
//vpx_codec_control(&self->encoder.context, VP8E_SET_CPUUSED, 0);
//vpx_codec_control(&self->encoder.context, VP8E_SET_SHARPNESS, 7);
//vpx_codec_control(&self->encoder.context, VP8E_SET_ENABLEAUTOALTREF, 1);
vpx_codec_control(&self->encoder.context, VP8E_SET_STATIC_THRESHOLD, 800);
#if !TDAV_UNDER_MOBILE /* must not remove: crash on Android for sure and probably on iOS also (all ARM devices ?) */
vpx_codec_control(&self->encoder.context, VP8E_SET_NOISE_SENSITIVITY, 2);
#endif
/* vpx_codec_control(&self->encoder.context, VP8E_SET_CPUUSED, 0); */
return 0;
}
@ -648,7 +719,9 @@ int tdav_codec_vp8_open_decoder(tdav_codec_vp8_t* self)
vpx_codec_err_t vpx_ret;
vpx_codec_caps_t dec_caps;
vpx_codec_flags_t dec_flags = 0;
#if !TDAV_UNDER_MOBILE
static vp8_postproc_cfg_t __pp = { VP8_DEBLOCK | VP8_DEMACROBLOCK, 4, 0};
#endif
if(self->decoder.initialized){
TSK_DEBUG_ERROR("VP8 decoder already initialized");
@ -666,21 +739,24 @@ int tdav_codec_vp8_open_decoder(tdav_codec_vp8_t* self)
#endif
dec_caps = vpx_codec_get_caps(&vpx_codec_vp8_dx_algo);
#if !TDAV_UNDER_MOBILE
if(dec_caps & VPX_CODEC_CAP_POSTPROC){
dec_flags |= VPX_CODEC_USE_POSTPROC;
}
//--if(dec_caps & VPX_CODEC_CAP_ERROR_CONCEALMENT){
//-- dec_flags |= VPX_CODEC_USE_ERROR_CONCEALMENT;
//--}
#endif
if(dec_caps & VPX_CODEC_CAP_ERROR_CONCEALMENT){
dec_flags |= VPX_CODEC_USE_ERROR_CONCEALMENT;
}
if((vpx_ret = vpx_codec_dec_init(&self->decoder.context, vp8_interface_dec, &self->decoder.cfg, dec_flags)) != VPX_CODEC_OK){
TSK_DEBUG_ERROR("vpx_codec_dec_init failed with error =%s", vpx_codec_err_to_string(vpx_ret));
return -4;
}
#if !TDAV_UNDER_MOBILE
if((vpx_ret = vpx_codec_control(&self->decoder.context, VP8_SET_POSTPROC, &__pp))){
TSK_DEBUG_WARN("vpx_codec_dec_init failed with error =%s", vpx_codec_err_to_string(vpx_ret));
}
#endif
self->decoder.initialized = tsk_true;
return 0;
@ -688,19 +764,23 @@ int tdav_codec_vp8_open_decoder(tdav_codec_vp8_t* self)
int tdav_codec_vp8_close_encoder(tdav_codec_vp8_t* self)
{
TSK_DEBUG_INFO("tdav_codec_vp8_close_encoder(begin)");
if(self->encoder.initialized){
vpx_codec_destroy(&self->encoder.context);
self->encoder.initialized = tsk_false;
}
TSK_DEBUG_INFO("tdav_codec_vp8_close_encoder(end)");
return 0;
}
int tdav_codec_vp8_close_decoder(tdav_codec_vp8_t* self)
{
TSK_DEBUG_INFO("tdav_codec_vp8_close_decoder(begin)");
if(self->decoder.initialized){
vpx_codec_destroy(&self->decoder.context);
self->decoder.initialized = tsk_false;
}
TSK_DEBUG_INFO("tdav_codec_vp8_close_decoder(end)");
return 0;
}
@ -726,10 +806,15 @@ static void tdav_codec_vp8_encap(tdav_codec_vp8_t* self, const vpx_codec_cx_pkt_
is_keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY);
// check P bit validity
#if 0
if((is_keyframe && (*frame_ptr & 0x01)) || (!is_keyframe && !(*frame_ptr & 0x01))){// 4.3. VP8 Payload Header
TSK_DEBUG_ERROR("Invalid payload header");
return;
}
if(is_keyframe){
TSK_DEBUG_INFO("Sending VP8 keyframe...");
}
#endif
// first partition (contains modes and motion vectors)
part_ID = 0; // The first VP8 partition(containing modes and motion vectors) MUST be labeled with PartID = 0
@ -855,20 +940,20 @@ static void tdav_codec_vp8_rtp_callback(tdav_codec_vp8_t *self, const void *data
// X: |I|L|T|K| RSV |
self->encoder.rtp.ptr[1] = 0x80; // I = 1, L = 0, T = 0, K = 0, RSV = 0
// I: |M| PictureID |
self->encoder.rtp.ptr[2] = (0x80 | (self->encoder.pic_id >> 9)); // M = 1 (PictureID on 15 bits)
self->encoder.rtp.ptr[2] = (0x80 | ((self->encoder.pic_id >> 8) & 0x7F)); // M = 1 (PictureID on 15 bits)
self->encoder.rtp.ptr[3] = (self->encoder.pic_id & 0xFF);
#endif
/* 4.2. VP8 Payload Header */
if(has_hdr){
//if(has_hdr){
// already part of the encoded stream
}
//}
// Send data over the network
if(TMEDIA_CODEC_VIDEO(self)->out.callback){
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.ptr = self->encoder.rtp.ptr;
TMEDIA_CODEC_VIDEO(self)->out.result.buffer.size = (size + TDAV_VP8_PAY_DESC_SIZE);
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (3003* (30/TMEDIA_CODEC_VIDEO(self)->out.fps));
TMEDIA_CODEC_VIDEO(self)->out.result.duration = (1./(double)TMEDIA_CODEC_VIDEO(self)->out.fps) * TMEDIA_CODEC(self)->plugin->rate;
TMEDIA_CODEC_VIDEO(self)->out.result.last_chunck = last;
TMEDIA_CODEC_VIDEO(self)->out.callback(&TMEDIA_CODEC_VIDEO(self)->out.result);
}

View File

@ -1026,11 +1026,11 @@ const tsdp_header_M_t* tdav_session_av_get_lo(tdav_session_av_t* self, tsk_bool_
// draft-lennox-mmusic-sdp-source-attributes-01
if(self->media_type == tmedia_audio || self->media_type == tmedia_video){
char* str = tsk_null;
tsk_sprintf(&str, "%u cname:%s", self->rtp_manager->rtp.ssrc.local, "ldjWoB60jbyQlR6e");
tsk_sprintf(&str, "%u cname:%s", self->rtp_manager->rtp.ssrc.local, self->rtp_manager->rtcp.cname); // also defined in RTCP session
tsdp_header_M_add_headers(base->M.lo, TSDP_HEADER_A_VA_ARGS("ssrc", str), tsk_null);
tsk_sprintf(&str, "%u mslabel:%s", self->rtp_manager->rtp.ssrc.local, "6994f7d1-6ce9-4fbd-acfd-84e5131ca2e2");
tsdp_header_M_add_headers(base->M.lo, TSDP_HEADER_A_VA_ARGS("ssrc", str), tsk_null);
tsk_sprintf(&str, "%u label:%s", self->rtp_manager->rtp.ssrc.local, (self->media_type == tmedia_audio) ? "Doubango.Audio" : "Doubango.Video"); /* https://groups.google.com/group/discuss-webrtc/browse_thread/thread/6c44106c8ce7d6dc */
tsk_sprintf(&str, "%u label:%s", self->rtp_manager->rtp.ssrc.local, (self->media_type == tmedia_audio) ? "doubango@audio" : "doubango@video"); /* https://groups.google.com/group/discuss-webrtc/browse_thread/thread/6c44106c8ce7d6dc */
tsdp_header_M_add_headers(base->M.lo, TSDP_HEADER_A_VA_ARGS("ssrc", str), tsk_null);
TSK_FREE(str);
}

View File

@ -90,6 +90,7 @@ tdav_video_frame_t* tdav_video_frame_create(trtp_rtp_packet_t* rtp_pkt)
frame->payload_type = rtp_pkt->header->payload_type;
frame->timestamp = rtp_pkt->header->timestamp;
frame->highest_seq_num = rtp_pkt->header->seq_num;
frame->ssrc = rtp_pkt->header->ssrc;
tsk_list_push_ascending_data(frame->pkts, (void**)&rtp_pkt);
}
return frame;
@ -109,6 +110,12 @@ int tdav_video_frame_put(tdav_video_frame_t* self, trtp_rtp_packet_t* rtp_pkt)
TSK_DEBUG_ERROR("Payload Type mismatch");
return -2;
}
#if 0
if(self->ssrc != rtp_pkt->header->ssrc){
TSK_DEBUG_ERROR("SSRC mismatch");
return -2;
}
#endif
rtp_pkt = tsk_object_ref(rtp_pkt);
self->highest_seq_num = TSK_MAX(self->highest_seq_num, rtp_pkt->header->seq_num);
@ -185,3 +192,45 @@ bail:
return ret_size;
}
/**
Checks if the frame is complete (no gap/loss) or not.
IMPORTANT: This function assume that the RTP packets use the marker bit to signal end of sequences.
*@param self The frame with all rtp packets to check
*@param last_seq_num_with_mark The last seq num value of the packet with the mark bit set. Use negative value to ignore.
*@param missing_seq_num A missing seq num if any. This value is set only if the function returns False.
*@return True if the frame is complete and False otherwise. If False is returned then, missing_seq_num is set.
*/
tsk_bool_t tdav_video_frame_is_complete(const tdav_video_frame_t* self, int32_t last_seq_num_with_mark, uint16_t* missing_seq_num)
{
const trtp_rtp_packet_t* pkt;
const tsk_list_item_t *item;
uint16_t i;
tsk_bool_t is_complete = tsk_false;
if(!self || !missing_seq_num){
TSK_DEBUG_ERROR("Invalid parameter");
return tsk_false;
}
i = 0;
tsk_list_lock(self->pkts);
tsk_list_foreach(item, self->pkts){
if(!(pkt = item->data)){
continue;
}
if(last_seq_num_with_mark >= 0 && pkt->header->seq_num != (last_seq_num_with_mark + ++i)){
*missing_seq_num = (pkt->header->seq_num - 1);
break;
}
if(item == self->pkts->tail){
if(!(is_complete = (pkt->header->marker))){
*missing_seq_num = (pkt->header->seq_num + 1);
}
}
}
tsk_list_unlock(self->pkts);
return is_complete;
}

View File

@ -32,31 +32,48 @@
#include "tsk_time.h"
#include "tsk_memory.h"
#include "tsk_timer.h"
#include "tsk_thread.h"
#include "tsk_condwait.h"
#include "tsk_debug.h"
#if TSK_UNDER_WINDOWS
# include <windows.h>
#endif
// default frame rate
// the corret fps will be computed using the RTP timestamps
#define TDAV_VIDEO_JB_FPS TDAV_VIDEO_JB_FPS_MAX
#define TDAV_VIDEO_JB_FPS_MIN 1
#define TDAV_VIDEO_JB_FPS_MAX 30
#define TDAV_VIDEO_JB_FPS_MIN 15
#define TDAV_VIDEO_JB_FPS_MAX 60
// Number of correct consecutive RTP packets to receive before computing the FPS
#define TDAV_VIDEO_JB_FPS_PROB (TDAV_VIDEO_JB_FPS >> 1)
// Max number of frames to allow in jitter buffer
#define TDAV_VIDEO_JB_TAIL_MAX (TDAV_VIDEO_JB_FPS << 2)
//#define TDAV_VIDEO_JB_TAIL_MAX /*FIXME:(TDAV_VIDEO_JB_FPS << 2)*/100
// Min number of frames required before requesting a full decode
// This is required because of the FEC and NACK functions
// Will be updated using the RTT value from RTCP and probation
#define TDAV_VIDEO_JB_TAIL_MIN_MIN 2
#define TDAV_VIDEO_JB_TAIL_MIN_MAX 4
#define TDAV_VIDEO_JB_TAIL_MIN_MIN 10
#define TDAV_VIDEO_JB_TAIL_MIN_MAX 20
#define TDAV_VIDEO_JB_TAIL_MIN_PROB (TDAV_VIDEO_JB_FPS >> 2)
#define TDAV_VIDEO_JB_MAX_DROPOUT 0xFD9B
#define TDAV_VIDEO_JB_DISABLE 0
#define TDAV_VIDEO_JB_TAIL_MAX_LOG2 1
#if TDAV_UNDER_MOBILE /* to avoid too high memory usage */
# define TDAV_VIDEO_JB_TAIL_MAX (TDAV_VIDEO_JB_FPS_MIN << TDAV_VIDEO_JB_TAIL_MAX_LOG2)
#else
# define TDAV_VIDEO_JB_TAIL_MAX (TDAV_VIDEO_JB_FPS_MAX << TDAV_VIDEO_JB_TAIL_MAX_LOG2)
#endif
#define TDAV_VIDEO_JB_RATE 90 /* KHz */
#define TDAV_VIDEO_JB_LATENCY_MIN 2 /* Must be > 0 */
#define TDAV_VIDEO_JB_LATENCY_MAX 10
static const tdav_video_frame_t* _tdav_video_jb_get_frame(struct tdav_video_jb_s* self, uint32_t timestamp, uint8_t pt, tsk_bool_t *pt_matched);
static int _tdav_video_jb_timer_callback(const void* arg, tsk_timer_id_t timer_id);
static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg);
typedef struct tdav_video_jb_s
{
@ -66,6 +83,7 @@ typedef struct tdav_video_jb_s
int32_t fps;
int32_t fps_prob;
int32_t avg_duration;
int32_t rate; // in Khz
uint32_t last_timestamp;
int32_t conseq_frame_drop;
int32_t tail_max;
@ -73,8 +91,16 @@ typedef struct tdav_video_jb_s
int32_t tail_prob;
tdav_video_frames_L_t *frames;
int64_t frames_count;
tsk_timer_manager_handle_t *h_timer;
tsk_timer_id_t timer_decode;
tsk_size_t latency_min;
tsk_size_t latency_max;
uint32_t decode_last_timestamp;
int32_t decode_last_seq_num_with_mark; // -1 = unset
uint64_t decode_last_time;
tsk_thread_handle_t* decode_thread[1];
tsk_condwait_handle_t* decode_thread_cond;
uint16_t seq_nums[0xFF];
tdav_video_jb_cb_f callback;
const void* callback_data;
@ -102,13 +128,22 @@ static tsk_object_t* tdav_video_jb_ctor(tsk_object_t * self, va_list * app)
TSK_DEBUG_ERROR("Failed to create list");
return tsk_null;
}
if(!(jb->h_timer = tsk_timer_manager_create())){
TSK_DEBUG_ERROR("Failed to create timer manager");
if(!(jb->decode_thread_cond = tsk_condwait_create())){
TSK_DEBUG_ERROR("Failed to create condition var");
return tsk_null;
}
jb->cb_data_fdd.type = tdav_video_jb_cb_data_type_fdd;
jb->cb_data_rtp.type = tdav_video_jb_cb_data_type_rtp;
jb->decode_last_seq_num_with_mark = -1;
jb->fps = TDAV_VIDEO_JB_FPS_MAX;
jb->rate = TDAV_VIDEO_JB_RATE;
jb->latency_min = TDAV_VIDEO_JB_LATENCY_MIN;
jb->latency_max = TDAV_VIDEO_JB_LATENCY_MAX;
tsk_safeobj_init(jb);
}
return self;
@ -121,8 +156,8 @@ static tsk_object_t* tdav_video_jb_dtor(tsk_object_t * self)
tdav_video_jb_stop(jb);
}
TSK_OBJECT_SAFE_FREE(jb->frames);
if(jb->h_timer){
tsk_timer_manager_destroy(&jb->h_timer);
if(jb->decode_thread_cond){
tsk_condwait_destroy(&jb->decode_thread_cond);
}
TSK_SAFE_FREE(jb->buffer.ptr);
tsk_safeobj_deinit(jb);
@ -177,7 +212,7 @@ int tdav_video_jb_set_callback(tdav_video_jb_t* self, tdav_video_jb_cb_f callbac
int tdav_video_jb_start(tdav_video_jb_t* self)
{
int ret;
int ret = 0;
if(!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
@ -186,10 +221,16 @@ int tdav_video_jb_start(tdav_video_jb_t* self)
return 0;
}
if((ret = tsk_timer_manager_start(self->h_timer)) == 0){
self->timer_decode = tsk_timer_manager_schedule(self->h_timer, (1000 / self->fps), _tdav_video_jb_timer_callback, self);
self->started = tsk_true;
self->started = tsk_true;
if(!self->decode_thread[0]){
ret = tsk_thread_create(&self->decode_thread[0], _tdav_video_jb_decode_thread_func, self);
if(ret != 0 || !self->decode_thread[0]){
TSK_DEBUG_ERROR("Failed to create new thread");
}
ret = tsk_thread_set_priority(self->decode_thread[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
}
return ret;
}
@ -208,14 +249,29 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
return -1;
}
if(!self->started){
TSK_DEBUG_INFO("Video jitter buffer not started");
return 0;
}
seq_num = &self->seq_nums[rtp_pkt->header->payload_type];
tsk_safeobj_lock(self);
//TSK_DEBUG_INFO("receive seqnum=%u", rtp_pkt->header->seq_num);
if(self->decode_last_timestamp && (self->decode_last_timestamp > rtp_pkt->header->timestamp)){
if((self->decode_last_timestamp - rtp_pkt->header->timestamp) < TDAV_VIDEO_JB_MAX_DROPOUT){
TSK_DEBUG_INFO("--------Frame already Decoded [seqnum=%u]------------", rtp_pkt->header->seq_num);
tsk_safeobj_unlock(self);
return 0;
}
}
old_frame = _tdav_video_jb_get_frame(self, rtp_pkt->header->timestamp, rtp_pkt->header->payload_type, &pt_matched);
if((*seq_num && *seq_num != 0xFFFF) && (*seq_num + 1) != rtp_pkt->header->seq_num){ // FIXME: check if seq_num wrapped
int32_t diff = (rtp_pkt->header->seq_num - *seq_num);
if((*seq_num && *seq_num != 0xFFFF) && (*seq_num + 1) != rtp_pkt->header->seq_num){
int32_t diff = ((int32_t)rtp_pkt->header->seq_num - (int32_t)*seq_num);
tsk_bool_t is_frame_loss = (diff > 0);
is_restarted = (TSK_ABS(diff) > TDAV_VIDEO_JB_MAX_DROPOUT);
is_frame_late_or_dup = !is_frame_loss;
@ -247,7 +303,7 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
if((new_frame = tdav_video_frame_create(rtp_pkt))){
// compute avg frame duration
if(self->last_timestamp && self->last_timestamp < rtp_pkt->header->timestamp){
uint32_t duration = (rtp_pkt->header->timestamp - self->last_timestamp);
uint32_t duration = (rtp_pkt->header->timestamp - self->last_timestamp)/self->rate;
self->avg_duration = self->avg_duration ? ((self->avg_duration + duration) >> 1) : duration;
--self->fps_prob;
}
@ -255,8 +311,8 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
tsk_list_lock(self->frames);
if(self->frames_count >= self->tail_max){
if(++self->conseq_frame_drop >= self->fps){
TSK_DEBUG_INFO("Too many frames dropped and fps=%d", self->fps);
if(++self->conseq_frame_drop >= self->tail_max){
TSK_DEBUG_ERROR("Too many frames dropped and fps=%d", self->fps);
tsk_list_clear_items(self->frames);
self->conseq_frame_drop = 0;
self->frames_count = 1;
@ -268,7 +324,6 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
}
else{
tsk_list_remove_first_item(self->frames);
// self->frames_count += 0;
}
tdav_video_jb_reset_fps_prob(self);
}
@ -279,10 +334,11 @@ int tdav_video_jb_put(tdav_video_jb_t* self, trtp_rtp_packet_t* rtp_pkt)
tsk_list_unlock(self->frames);
}
if(self->fps_prob <= 0 && self->avg_duration){
// compute FPS
self->fps = TSK_CLAMP(TDAV_VIDEO_JB_FPS_MIN, ((3003 * 30) / self->avg_duration), TDAV_VIDEO_JB_FPS_MAX);
//self->fps = ((3003 * 30) / self->avg_duration);
self->tail_max = (self->fps << 1); // maximum delay = 2 seconds
// compute FPS using timestamp values
int32_t fps = (1000 / self->avg_duration);
self->fps = TSK_CLAMP(TDAV_VIDEO_JB_FPS_MIN, fps, TDAV_VIDEO_JB_FPS_MAX);
self->tail_max = (self->fps << TDAV_VIDEO_JB_TAIL_MAX_LOG2); // maximum delay = 2 seconds
TSK_DEBUG_INFO("According to rtp-timestamps ...FPS = %d (clipped to %d) and max jb tail will be = %d", fps, self->fps, self->tail_max);
tdav_video_jb_reset_fps_prob(self);
}
}
@ -311,9 +367,16 @@ int tdav_video_jb_stop(tdav_video_jb_t* self)
return 0;
}
if((ret = tsk_timer_manager_stop(self->h_timer)) == 0){
self->started = tsk_false;
TSK_DEBUG_INFO("tdav_video_jb_stop()");
self->started = tsk_false;
ret = tsk_condwait_broadcast(self->decode_thread_cond);
if(self->decode_thread[0]){
ret = tsk_thread_join(&self->decode_thread[0]);
}
return ret;
}
@ -340,57 +403,116 @@ static const tdav_video_frame_t* _tdav_video_jb_get_frame(tdav_video_jb_t* self,
return ret;
}
static int _tdav_video_jb_timer_callback(const void* arg, tsk_timer_id_t timer_id)
static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
{
#if !TDAV_VIDEO_JB_DISABLE
tdav_video_jb_t* jb = (tdav_video_jb_t*)arg;
tdav_video_jb_t* jb = (tdav_video_jb_t*)arg;
uint64_t delay;
uint16_t missing_seq_num;
const tdav_video_frame_t* frame;
tsk_list_item_t* item;
uint64_t next_decode_duration = (1000 / jb->fps), now;
uint64_t x_decode_duration = (1000 / jb->fps); // expected
uint64_t x_decode_time = tsk_time_now();//expected
tsk_bool_t postpone;
static const uint64_t __toomuch_delay_to_be_valid = 10000; // guard against systems with buggy "tsk_time_now()" -Won't say Windows ...but :)-
if(!jb->started){
return 0;
}
jb->decode_last_seq_num_with_mark = -1; // -1 -> unset
jb->decode_last_time = tsk_time_now();
if(jb->timer_decode == timer_id){
uint64_t next_timeout = (1000 / jb->fps) - 15/*time spent for various tasks (mutexes, timer init, ...)*/;
(now);
(delay);
if(jb->frames_count >= jb->tail_min){
tsk_list_item_t* item;
uint64_t decode_start = tsk_time_now(), decode_duration;
TSK_DEBUG_INFO("Video jitter buffer thread - ENTER");
while(jb->started){
tsk_condwait_timedwait(jb->decode_thread_cond, next_decode_duration);
if(!jb->started){
break;
}
// TSK_DEBUG_INFO("Frames count = %d", jb->frames_count);
if(jb->frames_count >= jb->latency_min){
item = tsk_null;
postpone = tsk_false;
tsk_safeobj_lock(jb); // against get_frame()
tsk_list_lock(jb->frames);
item = tsk_list_pop_first_item(jb->frames);
--jb->frames_count;
tsk_list_lock(jb->frames); // against put()
// is it still acceptable to wait for missing packets?
if(jb->frames_count < jb->latency_max){
frame = (const tdav_video_frame_t*)jb->frames->head->data;
if(!tdav_video_frame_is_complete(frame, jb->decode_last_seq_num_with_mark, &missing_seq_num)){
TSK_DEBUG_INFO("Time to decode frame...but some RTP packets are missing (seqnum=%u). Postpone :(", missing_seq_num);
// signal to the session that a sequence number is missing (will send a NACK)
if(jb->callback){
jb->cb_data_any.type = tdav_video_jb_cb_data_type_fl;
jb->cb_data_any.ssrc = frame->ssrc;
jb->cb_data_any.fl.seq_num = missing_seq_num;
jb->cb_data_any.fl.count = 1;
jb->callback(&jb->cb_data_any);
postpone = tsk_true;
}
}
}
else{
jb->decode_last_seq_num_with_mark = -1; // unset()
}
if(!postpone){
item = tsk_list_pop_first_item(jb->frames);
--jb->frames_count;
}
tsk_list_unlock(jb->frames);
tsk_safeobj_unlock(jb);
if(jb->callback){
trtp_rtp_packet_t* pkt;
const tsk_list_item_t* _item = item; // save memory address as "tsk_list_foreach() will change it for each loop"
const tdav_video_frame_t* frame = _item->data;
int32_t last_seq_num = -1; // guard against duplicated packets
tsk_list_foreach(_item, frame->pkts){
if(!(pkt = _item->data) || !pkt->payload.size || !pkt->header || pkt->header->seq_num == last_seq_num){
continue;
if(item){
jb->decode_last_timestamp = ((const tdav_video_frame_t*)item->data)->timestamp;
if(jb->callback){
trtp_rtp_packet_t* pkt;
const tsk_list_item_t* _item = item; // save memory address as "tsk_list_foreach() will change it for each loop"
int32_t last_seq_num = -1; // guard against duplicated packets
frame = _item->data;
tsk_list_foreach(_item, frame->pkts){
if(!(pkt = _item->data) || !pkt->payload.size || !pkt->header || pkt->header->seq_num == last_seq_num || !jb->started){
TSK_DEBUG_ERROR("Skipping invalid rtp packet (do not decode!)");
continue;
}
jb->cb_data_rtp.rtp.pkt = pkt;
jb->callback(&jb->cb_data_rtp);
if(pkt->header->marker){
jb->decode_last_seq_num_with_mark = pkt->header->seq_num;
}
}
// pkt->header->marker = (_item == frame->pkts->tail); // break the accumulator
jb->cb_data_rtp.rtp.pkt = pkt;
jb->callback(&jb->cb_data_rtp);
}
TSK_OBJECT_SAFE_FREE(item);
}
TSK_OBJECT_SAFE_FREE(item);
decode_duration = (tsk_time_now() - decode_start);
next_timeout = (decode_duration > next_timeout) ? 0 : (next_timeout - decode_duration);
//if(!next_timeout)TSK_DEBUG_INFO("next_timeout=%llu", next_timeout);
}
else{
//TSK_DEBUG_INFO("Not enought frames");
next_timeout >>= 1;
}
jb->timer_decode = tsk_timer_manager_schedule(jb->h_timer, next_timeout, _tdav_video_jb_timer_callback, jb);
}
#if 1
now = tsk_time_now();
// comparison used as guard against time wrapping
delay = (now - x_decode_time);//(now > x_decode_time) ? (now - x_decode_time) : x_decode_duration/* do not use zero to avoid endless loop when there is no frame to display */;
if(delay > __toomuch_delay_to_be_valid){
TSK_DEBUG_INFO("Too much delay (%llu) in video jb. Reseting...", delay);
x_decode_time = now;
next_decode_duration = 0;
}
else{
next_decode_duration = (delay > x_decode_duration) ? 0 : (x_decode_duration - delay);
x_decode_duration = (1000 / jb->fps);
x_decode_time += x_decode_duration;
}
//TSK_DEBUG_INFO("next_decode_timeout=%llu, delay = %llu", next_decode_duration, delay);
#else
next_decode_duration = (1000 / jb->fps);
#endif
}
return 0;
TSK_DEBUG_INFO("Video jitter buffer thread - EXIT");
return tsk_null;
}

View File

@ -49,7 +49,12 @@
#include "tsk_memory.h"
#include "tsk_debug.h"
#define TDAV_SESSION_VIDEO_AVPF_FIR_INTERVAL_MIN 800 // millis
// Minimum time between two incoming FIR. If smaller, the request from the remote party will be ignored
// Tell the encoder to send IDR frame if condition is met
#define TDAV_SESSION_VIDEO_AVPF_FIR_HONOR_INTERVAL_MIN 1500 // millis
// Minimum time between two outgoing FIR. If smaller, the request from the remote party will be ignored
// Tell the RTCP session to request IDR if condition is met
#define TDAV_SESSION_VIDEO_AVPF_FIR_REQUEST_INTERVAL_MIN 3000 // millis
#define TDAV_SESSION_VIDEO_PKT_LOSS_PROB_BAD 2
#define TDAV_SESSION_VIDEO_PKT_LOSS_PROB_GOOD 6
@ -59,6 +64,9 @@
#define TDAV_SESSION_VIDEO_PKT_LOSS_MEDIUM 22
#define TDAV_SESSION_VIDEO_PKT_LOSS_HIGH 63
// The maximum number of pakcet loss allowed
#define TDAV_SESSION_VIDEO_PKT_LOSS_MAX_COUNT_TO_REQUEST_FIR 50
static const tmedia_codec_action_t __action_encode_idr = tmedia_codec_action_encode_idr;
static const tmedia_codec_action_t __action_encode_bw_up = tmedia_codec_action_bw_up;
static const tmedia_codec_action_t __action_encode_bw_down = tmedia_codec_action_bw_down;
@ -85,7 +93,7 @@ static const tmedia_codec_action_t __action_encode_bw_down = tmedia_codec_action
#define _tdav_session_video_remote_requested_idr(__self, __ssrc_media) { \
uint64_t __now = tsk_time_now(); \
if((__now - (__self)->avpf.last_fir_time) > TDAV_SESSION_VIDEO_AVPF_FIR_INTERVAL_MIN){ /* guard to avoid sending too many FIR */ \
if((__now - (__self)->avpf.last_fir_time) > TDAV_SESSION_VIDEO_AVPF_FIR_HONOR_INTERVAL_MIN){ /* guard to avoid sending too many FIR */ \
_tdav_session_video_codec_set((__self), "action", __action_encode_idr); \
} \
if((__self)->cb_rtcpevent.func){ \
@ -157,18 +165,39 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
if(packet ){
tsk_size_t rtp_hdr_size;
if(!video->encoder.last_frame_time){
video->encoder.last_frame_time = tsk_time_now();
}
if(result->last_chunck){
#if 0
#if 0
/* http://www.cs.columbia.edu/~hgs/rtp/faq.html#timestamp-computed
For video, time clock rate is fixed at 90 kHz. The timestamps generated depend on whether the application can determine the frame number or not.
If it can or it can be sure that it is transmitting every frame with a fixed frame rate, the timestamp is governed by the nominal frame rate. Thus, for a 30 f/s video, timestamps would increase by 3,000 for each frame, for a 25 f/s video by 3,600 for each frame.
If a frame is transmitted as several RTP packets, these packets would all bear the same timestamp.
If the frame number cannot be determined or if frames are sampled aperiodically, as is typically the case for software codecs, the timestamp has to be computed from the system clock (e.g., gettimeofday())
*/
uint64_t now = tsk_time_now();
uint32_t duration = (uint32_t)(now - video->encoder.last_frame_time);
base->rtp_manager->rtp.timestamp += (duration * 90/* 90KHz */);
video->encoder.last_frame_time = now;
#else
base->rtp_manager->rtp.timestamp = (uint32_t)(tsk_gettimeofday_ms() * 90/* 90KHz */);
#endif
#else
base->rtp_manager->rtp.timestamp += result->duration;
#endif
}
packet->payload.data_const = result->buffer.ptr;
packet->payload.size = result->buffer.size;
s = trtp_manager_send_rtp_packet(base->rtp_manager, packet, tsk_false); // encrypt and send data
++base->rtp_manager->rtp.seq_num; // seq_num must be incremented here (before the bail) because already used by SRTP context
if(s < TRTP_RTP_HEADER_MIN_SIZE) {
TSK_DEBUG_ERROR("Failed to send packet. %u expected but only %u sent", packet->payload.size, s);
TSK_DEBUG_ERROR("Failed to send packet with seqnum=%u. %u expected but only %u sent", packet->header->seq_num, packet->payload.size, s);
goto bail;
}
++base->rtp_manager->rtp.seq_num;
rtp_hdr_size = TRTP_RTP_HEADER_MIN_SIZE + (packet->header->csrc_count << 2);
// Save packet
// FIXME: only if AVPF is enabled
@ -191,7 +220,13 @@ static int tdav_session_video_raw_cb(const tmedia_video_encode_result_xt* result
++video->avpf.count;
}
tsk_list_push_ascending_data(video->avpf.packets, (void**)&packet_avpf); // filtered per seqnum
// The packet must not added 'ascending' but 'back' because the sequence number coult wrap
// For example:
// - send(65533, 65534, 65535, 0, 1)
// - will be stored as (if added 'ascending'): 0, 1, 65533, 65534, 65535
// - this means there is no benefit (if added 'ascending') as we cannot make 'smart search' using seqnums
// tsk_list_push_ascending_data(video->avpf.packets, (void**)&packet_avpf); // filtered per seqnum
tsk_list_push_back_data(video->avpf.packets, (void**)&packet_avpf);
tsk_list_unlock(video->avpf.packets);
}
@ -253,8 +288,20 @@ bail:
static int tdav_session_video_decode_cb(const tmedia_video_decode_result_xt* result)
{
tdav_session_av_t* base = (tdav_session_av_t*)result->usr_data;
tdav_session_video_t* video = (tdav_session_video_t*)base;
switch(result->type){
case tmedia_video_decode_result_type_idr:
{
if(video->decoder.last_corrupted_timestamp != ((const trtp_rtp_header_t*)result->proto_hdr)->timestamp){
TSK_DEBUG_INFO("IDR frame decoded");
video->decoder.stream_corrupted = tsk_false;
}
else{
TSK_DEBUG_INFO("IDR frame decoded but corrupted :(");
}
break;
}
case tmedia_video_decode_result_type_error:
{
TSK_DEBUG_INFO("Decoding failed -> send Full Intra Refresh (FIR)");
@ -528,12 +575,12 @@ static int tdav_session_video_rtcp_cb(const void* callback_data, const trtp_rtcp
const tsk_list_item_t* item;
const trtp_rtp_packet_t* pkt_rtp;
for(i = 0; i < rtpfb->nack.count; ++i){
static const int32_t __Pow2[16] = { 0x8000, 0x4000, 0x2000, 0x1000, 0x800, 0x400, 0x200, 0x100, 0x80, 0x40, 0x20, 0x10, 0x8, 0x4, 0x2, 0x1 };
static const int32_t __Pow2[16] = { 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80, 0x100, 0x200, 0x400, 0x800, 0x1000, 0x2000, 0x4000, 0x8000 };
int32_t blp_count;
blp = rtpfb->nack.blp[i];
blp_count = blp ? 16 : 0;
for(j = -1; j < blp_count; ++j){
for(j = -1/*Packet ID (PID)*/; j < blp_count; ++j){
if(j == -1 || (blp & __Pow2[j])){
pid = (rtpfb->nack.pid[i] + (j + 1));
tsk_list_lock(video->avpf.packets);
@ -542,7 +589,15 @@ static int tdav_session_video_rtcp_cb(const void* callback_data, const trtp_rtcp
continue;
}
if(pkt_rtp->header->seq_num > pid){
// Very Important: the seq_nums are not consecutive because of wrapping.
// For example, '65533, 65534, 65535, 0, 1' is a valid sequences which means we have to check all packets (probaly need somthing smarter)
if(pkt_rtp->header->seq_num == pid){
TSK_DEBUG_INFO("NACK Found, pid=%d, blp=%u", pid, blp);
trtp_manager_send_rtp_packet(base->rtp_manager, pkt_rtp, tsk_true);
break;
}
if(item == video->avpf.packets->tail){
// should never be called unless the tail is too small
int32_t old_max = video->avpf.max;
int32_t len_drop = (pkt_rtp->header->seq_num - pid);
video->avpf.max = TSK_CLAMP((int32_t)tmedia_defaults_get_avpf_tail_min(), (old_max + len_drop), (int32_t)tmedia_defaults_get_avpf_tail_max());
@ -552,32 +607,21 @@ static int tdav_session_video_rtcp_cb(const void* callback_data, const trtp_rtcp
pid,
video->avpf.max,
video->avpf.count);
// FIR not really requested but needed
/*_tdav_session_video_remote_requested_idr(video, ((const trtp_rtcp_report_fb_t*)rtpfb)->ssrc_media);
tsk_list_clear_items(video->avpf.packets);
video->avpf.count = 0;*/
goto done;
}
if(pkt_rtp->header->seq_num == pid){
TSK_DEBUG_INFO("NACK Found=%d", pid);
trtp_manager_send_rtp_packet(base->rtp_manager, pkt_rtp, tsk_true);
break;
}
if(item == video->avpf.packets->tail){
// must never be called
TSK_DEBUG_INFO("**NACK Not Found=%d", pid);
}
}
done:
// FIR not really requested but needed
/*_tdav_session_video_remote_requested_idr(video, ((const trtp_rtcp_report_fb_t*)rtpfb)->ssrc_media);
tsk_list_clear_items(video->avpf.packets);
video->avpf.count = 0;*/
} // if(last_item)
}// foreach(pkt)
tsk_list_unlock(video->avpf.packets);
}
}
}
}
}// if(BLP is set)
}// foreach(BIT in BLP)
}// foreach(nack)
}// if(nack-blp and nack-pid are set)
break;
}
}
}
}// case
}// switch
}// while(rtcp-pkt)
return 0;
}
@ -600,13 +644,20 @@ static int _tdav_session_video_jb_cb(const tdav_video_jb_cb_data_xt* data)
}
case tdav_video_jb_cb_data_type_fl:
{
tsk_size_t i, j, k;
uint16_t seq_nums[16];
for(i = 0; i < data->fl.count; i+=16){
for(j = 0, k = i; j < 16 && k < data->fl.count; ++j, ++k){
seq_nums[j] = (data->fl.seq_num + i + j);
if(data->fl.count > TDAV_SESSION_VIDEO_PKT_LOSS_MAX_COUNT_TO_REQUEST_FIR){
TSK_DEBUG_INFO("Packet loss too high (%u) -> Requesting FIR", data->fl.count);
trtp_manager_signal_frame_corrupted(base->rtp_manager, data->ssrc);
}
else{
tsk_size_t i, j, k;
uint16_t seq_nums[16];
for(i = 0; i < data->fl.count; i+=16){
for(j = 0, k = i; j < 16 && k < data->fl.count; ++j, ++k){
seq_nums[j] = (data->fl.seq_num + i + j);
TSK_DEBUG_INFO("Request re-send(%u)", seq_nums[j]);
}
trtp_manager_signal_pkt_loss(base->rtp_manager, data->ssrc, seq_nums, j);
}
trtp_manager_signal_pkt_loss(base->rtp_manager, data->ssrc, seq_nums, j);
}
break;
}
@ -645,7 +696,8 @@ bail:
static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp_packet_t* packet)
{
tdav_session_av_t* base = (tdav_session_av_t*)self;
static const trtp_rtp_header_t* rtp_header = tsk_null;
static const trtp_rtp_header_t* __rtp_header = tsk_null;
static const tmedia_codec_id_t __codecs_supporting_zero_artifacts = (tmedia_codec_id_vp8 | tmedia_codec_id_h264_bp | tmedia_codec_id_h264_mp | tmedia_codec_id_h263);
int ret = 0;
if(!self || !packet || !packet->header){
@ -658,6 +710,7 @@ static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp
if(base->consumer && base->consumer->is_started){
tsk_size_t out_size, _size;
const void* _buffer;
tdav_session_video_t* video = (tdav_session_video_t*)base;
// Find the codec to use to decode the RTP payload
if(!self->decoder.codec || self->decoder.payload_type != packet->header->payload_type){
@ -675,6 +728,29 @@ static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp
goto bail;
}
// Check if stream is corrupted or not
if(video->decoder.last_seqnum && (video->decoder.last_seqnum + 1) != packet->header->seq_num){
TSK_DEBUG_INFO("/!\\Video stream corrupted because of packet loss [%u - %u]. Pause rendering if 'zero_artifacts' (supported = %s, enabled = %s).",
video->decoder.last_seqnum,
packet->header->seq_num,
(__codecs_supporting_zero_artifacts & self->decoder.codec->id) ? "yes" : "no",
self->zero_artifacts ? "yes" : "no"
);
if(!video->decoder.stream_corrupted){ // do not do the job twice
if(self->zero_artifacts && (__codecs_supporting_zero_artifacts & self->decoder.codec->id)){
// request IDR now and every time after 'TDAV_SESSION_VIDEO_AVPF_FIR_REQUEST_INTERVAL_MIN' ellapsed
// 'zero-artifacts' not enabled then, we'll request IDR when decoding fails
TSK_DEBUG_INFO("Sending FIR to request IDR...");
trtp_manager_signal_frame_corrupted(base->rtp_manager, packet->header->ssrc);
}
// value will be updated when we decode an IDR frame
video->decoder.stream_corrupted = tsk_true;
video->decoder.stream_corrupted_since = tsk_time_now();
}
// will be used as guard to avoid redering corrupted IDR
video->decoder.last_corrupted_timestamp = packet->header->timestamp;
}
video->decoder.last_seqnum = packet->header->seq_num; // update last seqnum
// Decode data
out_size = self->decoder.codec->plugin->decode(
@ -687,6 +763,16 @@ static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp
if(!out_size || !self->decoder.buffer){
goto bail;
}
// check if stream is corrupted
// the above decoding process is required in order to reset stream corruption status when IDR frame is decoded
if(self->zero_artifacts && self->decoder.stream_corrupted && (__codecs_supporting_zero_artifacts & self->decoder.codec->id)){
TSK_DEBUG_INFO("Do not render video frame because stream is corrupted and 'zero-artifacts' is enabled. Last seqnum=%u", video->decoder.last_seqnum);
if(video->decoder.stream_corrupted && (tsk_time_now() - video->decoder.stream_corrupted_since) > TDAV_SESSION_VIDEO_AVPF_FIR_REQUEST_INTERVAL_MIN){
TSK_DEBUG_INFO("Sending FIR to request IDR because frame corrupted since %llu...", video->decoder.stream_corrupted_since);
trtp_manager_signal_frame_corrupted(base->rtp_manager, packet->header->ssrc);
}
goto bail;
}
// important: do not override the display size (used by the end-user) unless requested
if(base->consumer->video.display.auto_resize){
@ -742,7 +828,7 @@ static int _tdav_session_video_decode(tdav_session_video_t* self, const trtp_rtp
_size = out_size;
}
ret = tmedia_consumer_consume(base->consumer, _buffer, _size, rtp_header);
ret = tmedia_consumer_consume(base->consumer, _buffer, _size, __rtp_header);
}
else if(!base->consumer->is_started){
TSK_DEBUG_INFO("Consumer not started");
@ -1080,6 +1166,8 @@ static tsk_object_t* tdav_session_video_ctor(tsk_object_t * self, va_list * app)
/* init() self */
video->jb_enabled = tmedia_defaults_get_videojb_enabled();
video->zero_artifacts = tmedia_defaults_get_video_zeroartifacts_enabled();
TSK_DEBUG_INFO("Video 'zero-artifacts' option = %s", video->zero_artifacts ? "yes" : "no");
if(!(video->encoder.h_mutex = tsk_mutex_create())){
TSK_DEBUG_ERROR("Failed to create encode mutex");
return tsk_null;

View File

@ -187,7 +187,7 @@ typedef enum tmedia_video_decode_result_type_e
tmedia_video_decode_result_type_none,
tmedia_video_decode_result_type_error,
tmedia_video_decode_result_type_success,
tmedia_video_decode_result_type_idr,
}
tmedia_video_decode_result_type_t;

View File

@ -90,8 +90,11 @@ TINYMEDIA_API int tmedia_defaults_set_bypass_encoding(tsk_bool_t enabled);
TINYMEDIA_API tsk_bool_t tmedia_defaults_get_bypass_encoding();
TINYMEDIA_API int tmedia_defaults_set_bypass_decoding(tsk_bool_t enabled);
TINYMEDIA_API tsk_bool_t tmedia_defaults_get_bypass_decoding();
TINYMEDIA_API int tmedia_defaults_set_videojb_enabled(tsk_bool_t enabled);
TINYMEDIA_API tsk_bool_t tmedia_defaults_get_videojb_enabled();
TINYMEDIA_API int tmedia_defaults_set_video_zeroartifacts_enabled(tsk_bool_t enabled);
TINYMEDIA_API tsk_bool_t tmedia_defaults_get_video_zeroartifacts_enabled();
TINYMEDIA_API int tmedia_defaults_set_rtpbuff_size(tsk_size_t rtpbuff_size);
TINYMEDIA_API tsk_size_t tmedia_defaults_get_rtpbuff_size();
TINYMEDIA_API int tmedia_defaults_set_avpf_tail(tsk_size_t tail_min, tsk_size_t tail_max);

View File

@ -59,6 +59,7 @@ static tsk_bool_t __ice_enabled = tsk_false;
static tsk_bool_t __bypass_encoding_enabled = tsk_false;
static tsk_bool_t __bypass_decoding_enabled = tsk_false;
static tsk_bool_t __videojb_enabled = tsk_true;
static tsk_bool_t __video_zeroartifacts_enabled = tsk_false; // Requires from remote parties to support AVPF (RTCP-FIR/NACK/PLI)
static tsk_size_t __rtpbuff_size = 0x1FFFE; // Network buffer size use for RTP (SO_RCVBUF, SO_SNDBUF)
static tsk_size_t __avpf_tail_min = 20; // Min size for tail used to honor RTCP-NACK requests
static tsk_size_t __avpf_tail_max = 160; // Max size for tail used to honor RTCP-NACK requests
@ -343,6 +344,14 @@ tsk_bool_t tmedia_defaults_get_videojb_enabled(){
return __videojb_enabled;
}
int tmedia_defaults_set_video_zeroartifacts_enabled(tsk_bool_t enabled){
__video_zeroartifacts_enabled = enabled;
return 0;
}
tsk_bool_t tmedia_defaults_get_video_zeroartifacts_enabled(){
return __video_zeroartifacts_enabled;
}
int tmedia_defaults_set_rtpbuff_size(tsk_size_t rtpbuff_size){
__rtpbuff_size = rtpbuff_size;
return 0;

View File

@ -944,6 +944,8 @@ int tmedia_session_mgr_stop(tmedia_session_mgr_t* self)
tsk_list_item_t* item;
tmedia_session_t* session;
TSK_DEBUG_INFO("tmedia_session_mgr_stop()");
if(!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;

View File

@ -46,7 +46,6 @@ static int _tnet_ice_candidate_tostring(
static const char* _tnet_ice_candidate_get_foundation(tnet_ice_cand_type_t type);
static tnet_stun_message_t * _tnet_ice_candidate_stun_create_bind_request(tnet_ice_candidate_t* self, const char* username, const char* password);
static tsk_bool_t _tnet_ice_candidate_stun_transac_id_equals(const tnet_stun_transacid_t id1, const tnet_stun_transacid_t id2);
static int _tnet_ice_candidate_stun_address_tostring(const uint8_t in_ip[16], tnet_stun_addr_family_t family, char** out_ip);
static const char* _tnet_ice_candidate_get_transport_str(tnet_socket_type_t transport_e);
static tnet_socket_type_t _tnet_ice_candidate_get_transport_type(tsk_bool_t ipv6, const char* transport_str);
static const char* _tnet_ice_candidate_get_candtype_str(tnet_ice_cand_type_t candtype_e);
@ -111,7 +110,6 @@ tnet_ice_candidate_t* tnet_ice_candidate_create(tnet_ice_cand_type_t type_e, tne
return tsk_null;
}
candidate->transport_e = socket->type;
candidate->type_e = type_e;
candidate->socket = tsk_object_ref(socket);
candidate->local_pref = 0xFFFF;
@ -129,6 +127,7 @@ tnet_ice_candidate_t* tnet_ice_candidate_create(tnet_ice_cand_type_t type_e, tne
if(candidate->socket){
memcpy(candidate->connection_addr, candidate->socket->ip, sizeof(candidate->socket->ip));
candidate->port = candidate->socket->port;
candidate->transport_e = socket->type;
}
tnet_ice_candidate_set_credential(candidate, ufrag, pwd);
@ -420,12 +419,12 @@ int tnet_ice_candidate_process_stun_response(tnet_ice_candidate_t* self, const
if((attribute = tnet_stun_message_get_attribute(response, stun_xor_mapped_address))){
const tnet_stun_attribute_xmapped_addr_t *xmaddr = (const tnet_stun_attribute_xmapped_addr_t *)attribute;
_tnet_ice_candidate_stun_address_tostring(xmaddr->xaddress, xmaddr->family, &self->stun.srflx_addr);
tnet_ice_utils_stun_address_tostring(xmaddr->xaddress, xmaddr->family, &self->stun.srflx_addr);
self->stun.srflx_port = xmaddr->xport;
}
else if((attribute = tnet_stun_message_get_attribute(response, stun_mapped_address))){
const tnet_stun_attribute_mapped_addr_t *maddr = (const tnet_stun_attribute_mapped_addr_t *)attribute;
ret = _tnet_ice_candidate_stun_address_tostring(maddr->address, maddr->family, &self->stun.srflx_addr);
ret = tnet_ice_utils_stun_address_tostring(maddr->address, maddr->family, &self->stun.srflx_addr);
self->stun.srflx_port = maddr->port;
}
}
@ -526,25 +525,6 @@ static tsk_bool_t _tnet_ice_candidate_stun_transac_id_equals(const tnet_stun_tra
return tsk_true;
}
static int _tnet_ice_candidate_stun_address_tostring(const uint8_t in_ip[16], tnet_stun_addr_family_t family, char** out_ip)
{
if(family == stun_ipv6){
tsk_sprintf(out_ip, "%x:%x:%x:%x:%x:%x:%x:%x",
TSK_TO_UINT16(&in_ip[0]), TSK_TO_UINT16(&in_ip[2]), TSK_TO_UINT16(&in_ip[4]), TSK_TO_UINT16(&in_ip[6]),
TSK_TO_UINT16(&in_ip[8]), TSK_TO_UINT16(&in_ip[10]), TSK_TO_UINT16(&in_ip[12]), TSK_TO_UINT16(&in_ip[14]));
}
else if(family == stun_ipv4){
tsk_sprintf(out_ip, "%u.%u.%u.%u", in_ip[0], in_ip[1], in_ip[2], in_ip[3]);
return 0;
}
else{
TSK_DEBUG_ERROR("Unsupported address family: %u.", family);
}
return -1;
}
static tnet_stun_message_t * _tnet_ice_candidate_stun_create_bind_request(tnet_ice_candidate_t* self, const char* username, const char* password)
{
tnet_stun_message_t *request = tsk_null;

View File

@ -660,8 +660,17 @@ int tnet_ice_ctx_recv_stun_message(tnet_ice_ctx_t* self, const void* data, tsk_s
if((message = tnet_stun_message_deserialize(data, size))){
if(message->type == stun_binding_request){
// check controlling flag
if((pair = tnet_ice_pairs_find_by_fd_and_addr(self->candidates_pairs, local_fd, remote_addr))){
pair = tnet_ice_pairs_find_by_fd_and_addr(self->candidates_pairs, local_fd, remote_addr);
if(!pair && !self->have_nominated_symetric){ // pair not found and we're still negotiating
// rfc 5245 - 7.1.3.2.1. Discovering Peer Reflexive Candidates
tnet_ice_pair_t* pair_peer = tnet_ice_pair_prflx_create(self->candidates_pairs, local_fd, remote_addr);
if(pair_peer){
pair = pair_peer; // copy
tsk_list_push_back_data(self->candidates_pairs, (void**)&pair_peer);
TSK_OBJECT_SAFE_FREE(pair_peer);
}
}
if(pair){
short resp_code = 0;
char* resp_phrase = tsk_null;
// authenticate the request

View File

@ -96,6 +96,68 @@ tnet_ice_pair_t* tnet_ice_pair_create(const tnet_ice_candidate_t* candidate_offe
return pair;
}
// rfc 5245 - 7.1.3.2.1. Discovering Peer Reflexive Candidates
tnet_ice_pair_t* tnet_ice_pair_prflx_create(tnet_ice_pairs_L_t* pairs, uint16_t local_fd, const struct sockaddr_storage *remote_addr)
{
int ret;
const tsk_list_item_t *item;
const tnet_ice_pair_t *pair_local = tsk_null, *pair;
tnet_ip_t remote_ip;
tnet_port_t remote_port;
if(!pairs || !remote_addr){
TSK_DEBUG_ERROR("Invalid parameter");
return tsk_null;
}
if((ret = tnet_get_sockip_n_port((const struct sockaddr*)remote_addr, &remote_ip, &remote_port))){
TNET_PRINT_LAST_ERROR("tnet_get_sockip_n_port() failed");
return tsk_null;
}
tsk_list_foreach(item, pairs){
if(!(pair = item->data) || !pair->candidate_offer || !pair->candidate_answer || !pair->candidate_offer->socket || pair->candidate_offer->socket->fd != local_fd){
continue;
}
pair_local = pair;
break;
}
if(!pair_local){
TSK_DEBUG_ERROR("Cannot create prflx candidate with remote ip = %s and remote port = %u", remote_ip, remote_port);
return tsk_null;
}
else{
tnet_ice_pair_t* pair_peer = tsk_null;
tnet_ice_candidate_t* cand_local = tnet_ice_candidate_create(tnet_ice_cand_type_prflx, pair_local->candidate_offer->socket, pair_local->is_ice_jingle, pair_local->candidate_offer->is_rtp, pair_local->candidate_offer->is_video, pair_local->candidate_offer->ufrag, pair_local->candidate_offer->pwd, pair_local->candidate_offer->foundation);
tnet_ice_candidate_t* cand_remote = tnet_ice_candidate_create(tnet_ice_cand_type_prflx, tsk_null, pair_local->is_ice_jingle, pair_local->candidate_answer->is_rtp, pair_local->candidate_answer->is_video, pair_local->candidate_answer->ufrag, pair_local->candidate_answer->pwd, pair_local->candidate_answer->foundation);
if(cand_local && cand_remote){
tsk_strupdate(&cand_remote->transport_str, pair->candidate_offer->transport_str);
cand_remote->comp_id = pair->candidate_offer->comp_id;
memcpy(cand_remote->connection_addr, remote_ip, sizeof(tnet_ip_t));
cand_remote->port = remote_port;
TSK_DEBUG_INFO("ICE Pair (Peer Reflexive Candidate): [%s %u %s %d] -> [%s %u %s %d]",
cand_local->foundation,
cand_local->comp_id,
cand_local->connection_addr,
cand_local->port,
cand_remote->foundation,
cand_remote->comp_id,
cand_remote->connection_addr,
cand_remote->port);
pair_peer = tnet_ice_pair_create(cand_local, cand_remote, pair_local->is_controlling, pair_local->tie_breaker, pair_local->is_ice_jingle);
}
TSK_OBJECT_SAFE_FREE(cand_local);
TSK_OBJECT_SAFE_FREE(cand_remote);
return pair_peer;
}
return tsk_null;
}
int tnet_ice_pair_send_conncheck(tnet_ice_pair_t *self)
{
char* username = tsk_null;
@ -498,11 +560,55 @@ const tnet_ice_pair_t* tnet_ice_pairs_find_by_response(tnet_ice_pairs_L_t* pairs
if(pairs && response){
const tsk_list_item_t *item;
const tnet_ice_pair_t *pair;
tnet_port_t mapped_port;
char* mapped_addr_str = tsk_null;
tsk_list_foreach(item, pairs){
if(!(pair = item->data)){
if(!(pair = item->data) || !pair->candidate_answer || !pair->candidate_offer){
continue;
}
if(pair->last_request && tnet_stun_message_transac_id_equals(pair->last_request->transaction_id, response->transaction_id)){
// check that mapped/xmapped address match destination
const tnet_stun_attribute_xmapped_addr_t *xmapped_addr;
const tnet_stun_attribute_mapped_addr_t* mapped_addr = tsk_null;
if(!(xmapped_addr = (const tnet_stun_attribute_xmapped_addr_t *)tnet_stun_message_get_attribute(response, stun_xor_mapped_address))){
mapped_addr = (const tnet_stun_attribute_mapped_addr_t *)tnet_stun_message_get_attribute(response, stun_mapped_address);
}
if(!xmapped_addr && !mapped_addr){
return pair; // do nothing if the client doesn't return mapped address STUN attribute
}
/* rfc 5245 7.1.3.2.1. Discovering Peer Reflexive Candidates
The agent checks the mapped address from the STUN response. If the
transport address does not match any of the local candidates that the
agent knows about, the mapped address represents a new candidate -- a
peer reflexive candidate. Like other candidates, it has a type,
base, priority, and foundation. They are computed as follows:
o Its type is equal to peer reflexive.
o Its base is set equal to the local candidate of the candidate pair
from which the STUN check was sent.
o Its priority is set equal to the value of the PRIORITY attribute
in the Binding request.
o Its foundation is selected as described in Section 4.1.1.3.
This peer reflexive candidate is then added to the list of local
candidates for the media stream. Its username fragment and password
are the same as all other local candidates for that media stream.
*/
tnet_ice_utils_stun_address_tostring(xmapped_addr ? xmapped_addr->xaddress : mapped_addr->address, xmapped_addr ? xmapped_addr->family : mapped_addr->family, &mapped_addr_str);
mapped_port = xmapped_addr ? xmapped_addr->xport : mapped_addr->port;
if((mapped_port != pair->candidate_offer->port || !tsk_striequals(mapped_addr_str, pair->candidate_offer->connection_addr))){
TSK_DEBUG_INFO("Mapped address different than local connection address...probably symetric NAT: %s#%s and %u#%u",
pair->candidate_offer->connection_addr, mapped_addr_str,
pair->candidate_offer->port, mapped_port);
// do we really need to add new local candidate?
// continue;
}
return pair;
}
}
@ -539,6 +645,8 @@ const tnet_ice_pair_t* tnet_ice_pairs_find_by_fd_and_addr(tnet_ice_pairs_L_t* pa
return pair;
}
TSK_DEBUG_INFO("No ICE candidate with remote ip = %s and port = %u could be found...probably symetric NAT", remote_ip, remote_port);
return tsk_null;
}

View File

@ -59,6 +59,7 @@ typedef struct tnet_ice_pair_s
tnet_ice_pair_t;
tnet_ice_pair_t* tnet_ice_pair_create(const struct tnet_ice_candidate_s* candidate_offer, const struct tnet_ice_candidate_s* candidate_answer, tsk_bool_t is_controlling, uint64_t tie_breaker, tsk_bool_t is_ice_jingle);
tnet_ice_pair_t* tnet_ice_pair_prflx_create(tnet_ice_pairs_L_t* pairs, uint16_t local_fd, const struct sockaddr_storage *remote_addr);
int tnet_ice_pair_send_conncheck(tnet_ice_pair_t *self);
int tnet_ice_pair_send_response(tnet_ice_pair_t *self, const struct tnet_stun_message_s* request, const short code, const char* phrase, const struct sockaddr_storage *remote_addr);
int tnet_ice_pair_auth_conncheck(const tnet_ice_pair_t *self, const struct tnet_stun_message_s* request, const void* request_buff, tsk_size_t request_buff_size, short* resp_code, char** resp_phrase);

View File

@ -46,6 +46,7 @@
TNET_BEGIN_DECLS
typedef void tnet_tls_socket_handle_t;
struct ssl_ctx_st;
int tnet_tls_socket_connect(tnet_tls_socket_handle_t* self);
int tnet_tls_socket_accept(tnet_tls_socket_handle_t* self);

View File

@ -160,6 +160,8 @@ tnet_transport_t* tnet_transport_create(const char* host, tnet_port_t port, tnet
TSK_DEBUG_ERROR("Failed to initialize TLS and/or DTLS caps");
TSK_OBJECT_SAFE_FREE(transport);
}
// set priority
tsk_runnable_set_priority(TSK_RUNNABLE(transport), TSK_THREAD_PRIORITY_TIME_CRITICAL);
}
return transport;
@ -189,6 +191,9 @@ tnet_transport_t* tnet_transport_create_2(tnet_socket_t *master, const char* des
TSK_DEBUG_ERROR("Failed to initialize TLS and/or DTLS caps");
TSK_OBJECT_SAFE_FREE(transport);
}
// set priority
tsk_runnable_set_priority(TSK_RUNNABLE(transport), TSK_THREAD_PRIORITY_TIME_CRITICAL);
}
return transport;
@ -843,6 +848,8 @@ static void* TSK_STDCALL run(void* self)
TSK_DEBUG_FATAL("Failed to create main thread [%d]", ret);
return tsk_null;
}
/* set thread priority */
ret = tsk_thread_set_priority(transport->mainThreadId[0], TSK_THREAD_PRIORITY_TIME_CRITICAL);
TSK_RUNNABLE_RUN_BEGIN(transport);

View File

@ -1450,20 +1450,24 @@ int tnet_sockfd_sendto(tnet_fd_t fd, const struct sockaddr *to, const void* buf,
wsaBuffer.len = (size - sent);
try_again:
ret = WSASendTo(fd, &wsaBuffer, 1, &numberOfBytesSent, 0, to, tnet_get_sockaddr_size(to), 0, 0); // returns zero if succeed
if(ret == 0) ret = numberOfBytesSent;
if(ret == 0){
ret = numberOfBytesSent;
}
#else
try_again:
ret = sendto(fd, (((const uint8_t*)buf)+sent), (size-sent), 0, to, tnet_get_sockaddr_size(to)); // returns number of sent bytes if succeed
#endif
if(ret <= 0){
if(tnet_geterrno() == TNET_ERROR_WOULDBLOCK){
TSK_DEBUG_INFO("SendUdp() - WouldBlock. Retrying...");
if(try_guard--){
tsk_thread_sleep(7);
tsk_thread_sleep(10);
goto try_again;
}
}
else{
TNET_PRINT_LAST_ERROR("sendto() failed");
}
goto bail;
}
@ -1522,14 +1526,13 @@ tsk_size_t tnet_sockfd_send(tnet_fd_t fd, const void* buf, tsk_size_t size, int
while(sent < size){
if((ret = send(fd, (((const char*)buf)+sent), (size-sent), flags)) <= 0){
if(tnet_geterrno() == TNET_ERROR_WOULDBLOCK){
// FIXME: HORRIBLE HACK
if((ret = tnet_sockfd_waitUntilWritable(fd, TNET_CONNECT_TIMEOUT))){
break;
}
else continue;
}
else{
TNET_PRINT_LAST_ERROR("send failed.");
TNET_PRINT_LAST_ERROR("send failed");
// Under Windows XP if WSAGetLastError()==WSAEINTR then try to disable both the ICS and the Firewall
// More info about How to disable the ISC: http://support.microsoft.com/?scid=kb%3Ben-us%3B230112&x=6&y=11
goto bail;

View File

@ -43,7 +43,7 @@ struct trtp_rtp_packet_s;
typedef int (*trtp_rtcp_cb_f)(const void* callback_data, const struct trtp_rtcp_packet_s* packet);
struct trtp_rtcp_session_s* trtp_rtcp_session_create(uint32_t ssrc);
struct trtp_rtcp_session_s* trtp_rtcp_session_create(uint32_t ssrc, const char* cname);
int trtp_rtcp_session_set_callback(struct trtp_rtcp_session_s* self, trtp_rtcp_cb_f callback, const void* callback_data);
#if HAVE_SRTP
int trtp_rtcp_session_set_srtp_sess(struct trtp_rtcp_session_s* self, const srtp_t* session);

View File

@ -100,6 +100,7 @@ typedef struct trtp_manager_s
} rtp;
struct{
char* cname;
char* remote_ip;
tnet_port_t remote_port;
struct sockaddr_storage remote_addr;

View File

@ -181,7 +181,7 @@ trtp_rtcp_report_rtpfb_t* trtp_rtcp_report_rtpfb_create_nack(uint32_t ssrc_sende
rtpfb->nack.blp[0] = 0;
for(i = 1; i <= 16 && i < count; ++i){
j = seq_nums[i] - rtpfb->nack.pid[0];
rtpfb->nack.blp[0] |= (1 << (16 - j - 1));
rtpfb->nack.blp[0] |= (1 << (j - 1));
}
TRTP_RTCP_PACKET(rtpfb)->header->length_in_bytes += (rtpfb->nack.count << 2);

View File

@ -284,8 +284,7 @@ typedef struct trtp_rtcp_session_s
// </RTCP-FB>
// <sender>
tsk_md5string_t cname;
tsk_bool_t is_cname_defined;
char* cname;
uint32_t packets_count;
uint32_t octets_count;
// </sender>
@ -345,6 +344,7 @@ static tsk_object_t* trtp_rtcp_session_dtor(tsk_object_t * self)
TSK_OBJECT_SAFE_FREE(session->sources);
TSK_OBJECT_SAFE_FREE(session->source_local);
TSK_OBJECT_SAFE_FREE(session->sdes);
TSK_FREE(session->cname);
// release the handle for the global timer manager
tsk_timer_mgr_global_unref(&session->timer.handle_global);
@ -362,7 +362,6 @@ static const tsk_object_def_t trtp_rtcp_session_def_s =
const tsk_object_def_t *trtp_rtcp_session_def_t = &trtp_rtcp_session_def_s;
static void _trtp_rtcp_session_set_cname(trtp_rtcp_session_t* self, const void* random_data, tsk_size_t size);
static tsk_bool_t _trtp_rtcp_session_have_source(trtp_rtcp_session_t* self, uint32_t ssrc);
static trtp_rtcp_source_t* _trtp_rtcp_session_find_source(trtp_rtcp_session_t* self, uint32_t ssrc);
static trtp_rtcp_source_t* _trtp_rtcp_session_find_or_add_source(trtp_rtcp_session_t* self, uint32_t ssrc, uint16_t seq_if_add, uint32_t ts_id_add);
@ -377,7 +376,7 @@ static void OnReceive(trtp_rtcp_session_t* session, const packet_ p, event_ e, t
static void OnExpire(trtp_rtcp_session_t* session, event_ e);
static void SendBYEPacket(trtp_rtcp_session_t* session, event_ e);
trtp_rtcp_session_t* trtp_rtcp_session_create(uint32_t ssrc)
trtp_rtcp_session_t* trtp_rtcp_session_create(uint32_t ssrc, const char* cname)
{
trtp_rtcp_session_t* session;
@ -398,6 +397,7 @@ trtp_rtcp_session_t* trtp_rtcp_session_create(uint32_t ssrc)
session->senders = 1;
session->members = 1;
session->rtcp_bw = RTCP_BW;//FIXME: as parameter from the code, Also added possiblities to update this value
session->cname = tsk_strdup(cname);
bail:
return session;
@ -512,11 +512,6 @@ int trtp_rtcp_session_process_rtp_out(trtp_rtcp_session_t* self, const trtp_rtp_
tsk_safeobj_lock(self);
// initialize CNAME if not already done
if(!self->is_cname_defined){
_trtp_rtcp_session_set_cname(self, packet_rtp->payload.data, packet_rtp->payload.size);
}
// create local source if not already done
// first destroy it if the ssrc don't match
if(self->source_local && self->source_local->ssrc != packet_rtp->header->ssrc){
@ -800,17 +795,12 @@ static tsk_size_t _trtp_rtcp_session_send_pkt(trtp_rtcp_session_t* self, trtp_rt
if(self->srtp.session) __num_bytes_pad = (SRTP_MAX_TRAILER_LEN + 0x4);
#endif
if(!self->is_cname_defined){ // should not be true
uint64_t now = (tsk_time_now() ^ rand()); // not really random...but we hope it'll never called
_trtp_rtcp_session_set_cname(self, &now, sizeof(now));
}
// SDES
if(!self->sdes && (self->sdes = trtp_rtcp_report_sdes_create_null())){
trtp_rtcp_sdes_chunck_t* chunck = trtp_rtcp_sdes_chunck_create(self->source_local->ssrc);
if(chunck){
static const char* _name = "test@doubango.org";
trtp_rtcp_sdes_chunck_add_item(chunck, trtp_rtcp_sdes_item_type_cname, self->cname, TSK_MD5_STRING_SIZE);
trtp_rtcp_sdes_chunck_add_item(chunck, trtp_rtcp_sdes_item_type_cname, self->cname, tsk_strlen(self->cname));
trtp_rtcp_sdes_chunck_add_item(chunck, trtp_rtcp_sdes_item_type_name, _name, tsk_strlen(_name));
trtp_rtcp_report_sdes_add_chunck(self->sdes, chunck);
TSK_OBJECT_SAFE_FREE(chunck);
@ -839,24 +829,6 @@ static tsk_size_t _trtp_rtcp_session_send_pkt(trtp_rtcp_session_t* self, trtp_rt
return ret;
}
// sets cname from rtp payload (sound or video) which is random xor'ed with some rand values
static void _trtp_rtcp_session_set_cname(trtp_rtcp_session_t* self, const void* random_data, tsk_size_t size)
{
tsk_size_t i;
uint8_t _cname[16] = { 'd', 'o', 'u', 'b', 'a', 'n', 'g', 'o', 'd', 'o', 'u', 'b', 'a', 'n', 'g', 'o' };
if(random_data && size){
memcpy(_cname, random_data, TSK_MIN(sizeof(_cname), size));
}
for(i = 0; i < sizeof(_cname); i+= 4){
*((uint32_t*)&_cname[i]) ^= rand();
}
tsk_md5compute((char*)_cname, sizeof(_cname), &self->cname);
self->is_cname_defined = tsk_true;
}
static int _trtp_rtcp_session_timer_callback(const void* arg, tsk_timer_id_t timer_id)
{
trtp_rtcp_session_t* session = (trtp_rtcp_session_t*)arg;

View File

@ -414,7 +414,7 @@ static int _trtp_manager_recv_data(const trtp_manager_t* self, const uint8_t* da
err_status_t status;
if(self->srtp_ctx_neg_remote){
if((status = srtp_unprotect(self->srtp_ctx_neg_remote->rtp.session, (void*)data_ptr, (int*)&data_size)) != err_status_ok){
TSK_DEBUG_ERROR("srtp_unprotect(RTP) failed with error code=%d", (int)status);
TSK_DEBUG_ERROR("srtp_unprotect(RTP) failed with error code=%d, seq_num=%u", (int)status, (data_size > 4 ? tnet_ntohs_2(&data_ptr[2]) : 0x0000));
return -1;
}
}
@ -1280,7 +1280,7 @@ int trtp_manager_start(trtp_manager_t* self)
}
/* create and start RTCP session */
if(!self->rtcp.session && ret == 0){
self->rtcp.session = trtp_rtcp_session_create(self->rtp.ssrc.local);
self->rtcp.session = trtp_rtcp_session_create(self->rtp.ssrc.local, self->rtcp.cname);
}
if(self->rtcp.session){
ret = trtp_rtcp_session_set_callback(self->rtcp.session, self->rtcp.cb.fun, self->rtcp.cb.usrdata);
@ -1566,6 +1566,7 @@ static tsk_object_t* trtp_manager_ctor(tsk_object_t * self, va_list * app)
manager->rtp.dscp = TRTP_DSCP_RTP_DEFAULT;
/* rtcp */
tsk_sprintf(&manager->rtcp.cname, "doubango@%llu", (tsk_time_now() + rand()));
/* timer */
manager->timer_mgr_global = tsk_timer_mgr_global_ref();
@ -1597,6 +1598,7 @@ static tsk_object_t* trtp_manager_dtor(tsk_object_t * self)
TSK_OBJECT_SAFE_FREE(manager->rtcp.session);
TSK_FREE(manager->rtcp.remote_ip);
TSK_FREE(manager->rtcp.public_ip);
TSK_FREE(manager->rtcp.cname);
TSK_OBJECT_SAFE_FREE(manager->rtcp.local_socket);
/* SRTP */

View File

@ -68,6 +68,8 @@ int trtp_srtp_ctx_internal_init(struct trtp_srtp_ctx_internal_xs* ctx, int32_t t
ctx->policy.key = (unsigned char*)ctx->key_bin;
ctx->policy.ssrc.type = ssrc_any_outbound;
ctx->policy.ssrc.value = ssrc;
ctx->policy.window_size = 1024;
ctx->policy.allow_repeat_tx = 0;
if((srtp_err = srtp_create(&ctx->session, &ctx->policy)) != err_status_ok){
TSK_DEBUG_ERROR("srtp_create() failed");
return -3;
@ -236,6 +238,8 @@ int trtp_srtp_set_crypto(struct trtp_manager_s* rtp_mgr, const char* crypto_line
tsk_base64_decode((const uint8_t*)srtp_ctx->rtp.key_str, tsk_strlen(srtp_ctx->rtp.key_str), (char**)&key_bin);
srtp_ctx->rtp.policy.key = key_bin;
srtp_ctx->rtp.policy.ssrc.type = idx == TRTP_SRTP_LINE_IDX_REMOTE ? ssrc_any_inbound : ssrc_any_outbound;
srtp_ctx->rtp.policy.window_size = 1024;
srtp_ctx->rtp.policy.allow_repeat_tx = 0;
if((srtp_err = srtp_create(&srtp_ctx->rtp.session, &srtp_ctx->rtp.policy)) != err_status_ok){
TSK_DEBUG_ERROR("srtp_create() failed: %d", srtp_err);
return -3;
@ -284,6 +288,8 @@ int trtp_srtp_set_key_and_salt(trtp_manager_t* rtp_mgr, trtp_srtp_crypto_type_t
srtp_ctx->policy.key = (unsigned char *)srtp_ctx->key_bin;
srtp_ctx->policy.ssrc.type = idx == TRTP_SRTP_LINE_IDX_REMOTE ? ssrc_any_inbound : ssrc_any_outbound;
srtp_ctx->policy.window_size = 1024;
srtp_ctx->policy.allow_repeat_tx = 0;
if((srtp_err = srtp_create(&srtp_ctx->session, &srtp_ctx->policy)) != err_status_ok){
TSK_DEBUG_ERROR("srtp_create() failed: %d", srtp_err);
return -3;

View File

@ -1,5 +1,9 @@
lib_LTLIBRARIES = libtinySAK.la
if USE_RT
libtinySAK_la_LIBADD = ${LIBRT_LIBADD}
endif
libtinySAK_la_SOURCES = \
src/tsk.c\
src/tsk_base64.c\

View File

@ -31,6 +31,10 @@
#include "tsk_thread.h"
#include "tsk_debug.h"
#if TSK_UNDER_WINDOWS
# include <windows.h>
#endif
/**@defgroup tsk_runnable_group Base class for runnable object.
*/
@ -40,7 +44,22 @@
*/
tsk_runnable_t* tsk_runnable_create()
{
return (tsk_runnable_t*)tsk_object_new(tsk_runnable_def_t);
return tsk_runnable_create_2(TSK_THREAD_PRIORITY_MEDIUM);
}
/**@ingroup tsk_runnable_group
* Creates new Runnable object.
* @param priority Thread priority. Possible values: TSK_THREAD_PRIORITY_LOW, TSK_THREAD_PRIORITY_MEDIUM, TSK_THREAD_PRIORITY_HIGH or TSK_THREAD_PRIORITY_TIME_CRITICAL
* @retval @ref tsk_runnable_t.
*/
tsk_runnable_t* tsk_runnable_create_2(int32_t priority)
{
tsk_runnable_t* runnable;
if((runnable = (tsk_runnable_t*)tsk_object_new(tsk_runnable_def_t))){
runnable->priority = priority;
}
return runnable;
}
/**@ingroup tsk_runnable_group
@ -117,6 +136,10 @@ int tsk_runnable_start(tsk_runnable_t *self, const tsk_object_def_t *objdef)
TSK_DEBUG_ERROR("Failed to start new thread.");
return ret;
}
/* set priority now that the thread is created */
if(tsk_runnable_set_priority(self, self->priority)){
TSK_DEBUG_ERROR("Failed to set thread priority value to %d", self->priority);
}
// Do not set "running" to true here
// Problem: When you try to stop the thread before it start
// Will be done by "TSK_RUNNABLE_RUN_BEGIN" which is called into the thread
@ -149,6 +172,21 @@ int tsk_runnable_set_important(tsk_runnable_t *self, tsk_bool_t important)
}
}
/**@ingroup tsk_runnable_group
*/
int tsk_runnable_set_priority(tsk_runnable_t *self, int32_t priority)
{
if(!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
self->priority = priority;
if(self->h_thread[0]){
return tsk_thread_set_priority(self->h_thread[0], priority);
}
return 0;
}
/**@ingroup tsk_runnable_group
* Stops a runnable object.
* @param self The runnable object to stop.
@ -217,6 +255,7 @@ static tsk_object_t* tsk_runnable_ctor(tsk_object_t * self, va_list * app)
{
tsk_runnable_t* runnable = (tsk_runnable_t*)self;
if(runnable){
}
return self;
}

View File

@ -70,6 +70,8 @@ typedef struct tsk_runnable_s
*/
tsk_bool_t important;
int32_t priority;
tsk_list_t *objects;
}
tsk_runnable_t;
@ -79,9 +81,11 @@ tsk_runnable_t;
#define TSK_DECLARE_RUNNABLE tsk_runnable_t __runnable__
TINYSAK_API tsk_runnable_t* tsk_runnable_create();
TINYSAK_API tsk_runnable_t* tsk_runnable_create_2(int32_t priority);
TINYSAK_API int tsk_runnable_start(tsk_runnable_t *self, const tsk_object_def_t *objdef);
TINYSAK_API int tsk_runnable_set_important(tsk_runnable_t *self, tsk_bool_t important);
TINYSAK_API int tsk_runnable_set_priority(tsk_runnable_t *self, int32_t priority);
TINYSAK_API int tsk_runnable_enqueue(tsk_runnable_t *self, ...);
TINYSAK_API int tsk_runnable_stop(tsk_runnable_t *self);

View File

@ -92,7 +92,7 @@ int tsk_thread_set_priority(tsk_thread_handle_t* handle, int32_t priority)
int ret;
memset(&sp, 0, sizeof(struct sched_param));
sp.sched_priority = priority;
if ((ret = pthread_setschedparam(*((pthread_t*)handle), SCHED_RR, &sp))) {
if ((ret = pthread_setschedparam(*((pthread_t*)handle), SCHED_OTHER, &sp))) {
TSK_DEBUG_ERROR("Failed to change priority to %d with error code=%d", priority, ret);
return ret;
}

View File

@ -35,9 +35,18 @@
typedef void tsk_thread_handle_t;
#if TSK_UNDER_WINDOWS
typedef unsigned long tsk_thread_id_t;
# define TSK_THREAD_PRIORITY_LOW THREAD_PRIORITY_LOWEST
# define TSK_THREAD_PRIORITY_MEDIUM THREAD_PRIORITY_NORMAL
# define TSK_THREAD_PRIORITY_HIGH THREAD_PRIORITY_HIGHEST
# define TSK_THREAD_PRIORITY_TIME_CRITICAL THREAD_PRIORITY_TIME_CRITICAL
#else
# include <pthread.h>
typedef pthread_t tsk_thread_id_t;
# include <sched.h>
typedef pthread_t tsk_thread_id_t;
# define TSK_THREAD_PRIORITY_LOW sched_get_priority_min(SCHED_OTHER)
# define TSK_THREAD_PRIORITY_TIME_CRITICAL sched_get_priority_max(SCHED_OTHER)
# define TSK_THREAD_PRIORITY_MEDIUM ((TSK_THREAD_PRIORITY_TIME_CRITICAL - TSK_THREAD_PRIORITY_LOW) >> 1)
# define TSK_THREAD_PRIORITY_HIGH ((TSK_THREAD_PRIORITY_MEDIUM * 3) >> 1)
#endif
TSK_BEGIN_DECLS

View File

@ -124,6 +124,15 @@ int tsk_gettimeofday(struct timeval *tv, struct timezone *tz)
return gettimeofday(tv, tz);
}
/**@ingroup tsk_time_group
*/
uint64_t tsk_gettimeofday_ms()
{
struct timeval tv;
tsk_gettimeofday(&tv, tsk_null);
return (((uint64_t)tv.tv_sec)*(uint64_t)1000) + (((uint64_t)tv.tv_usec)/(uint64_t)1000);
}
/**@ingroup tsk_time_group
* Gets the number of milliseconds in @a tv
* @retval The number of milliseconds

View File

@ -46,6 +46,7 @@ struct timespec;
#define TSK_TIME_MS_2_S(MS) ((MS)/1000)
TINYSAK_API int tsk_gettimeofday(struct timeval *tv, struct timezone *tz);
TINYSAK_API uint64_t tsk_gettimeofday_ms();
TINYSAK_API uint64_t tsk_time_get_ms(const struct timeval *tv);
TINYSAK_API uint64_t tsk_time_epoch();
TINYSAK_API uint64_t tsk_time_now();