Adds support for Win64 (Media libraries used in Gotham project) build.

This commit is contained in:
bossiel 2014-12-19 17:09:09 +00:00
parent 9d964daa3f
commit 6dabf5ca17
215 changed files with 9839 additions and 11189 deletions

View File

@ -58,7 +58,12 @@ HRESULT DisplayWatcher::Start()
BOOL ret = SetPropA(m_hWnd, "This", this);
assert(ret);
#if _M_X64
m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)DisplayWatcher::WndProc);
#else
m_pWndProc = (WNDPROC)SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)DisplayWatcher::WndProc);
#endif
UpdatePosition(); // black screen if attached later
}
@ -96,7 +101,12 @@ HRESULT DisplayWatcher::Stop()
if(m_hWnd && m_pWndProc)
{
// Restore
#if _M_X64
SetWindowLongPtr(m_hWnd, GWLP_WNDPROC, (LONG_PTR)m_pWndProc);
#else
SetWindowLongPtr(m_hWnd, GWL_WNDPROC, (LONG)m_pWndProc);
#endif
}
m_hWnd = NULL;
m_pWndProc = NULL;

View File

@ -238,7 +238,7 @@ static tsk_size_t mf_codec_h264_encode(tmedia_codec_t* self, const void* in_data
}
// Encode data
CHECK_HR(hr = h264->encoder.pInst->Process(in_data, in_size, &pSampleOut));
CHECK_HR(hr = h264->encoder.pInst->Process(in_data, (UINT32)in_size, &pSampleOut));
if(pSampleOut) {
CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));
@ -377,7 +377,7 @@ static tsk_size_t mf_codec_h264_decode(tmedia_codec_t* self, const void* in_data
}
else { // !h264->decoder.passthrough
/* decode the picture */
CHECK_HR(hr = h264->decoder.pInst->Process(h264->decoder.accumulator, h264->decoder.accumulator_pos, &pSampleOut));
CHECK_HR(hr = h264->decoder.pInst->Process(h264->decoder.accumulator, (UINT32)h264->decoder.accumulator_pos, &pSampleOut));
if (pSampleOut) {
CHECK_HR(hr = pSampleOut->GetBufferByIndex(0, &pBufferOut));

View File

@ -201,8 +201,8 @@ static int plugin_win_mf_consumer_video_prepare(tmedia_consumer_t* self, const t
}
pSelf->nNegFps = TMEDIA_CONSUMER(pSelf)->video.fps;
pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.display.width;
pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.display.height;
pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.width;
pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.display.height;
TSK_DEBUG_INFO("D3D9 video consumer: fps=%d, width=%d, height=%d",
pSelf->nNegFps,
@ -313,8 +313,8 @@ static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const v
if(hWnd)
{
// means HWND was not set but defined now
pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.in.width;
pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.in.height;
pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
CHECK_HR(hr = CreateDeviceD3D9(hWnd, &pSelf->pDevice, &pSelf->pD3D, pSelf->d3dpp));
CHECK_HR(hr = CreateSwapChain(hWnd, pSelf->nNegWidth, pSelf->nNegHeight, pSelf->pDevice, &pSelf->pSwapChain));
@ -328,10 +328,10 @@ static int plugin_win_mf_consumer_video_consume(tmedia_consumer_t* self, const v
// Update media type
SafeRelease(&pSelf->pSwapChain);
CHECK_HR(hr = CreateSwapChain(hWnd, TMEDIA_CONSUMER(pSelf)->video.in.width, TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
CHECK_HR(hr = CreateSwapChain(hWnd, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width, (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height, pSelf->pDevice, &pSelf->pSwapChain));
pSelf->nNegWidth = TMEDIA_CONSUMER(pSelf)->video.in.width;
pSelf->nNegHeight = TMEDIA_CONSUMER(pSelf)->video.in.height;
pSelf->nNegWidth = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.width;
pSelf->nNegHeight = (UINT32)TMEDIA_CONSUMER(pSelf)->video.in.height;
// Update Destination will do noting if the window size haven't changed.
// Force updating the destination rect if negotiated size change
@ -987,7 +987,7 @@ static HRESULT HookWindow(plugin_win_mf_consumer_video_s *pSelf, HWND hWnd)
CHECK_HR(hr = UnhookWindow(pSelf));
if ((pSelf->hWindow = hWnd)) {
pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWL_WNDPROC, (LONG)WndProc);
pSelf->wndProc = (WNDPROC)SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)WndProc);
if (!pSelf->wndProc) {
TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
CHECK_HR(hr = E_FAIL);
@ -1003,7 +1003,7 @@ static HRESULT UnhookWindow(struct plugin_win_mf_consumer_video_s *pSelf)
{
tsk_safeobj_lock(pSelf);
if (pSelf->hWindow && pSelf->wndProc) {
SetWindowLongPtr(pSelf->hWindow, GWL_WNDPROC, (LONG)pSelf->wndProc);
SetWindowLongPtr(pSelf->hWindow, GWLP_WNDPROC, (LONG_PTR)pSelf->wndProc);
pSelf->wndProc = NULL;
}
if(pSelf->hWindow)

View File

@ -1,5 +1,5 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
/* Copyright (C) 2013-2015 Mamadou DIOP
* Copyright (C) 2013-2015 Doubango Telecom <http://www.doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
@ -124,8 +124,8 @@ static int plugin_win_mf_converter_video_ms_init(tmedia_converter_video_t* self,
pSelf->widthDst = dstWidth;
pSelf->heightDst = dstHeight;
pSelf->rotation = 0;
pSelf->xOutputSize = _plugin_win_mf_converter_video_ms_get_size(dstChroma, dstWidth, dstHeight);
pSelf->xInputSize = _plugin_win_mf_converter_video_ms_get_size(srcChroma, srcWidth, srcHeight);
pSelf->xOutputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(dstChroma, dstWidth, dstHeight);
pSelf->xInputSize = (UINT32)_plugin_win_mf_converter_video_ms_get_size(srcChroma, srcWidth, srcHeight);
SafeRelease(&pSelf->pSampleOut);
SafeRelease(&pSelf->pSampleIn);
@ -166,8 +166,8 @@ static int plugin_win_mf_converter_video_ms_init(tmedia_converter_video_t* self,
}
#endif
CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtSrc, &pTypeSrc, pSelf->widthSrc, pSelf->heightSrc));
CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtDst, &pTypeDst, pSelf->widthDst, pSelf->heightDst));
CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtSrc, &pTypeSrc, (UINT32)pSelf->widthSrc, (UINT32)pSelf->heightSrc));
CHECK_HR(hr = MFUtils::CreateVideoType(&pSelf->fmtDst, &pTypeDst, (UINT32)pSelf->widthDst, (UINT32)pSelf->heightDst));
CHECK_HR(hr = pSelf->pMFT->SetInputType(0, pTypeSrc, 0));
CHECK_HR(hr = pSelf->pMFT->SetOutputType(0, pTypeDst, 0));
@ -269,19 +269,19 @@ static tsk_size_t plugin_win_mf_converter_video_ms_process(tmedia_converter_vide
hr = _plugin_win_mf_converter_video_ms_copy_rgb32_down_top(
(BYTE*)*output,
(const BYTE*)pBufferPtr,
pSelf->widthDst,
pSelf->heightDst
(INT)pSelf->widthDst,
(INT)pSelf->heightDst
);
}
else
{
hr = MFCopyImage(
(BYTE*)*output,
(pSelf->widthDst << 2),
(LONG)(pSelf->widthDst << 2),
(BYTE*)pBufferPtr,
(pSelf->widthDst << 2),
(pSelf->widthDst << 2),
pSelf->heightDst
(LONG)(pSelf->widthDst << 2),
(DWORD)(pSelf->widthDst << 2),
(DWORD)pSelf->heightDst
);
}

View File

@ -1,13 +1,13 @@
/* Copyright (C) 2013 Mamadou DIOP
* Copyright (C) 2013 Doubango Telecom <http://www.doubango.org>
*
/* Copyright (C) 2013-2015 Mamadou DIOP
* Copyright (C) 2013-2015 Doubango Telecom <http://www.doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
@ -31,7 +31,6 @@
#include "tsk_thread.h"
#include "tsk_debug.h"
#include <KS.h>
#include <Codecapi.h>
#include <assert.h>
@ -51,7 +50,7 @@
#endif /* PLUGIN_MF_GOP_SIZE_IN_SECONDS */
DEFINE_GUID(PLUGIN_MF_LOW_LATENCY,
0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, 0x27, 0xa0, 0x24, 0xee);
extern const tmedia_codec_plugin_def_t *mf_codec_h264_main_plugin_def_t;
extern const tmedia_codec_plugin_def_t *mf_codec_h264_base_plugin_def_t;
@ -65,23 +64,23 @@ static int _plugin_win_mf_producer_video_unprepare(struct plugin_win_mf_producer
typedef struct plugin_win_mf_producer_video_s
{
TMEDIA_DECLARE_PRODUCER;
bool bStarted, bPrepared, bMuted;
tsk_thread_handle_t* ppTread[1];
HWND hWndPreview;
int32_t bitrate_bps; // used when encoder bundled only
DeviceListVideo* pDeviceList;
DeviceListVideo* pDeviceList;
MFCodecVideo *pEncoder;
IMFMediaSession *pSession;
IMFMediaSource *pSource;
SampleGrabberCB *pCallback;
IMFActivate *pSinkGrabber;
IMFMediaSession *pSession;
IMFMediaSource *pSource;
SampleGrabberCB *pCallback;
IMFActivate *pSinkGrabber;
IMFActivate *pSinkActivatePreview;
DisplayWatcher* pWatcherPreview;
IMFTopology *pTopology;
IMFTopology *pTopology;
IMFMediaType *pGrabberInputType;
}
plugin_win_mf_producer_video_t;
@ -93,60 +92,60 @@ static int plugin_win_mf_producer_video_set(tmedia_producer_t *self, const tmedi
HRESULT hr = S_OK;
plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
if(!pSelf || !param){
if (!pSelf || !param){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if(tsk_striequals(param->key, "action")){
if (tsk_striequals(param->key, "action")){
tmedia_codec_action_t action = (tmedia_codec_action_t)TSK_TO_INT32((uint8_t*)param->value);
HRESULT hr = S_OK;
switch(action){
case tmedia_codec_action_encode_idr:
{
if(pSelf->pEncoder)
{
CHECK_HR(hr = pSelf->pEncoder->RequestKeyFrame());
}
break;
}
case tmedia_codec_action_bw_down:
{
pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps << 1) / 3), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
if(pSelf->pEncoder)
{
CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
}
break;
}
case tmedia_codec_action_bw_up:
{
pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps * 3) >> 1), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
if(pSelf->pEncoder)
{
CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
}
break;
}
HRESULT hr = S_OK;
switch (action){
case tmedia_codec_action_encode_idr:
{
if (pSelf->pEncoder)
{
CHECK_HR(hr = pSelf->pEncoder->RequestKeyFrame());
}
break;
}
case tmedia_codec_action_bw_down:
{
pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps << 1) / 3), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
if (pSelf->pEncoder)
{
CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
}
break;
}
case tmedia_codec_action_bw_up:
{
pSelf->bitrate_bps = TSK_CLAMP(0, (int32_t)((pSelf->bitrate_bps * 3) >> 1), TMEDIA_CODEC(pSelf)->bandwidth_max_upload);
TSK_DEBUG_INFO("New target bitrate = %d kbps", pSelf->bitrate_bps);
if (pSelf->pEncoder)
{
CHECK_HR(hr = pSelf->pEncoder->SetBitRate(pSelf->bitrate_bps));
}
break;
}
}
}
else if(param->value_type == tmedia_pvt_int64){
if(tsk_striequals(param->key, "local-hwnd")){
else if (param->value_type == tmedia_pvt_int64){
if (tsk_striequals(param->key, "local-hwnd")){
HWND hWnd = reinterpret_cast<HWND>((INT64)*((int64_t*)param->value));
if(hWnd != pSelf->hWndPreview)
if (hWnd != pSelf->hWndPreview)
{
pSelf->hWndPreview = hWnd;
if(pSelf->pWatcherPreview)
if (pSelf->pWatcherPreview)
{
CHECK_HR(hr = pSelf->pWatcherPreview->SetHwnd(hWnd));
}
}
}
}
else if(param->value_type == tmedia_pvt_int32){
if(tsk_striequals(param->key, "mute")){
else if (param->value_type == tmedia_pvt_int32){
if (tsk_striequals(param->key, "mute")){
pSelf->bMuted = (TSK_TO_INT32((uint8_t*)param->value) != 0);
if (pSelf->pCallback) {
pSelf->pCallback->SetMute(pSelf->bMuted);
@ -162,10 +161,10 @@ static int plugin_win_mf_producer_video_set(tmedia_producer_t *self, const tmedi
}
#endif
}
else if(tsk_striequals(param->key, "create-on-current-thead")){
else if (tsk_striequals(param->key, "create-on-current-thead")){
//producer->create_on_ui_thread = *((int32_t*)param->value) ? tsk_false : tsk_true;
}
else if(tsk_striequals(param->key, "plugin-firefox")){
else if (tsk_striequals(param->key, "plugin-firefox")){
//producer->plugin_firefox = (*((int32_t*)param->value) != 0);
//if(producer->grabber){
// producer->grabber->setPluginFirefox((producer->plugin_firefox == tsk_true));
@ -174,18 +173,18 @@ static int plugin_win_mf_producer_video_set(tmedia_producer_t *self, const tmedi
}
bail:
return SUCCEEDED(hr) ? 0 : -1;
return SUCCEEDED(hr) ? 0 : -1;
}
static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const tmedia_codec_t* codec)
{
plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
if(!pSelf || !codec && codec->plugin){
if (!pSelf || !codec && codec->plugin){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if(pSelf->bPrepared){
if (pSelf->bPrepared){
TSK_DEBUG_WARN("MF video producer already prepared");
return -1;
}
@ -199,9 +198,9 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
TMEDIA_PRODUCER(pSelf)->video.height = TMEDIA_CODEC_VIDEO(codec)->out.height;
TMEDIA_PRODUCER(pSelf)->encoder.codec_id = tmedia_codec_id_none; // means RAW frames as input
TSK_DEBUG_INFO("MF video producer: fps=%d, width=%d, height=%d",
TMEDIA_PRODUCER(pSelf)->video.fps,
TMEDIA_PRODUCER(pSelf)->video.width,
TSK_DEBUG_INFO("MF video producer: fps=%d, width=%d, height=%d",
TMEDIA_PRODUCER(pSelf)->video.fps,
TMEDIA_PRODUCER(pSelf)->video.width,
TMEDIA_PRODUCER(pSelf)->video.height);
HRESULT hr = S_OK;
@ -215,19 +214,19 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
const VideoSubTypeGuidPair *pcPreferredSubTypeGuidPair = NULL;
// create device list object
if(!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListVideo())){
if (!pSelf->pDeviceList && !(pSelf->pDeviceList = new DeviceListVideo())){
TSK_DEBUG_ERROR("Failed to create device list");
hr = E_OUTOFMEMORY;
goto bail;
}
// enumerate devices
hr = pSelf->pDeviceList->EnumerateDevices();
if(!SUCCEEDED(hr)){
if (!SUCCEEDED(hr)){
goto bail;
}
// check if we have at least one MF video source connected to the PC
if(pSelf->pDeviceList->Count() == 0){
if (pSelf->pDeviceList->Count() == 0){
TSK_DEBUG_WARN("No MF video source could be found...no video will be sent");
// do not break the negotiation as one-way video connection is a valid use-case
}
@ -235,18 +234,18 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
// Get best MF video source
IMFActivate* pActivate = NULL;
const char* pczSrcFriendlyName = tmedia_producer_get_friendly_name(tmedia_video);
if(!tsk_strnullORempty(pczSrcFriendlyName)) {
if (!tsk_strnullORempty(pczSrcFriendlyName)) {
TSK_DEBUG_INFO("MF pref. video source = %s", pczSrcFriendlyName);
wchar_t pczwSrcFriendlyName[MAX_PATH] = { 0 };
mbstowcs(pczwSrcFriendlyName, pczSrcFriendlyName, sizeof(pczwSrcFriendlyName)/sizeof(pczwSrcFriendlyName[0]));
mbstowcs(pczwSrcFriendlyName, pczSrcFriendlyName, sizeof(pczwSrcFriendlyName) / sizeof(pczwSrcFriendlyName[0]));
hr = pSelf->pDeviceList->GetDeviceBest(&pActivate, pczwSrcFriendlyName);
}
else {
hr = pSelf->pDeviceList->GetDeviceBest(&pActivate);
}
if(!SUCCEEDED(hr) || !pActivate){
if (!SUCCEEDED(hr) || !pActivate){
TSK_DEBUG_ERROR("Failed to get best MF video source");
if(!pActivate){
if (!pActivate){
hr = E_OUTOFMEMORY;
}
goto bail;
@ -258,7 +257,7 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
(void**)&pSelf->pSource
);
SafeRelease(&pActivate);
if(!SUCCEEDED(hr)){
if (!SUCCEEDED(hr)){
TSK_DEBUG_ERROR("ActivateObject(MF video source) failed");
goto bail;
}
@ -268,19 +267,19 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
// Must not be set because not supported by Frame Rate Converter DSP (http://msdn.microsoft.com/en-us/library/windows/desktop/ff819100(v=vs.85).aspx).aspx) because of color (neither I420 nor NV12)
// Video Processor (http://msdn.microsoft.com/en-us/library/windows/desktop/hh162913(v=vs.85).aspx) supports both NV12 and I420
if(!bVideoProcessorIsSupported) {
if (!bVideoProcessorIsSupported) {
UINT32 nWidth, nHeight, nFps;
hr = MFUtils::GetBestFormat(
pSelf->pSource,
&MFVideoFormat_I420,
TMEDIA_PRODUCER(pSelf)->video.width,
TMEDIA_PRODUCER(pSelf)->video.height,
TMEDIA_PRODUCER(pSelf)->video.fps,
(UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
(UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
(UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
&nWidth,
&nHeight,
&nFps,
&pcPreferredSubTypeGuidPair
);
);
if (SUCCEEDED(hr))
{
TSK_DEBUG_INFO("Video processor not supported...using source fps=%u, width=%u, height=%u", nFps, nWidth, nHeight);
@ -296,39 +295,39 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
// Before embedding a H.264 encoder we have to be sure that:
// - Low latency is supported
// - The user decided to use MF encoder (Microsoft, Intel Quick Sync or any other)
if((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
BOOL bMFEncoderIsRegistered =
(codec->id == tmedia_codec_id_h264_mp && codec->plugin == mf_codec_h264_main_plugin_def_t)
|| (codec->id == tmedia_codec_id_h264_bp && codec->plugin == mf_codec_h264_base_plugin_def_t);
if(bMFEncoderIsRegistered)
if ((codec->id == tmedia_codec_id_h264_bp || codec->id == tmedia_codec_id_h264_mp) && MFUtils::IsLowLatencyH264Supported()) {
BOOL bMFEncoderIsRegistered =
(codec->id == tmedia_codec_id_h264_mp && codec->plugin == mf_codec_h264_main_plugin_def_t)
|| (codec->id == tmedia_codec_id_h264_bp && codec->plugin == mf_codec_h264_base_plugin_def_t);
if (bMFEncoderIsRegistered)
{
// both Microsoft and Intel encoders support NV12 only as input
// static const BOOL kIsEncoder = TRUE;
// hr = MFUtils::GetBestCodec(kIsEncoder, MFMediaType_Video, MFVideoFormat_NV12, MFVideoFormat_H264, &pSelf->pEncoder);
pSelf->pEncoder = (codec->id == tmedia_codec_id_h264_bp) ? MFCodecVideoH264::CreateCodecH264Base(MFCodecType_Encoder) : MFCodecVideoH264::CreateCodecH264Main(MFCodecType_Encoder);
if(pSelf->pEncoder)
if (pSelf->pEncoder)
{
pSelf->pEncoder->setBundled(TRUE);
int32_t avg_bitrate_kbps = tmedia_get_video_bandwidth_kbps_2(TMEDIA_PRODUCER(pSelf)->video.width, TMEDIA_PRODUCER(pSelf)->video.height, TMEDIA_PRODUCER(pSelf)->video.fps);
int32_t avg_bitrate_kbps = tmedia_get_video_bandwidth_kbps_2((unsigned int)TMEDIA_PRODUCER(pSelf)->video.width, (unsigned int)TMEDIA_PRODUCER(pSelf)->video.height, TMEDIA_PRODUCER(pSelf)->video.fps);
TSK_DEBUG_INFO("MF_MT_AVG_BITRATE defined with value = %d kbps", avg_bitrate_kbps);
pSelf->bitrate_bps = (avg_bitrate_kbps * 1024);
hr = pSelf->pEncoder->Initialize(
TMEDIA_PRODUCER(pSelf)->video.fps,
TMEDIA_PRODUCER(pSelf)->video.width,
TMEDIA_PRODUCER(pSelf)->video.height,
pSelf->bitrate_bps);
if(SUCCEEDED(hr))
(UINT32)TMEDIA_PRODUCER(pSelf)->video.fps,
(UINT32)TMEDIA_PRODUCER(pSelf)->video.width,
(UINT32)TMEDIA_PRODUCER(pSelf)->video.height,
(UINT32)pSelf->bitrate_bps);
if (SUCCEEDED(hr))
{
/*hr =*/ pSelf->pEncoder->SetGOPSize((PLUGIN_MF_GOP_SIZE_IN_SECONDS * TMEDIA_PRODUCER(pSelf)->video.fps));
}
if(FAILED(hr))
if (FAILED(hr))
{
SafeRelease(&pSelf->pEncoder);
hr = S_OK;
}
}
if(SUCCEEDED(hr) && pSelf->pEncoder)
if (SUCCEEDED(hr) && pSelf->pEncoder)
{
TMEDIA_PRODUCER(pSelf)->encoder.codec_id = codec->id; // means encoded frames as input
}
@ -357,26 +356,26 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
CHECK_HR(hr = MFSetAttributeSize(pSelf->pGrabberInputType, MF_MT_FRAME_SIZE, TMEDIA_PRODUCER(pSelf)->video.width, TMEDIA_PRODUCER(pSelf)->video.height));
CHECK_HR(hr = MFSetAttributeSize(pSelf->pGrabberInputType, MF_MT_FRAME_SIZE, (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, (UINT32)TMEDIA_PRODUCER(pSelf)->video.height));
CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_FRAME_RATE, TMEDIA_PRODUCER(pSelf)->video.fps, 1));
CHECK_HR(hr = MFSetAttributeRatio(pSelf->pGrabberInputType, MF_MT_PIXEL_ASPECT_RATIO, 1, 1));
CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, pSelf->pEncoder ? FALSE : TRUE));
CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_FIXED_SIZE_SAMPLES, pSelf->pEncoder ? FALSE : TRUE));
if(pSelf->pEncoder) {
switch(codec->id){
case tmedia_codec_id_h264_bp: case tmedia_codec_id_h264_mp:
{
CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_MPEG2_PROFILE, (codec->id == tmedia_codec_id_h264_bp) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_AVG_BITRATE, pSelf->bitrate_bps));
break;
}
default:
{
TSK_DEBUG_ERROR("HW encoder with id = %d not expected", codec->id);
assert(false);
}
if (pSelf->pEncoder) {
switch (codec->id){
case tmedia_codec_id_h264_bp: case tmedia_codec_id_h264_mp:
{
CHECK_HR(hr = pSelf->pGrabberInputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_H264));
CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_MPEG2_PROFILE, (codec->id == tmedia_codec_id_h264_bp) ? eAVEncH264VProfile_Base : eAVEncH264VProfile_Main));
CHECK_HR(hr = pSelf->pGrabberInputType->SetUINT32(MF_MT_AVG_BITRATE, pSelf->bitrate_bps));
break;
}
default:
{
TSK_DEBUG_ERROR("HW encoder with id = %d not expected", codec->id);
assert(false);
}
}
TMEDIA_PRODUCER(pSelf)->video.chroma = tmedia_chroma_nv12;
TSK_DEBUG_INFO("MF video producer chroma = NV12 (because of HW encoder)");
@ -388,8 +387,8 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
TMEDIA_PRODUCER(pSelf)->video.chroma = pcPreferredSubTypeGuidPair ? pcPreferredSubTypeGuidPair->chroma : tmedia_chroma_yuv420p;
TSK_DEBUG_INFO("MF video producer chroma = %d", TMEDIA_PRODUCER(pSelf)->video.chroma);
}
if(pSelf->pEncoder) {
if (pSelf->pEncoder) {
// Unlock the encoder
//BOOL bIsAsyncMFT = FALSE;
//CHECK_HR(hr = MFUtils::IsAsyncMFT(pSelf->pEncoder->GetMFT(), &bIsAsyncMFT));
@ -419,11 +418,11 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
// Create the topology.
CHECK_HR(hr = MFUtils::CreateTopology(
pSelf->pSource,
pSelf->pEncoder ? pSelf->pEncoder->GetMFT() : NULL,
pSelf->pSinkGrabber,
pSelf->pSource,
pSelf->pEncoder ? pSelf->pEncoder->GetMFT() : NULL,
pSelf->pSinkGrabber,
pSelf->pSinkActivatePreview,
pSelf->pGrabberInputType,
pSelf->pGrabberInputType,
&pTopology));
// Resolve topology (adds video processors if needed).
CHECK_HR(hr = MFUtils::ResolveTopology(pTopology, &pSelf->pTopology));
@ -432,24 +431,24 @@ static int plugin_win_mf_producer_video_prepare(tmedia_producer_t* self, const t
CHECK_HR(hr = MFUtils::FindNodeObject(pSelf->pTopology, MFUtils::g_ullTopoIdSinkPreview, (void**)&pEvr));
// Find negotiated media and update producer
UINT32 nNegWidth = TMEDIA_PRODUCER(pSelf)->video.width, nNegHeight = TMEDIA_PRODUCER(pSelf)->video.height, nNegNumeratorFps = TMEDIA_PRODUCER(pSelf)->video.fps, nNegDenominatorFps = 1;
UINT32 nNegWidth = (UINT32)TMEDIA_PRODUCER(pSelf)->video.width, nNegHeight = (UINT32)TMEDIA_PRODUCER(pSelf)->video.height, nNegNumeratorFps = (UINT32)TMEDIA_PRODUCER(pSelf)->video.fps, nNegDenominatorFps = 1;
CHECK_HR(hr = pSelf->pTopology->GetNodeByID(MFUtils::g_ullTopoIdSinkMain, &pNodeGrabber));
CHECK_HR(hr = pNodeGrabber->GetInputPrefType(0, &pGrabberNegotiatedInputMedia));
hr = MFGetAttributeSize(pGrabberNegotiatedInputMedia, MF_MT_FRAME_SIZE, &nNegWidth, &nNegHeight);
if(SUCCEEDED(hr))
if (SUCCEEDED(hr))
{
TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: width(%u/%u), height(%u/%u)",
TMEDIA_PRODUCER(pSelf)->video.width, nNegWidth,
TMEDIA_PRODUCER(pSelf)->video.height, nNegHeight
TMEDIA_PRODUCER(pSelf)->video.width, nNegWidth,
TMEDIA_PRODUCER(pSelf)->video.height, nNegHeight
);
TMEDIA_PRODUCER(pSelf)->video.width = nNegWidth;
TMEDIA_PRODUCER(pSelf)->video.height = nNegHeight;
}
hr = MFGetAttributeRatio(pGrabberNegotiatedInputMedia, MF_MT_FRAME_RATE, &nNegNumeratorFps, &nNegDenominatorFps);
if(SUCCEEDED(hr))
if (SUCCEEDED(hr))
{
TSK_DEBUG_INFO("MF video producer topology vs sdp parameters: fps(%u/%u)",
TMEDIA_PRODUCER(pSelf)->video.fps, (nNegNumeratorFps / nNegDenominatorFps)
TMEDIA_PRODUCER(pSelf)->video.fps, (nNegNumeratorFps / nNegDenominatorFps)
);
TMEDIA_PRODUCER(pSelf)->video.fps = (nNegNumeratorFps / nNegDenominatorFps);
}
@ -466,7 +465,7 @@ bail:
SafeRelease(&pEncoderInputType);
SafeRelease(&pNodeGrabber);
SafeRelease(&pGrabberNegotiatedInputMedia);
pSelf->bPrepared = SUCCEEDED(hr);
return pSelf->bPrepared ? 0 : -1;
}
@ -475,16 +474,16 @@ static int plugin_win_mf_producer_video_start(tmedia_producer_t* self)
{
plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
if(!pSelf){
if (!pSelf){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if(pSelf->bStarted){
if (pSelf->bStarted){
TSK_DEBUG_INFO("MF video producer already started");
return 0;
}
if(!pSelf->bPrepared){
if (!pSelf->bPrepared){
TSK_DEBUG_ERROR("MF video producer not prepared");
return -1;
}
@ -492,7 +491,7 @@ static int plugin_win_mf_producer_video_start(tmedia_producer_t* self)
HRESULT hr = S_OK;
// Run preview watcher
if(pSelf->pWatcherPreview) {
if (pSelf->pWatcherPreview) {
CHECK_HR(hr = pSelf->pWatcherPreview->Start());
}
@ -502,11 +501,11 @@ static int plugin_win_mf_producer_video_start(tmedia_producer_t* self)
// Start asynchronous watcher thread
pSelf->bStarted = true;
int ret = tsk_thread_create(&pSelf->ppTread[0], RunSessionThread, pSelf);
if(ret != 0) {
if (ret != 0) {
TSK_DEBUG_ERROR("Failed to create thread");
hr = E_FAIL;
pSelf->bStarted = false;
if(pSelf->ppTread[0]){
if (pSelf->ppTread[0]){
tsk_thread_join(&pSelf->ppTread[0]);
}
MFUtils::ShutdownSession(pSelf->pSession, pSelf->pSource);
@ -521,11 +520,11 @@ static int plugin_win_mf_producer_video_pause(tmedia_producer_t* self)
{
plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
if(!pSelf){
if (!pSelf){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if(!pSelf->bStarted)
if (!pSelf->bStarted)
{
TSK_DEBUG_INFO("MF video producer not started");
return 0;
@ -540,24 +539,24 @@ static int plugin_win_mf_producer_video_stop(tmedia_producer_t* self)
{
plugin_win_mf_producer_video_t* pSelf = (plugin_win_mf_producer_video_t*)self;
if(!pSelf){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if (!pSelf){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
HRESULT hr = S_OK;
HRESULT hr = S_OK;
if(pSelf->pWatcherPreview){
if (pSelf->pWatcherPreview){
hr = pSelf->pWatcherPreview->Stop();
}
// for the thread
pSelf->bStarted = false;
hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
if(pSelf->ppTread[0]){
tsk_thread_join(&pSelf->ppTread[0]);
}
hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
// for the thread
pSelf->bStarted = false;
hr = MFUtils::ShutdownSession(pSelf->pSession, NULL); // stop session to wakeup the asynchronous thread
if (pSelf->ppTread[0]){
tsk_thread_join(&pSelf->ppTread[0]);
}
hr = MFUtils::ShutdownSession(NULL, pSelf->pSource); // stop source to release the camera
// next start() will be called after prepare()
return _plugin_win_mf_producer_video_unprepare(pSelf);
@ -565,38 +564,38 @@ static int plugin_win_mf_producer_video_stop(tmedia_producer_t* self)
static int _plugin_win_mf_producer_video_unprepare(plugin_win_mf_producer_video_t* pSelf)
{
if(!pSelf){
if (!pSelf){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if(pSelf->bStarted) {
if (pSelf->bStarted) {
// plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
TSK_DEBUG_ERROR("Producer must be stopped before calling unprepare");
}
if(pSelf->pDeviceList){
if (pSelf->pDeviceList){
delete pSelf->pDeviceList, pSelf->pDeviceList = NULL;
}
if(pSelf->pWatcherPreview){
}
if (pSelf->pWatcherPreview){
pSelf->pWatcherPreview->Stop();
}
if(pSelf->pSource){
if (pSelf->pSource){
pSelf->pSource->Shutdown();
}
if(pSelf->pSession){
pSelf->pSession->Shutdown();
}
}
if (pSelf->pSession){
pSelf->pSession->Shutdown();
}
SafeRelease(&pSelf->pEncoder);
SafeRelease(&pSelf->pSession);
SafeRelease(&pSelf->pSource);
SafeRelease(&pSelf->pSession);
SafeRelease(&pSelf->pSource);
SafeRelease(&pSelf->pSinkActivatePreview);
SafeRelease(&pSelf->pCallback);
SafeRelease(&pSelf->pSinkGrabber);
SafeRelease(&pSelf->pTopology);
SafeRelease(&pSelf->pCallback);
SafeRelease(&pSelf->pSinkGrabber);
SafeRelease(&pSelf->pTopology);
SafeRelease(&pSelf->pGrabberInputType);
if(pSelf->pWatcherPreview){
if (pSelf->pWatcherPreview){
delete pSelf->pWatcherPreview;
pSelf->pWatcherPreview = NULL;
}
@ -615,7 +614,7 @@ static tsk_object_t* plugin_win_mf_producer_video_ctor(tsk_object_t * self, va_l
MFUtils::Startup();
plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
if(pSelf){
if (pSelf){
/* init base */
tmedia_producer_init(TMEDIA_PRODUCER(pSelf));
@ -632,11 +631,11 @@ static tsk_object_t* plugin_win_mf_producer_video_ctor(tsk_object_t * self, va_l
}
/* destructor */
static tsk_object_t* plugin_win_mf_producer_video_dtor(tsk_object_t * self)
{
{
plugin_win_mf_producer_video_t *pSelf = (plugin_win_mf_producer_video_t *)self;
if(pSelf){
if (pSelf){
/* stop */
if(pSelf->bStarted){
if (pSelf->bStarted){
plugin_win_mf_producer_video_stop(TMEDIA_PRODUCER(pSelf));
}
@ -649,15 +648,15 @@ static tsk_object_t* plugin_win_mf_producer_video_dtor(tsk_object_t * self)
return self;
}
/* object definition */
static const tsk_object_def_t plugin_win_mf_producer_video_def_s =
static const tsk_object_def_t plugin_win_mf_producer_video_def_s =
{
sizeof(plugin_win_mf_producer_video_t),
plugin_win_mf_producer_video_ctor,
plugin_win_mf_producer_video_ctor,
plugin_win_mf_producer_video_dtor,
tsk_null,
tsk_null,
};
/* plugin definition*/
static const tmedia_producer_plugin_def_t plugin_win_mf_producer_video_plugin_def_s =
static const tmedia_producer_plugin_def_t plugin_win_mf_producer_video_plugin_def_s =
{
&plugin_win_mf_producer_video_def_s,
@ -684,11 +683,11 @@ static void* TSK_STDCALL RunSessionThread(void *pArg)
TSK_DEBUG_INFO("RunSessionThread (MF video producer) - ENTER");
while(pSelf->bStarted){
while (pSelf->bStarted){
CHECK_HR(hr = pSelf->pSession->GetEvent(0, &pEvent));
CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
CHECK_HR(hr = pEvent->GetType(&met));
if (FAILED(hrStatus) /*&& hrStatus != MF_E_NO_SAMPLE_TIMESTAMP*/)
{
TSK_DEBUG_ERROR("Session error: 0x%x (event id: %d)\n", hrStatus, met);

View File

@ -0,0 +1,142 @@
/*
* Copyright (c) 2013 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/**
* SvcContext - input parameters and state to encode a multi-layered
* spatial SVC frame
*/
#ifndef VPX_SVC_CONTEXT_H_
#define VPX_SVC_CONTEXT_H_
#include "vpx/vp8cx.h"
#include "vpx/vpx_encoder.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef enum SVC_ENCODING_MODE {
INTER_LAYER_PREDICTION_I,
ALT_INTER_LAYER_PREDICTION_IP,
INTER_LAYER_PREDICTION_IP,
USE_GOLDEN_FRAME
} SVC_ENCODING_MODE;
typedef enum SVC_LOG_LEVEL {
SVC_LOG_ERROR,
SVC_LOG_INFO,
SVC_LOG_DEBUG
} SVC_LOG_LEVEL;
typedef struct {
// public interface to svc_command options
int spatial_layers; // number of layers
int first_frame_full_size; // set to one to force first frame full size
SVC_ENCODING_MODE encoding_mode; // svc encoding strategy
SVC_LOG_LEVEL log_level; // amount of information to display
int log_print; // when set, printf log messages instead of returning the
// message with svc_get_message
// private storage for vpx_svc_encode
void *internal;
} SvcContext;
/**
* Set SVC options
* options are supplied as a single string separated by spaces
* Format: encoding-mode=<i|ip|alt-ip|gf>
* layers=<layer_count>
* scaling-factors=<n1>/<d1>,<n2>/<d2>,...
* quantizers=<q1>,<q2>,...
*/
vpx_codec_err_t vpx_svc_set_options(SvcContext *svc_ctx, const char *options);
/**
* Set SVC quantizer values
* values comma separated, ordered from lowest resolution to highest
* e.g., "60,53,39,33,27"
*/
vpx_codec_err_t vpx_svc_set_quantizers(SvcContext *svc_ctx,
const char *quantizer_values);
/**
* Set SVC scale factors
* values comma separated, ordered from lowest resolution to highest
* e.g., "4/16,5/16,7/16,11/16,16/16"
*/
vpx_codec_err_t vpx_svc_set_scale_factors(SvcContext *svc_ctx,
const char *scale_factors);
/**
* initialize SVC encoding
*/
vpx_codec_err_t vpx_svc_init(SvcContext *svc_ctx, vpx_codec_ctx_t *codec_ctx,
vpx_codec_iface_t *iface,
vpx_codec_enc_cfg_t *cfg);
/**
* encode a frame of video with multiple layers
*/
vpx_codec_err_t vpx_svc_encode(SvcContext *svc_ctx, vpx_codec_ctx_t *codec_ctx,
struct vpx_image *rawimg, vpx_codec_pts_t pts,
int64_t duration, int deadline);
/**
* finished with svc encoding, release allocated resources
*/
void vpx_svc_release(SvcContext *svc_ctx);
/**
* dump accumulated statistics and reset accumulated values
*/
const char *vpx_svc_dump_statistics(SvcContext *svc_ctx);
/**
* get status message from previous encode
*/
const char *vpx_svc_get_message(const SvcContext *svc_ctx);
/**
* return size of encoded data to be returned by vpx_svc_get_buffer
*/
size_t vpx_svc_get_frame_size(const SvcContext *svc_ctx);
/**
* return buffer with encoded data
*/
void *vpx_svc_get_buffer(const SvcContext *svc_ctx);
/**
* return spatial resolution of the specified layer
*/
vpx_codec_err_t vpx_svc_get_layer_resolution(const SvcContext *svc_ctx,
int layer,
unsigned int *width,
unsigned int *height);
/**
* return number of frames that have been encoded
*/
int vpx_svc_get_encode_frame_count(const SvcContext *svc_ctx);
/**
* return 1 if last encoded frame was a keyframe
*/
int vpx_svc_is_keyframe(const SvcContext *svc_ctx);
/**
* force the next frame to be a keyframe
*/
void vpx_svc_set_keyframe(SvcContext *svc_ctx);
#ifdef __cplusplus
} // extern "C"
#endif
#endif /* VPX_SVC_CONTEXT_H_ */

View File

@ -8,7 +8,6 @@
* be found in the AUTHORS file in the root of the source tree.
*/
/*!\defgroup vp8 VP8
* \ingroup codecs
* VP8 is vpx's newest video compression algorithm that uses motion
@ -30,40 +29,50 @@
*/
#ifndef VP8_H
#define VP8_H
#include "vpx_codec_impl_top.h"
#include "./vpx_codec.h"
#include "./vpx_image.h"
#ifdef __cplusplus
extern "C" {
#endif
/*!\brief Control functions
*
* The set of macros define the control functions of VP8 interface
*/
enum vp8_com_control_id
{
VP8_SET_REFERENCE = 1, /**< pass in an external frame into decoder to be used as reference frame */
VP8_COPY_REFERENCE = 2, /**< get a copy of reference frame from the decoder */
VP8_SET_POSTPROC = 3, /**< set the decoder's post processing settings */
VP8_SET_DBG_COLOR_REF_FRAME = 4, /**< set the reference frames to color for each macroblock */
VP8_SET_DBG_COLOR_MB_MODES = 5, /**< set which macro block modes to color */
VP8_SET_DBG_COLOR_B_MODES = 6, /**< set which blocks modes to color */
VP8_SET_DBG_DISPLAY_MV = 7, /**< set which motion vector modes to draw */
VP8_COMMON_CTRL_ID_MAX,
VP8_DECODER_CTRL_ID_START = 256
enum vp8_com_control_id {
VP8_SET_REFERENCE = 1, /**< pass in an external frame into decoder to be used as reference frame */
VP8_COPY_REFERENCE = 2, /**< get a copy of reference frame from the decoder */
VP8_SET_POSTPROC = 3, /**< set the decoder's post processing settings */
VP8_SET_DBG_COLOR_REF_FRAME = 4, /**< set the reference frames to color for each macroblock */
VP8_SET_DBG_COLOR_MB_MODES = 5, /**< set which macro block modes to color */
VP8_SET_DBG_COLOR_B_MODES = 6, /**< set which blocks modes to color */
VP8_SET_DBG_DISPLAY_MV = 7, /**< set which motion vector modes to draw */
/* TODO(jkoleszar): The encoder incorrectly reuses some of these values (5+)
* for its control ids. These should be migrated to something like the
* VP8_DECODER_CTRL_ID_START range next time we're ready to break the ABI.
*/
VP9_GET_REFERENCE = 128, /**< get a pointer to a reference frame */
VP8_COMMON_CTRL_ID_MAX,
VP8_DECODER_CTRL_ID_START = 256
};
/*!\brief post process flags
*
* The set of macros define VP8 decoder post processing flags
*/
enum vp8_postproc_level
{
VP8_NOFILTERING = 0,
VP8_DEBLOCK = 1<<0,
VP8_DEMACROBLOCK = 1<<1,
VP8_ADDNOISE = 1<<2,
VP8_DEBUG_TXT_FRAME_INFO = 1<<3, /**< print frame information */
VP8_DEBUG_TXT_MBLK_MODES = 1<<4, /**< print macro block modes over each macro block */
VP8_DEBUG_TXT_DC_DIFF = 1<<5, /**< print dc diff for each macro block */
VP8_DEBUG_TXT_RATE_INFO = 1<<6, /**< print video rate info (encoder only) */
VP8_MFQE = 1<<10
enum vp8_postproc_level {
VP8_NOFILTERING = 0,
VP8_DEBLOCK = 1 << 0,
VP8_DEMACROBLOCK = 1 << 1,
VP8_ADDNOISE = 1 << 2,
VP8_DEBUG_TXT_FRAME_INFO = 1 << 3, /**< print frame information */
VP8_DEBUG_TXT_MBLK_MODES = 1 << 4, /**< print macro block modes over each macro block */
VP8_DEBUG_TXT_DC_DIFF = 1 << 5, /**< print dc diff for each macro block */
VP8_DEBUG_TXT_RATE_INFO = 1 << 6, /**< print video rate info (encoder only) */
VP8_MFQE = 1 << 10
};
/*!\brief post process flags
@ -73,41 +82,44 @@ enum vp8_postproc_level
* to VP8_DEBLOCK and deblocking_level to 1.
*/
typedef struct vp8_postproc_cfg
{
int post_proc_flag; /**< the types of post processing to be done, should be combination of "vp8_postproc_level" */
int deblocking_level; /**< the strength of deblocking, valid range [0, 16] */
int noise_level; /**< the strength of additive noise, valid range [0, 16] */
typedef struct vp8_postproc_cfg {
int post_proc_flag; /**< the types of post processing to be done, should be combination of "vp8_postproc_level" */
int deblocking_level; /**< the strength of deblocking, valid range [0, 16] */
int noise_level; /**< the strength of additive noise, valid range [0, 16] */
} vp8_postproc_cfg_t;
/*!\brief reference frame type
*
* The set of macros define the type of VP8 reference frames
*/
typedef enum vpx_ref_frame_type
{
VP8_LAST_FRAME = 1,
VP8_GOLD_FRAME = 2,
VP8_ALTR_FRAME = 4
typedef enum vpx_ref_frame_type {
VP8_LAST_FRAME = 1,
VP8_GOLD_FRAME = 2,
VP8_ALTR_FRAME = 4
} vpx_ref_frame_type_t;
/*!\brief reference frame data struct
*
* define the data struct to access vp8 reference frames
* Define the data struct to access vp8 reference frames.
*/
typedef struct vpx_ref_frame
{
vpx_ref_frame_type_t frame_type; /**< which reference frame */
vpx_image_t img; /**< reference frame data in image format */
typedef struct vpx_ref_frame {
vpx_ref_frame_type_t frame_type; /**< which reference frame */
vpx_image_t img; /**< reference frame data in image format */
} vpx_ref_frame_t;
/*!\brief VP9 specific reference frame data struct
*
* Define the data struct to access vp9 reference frames.
*/
typedef struct vp9_ref_frame {
int idx; /**< frame index to get (input) */
vpx_image_t img; /**< img structure to populate (output) */
} vp9_ref_frame_t;
/*!\brief vp8 decoder control function parameter type
*
* defines the data type for each of VP8 decoder control function requires
*/
VPX_CTRL_USE_TYPE(VP8_SET_REFERENCE, vpx_ref_frame_t *)
VPX_CTRL_USE_TYPE(VP8_COPY_REFERENCE, vpx_ref_frame_t *)
VPX_CTRL_USE_TYPE(VP8_SET_POSTPROC, vp8_postproc_cfg_t *)
@ -115,9 +127,12 @@ VPX_CTRL_USE_TYPE(VP8_SET_DBG_COLOR_REF_FRAME, int)
VPX_CTRL_USE_TYPE(VP8_SET_DBG_COLOR_MB_MODES, int)
VPX_CTRL_USE_TYPE(VP8_SET_DBG_COLOR_B_MODES, int)
VPX_CTRL_USE_TYPE(VP8_SET_DBG_DISPLAY_MV, int)
VPX_CTRL_USE_TYPE(VP9_GET_REFERENCE, vp9_ref_frame_t *)
/*! @} - end defgroup vp8 */
#include "vpx_codec_impl_bottom.h"
#ifdef __cplusplus
} // extern "C"
#endif
#endif

View File

@ -7,7 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef VP8CX_H
#define VP8CX_H
/*!\defgroup vp8_encoder WebM VP8 Encoder
* \ingroup vp8
@ -20,9 +21,10 @@
* \brief Provides definitions for using the VP8 encoder algorithm within the
* vpx Codec Interface.
*/
#ifndef VP8CX_H
#define VP8CX_H
#include "vpx_codec_impl_top.h"
#ifdef __cplusplus
extern "C" {
#endif
/*!\name Algorithm interface for VP8
*
@ -31,7 +33,14 @@
* @{
*/
extern vpx_codec_iface_t vpx_codec_vp8_cx_algo;
extern vpx_codec_iface_t* vpx_codec_vp8_cx(void);
extern vpx_codec_iface_t *vpx_codec_vp8_cx(void);
/* TODO(jkoleszar): These move to VP9 in a later patch set. */
extern vpx_codec_iface_t vpx_codec_vp9_cx_algo;
extern vpx_codec_iface_t *vpx_codec_vp9_cx(void);
extern vpx_codec_iface_t vpx_codec_vp9x_cx_algo;
extern vpx_codec_iface_t *vpx_codec_vp9x_cx(void);
/*!@} - end algorithm interface member group*/
@ -121,75 +130,84 @@ extern vpx_codec_iface_t* vpx_codec_vp8_cx(void);
*
* \sa #vpx_codec_control
*/
enum vp8e_enc_control_id
{
VP8E_UPD_ENTROPY = 5, /**< control function to set mode of entropy update in encoder */
VP8E_UPD_REFERENCE, /**< control function to set reference update mode in encoder */
VP8E_USE_REFERENCE, /**< control function to set which reference frame encoder can use */
VP8E_SET_ROI_MAP, /**< control function to pass an ROI map to encoder */
VP8E_SET_ACTIVEMAP, /**< control function to pass an Active map to encoder */
VP8E_SET_SCALEMODE = 11, /**< control function to set encoder scaling mode */
/*!\brief control function to set vp8 encoder cpuused
*
* Changes in this value influences, among others, the encoder's selection
* of motion estimation methods. Values greater than 0 will increase encoder
* speed at the expense of quality.
* The full set of adjustments can be found in
* onyx_if.c:vp8_set_speed_features().
* \todo List highlights of the changes at various levels.
*
* \note Valid range: -16..16
*/
VP8E_SET_CPUUSED = 13,
VP8E_SET_ENABLEAUTOALTREF, /**< control function to enable vp8 to automatic set and use altref frame */
VP8E_SET_NOISE_SENSITIVITY, /**< control function to set noise sensitivity */
VP8E_SET_SHARPNESS, /**< control function to set sharpness */
VP8E_SET_STATIC_THRESHOLD, /**< control function to set the threshold for macroblocks treated static */
VP8E_SET_TOKEN_PARTITIONS, /**< control function to set the number of token partitions */
VP8E_GET_LAST_QUANTIZER, /**< return the quantizer chosen by the
enum vp8e_enc_control_id {
VP8E_UPD_ENTROPY = 5, /**< control function to set mode of entropy update in encoder */
VP8E_UPD_REFERENCE, /**< control function to set reference update mode in encoder */
VP8E_USE_REFERENCE, /**< control function to set which reference frame encoder can use */
VP8E_SET_ROI_MAP, /**< control function to pass an ROI map to encoder */
VP8E_SET_ACTIVEMAP, /**< control function to pass an Active map to encoder */
VP8E_SET_SCALEMODE = 11, /**< control function to set encoder scaling mode */
/*!\brief control function to set vp8 encoder cpuused
*
* Changes in this value influences, among others, the encoder's selection
* of motion estimation methods. Values greater than 0 will increase encoder
* speed at the expense of quality.
* The full set of adjustments can be found in
* onyx_if.c:vp8_set_speed_features().
* \todo List highlights of the changes at various levels.
*
* \note Valid range: -16..16
*/
VP8E_SET_CPUUSED = 13,
VP8E_SET_ENABLEAUTOALTREF, /**< control function to enable vp8 to automatic set and use altref frame */
VP8E_SET_NOISE_SENSITIVITY, /**< control function to set noise sensitivity */
VP8E_SET_SHARPNESS, /**< control function to set sharpness */
VP8E_SET_STATIC_THRESHOLD, /**< control function to set the threshold for macroblocks treated static */
VP8E_SET_TOKEN_PARTITIONS, /**< control function to set the number of token partitions */
VP8E_GET_LAST_QUANTIZER, /**< return the quantizer chosen by the
encoder for the last frame using the internal
scale */
VP8E_GET_LAST_QUANTIZER_64, /**< return the quantizer chosen by the
VP8E_GET_LAST_QUANTIZER_64, /**< return the quantizer chosen by the
encoder for the last frame, using the 0..63
scale as used by the rc_*_quantizer config
parameters */
VP8E_SET_ARNR_MAXFRAMES, /**< control function to set the max number of frames blurred creating arf*/
VP8E_SET_ARNR_STRENGTH , /**< control function to set the filter strength for the arf */
VP8E_SET_ARNR_TYPE , /**< control function to set the type of filter to use for the arf*/
VP8E_SET_TUNING, /**< control function to set visual tuning */
/*!\brief control function to set constrained quality level
*
* \attention For this value to be used vpx_codec_enc_cfg_t::g_usage must be
* set to #VPX_CQ.
* \note Valid range: 0..63
*/
VP8E_SET_CQ_LEVEL,
VP8E_SET_ARNR_MAXFRAMES, /**< control function to set the max number of frames blurred creating arf*/
VP8E_SET_ARNR_STRENGTH, /**< control function to set the filter strength for the arf */
VP8E_SET_ARNR_TYPE, /**< control function to set the type of filter to use for the arf*/
VP8E_SET_TUNING, /**< control function to set visual tuning */
/*!\brief control function to set constrained quality level
*
* \attention For this value to be used vpx_codec_enc_cfg_t::g_usage must be
* set to #VPX_CQ.
* \note Valid range: 0..63
*/
VP8E_SET_CQ_LEVEL,
/*!\brief Max data rate for Intra frames
*
* This value controls additional clamping on the maximum size of a
* keyframe. It is expressed as a percentage of the average
* per-frame bitrate, with the special (and default) value 0 meaning
* unlimited, or no additional clamping beyond the codec's built-in
* algorithm.
*
* For example, to allocate no more than 4.5 frames worth of bitrate
* to a keyframe, set this to 450.
*
*/
VP8E_SET_MAX_INTRA_BITRATE_PCT
/*!\brief Max data rate for Intra frames
*
* This value controls additional clamping on the maximum size of a
* keyframe. It is expressed as a percentage of the average
* per-frame bitrate, with the special (and default) value 0 meaning
* unlimited, or no additional clamping beyond the codec's built-in
* algorithm.
*
* For example, to allocate no more than 4.5 frames worth of bitrate
* to a keyframe, set this to 450.
*
*/
VP8E_SET_MAX_INTRA_BITRATE_PCT,
/* TODO(jkoleszar): Move to vp9cx.h */
VP9E_SET_LOSSLESS,
VP9E_SET_TILE_COLUMNS,
VP9E_SET_TILE_ROWS,
VP9E_SET_FRAME_PARALLEL_DECODING,
VP9E_SET_AQ_MODE,
VP9E_SET_SVC,
VP9E_SET_SVC_PARAMETERS
};
/*!\brief vpx 1-D scaling mode
*
* This set of constants define 1-D vpx scaling modes
*/
typedef enum vpx_scaling_mode_1d
{
VP8E_NORMAL = 0,
VP8E_FOURFIVE = 1,
VP8E_THREEFIVE = 2,
VP8E_ONETWO = 3
typedef enum vpx_scaling_mode_1d {
VP8E_NORMAL = 0,
VP8E_FOURFIVE = 1,
VP8E_THREEFIVE = 2,
VP8E_ONETWO = 3
} VPX_SCALING_MODE;
@ -199,14 +217,18 @@ typedef enum vpx_scaling_mode_1d
*
*/
typedef struct vpx_roi_map
{
unsigned char *roi_map; /**< specify an id between 0 and 3 for each 16x16 region within a frame */
unsigned int rows; /**< number of rows */
unsigned int cols; /**< number of cols */
int delta_q[4]; /**< quantizer delta [-63, 63] off baseline for regions with id between 0 and 3*/
int delta_lf[4]; /**< loop filter strength delta [-63, 63] for regions with id between 0 and 3 */
unsigned int static_threshold[4];/**< threshold for region to be treated as static */
typedef struct vpx_roi_map {
/*! An id between 0 and 3 for each 16x16 region within a frame. */
unsigned char *roi_map;
unsigned int rows; /**< Number of rows. */
unsigned int cols; /**< Number of columns. */
// TODO(paulwilkins): broken for VP9 which has 8 segments
// q and loop filter deltas for each segment
// (see MAX_MB_SEGMENTS)
int delta_q[4]; /**< Quantizer deltas. */
int delta_lf[4]; /**< Loop filter deltas. */
/*! Static breakout threshold for each segment. */
unsigned int static_threshold[4];
} vpx_roi_map_t;
/*!\brief vpx active region map
@ -216,11 +238,10 @@ typedef struct vpx_roi_map
*/
typedef struct vpx_active_map
{
unsigned char *active_map; /**< specify an on (1) or off (0) each 16x16 region within a frame */
unsigned int rows; /**< number of rows */
unsigned int cols; /**< number of cols */
typedef struct vpx_active_map {
unsigned char *active_map; /**< specify an on (1) or off (0) each 16x16 region within a frame */
unsigned int rows; /**< number of rows */
unsigned int cols; /**< number of cols */
} vpx_active_map_t;
/*!\brief vpx image scaling mode
@ -228,10 +249,9 @@ typedef struct vpx_active_map
* This defines the data structure for image scaling mode
*
*/
typedef struct vpx_scaling_mode
{
VPX_SCALING_MODE h_scaling_mode; /**< horizontal scaling mode */
VPX_SCALING_MODE v_scaling_mode; /**< vertical scaling mode */
typedef struct vpx_scaling_mode {
VPX_SCALING_MODE h_scaling_mode; /**< horizontal scaling mode */
VPX_SCALING_MODE v_scaling_mode; /**< vertical scaling mode */
} vpx_scaling_mode_t;
/*!\brief VP8 token partition mode
@ -241,12 +261,11 @@ typedef struct vpx_scaling_mode
*
*/
typedef enum
{
VP8_ONE_TOKENPARTITION = 0,
VP8_TWO_TOKENPARTITION = 1,
VP8_FOUR_TOKENPARTITION = 2,
VP8_EIGHT_TOKENPARTITION = 3
typedef enum {
VP8_ONE_TOKENPARTITION = 0,
VP8_TWO_TOKENPARTITION = 1,
VP8_FOUR_TOKENPARTITION = 2,
VP8_EIGHT_TOKENPARTITION = 3
} vp8e_token_partitions;
@ -255,12 +274,28 @@ typedef enum
* Changes the encoder to tune for certain types of input material.
*
*/
typedef enum
{
VP8_TUNE_PSNR,
VP8_TUNE_SSIM
typedef enum {
VP8_TUNE_PSNR,
VP8_TUNE_SSIM
} vp8e_tuning;
/*!\brief vp9 svc parameters
*
* This defines parameters for svc encoding.
*
*/
typedef struct vpx_svc_parameters {
unsigned int width; /**< width of current spatial layer */
unsigned int height; /**< height of current spatial layer */
int layer; /**< current layer number - 0 = base */
int flags; /**< encode frame flags */
int max_quantizer; /**< max quantizer for current layer */
int min_quantizer; /**< min quantizer for current layer */
int distance_from_i_frame; /**< frame number within current gop */
int lst_fb_idx; /**< last frame frame buffer index */
int gld_fb_idx; /**< golden frame frame buffer index */
int alt_fb_idx; /**< alt reference frame frame buffer index */
} vpx_svc_parameters_t;
/*!\brief VP8 encoder control function parameter type
*
@ -281,6 +316,9 @@ VPX_CTRL_USE_TYPE(VP8E_SET_ROI_MAP, vpx_roi_map_t *)
VPX_CTRL_USE_TYPE(VP8E_SET_ACTIVEMAP, vpx_active_map_t *)
VPX_CTRL_USE_TYPE(VP8E_SET_SCALEMODE, vpx_scaling_mode_t *)
VPX_CTRL_USE_TYPE(VP9E_SET_SVC, int)
VPX_CTRL_USE_TYPE(VP9E_SET_SVC_PARAMETERS, vpx_svc_parameters_t *)
VPX_CTRL_USE_TYPE(VP8E_SET_CPUUSED, int)
VPX_CTRL_USE_TYPE(VP8E_SET_ENABLEAUTOALTREF, unsigned int)
VPX_CTRL_USE_TYPE(VP8E_SET_NOISE_SENSITIVITY, unsigned int)
@ -289,17 +327,28 @@ VPX_CTRL_USE_TYPE(VP8E_SET_STATIC_THRESHOLD, unsigned int)
VPX_CTRL_USE_TYPE(VP8E_SET_TOKEN_PARTITIONS, int) /* vp8e_token_partitions */
VPX_CTRL_USE_TYPE(VP8E_SET_ARNR_MAXFRAMES, unsigned int)
VPX_CTRL_USE_TYPE(VP8E_SET_ARNR_STRENGTH , unsigned int)
VPX_CTRL_USE_TYPE(VP8E_SET_ARNR_TYPE , unsigned int)
VPX_CTRL_USE_TYPE(VP8E_SET_ARNR_STRENGTH, unsigned int)
VPX_CTRL_USE_TYPE(VP8E_SET_ARNR_TYPE, unsigned int)
VPX_CTRL_USE_TYPE(VP8E_SET_TUNING, int) /* vp8e_tuning */
VPX_CTRL_USE_TYPE(VP8E_SET_CQ_LEVEL , unsigned int)
VPX_CTRL_USE_TYPE(VP8E_SET_CQ_LEVEL, unsigned int)
VPX_CTRL_USE_TYPE(VP9E_SET_TILE_COLUMNS, int)
VPX_CTRL_USE_TYPE(VP9E_SET_TILE_ROWS, int)
VPX_CTRL_USE_TYPE(VP8E_GET_LAST_QUANTIZER, int *)
VPX_CTRL_USE_TYPE(VP8E_GET_LAST_QUANTIZER_64, int *)
VPX_CTRL_USE_TYPE(VP8E_SET_MAX_INTRA_BITRATE_PCT, unsigned int)
VPX_CTRL_USE_TYPE(VP9E_SET_LOSSLESS, unsigned int)
VPX_CTRL_USE_TYPE(VP9E_SET_FRAME_PARALLEL_DECODING, unsigned int)
VPX_CTRL_USE_TYPE(VP9E_SET_AQ_MODE, unsigned int)
/*! @} - end defgroup vp8_encoder */
#include "vpx_codec_impl_bottom.h"
#ifdef __cplusplus
} // extern "C"
#endif
#endif

View File

@ -22,7 +22,10 @@
*/
#ifndef VP8DX_H
#define VP8DX_H
#include "vpx_codec_impl_top.h"
#ifdef __cplusplus
extern "C" {
#endif
/*!\name Algorithm interface for VP8
*
@ -31,38 +34,64 @@
* @{
*/
extern vpx_codec_iface_t vpx_codec_vp8_dx_algo;
extern vpx_codec_iface_t* vpx_codec_vp8_dx(void);
extern vpx_codec_iface_t *vpx_codec_vp8_dx(void);
/* TODO(jkoleszar): These move to VP9 in a later patch set. */
extern vpx_codec_iface_t vpx_codec_vp9_dx_algo;
extern vpx_codec_iface_t *vpx_codec_vp9_dx(void);
/*!@} - end algorithm interface member group*/
/* Include controls common to both the encoder and decoder */
#include "vp8.h"
/*!\brief VP8 decoder control functions
/*!\enum vp8_dec_control_id
* \brief VP8 decoder control functions
*
* This set of macros define the control functions available for the VP8
* decoder interface.
*
* \sa #vpx_codec_control
*/
enum vp8_dec_control_id
{
/** control function to get info on which reference frames were updated
* by the last decode
enum vp8_dec_control_id {
/** control function to get info on which reference frames were updated
* by the last decode
*/
VP8D_GET_LAST_REF_UPDATES = VP8_DECODER_CTRL_ID_START,
/** check if the indicated frame is corrupted */
VP8D_GET_FRAME_CORRUPTED,
/** control function to get info on which reference frames were used
* by the last decode
*/
VP8D_GET_LAST_REF_USED,
/** decryption function to decrypt encoded buffer data immediately
* before decoding. Takes a vp8_decrypt_init, which contains
* a callback function and opaque context pointer.
*/
VP8D_SET_DECRYPTOR,
/** For testing. */
VP9_INVERT_TILE_DECODE_ORDER,
VP8_DECODER_CTRL_ID_MAX
};
/*!\brief Structure to hold decryption state
*
* Defines a structure to hold the decryption state and access function.
*/
typedef struct vp8_decrypt_init {
/** Decrypt n bytes of data from input -> output, using the decrypt_state
* passed in VP8D_SET_DECRYPTOR.
*/
VP8D_GET_LAST_REF_UPDATES = VP8_DECODER_CTRL_ID_START,
/** check if the indicated frame is corrupted */
VP8D_GET_FRAME_CORRUPTED,
/** control function to get info on which reference frames were used
* by the last decode
*/
VP8D_GET_LAST_REF_USED,
VP8_DECODER_CTRL_ID_MAX
} ;
void (*decrypt_cb)(void *decrypt_state, const unsigned char *input,
unsigned char *output, int count);
/*! Decryption state. */
void *decrypt_state;
} vp8_decrypt_init;
/*!\brief VP8 decoder control function parameter type
*
@ -75,9 +104,13 @@ enum vp8_dec_control_id
VPX_CTRL_USE_TYPE(VP8D_GET_LAST_REF_UPDATES, int *)
VPX_CTRL_USE_TYPE(VP8D_GET_FRAME_CORRUPTED, int *)
VPX_CTRL_USE_TYPE(VP8D_GET_LAST_REF_USED, int *)
VPX_CTRL_USE_TYPE(VP8D_SET_DECRYPTOR, vp8_decrypt_init *)
VPX_CTRL_USE_TYPE(VP9_INVERT_TILE_DECODE_ORDER, int)
/*! @} - end defgroup vp8_decoder */
#include "vpx_codec_impl_bottom.h"
#ifdef __cplusplus
} // extern "C"
#endif
#endif

View File

@ -1,63 +0,0 @@
/*
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/* This file contains backwards compatibility stubs for applications using
* the VP8 version 1.0 API.
*/
#ifndef VP8E_H
#define VP8E_H
#include "vpx/vpx_codec_impl_top.h"
#if defined(VPX_CODEC_DISABLE_COMPAT) && VPX_CODEC_DISABLE_COMPAT
#error "Backwards compatibility disabled: don't include vp8e.h"
#endif
#include "vp8cx.h"
DECLSPEC_DEPRECATED extern vpx_codec_iface_t vpx_enc_vp8_algo DEPRECATED;
enum
{
VP8E_SET_REFERENCE = VP8_SET_REFERENCE,
VP8E_COPY_REFERENCE = VP8_COPY_REFERENCE,
VP8E_SET_PREVIEWPP = VP8_SET_POSTPROC,
VP8E_SET_FLUSHFLAG = 4,
VP8E_SET_FRAMETYPE = 10,
VP8E_SET_ENCODING_MODE = 12
};
#define NORMAL_FRAME (0)
#define KEY_FRAME (1)
/* Change VP8E to VP8 to get the undeprecated version of these (defined in
* vp8.h)
*/
VPX_CTRL_USE_TYPE_DEPRECATED(VP8E_SET_REFERENCE, vpx_ref_frame_t *)
VPX_CTRL_USE_TYPE_DEPRECATED(VP8E_COPY_REFERENCE, vpx_ref_frame_t *)
VPX_CTRL_USE_TYPE_DEPRECATED(VP8E_SET_PREVIEWPP, vp8_postproc_cfg_t *)
/* Flush is done by calling vpx_codec_encode with a NULL input image. */
VPX_CTRL_USE_TYPE_DEPRECATED(VP8E_SET_FLUSHFLAG, int)
/* Frame type is set with a flag to vpx_codec_control. See VPX_EFLAG_FORCE_KF
*/
VPX_CTRL_USE_TYPE_DEPRECATED(VP8E_SET_FRAMETYPE, int)
/* This control has been deprecated in favor of the duration parameter to
* vpx_codec_encode(). Use the #VPX_DL_REALTIME, #VPX_DL_GOOD_QUALITY,
* #VPX_DL_BEST_QUALITY constants to that parameter instead.
*/
VPX_CTRL_USE_TYPE_DEPRECATED(VP8E_SET_ENCODING_MODE, vp8e_encoding_mode)
#include "vpx/vpx_codec_impl_bottom.h"
#endif

View File

@ -36,30 +36,38 @@
* Once initialized, the instance is manged using other functions from
* the vpx_codec_* family.
*/
#ifndef VPX_CODEC_H
#define VPX_CODEC_H
#ifdef __cplusplus
extern "C" {
#endif
#ifndef VPX_CODEC_H
#define VPX_CODEC_H
#include "vpx_integer.h"
#include "vpx_image.h"
/*!\brief Decorator indicating a function is deprecated */
/*!\brief Decorator indicating a function is deprecated */
#ifndef DEPRECATED
#if defined(__GNUC__) && __GNUC__
#define DEPRECATED __attribute__ ((deprecated))
#define DECLSPEC_DEPRECATED /**< \copydoc #DEPRECATED */
#elif defined(_MSC_VER)
#define DEPRECATED
#define DECLSPEC_DEPRECATED __declspec(deprecated) /**< \copydoc #DEPRECATED */
#else
#define DEPRECATED
#endif
#endif /* DEPRECATED */
#ifndef DECLSPEC_DEPRECATED
#if defined(__GNUC__) && __GNUC__
#define DECLSPEC_DEPRECATED /**< \copydoc #DEPRECATED */
#elif defined(_MSC_VER)
#define DECLSPEC_DEPRECATED __declspec(deprecated) /**< \copydoc #DEPRECATED */
#else
#define DECLSPEC_DEPRECATED /**< \copydoc #DEPRECATED */
#endif
#endif
#endif /* DECLSPEC_DEPRECATED */
/*!\brief Decorator indicating a function is potentially unused */
/*!\brief Decorator indicating a function is potentially unused */
#ifdef UNUSED
#elif __GNUC__
#define UNUSED __attribute__ ((unused))
@ -67,312 +75,310 @@ extern "C" {
#define UNUSED
#endif
/*!\brief Current ABI version number
*
* \internal
* If this file is altered in any way that changes the ABI, this value
* must be bumped. Examples include, but are not limited to, changing
* types, removing or reassigning enums, adding/removing/rearranging
* fields to structures
*/
/*!\brief Current ABI version number
*
* \internal
* If this file is altered in any way that changes the ABI, this value
* must be bumped. Examples include, but are not limited to, changing
* types, removing or reassigning enums, adding/removing/rearranging
* fields to structures
*/
#define VPX_CODEC_ABI_VERSION (2 + VPX_IMAGE_ABI_VERSION) /**<\hideinitializer*/
/*!\brief Algorithm return codes */
typedef enum {
/*!\brief Operation completed without error */
VPX_CODEC_OK,
/*!\brief Algorithm return codes */
typedef enum {
/*!\brief Operation completed without error */
VPX_CODEC_OK,
/*!\brief Unspecified error */
VPX_CODEC_ERROR,
/*!\brief Unspecified error */
VPX_CODEC_ERROR,
/*!\brief Memory operation failed */
VPX_CODEC_MEM_ERROR,
/*!\brief Memory operation failed */
VPX_CODEC_MEM_ERROR,
/*!\brief ABI version mismatch */
VPX_CODEC_ABI_MISMATCH,
/*!\brief ABI version mismatch */
VPX_CODEC_ABI_MISMATCH,
/*!\brief Algorithm does not have required capability */
VPX_CODEC_INCAPABLE,
/*!\brief Algorithm does not have required capability */
VPX_CODEC_INCAPABLE,
/*!\brief The given bitstream is not supported.
*
* The bitstream was unable to be parsed at the highest level. The decoder
* is unable to proceed. This error \ref SHOULD be treated as fatal to the
* stream. */
VPX_CODEC_UNSUP_BITSTREAM,
/*!\brief Encoded bitstream uses an unsupported feature
*
* The decoder does not implement a feature required by the encoder. This
* return code should only be used for features that prevent future
* pictures from being properly decoded. This error \ref MAY be treated as
* fatal to the stream or \ref MAY be treated as fatal to the current GOP.
*/
VPX_CODEC_UNSUP_FEATURE,
/*!\brief The coded data for this stream is corrupt or incomplete
*
* There was a problem decoding the current frame. This return code
* should only be used for failures that prevent future pictures from
* being properly decoded. This error \ref MAY be treated as fatal to the
* stream or \ref MAY be treated as fatal to the current GOP. If decoding
* is continued for the current GOP, artifacts may be present.
*/
VPX_CODEC_CORRUPT_FRAME,
/*!\brief An application-supplied parameter is not valid.
*
*/
VPX_CODEC_INVALID_PARAM,
/*!\brief An iterator reached the end of list.
*
*/
VPX_CODEC_LIST_END
}
vpx_codec_err_t;
/*! \brief Codec capabilities bitfield
/*!\brief The given bitstream is not supported.
*
* Each codec advertises the capabilities it supports as part of its
* ::vpx_codec_iface_t interface structure. Capabilities are extra interfaces
* or functionality, and are not required to be supported.
* The bitstream was unable to be parsed at the highest level. The decoder
* is unable to proceed. This error \ref SHOULD be treated as fatal to the
* stream. */
VPX_CODEC_UNSUP_BITSTREAM,
/*!\brief Encoded bitstream uses an unsupported feature
*
* The available flags are specified by VPX_CODEC_CAP_* defines.
* The decoder does not implement a feature required by the encoder. This
* return code should only be used for features that prevent future
* pictures from being properly decoded. This error \ref MAY be treated as
* fatal to the stream or \ref MAY be treated as fatal to the current GOP.
*/
typedef long vpx_codec_caps_t;
VPX_CODEC_UNSUP_FEATURE,
/*!\brief The coded data for this stream is corrupt or incomplete
*
* There was a problem decoding the current frame. This return code
* should only be used for failures that prevent future pictures from
* being properly decoded. This error \ref MAY be treated as fatal to the
* stream or \ref MAY be treated as fatal to the current GOP. If decoding
* is continued for the current GOP, artifacts may be present.
*/
VPX_CODEC_CORRUPT_FRAME,
/*!\brief An application-supplied parameter is not valid.
*
*/
VPX_CODEC_INVALID_PARAM,
/*!\brief An iterator reached the end of list.
*
*/
VPX_CODEC_LIST_END
}
vpx_codec_err_t;
/*! \brief Codec capabilities bitfield
*
* Each codec advertises the capabilities it supports as part of its
* ::vpx_codec_iface_t interface structure. Capabilities are extra interfaces
* or functionality, and are not required to be supported.
*
* The available flags are specified by VPX_CODEC_CAP_* defines.
*/
typedef long vpx_codec_caps_t;
#define VPX_CODEC_CAP_DECODER 0x1 /**< Is a decoder */
#define VPX_CODEC_CAP_ENCODER 0x2 /**< Is an encoder */
#define VPX_CODEC_CAP_XMA 0x4 /**< Supports eXternal Memory Allocation */
/*! \brief Initialization-time Feature Enabling
*
* Certain codec features must be known at initialization time, to allow for
* proper memory allocation.
*
* The available flags are specified by VPX_CODEC_USE_* defines.
*/
typedef long vpx_codec_flags_t;
/*! \brief Initialization-time Feature Enabling
*
* Certain codec features must be known at initialization time, to allow for
* proper memory allocation.
*
* The available flags are specified by VPX_CODEC_USE_* defines.
*/
typedef long vpx_codec_flags_t;
#define VPX_CODEC_USE_XMA 0x00000001 /**< Use eXternal Memory Allocation mode */
/*!\brief Codec interface structure.
*
* Contains function pointers and other data private to the codec
* implementation. This structure is opaque to the application.
*/
typedef const struct vpx_codec_iface vpx_codec_iface_t;
/*!\brief Codec interface structure.
*
* Contains function pointers and other data private to the codec
* implementation. This structure is opaque to the application.
*/
typedef const struct vpx_codec_iface vpx_codec_iface_t;
/*!\brief Codec private data structure.
*
* Contains data private to the codec implementation. This structure is opaque
* to the application.
*/
typedef struct vpx_codec_priv vpx_codec_priv_t;
/*!\brief Codec private data structure.
*
* Contains data private to the codec implementation. This structure is opaque
* to the application.
*/
typedef struct vpx_codec_priv vpx_codec_priv_t;
/*!\brief Iterator
*
* Opaque storage used for iterating over lists.
*/
typedef const void *vpx_codec_iter_t;
/*!\brief Iterator
*
* Opaque storage used for iterating over lists.
*/
typedef const void *vpx_codec_iter_t;
/*!\brief Codec context structure
*
* All codecs \ref MUST support this context structure fully. In general,
* this data should be considered private to the codec algorithm, and
* not be manipulated or examined by the calling application. Applications
* may reference the 'name' member to get a printable description of the
* algorithm.
*/
typedef struct vpx_codec_ctx
{
const char *name; /**< Printable interface name */
vpx_codec_iface_t *iface; /**< Interface pointers */
vpx_codec_err_t err; /**< Last returned error */
const char *err_detail; /**< Detailed info, if available */
vpx_codec_flags_t init_flags; /**< Flags passed at init time */
union
{
struct vpx_codec_dec_cfg *dec; /**< Decoder Configuration Pointer */
struct vpx_codec_enc_cfg *enc; /**< Encoder Configuration Pointer */
void *raw;
} config; /**< Configuration pointer aliasing union */
vpx_codec_priv_t *priv; /**< Algorithm private storage */
} vpx_codec_ctx_t;
/*!\brief Codec context structure
*
* All codecs \ref MUST support this context structure fully. In general,
* this data should be considered private to the codec algorithm, and
* not be manipulated or examined by the calling application. Applications
* may reference the 'name' member to get a printable description of the
* algorithm.
*/
typedef struct vpx_codec_ctx {
const char *name; /**< Printable interface name */
vpx_codec_iface_t *iface; /**< Interface pointers */
vpx_codec_err_t err; /**< Last returned error */
const char *err_detail; /**< Detailed info, if available */
vpx_codec_flags_t init_flags; /**< Flags passed at init time */
union {
struct vpx_codec_dec_cfg *dec; /**< Decoder Configuration Pointer */
struct vpx_codec_enc_cfg *enc; /**< Encoder Configuration Pointer */
void *raw;
} config; /**< Configuration pointer aliasing union */
vpx_codec_priv_t *priv; /**< Algorithm private storage */
} vpx_codec_ctx_t;
/*
* Library Version Number Interface
*
* For example, see the following sample return values:
* vpx_codec_version() (1<<16 | 2<<8 | 3)
* vpx_codec_version_str() "v1.2.3-rc1-16-gec6a1ba"
* vpx_codec_version_extra_str() "rc1-16-gec6a1ba"
*/
/*
* Library Version Number Interface
*
* For example, see the following sample return values:
* vpx_codec_version() (1<<16 | 2<<8 | 3)
* vpx_codec_version_str() "v1.2.3-rc1-16-gec6a1ba"
* vpx_codec_version_extra_str() "rc1-16-gec6a1ba"
*/
/*!\brief Return the version information (as an integer)
*
* Returns a packed encoding of the library version number. This will only include
* the major.minor.patch component of the version number. Note that this encoded
* value should be accessed through the macros provided, as the encoding may change
* in the future.
*
*/
int vpx_codec_version(void);
/*!\brief Return the version information (as an integer)
*
* Returns a packed encoding of the library version number. This will only include
* the major.minor.patch component of the version number. Note that this encoded
* value should be accessed through the macros provided, as the encoding may change
* in the future.
*
*/
int vpx_codec_version(void);
#define VPX_VERSION_MAJOR(v) ((v>>16)&0xff) /**< extract major from packed version */
#define VPX_VERSION_MINOR(v) ((v>>8)&0xff) /**< extract minor from packed version */
#define VPX_VERSION_PATCH(v) ((v>>0)&0xff) /**< extract patch from packed version */
/*!\brief Return the version major number */
/*!\brief Return the version major number */
#define vpx_codec_version_major() ((vpx_codec_version()>>16)&0xff)
/*!\brief Return the version minor number */
/*!\brief Return the version minor number */
#define vpx_codec_version_minor() ((vpx_codec_version()>>8)&0xff)
/*!\brief Return the version patch number */
/*!\brief Return the version patch number */
#define vpx_codec_version_patch() ((vpx_codec_version()>>0)&0xff)
/*!\brief Return the version information (as a string)
*
* Returns a printable string containing the full library version number. This may
* contain additional text following the three digit version number, as to indicate
* release candidates, prerelease versions, etc.
*
*/
const char *vpx_codec_version_str(void);
/*!\brief Return the version information (as a string)
*
* Returns a printable string containing the full library version number. This may
* contain additional text following the three digit version number, as to indicate
* release candidates, prerelease versions, etc.
*
*/
const char *vpx_codec_version_str(void);
/*!\brief Return the version information (as a string)
*
* Returns a printable "extra string". This is the component of the string returned
* by vpx_codec_version_str() following the three digit version number.
*
*/
const char *vpx_codec_version_extra_str(void);
/*!\brief Return the version information (as a string)
*
* Returns a printable "extra string". This is the component of the string returned
* by vpx_codec_version_str() following the three digit version number.
*
*/
const char *vpx_codec_version_extra_str(void);
/*!\brief Return the build configuration
*
* Returns a printable string containing an encoded version of the build
* configuration. This may be useful to vpx support.
*
*/
const char *vpx_codec_build_config(void);
/*!\brief Return the build configuration
*
* Returns a printable string containing an encoded version of the build
* configuration. This may be useful to vpx support.
*
*/
const char *vpx_codec_build_config(void);
/*!\brief Return the name for a given interface
*
* Returns a human readable string for name of the given codec interface.
*
* \param[in] iface Interface pointer
*
*/
const char *vpx_codec_iface_name(vpx_codec_iface_t *iface);
/*!\brief Return the name for a given interface
*
* Returns a human readable string for name of the given codec interface.
*
* \param[in] iface Interface pointer
*
*/
const char *vpx_codec_iface_name(vpx_codec_iface_t *iface);
/*!\brief Convert error number to printable string
*
* Returns a human readable string for the last error returned by the
* algorithm. The returned error will be one line and will not contain
* any newline characters.
*
*
* \param[in] err Error number.
*
*/
const char *vpx_codec_err_to_string(vpx_codec_err_t err);
/*!\brief Convert error number to printable string
*
* Returns a human readable string for the last error returned by the
* algorithm. The returned error will be one line and will not contain
* any newline characters.
*
*
* \param[in] err Error number.
*
*/
const char *vpx_codec_err_to_string(vpx_codec_err_t err);
/*!\brief Retrieve error synopsis for codec context
*
* Returns a human readable string for the last error returned by the
* algorithm. The returned error will be one line and will not contain
* any newline characters.
*
*
* \param[in] ctx Pointer to this instance's context.
*
*/
const char *vpx_codec_error(vpx_codec_ctx_t *ctx);
/*!\brief Retrieve error synopsis for codec context
*
* Returns a human readable string for the last error returned by the
* algorithm. The returned error will be one line and will not contain
* any newline characters.
*
*
* \param[in] ctx Pointer to this instance's context.
*
*/
const char *vpx_codec_error(vpx_codec_ctx_t *ctx);
/*!\brief Retrieve detailed error information for codec context
*
* Returns a human readable string providing detailed information about
* the last error.
*
* \param[in] ctx Pointer to this instance's context.
*
* \retval NULL
* No detailed information is available.
*/
const char *vpx_codec_error_detail(vpx_codec_ctx_t *ctx);
/*!\brief Retrieve detailed error information for codec context
*
* Returns a human readable string providing detailed information about
* the last error.
*
* \param[in] ctx Pointer to this instance's context.
*
* \retval NULL
* No detailed information is available.
*/
const char *vpx_codec_error_detail(vpx_codec_ctx_t *ctx);
/* REQUIRED FUNCTIONS
*
* The following functions are required to be implemented for all codecs.
* They represent the base case functionality expected of all codecs.
*/
/* REQUIRED FUNCTIONS
*
* The following functions are required to be implemented for all codecs.
* They represent the base case functionality expected of all codecs.
*/
/*!\brief Destroy a codec instance
*
* Destroys a codec context, freeing any associated memory buffers.
*
* \param[in] ctx Pointer to this instance's context
*
* \retval #VPX_CODEC_OK
* The codec algorithm initialized.
* \retval #VPX_CODEC_MEM_ERROR
* Memory allocation failed.
*/
vpx_codec_err_t vpx_codec_destroy(vpx_codec_ctx_t *ctx);
/*!\brief Destroy a codec instance
*
* Destroys a codec context, freeing any associated memory buffers.
*
* \param[in] ctx Pointer to this instance's context
*
* \retval #VPX_CODEC_OK
* The codec algorithm initialized.
* \retval #VPX_CODEC_MEM_ERROR
* Memory allocation failed.
*/
vpx_codec_err_t vpx_codec_destroy(vpx_codec_ctx_t *ctx);
/*!\brief Get the capabilities of an algorithm.
*
* Retrieves the capabilities bitfield from the algorithm's interface.
*
* \param[in] iface Pointer to the algorithm interface
*
*/
vpx_codec_caps_t vpx_codec_get_caps(vpx_codec_iface_t *iface);
/*!\brief Get the capabilities of an algorithm.
*
* Retrieves the capabilities bitfield from the algorithm's interface.
*
* \param[in] iface Pointer to the algorithm interface
*
*/
vpx_codec_caps_t vpx_codec_get_caps(vpx_codec_iface_t *iface);
/*!\brief Control algorithm
*
* This function is used to exchange algorithm specific data with the codec
* instance. This can be used to implement features specific to a particular
* algorithm.
*
* This wrapper function dispatches the request to the helper function
* associated with the given ctrl_id. It tries to call this function
* transparently, but will return #VPX_CODEC_ERROR if the request could not
* be dispatched.
*
* Note that this function should not be used directly. Call the
* #vpx_codec_control wrapper macro instead.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] ctrl_id Algorithm specific control identifier
*
* \retval #VPX_CODEC_OK
* The control request was processed.
* \retval #VPX_CODEC_ERROR
* The control request was not processed.
* \retval #VPX_CODEC_INVALID_PARAM
* The data was not valid.
*/
vpx_codec_err_t vpx_codec_control_(vpx_codec_ctx_t *ctx,
int ctrl_id,
...);
/*!\brief Control algorithm
*
* This function is used to exchange algorithm specific data with the codec
* instance. This can be used to implement features specific to a particular
* algorithm.
*
* This wrapper function dispatches the request to the helper function
* associated with the given ctrl_id. It tries to call this function
* transparently, but will return #VPX_CODEC_ERROR if the request could not
* be dispatched.
*
* Note that this function should not be used directly. Call the
* #vpx_codec_control wrapper macro instead.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] ctrl_id Algorithm specific control identifier
*
* \retval #VPX_CODEC_OK
* The control request was processed.
* \retval #VPX_CODEC_ERROR
* The control request was not processed.
* \retval #VPX_CODEC_INVALID_PARAM
* The data was not valid.
*/
vpx_codec_err_t vpx_codec_control_(vpx_codec_ctx_t *ctx,
int ctrl_id,
...);
#if defined(VPX_DISABLE_CTRL_TYPECHECKS) && VPX_DISABLE_CTRL_TYPECHECKS
# define vpx_codec_control(ctx,id,data) vpx_codec_control_(ctx,id,data)
# define VPX_CTRL_USE_TYPE(id, typ)
@ -380,175 +386,173 @@ extern "C" {
# define VPX_CTRL_VOID(id, typ)
#else
/*!\brief vpx_codec_control wrapper macro
*
* This macro allows for type safe conversions across the variadic parameter
* to vpx_codec_control_().
*
* \internal
* It works by dispatching the call to the control function through a wrapper
* function named with the id parameter.
*/
/*!\brief vpx_codec_control wrapper macro
*
* This macro allows for type safe conversions across the variadic parameter
* to vpx_codec_control_().
*
* \internal
* It works by dispatching the call to the control function through a wrapper
* function named with the id parameter.
*/
# define vpx_codec_control(ctx,id,data) vpx_codec_control_##id(ctx,id,data)\
/**<\hideinitializer*/
/**<\hideinitializer*/
/*!\brief vpx_codec_control type definition macro
*
* This macro allows for type safe conversions across the variadic parameter
* to vpx_codec_control_(). It defines the type of the argument for a given
* control identifier.
*
* \internal
* It defines a static function with
* the correctly typed arguments as a wrapper to the type-unsafe internal
* function.
*/
/*!\brief vpx_codec_control type definition macro
*
* This macro allows for type safe conversions across the variadic parameter
* to vpx_codec_control_(). It defines the type of the argument for a given
* control identifier.
*
* \internal
* It defines a static function with
* the correctly typed arguments as a wrapper to the type-unsafe internal
* function.
*/
# define VPX_CTRL_USE_TYPE(id, typ) \
static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t*, int, typ) UNUSED;\
\
static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t *ctx, int ctrl_id, typ data) {\
return vpx_codec_control_(ctx, ctrl_id, data);\
} /**<\hideinitializer*/
static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t*, int, typ) UNUSED;\
\
static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t *ctx, int ctrl_id, typ data) {\
return vpx_codec_control_(ctx, ctrl_id, data);\
} /**<\hideinitializer*/
/*!\brief vpx_codec_control deprecated type definition macro
*
* Like #VPX_CTRL_USE_TYPE, but indicates that the specified control is
* deprecated and should not be used. Consult the documentation for your
* codec for more information.
*
* \internal
* It defines a static function with the correctly typed arguments as a
* wrapper to the type-unsafe internal function.
*/
/*!\brief vpx_codec_control deprecated type definition macro
*
* Like #VPX_CTRL_USE_TYPE, but indicates that the specified control is
* deprecated and should not be used. Consult the documentation for your
* codec for more information.
*
* \internal
* It defines a static function with the correctly typed arguments as a
* wrapper to the type-unsafe internal function.
*/
# define VPX_CTRL_USE_TYPE_DEPRECATED(id, typ) \
DECLSPEC_DEPRECATED static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t*, int, typ) DEPRECATED UNUSED;\
\
DECLSPEC_DEPRECATED static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t *ctx, int ctrl_id, typ data) {\
return vpx_codec_control_(ctx, ctrl_id, data);\
} /**<\hideinitializer*/
DECLSPEC_DEPRECATED static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t*, int, typ) DEPRECATED UNUSED;\
\
DECLSPEC_DEPRECATED static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t *ctx, int ctrl_id, typ data) {\
return vpx_codec_control_(ctx, ctrl_id, data);\
} /**<\hideinitializer*/
/*!\brief vpx_codec_control void type definition macro
*
* This macro allows for type safe conversions across the variadic parameter
* to vpx_codec_control_(). It indicates that a given control identifier takes
* no argument.
*
* \internal
* It defines a static function without a data argument as a wrapper to the
* type-unsafe internal function.
*/
/*!\brief vpx_codec_control void type definition macro
*
* This macro allows for type safe conversions across the variadic parameter
* to vpx_codec_control_(). It indicates that a given control identifier takes
* no argument.
*
* \internal
* It defines a static function without a data argument as a wrapper to the
* type-unsafe internal function.
*/
# define VPX_CTRL_VOID(id) \
static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t*, int) UNUSED;\
\
static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t *ctx, int ctrl_id) {\
return vpx_codec_control_(ctx, ctrl_id);\
} /**<\hideinitializer*/
static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t*, int) UNUSED;\
\
static vpx_codec_err_t \
vpx_codec_control_##id(vpx_codec_ctx_t *ctx, int ctrl_id) {\
return vpx_codec_control_(ctx, ctrl_id);\
} /**<\hideinitializer*/
#endif
/*!\defgroup cap_xma External Memory Allocation Functions
*
* The following functions are required to be implemented for all codecs
* that advertise the VPX_CODEC_CAP_XMA capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_CODEC_INCAPABLE
* @{
*/
/*!\defgroup cap_xma External Memory Allocation Functions
*
* The following functions are required to be implemented for all codecs
* that advertise the VPX_CODEC_CAP_XMA capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_CODEC_INCAPABLE
* @{
*/
/*!\brief Memory Map Entry
*
* This structure is used to contain the properties of a memory segment. It
* is populated by the codec in the request phase, and by the calling
* application once the requested allocation has been performed.
/*!\brief Memory Map Entry
*
* This structure is used to contain the properties of a memory segment. It
* is populated by the codec in the request phase, and by the calling
* application once the requested allocation has been performed.
*/
typedef struct vpx_codec_mmap {
/*
* The following members are set by the codec when requesting a segment
*/
typedef struct vpx_codec_mmap
{
/*
* The following members are set by the codec when requesting a segment
*/
unsigned int id; /**< identifier for the segment's contents */
unsigned long sz; /**< size of the segment, in bytes */
unsigned int align; /**< required alignment of the segment, in bytes */
unsigned int flags; /**< bitfield containing segment properties */
unsigned int id; /**< identifier for the segment's contents */
unsigned long sz; /**< size of the segment, in bytes */
unsigned int align; /**< required alignment of the segment, in bytes */
unsigned int flags; /**< bitfield containing segment properties */
#define VPX_CODEC_MEM_ZERO 0x1 /**< Segment must be zeroed by allocation */
#define VPX_CODEC_MEM_WRONLY 0x2 /**< Segment need not be readable */
#define VPX_CODEC_MEM_FAST 0x4 /**< Place in fast memory, if available */
/* The following members are to be filled in by the allocation function */
void *base; /**< pointer to the allocated segment */
void (*dtor)(struct vpx_codec_mmap *map); /**< destructor to call */
void *priv; /**< allocator private storage */
} vpx_codec_mmap_t; /**< alias for struct vpx_codec_mmap */
/* The following members are to be filled in by the allocation function */
void *base; /**< pointer to the allocated segment */
void (*dtor)(struct vpx_codec_mmap *map); /**< destructor to call */
void *priv; /**< allocator private storage */
} vpx_codec_mmap_t; /**< alias for struct vpx_codec_mmap */
/*!\brief Iterate over the list of segments to allocate.
*
* Iterates over a list of the segments to allocate. The iterator storage
* should be initialized to NULL to start the iteration. Iteration is complete
* when this function returns VPX_CODEC_LIST_END. The amount of memory needed to
* allocate is dependent upon the size of the encoded stream. In cases where the
* stream is not available at allocation time, a fixed size must be requested.
* The codec will not be able to operate on streams larger than the size used at
* allocation time.
*
* \param[in] ctx Pointer to this instance's context.
* \param[out] mmap Pointer to the memory map entry to populate.
* \param[in,out] iter Iterator storage, initialized to NULL
*
* \retval #VPX_CODEC_OK
* The memory map entry was populated.
* \retval #VPX_CODEC_ERROR
* Codec does not support XMA mode.
* \retval #VPX_CODEC_MEM_ERROR
* Unable to determine segment size from stream info.
*/
vpx_codec_err_t vpx_codec_get_mem_map(vpx_codec_ctx_t *ctx,
vpx_codec_mmap_t *mmap,
vpx_codec_iter_t *iter);
/*!\brief Iterate over the list of segments to allocate.
*
* Iterates over a list of the segments to allocate. The iterator storage
* should be initialized to NULL to start the iteration. Iteration is complete
* when this function returns VPX_CODEC_LIST_END. The amount of memory needed to
* allocate is dependent upon the size of the encoded stream. In cases where the
* stream is not available at allocation time, a fixed size must be requested.
* The codec will not be able to operate on streams larger than the size used at
* allocation time.
*
* \param[in] ctx Pointer to this instance's context.
* \param[out] mmap Pointer to the memory map entry to populate.
* \param[in,out] iter Iterator storage, initialized to NULL
*
* \retval #VPX_CODEC_OK
* The memory map entry was populated.
* \retval #VPX_CODEC_ERROR
* Codec does not support XMA mode.
* \retval #VPX_CODEC_MEM_ERROR
* Unable to determine segment size from stream info.
*/
vpx_codec_err_t vpx_codec_get_mem_map(vpx_codec_ctx_t *ctx,
vpx_codec_mmap_t *mmap,
vpx_codec_iter_t *iter);
/*!\brief Identify allocated segments to codec instance
*
* Stores a list of allocated segments in the codec. Segments \ref MUST be
* passed in the order they are read from vpx_codec_get_mem_map(), but may be
* passed in groups of any size. Segments \ref MUST be set only once. The
* allocation function \ref MUST ensure that the vpx_codec_mmap_t::base member
* is non-NULL. If the segment requires cleanup handling (e.g., calling free()
* or close()) then the vpx_codec_mmap_t::dtor member \ref MUST be populated.
*
* \param[in] ctx Pointer to this instance's context.
* \param[in] mmaps Pointer to the first memory map entry in the list.
* \param[in] num_maps Number of entries being set at this time
*
* \retval #VPX_CODEC_OK
* The segment was stored in the codec context.
* \retval #VPX_CODEC_INCAPABLE
* Codec does not support XMA mode.
* \retval #VPX_CODEC_MEM_ERROR
* Segment base address was not set, or segment was already stored.
/*!\brief Identify allocated segments to codec instance
*
* Stores a list of allocated segments in the codec. Segments \ref MUST be
* passed in the order they are read from vpx_codec_get_mem_map(), but may be
* passed in groups of any size. Segments \ref MUST be set only once. The
* allocation function \ref MUST ensure that the vpx_codec_mmap_t::base member
* is non-NULL. If the segment requires cleanup handling (e.g., calling free()
* or close()) then the vpx_codec_mmap_t::dtor member \ref MUST be populated.
*
* \param[in] ctx Pointer to this instance's context.
* \param[in] mmaps Pointer to the first memory map entry in the list.
* \param[in] num_maps Number of entries being set at this time
*
* \retval #VPX_CODEC_OK
* The segment was stored in the codec context.
* \retval #VPX_CODEC_INCAPABLE
* Codec does not support XMA mode.
* \retval #VPX_CODEC_MEM_ERROR
* Segment base address was not set, or segment was already stored.
*/
vpx_codec_err_t vpx_codec_set_mem_map(vpx_codec_ctx_t *ctx,
vpx_codec_mmap_t *mmaps,
unsigned int num_maps);
*/
vpx_codec_err_t vpx_codec_set_mem_map(vpx_codec_ctx_t *ctx,
vpx_codec_mmap_t *mmaps,
unsigned int num_maps);
/*!@} - end defgroup cap_xma*/
/*!@} - end defgroup codec*/
#endif
/*!@} - end defgroup cap_xma*/
/*!@} - end defgroup codec*/
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,19 +0,0 @@
/*
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file is to be included at the bottom of the header files defining the
* interface to individual codecs and contains matching blocks to those defined
* in vpx_codec_impl_top.h
*/
#ifdef __cplusplus
}
#endif

View File

@ -1,19 +0,0 @@
/*
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*
* This file is to be included at the top of the header files defining the
* interface to individual codecs and contains various workarounds common
* to all codec implementations.
*/
#ifdef __cplusplus
extern "C" {
#endif

View File

@ -7,7 +7,8 @@
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#ifndef VPX_DECODER_H
#define VPX_DECODER_H
/*!\defgroup decoder Decoder Algorithm Interface
* \ingroup codec
@ -28,306 +29,306 @@
extern "C" {
#endif
#ifndef VPX_DECODER_H
#define VPX_DECODER_H
#include "vpx_codec.h"
/*!\brief Current ABI version number
*
* \internal
* If this file is altered in any way that changes the ABI, this value
* must be bumped. Examples include, but are not limited to, changing
* types, removing or reassigning enums, adding/removing/rearranging
* fields to structures
*/
/*!\brief Current ABI version number
*
* \internal
* If this file is altered in any way that changes the ABI, this value
* must be bumped. Examples include, but are not limited to, changing
* types, removing or reassigning enums, adding/removing/rearranging
* fields to structures
*/
#define VPX_DECODER_ABI_VERSION (2 + VPX_CODEC_ABI_VERSION) /**<\hideinitializer*/
/*! \brief Decoder capabilities bitfield
*
* Each decoder advertises the capabilities it supports as part of its
* ::vpx_codec_iface_t interface structure. Capabilities are extra interfaces
* or functionality, and are not required to be supported by a decoder.
*
* The available flags are specified by VPX_CODEC_CAP_* defines.
*/
/*! \brief Decoder capabilities bitfield
*
* Each decoder advertises the capabilities it supports as part of its
* ::vpx_codec_iface_t interface structure. Capabilities are extra interfaces
* or functionality, and are not required to be supported by a decoder.
*
* The available flags are specified by VPX_CODEC_CAP_* defines.
*/
#define VPX_CODEC_CAP_PUT_SLICE 0x10000 /**< Will issue put_slice callbacks */
#define VPX_CODEC_CAP_PUT_FRAME 0x20000 /**< Will issue put_frame callbacks */
#define VPX_CODEC_CAP_POSTPROC 0x40000 /**< Can postprocess decoded frame */
#define VPX_CODEC_CAP_ERROR_CONCEALMENT 0x80000 /**< Can conceal errors due to
packet loss */
packet loss */
#define VPX_CODEC_CAP_INPUT_FRAGMENTS 0x100000 /**< Can receive encoded frames
one fragment at a time */
one fragment at a time */
/*! \brief Initialization-time Feature Enabling
*
* Certain codec features must be known at initialization time, to allow for
* proper memory allocation.
*
* The available flags are specified by VPX_CODEC_USE_* defines.
*/
#define VPX_CODEC_CAP_FRAME_THREADING 0x200000 /**< Can support frame-based
multi-threading */
/*! \brief Initialization-time Feature Enabling
*
* Certain codec features must be known at initialization time, to allow for
* proper memory allocation.
*
* The available flags are specified by VPX_CODEC_USE_* defines.
*/
#define VPX_CODEC_USE_POSTPROC 0x10000 /**< Postprocess decoded frame */
#define VPX_CODEC_USE_ERROR_CONCEALMENT 0x20000 /**< Conceal errors in decoded
frames */
frames */
#define VPX_CODEC_USE_INPUT_FRAGMENTS 0x40000 /**< The input frame should be
passed to the decoder one
fragment at a time */
passed to the decoder one
fragment at a time */
#define VPX_CODEC_USE_FRAME_THREADING 0x80000 /**< Enable frame-based
multi-threading */
/*!\brief Stream properties
*
* This structure is used to query or set properties of the decoded
* stream. Algorithms may extend this structure with data specific
* to their bitstream by setting the sz member appropriately.
*/
typedef struct vpx_codec_stream_info
{
unsigned int sz; /**< Size of this structure */
unsigned int w; /**< Width (or 0 for unknown/default) */
unsigned int h; /**< Height (or 0 for unknown/default) */
unsigned int is_kf; /**< Current frame is a keyframe */
} vpx_codec_stream_info_t;
/*!\brief Stream properties
*
* This structure is used to query or set properties of the decoded
* stream. Algorithms may extend this structure with data specific
* to their bitstream by setting the sz member appropriately.
*/
typedef struct vpx_codec_stream_info {
unsigned int sz; /**< Size of this structure */
unsigned int w; /**< Width (or 0 for unknown/default) */
unsigned int h; /**< Height (or 0 for unknown/default) */
unsigned int is_kf; /**< Current frame is a keyframe */
} vpx_codec_stream_info_t;
/* REQUIRED FUNCTIONS
*
* The following functions are required to be implemented for all decoders.
* They represent the base case functionality expected of all decoders.
*/
/* REQUIRED FUNCTIONS
*
* The following functions are required to be implemented for all decoders.
* They represent the base case functionality expected of all decoders.
*/
/*!\brief Initialization Configurations
*
* This structure is used to pass init time configuration options to the
* decoder.
*/
typedef struct vpx_codec_dec_cfg
{
unsigned int threads; /**< Maximum number of threads to use, default 1 */
unsigned int w; /**< Width */
unsigned int h; /**< Height */
} vpx_codec_dec_cfg_t; /**< alias for struct vpx_codec_dec_cfg */
/*!\brief Initialization Configurations
*
* This structure is used to pass init time configuration options to the
* decoder.
*/
typedef struct vpx_codec_dec_cfg {
unsigned int threads; /**< Maximum number of threads to use, default 1 */
unsigned int w; /**< Width */
unsigned int h; /**< Height */
} vpx_codec_dec_cfg_t; /**< alias for struct vpx_codec_dec_cfg */
/*!\brief Initialize a decoder instance
*
* Initializes a decoder context using the given interface. Applications
* should call the vpx_codec_dec_init convenience macro instead of this
* function directly, to ensure that the ABI version number parameter
* is properly initialized.
*
* If the library was configured with --disable-multithread, this call
* is not thread safe and should be guarded with a lock if being used
* in a multithreaded context.
*
* In XMA mode (activated by setting VPX_CODEC_USE_XMA in the flags
* parameter), the storage pointed to by the cfg parameter must be
* kept readable and stable until all memory maps have been set.
*
* \param[in] ctx Pointer to this instance's context.
* \param[in] iface Pointer to the algorithm interface to use.
* \param[in] cfg Configuration to use, if known. May be NULL.
* \param[in] flags Bitfield of VPX_CODEC_USE_* flags
* \param[in] ver ABI version number. Must be set to
* VPX_DECODER_ABI_VERSION
* \retval #VPX_CODEC_OK
* The decoder algorithm initialized.
* \retval #VPX_CODEC_MEM_ERROR
* Memory allocation failed.
*/
vpx_codec_err_t vpx_codec_dec_init_ver(vpx_codec_ctx_t *ctx,
vpx_codec_iface_t *iface,
vpx_codec_dec_cfg_t *cfg,
vpx_codec_flags_t flags,
int ver);
/*!\brief Initialize a decoder instance
*
* Initializes a decoder context using the given interface. Applications
* should call the vpx_codec_dec_init convenience macro instead of this
* function directly, to ensure that the ABI version number parameter
* is properly initialized.
*
* If the library was configured with --disable-multithread, this call
* is not thread safe and should be guarded with a lock if being used
* in a multithreaded context.
*
* In XMA mode (activated by setting VPX_CODEC_USE_XMA in the flags
* parameter), the storage pointed to by the cfg parameter must be
* kept readable and stable until all memory maps have been set.
*
* \param[in] ctx Pointer to this instance's context.
* \param[in] iface Pointer to the algorithm interface to use.
* \param[in] cfg Configuration to use, if known. May be NULL.
* \param[in] flags Bitfield of VPX_CODEC_USE_* flags
* \param[in] ver ABI version number. Must be set to
* VPX_DECODER_ABI_VERSION
* \retval #VPX_CODEC_OK
* The decoder algorithm initialized.
* \retval #VPX_CODEC_MEM_ERROR
* Memory allocation failed.
*/
vpx_codec_err_t vpx_codec_dec_init_ver(vpx_codec_ctx_t *ctx,
vpx_codec_iface_t *iface,
vpx_codec_dec_cfg_t *cfg,
vpx_codec_flags_t flags,
int ver);
/*!\brief Convenience macro for vpx_codec_dec_init_ver()
*
* Ensures the ABI version parameter is properly set.
*/
/*!\brief Convenience macro for vpx_codec_dec_init_ver()
*
* Ensures the ABI version parameter is properly set.
*/
#define vpx_codec_dec_init(ctx, iface, cfg, flags) \
vpx_codec_dec_init_ver(ctx, iface, cfg, flags, VPX_DECODER_ABI_VERSION)
vpx_codec_dec_init_ver(ctx, iface, cfg, flags, VPX_DECODER_ABI_VERSION)
/*!\brief Parse stream info from a buffer
*
* Performs high level parsing of the bitstream. Construction of a decoder
* context is not necessary. Can be used to determine if the bitstream is
* of the proper format, and to extract information from the stream.
*
* \param[in] iface Pointer to the algorithm interface
* \param[in] data Pointer to a block of data to parse
* \param[in] data_sz Size of the data buffer
* \param[in,out] si Pointer to stream info to update. The size member
* \ref MUST be properly initialized, but \ref MAY be
* clobbered by the algorithm. This parameter \ref MAY
* be NULL.
*
* \retval #VPX_CODEC_OK
* Bitstream is parsable and stream information updated
*/
vpx_codec_err_t vpx_codec_peek_stream_info(vpx_codec_iface_t *iface,
const uint8_t *data,
unsigned int data_sz,
vpx_codec_stream_info_t *si);
/*!\brief Parse stream info from a buffer
*
* Performs high level parsing of the bitstream. Construction of a decoder
* context is not necessary. Can be used to determine if the bitstream is
* of the proper format, and to extract information from the stream.
*
* \param[in] iface Pointer to the algorithm interface
* \param[in] data Pointer to a block of data to parse
* \param[in] data_sz Size of the data buffer
* \param[in,out] si Pointer to stream info to update. The size member
* \ref MUST be properly initialized, but \ref MAY be
* clobbered by the algorithm. This parameter \ref MAY
* be NULL.
*
* \retval #VPX_CODEC_OK
* Bitstream is parsable and stream information updated
*/
vpx_codec_err_t vpx_codec_peek_stream_info(vpx_codec_iface_t *iface,
const uint8_t *data,
unsigned int data_sz,
vpx_codec_stream_info_t *si);
/*!\brief Return information about the current stream.
*
* Returns information about the stream that has been parsed during decoding.
*
* \param[in] ctx Pointer to this instance's context
* \param[in,out] si Pointer to stream info to update. The size member
* \ref MUST be properly initialized, but \ref MAY be
* clobbered by the algorithm. This parameter \ref MAY
* be NULL.
*
* \retval #VPX_CODEC_OK
* Bitstream is parsable and stream information updated
*/
vpx_codec_err_t vpx_codec_get_stream_info(vpx_codec_ctx_t *ctx,
vpx_codec_stream_info_t *si);
/*!\brief Return information about the current stream.
*
* Returns information about the stream that has been parsed during decoding.
*
* \param[in] ctx Pointer to this instance's context
* \param[in,out] si Pointer to stream info to update. The size member
* \ref MUST be properly initialized, but \ref MAY be
* clobbered by the algorithm. This parameter \ref MAY
* be NULL.
*
* \retval #VPX_CODEC_OK
* Bitstream is parsable and stream information updated
*/
vpx_codec_err_t vpx_codec_get_stream_info(vpx_codec_ctx_t *ctx,
vpx_codec_stream_info_t *si);
/*!\brief Decode data
*
* Processes a buffer of coded data. If the processing results in a new
* decoded frame becoming available, PUT_SLICE and PUT_FRAME events may be
* generated, as appropriate. Encoded data \ref MUST be passed in DTS (decode
* time stamp) order. Frames produced will always be in PTS (presentation
* time stamp) order.
* If the decoder is configured with VPX_CODEC_USE_INPUT_FRAGMENTS enabled,
* data and data_sz can contain a fragment of the encoded frame. Fragment
* \#n must contain at least partition \#n, but can also contain subsequent
* partitions (\#n+1 - \#n+i), and if so, fragments \#n+1, .., \#n+i must
* be empty. When no more data is available, this function should be called
* with NULL as data and 0 as data_sz. The memory passed to this function
* must be available until the frame has been decoded.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] data Pointer to this block of new coded data. If
* NULL, a VPX_CODEC_CB_PUT_FRAME event is posted
* for the previously decoded frame.
* \param[in] data_sz Size of the coded data, in bytes.
* \param[in] user_priv Application specific data to associate with
* this frame.
* \param[in] deadline Soft deadline the decoder should attempt to meet,
* in us. Set to zero for unlimited.
*
* \return Returns #VPX_CODEC_OK if the coded data was processed completely
* and future pictures can be decoded without error. Otherwise,
* see the descriptions of the other error codes in ::vpx_codec_err_t
* for recoverability capabilities.
*/
vpx_codec_err_t vpx_codec_decode(vpx_codec_ctx_t *ctx,
const uint8_t *data,
unsigned int data_sz,
void *user_priv,
long deadline);
/*!\brief Decode data
*
* Processes a buffer of coded data. If the processing results in a new
* decoded frame becoming available, PUT_SLICE and PUT_FRAME events may be
* generated, as appropriate. Encoded data \ref MUST be passed in DTS (decode
* time stamp) order. Frames produced will always be in PTS (presentation
* time stamp) order.
* If the decoder is configured with VPX_CODEC_USE_INPUT_FRAGMENTS enabled,
* data and data_sz can contain a fragment of the encoded frame. Fragment
* \#n must contain at least partition \#n, but can also contain subsequent
* partitions (\#n+1 - \#n+i), and if so, fragments \#n+1, .., \#n+i must
* be empty. When no more data is available, this function should be called
* with NULL as data and 0 as data_sz. The memory passed to this function
* must be available until the frame has been decoded.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] data Pointer to this block of new coded data. If
* NULL, a VPX_CODEC_CB_PUT_FRAME event is posted
* for the previously decoded frame.
* \param[in] data_sz Size of the coded data, in bytes.
* \param[in] user_priv Application specific data to associate with
* this frame.
* \param[in] deadline Soft deadline the decoder should attempt to meet,
* in us. Set to zero for unlimited.
*
* \return Returns #VPX_CODEC_OK if the coded data was processed completely
* and future pictures can be decoded without error. Otherwise,
* see the descriptions of the other error codes in ::vpx_codec_err_t
* for recoverability capabilities.
*/
vpx_codec_err_t vpx_codec_decode(vpx_codec_ctx_t *ctx,
const uint8_t *data,
unsigned int data_sz,
void *user_priv,
long deadline);
/*!\brief Decoded frames iterator
*
* Iterates over a list of the frames available for display. The iterator
* storage should be initialized to NULL to start the iteration. Iteration is
* complete when this function returns NULL.
*
* The list of available frames becomes valid upon completion of the
* vpx_codec_decode call, and remains valid until the next call to vpx_codec_decode.
*
* \param[in] ctx Pointer to this instance's context
* \param[in,out] iter Iterator storage, initialized to NULL
*
* \return Returns a pointer to an image, if one is ready for display. Frames
* produced will always be in PTS (presentation time stamp) order.
*/
vpx_image_t *vpx_codec_get_frame(vpx_codec_ctx_t *ctx,
vpx_codec_iter_t *iter);
/*!\brief Decoded frames iterator
*
* Iterates over a list of the frames available for display. The iterator
* storage should be initialized to NULL to start the iteration. Iteration is
* complete when this function returns NULL.
*
* The list of available frames becomes valid upon completion of the
* vpx_codec_decode call, and remains valid until the next call to vpx_codec_decode.
*
* \param[in] ctx Pointer to this instance's context
* \param[in,out] iter Iterator storage, initialized to NULL
*
* \return Returns a pointer to an image, if one is ready for display. Frames
* produced will always be in PTS (presentation time stamp) order.
*/
vpx_image_t *vpx_codec_get_frame(vpx_codec_ctx_t *ctx,
vpx_codec_iter_t *iter);
/*!\defgroup cap_put_frame Frame-Based Decoding Functions
*
* The following functions are required to be implemented for all decoders
* that advertise the VPX_CODEC_CAP_PUT_FRAME capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_CODEC_ERROR
* @{
*/
/*!\defgroup cap_put_frame Frame-Based Decoding Functions
*
* The following functions are required to be implemented for all decoders
* that advertise the VPX_CODEC_CAP_PUT_FRAME capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_CODEC_ERROR
* @{
*/
/*!\brief put frame callback prototype
*
* This callback is invoked by the decoder to notify the application of
* the availability of decoded image data.
*/
typedef void (*vpx_codec_put_frame_cb_fn_t)(void *user_priv,
const vpx_image_t *img);
/*!\brief put frame callback prototype
*
* This callback is invoked by the decoder to notify the application of
* the availability of decoded image data.
*/
typedef void (*vpx_codec_put_frame_cb_fn_t)(void *user_priv,
const vpx_image_t *img);
/*!\brief Register for notification of frame completion.
*
* Registers a given function to be called when a decoded frame is
* available.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] cb Pointer to the callback function
* \param[in] user_priv User's private data
*
* \retval #VPX_CODEC_OK
* Callback successfully registered.
* \retval #VPX_CODEC_ERROR
* Decoder context not initialized, or algorithm not capable of
* posting slice completion.
*/
vpx_codec_err_t vpx_codec_register_put_frame_cb(vpx_codec_ctx_t *ctx,
vpx_codec_put_frame_cb_fn_t cb,
void *user_priv);
/*!\brief Register for notification of frame completion.
*
* Registers a given function to be called when a decoded frame is
* available.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] cb Pointer to the callback function
* \param[in] user_priv User's private data
*
* \retval #VPX_CODEC_OK
* Callback successfully registered.
* \retval #VPX_CODEC_ERROR
* Decoder context not initialized, or algorithm not capable of
* posting slice completion.
*/
vpx_codec_err_t vpx_codec_register_put_frame_cb(vpx_codec_ctx_t *ctx,
vpx_codec_put_frame_cb_fn_t cb,
void *user_priv);
/*!@} - end defgroup cap_put_frame */
/*!@} - end defgroup cap_put_frame */
/*!\defgroup cap_put_slice Slice-Based Decoding Functions
*
* The following functions are required to be implemented for all decoders
* that advertise the VPX_CODEC_CAP_PUT_SLICE capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_CODEC_ERROR
* @{
*/
/*!\defgroup cap_put_slice Slice-Based Decoding Functions
*
* The following functions are required to be implemented for all decoders
* that advertise the VPX_CODEC_CAP_PUT_SLICE capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_CODEC_ERROR
* @{
*/
/*!\brief put slice callback prototype
*
* This callback is invoked by the decoder to notify the application of
* the availability of partially decoded image data. The
*/
typedef void (*vpx_codec_put_slice_cb_fn_t)(void *user_priv,
const vpx_image_t *img,
const vpx_image_rect_t *valid,
const vpx_image_rect_t *update);
/*!\brief put slice callback prototype
*
* This callback is invoked by the decoder to notify the application of
* the availability of partially decoded image data. The
*/
typedef void (*vpx_codec_put_slice_cb_fn_t)(void *user_priv,
const vpx_image_t *img,
const vpx_image_rect_t *valid,
const vpx_image_rect_t *update);
/*!\brief Register for notification of slice completion.
*
* Registers a given function to be called when a decoded slice is
* available.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] cb Pointer to the callback function
* \param[in] user_priv User's private data
*
* \retval #VPX_CODEC_OK
* Callback successfully registered.
* \retval #VPX_CODEC_ERROR
* Decoder context not initialized, or algorithm not capable of
* posting slice completion.
*/
vpx_codec_err_t vpx_codec_register_put_slice_cb(vpx_codec_ctx_t *ctx,
vpx_codec_put_slice_cb_fn_t cb,
void *user_priv);
/*!\brief Register for notification of slice completion.
*
* Registers a given function to be called when a decoded slice is
* available.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] cb Pointer to the callback function
* \param[in] user_priv User's private data
*
* \retval #VPX_CODEC_OK
* Callback successfully registered.
* \retval #VPX_CODEC_ERROR
* Decoder context not initialized, or algorithm not capable of
* posting slice completion.
*/
vpx_codec_err_t vpx_codec_register_put_slice_cb(vpx_codec_ctx_t *ctx,
vpx_codec_put_slice_cb_fn_t cb,
void *user_priv);
/*!@} - end defgroup cap_put_slice*/
/*!@} - end defgroup decoder*/
#endif
/*!@} - end defgroup cap_put_slice*/
/*!@} - end defgroup decoder*/
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,587 +0,0 @@
/*
* Copyright (c) 2010 The WebM project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
/*!\defgroup decoder Common Decoder Algorithm Interface
* This abstraction allows applications using this decoder to easily support
* multiple video formats with minimal code duplication. This section describes
* the interface common to all codecs.
* @{
*/
/*!\file
* \brief Provides a compatibility layer between version 1 and 2 of this API.
*
* This interface has been deprecated. Only existing code should make use
* of this interface, and therefore, it is only thinly documented. Existing
* code should be ported to the vpx_codec_* API.
*/
#ifdef __cplusplus
extern "C" {
#endif
#ifndef VPX_DECODER_COMPAT_H
#define VPX_DECODER_COMPAT_H
/*!\brief Decoder algorithm return codes */
typedef enum {
/*!\brief Operation completed without error */
VPX_DEC_OK = VPX_CODEC_OK,
/*!\brief Unspecified error */
VPX_DEC_ERROR = VPX_CODEC_ERROR,
/*!\brief Memory operation failed */
VPX_DEC_MEM_ERROR = VPX_CODEC_MEM_ERROR,
/*!\brief ABI version mismatch */
VPX_DEC_ABI_MISMATCH = VPX_CODEC_ABI_MISMATCH,
/*!\brief The given bitstream is not supported.
*
* The bitstream was unable to be parsed at the highest level. The decoder
* is unable to proceed. This error \ref SHOULD be treated as fatal to the
* stream. */
VPX_DEC_UNSUP_BITSTREAM = VPX_CODEC_UNSUP_BITSTREAM,
/*!\brief Encoded bitstream uses an unsupported feature
*
* The decoder does not implement a feature required by the encoder. This
* return code should only be used for features that prevent future
* pictures from being properly decoded. This error \ref MAY be treated as
* fatal to the stream or \ref MAY be treated as fatal to the current GOP.
*/
VPX_DEC_UNSUP_FEATURE = VPX_CODEC_UNSUP_FEATURE,
/*!\brief The coded data for this stream is corrupt or incomplete
*
* There was a problem decoding the current frame. This return code
* should only be used for failures that prevent future pictures from
* being properly decoded. This error \ref MAY be treated as fatal to the
* stream or \ref MAY be treated as fatal to the current GOP. If decoding
* is continued for the current GOP, artifacts may be present.
*/
VPX_DEC_CORRUPT_FRAME = VPX_CODEC_CORRUPT_FRAME,
/*!\brief An application-supplied parameter is not valid.
*
*/
VPX_DEC_INVALID_PARAM = VPX_CODEC_INVALID_PARAM,
/*!\brief An iterator reached the end of list.
*
*/
VPX_DEC_LIST_END = VPX_CODEC_LIST_END
}
vpx_dec_err_t;
/*! \brief Decoder capabilities bitfield
*
* Each decoder advertises the capabilities it supports as part of its
* ::vpx_dec_iface_t interface structure. Capabilities are extra interfaces
* or functionality, and are not required to be supported by a decoder.
*
* The available flags are specified by VPX_DEC_CAP_* defines.
*/
typedef int vpx_dec_caps_t;
#define VPX_DEC_CAP_PUT_SLICE 0x0001 /**< Will issue put_slice callbacks */
#define VPX_DEC_CAP_PUT_FRAME 0x0002 /**< Will issue put_frame callbacks */
#define VPX_DEC_CAP_XMA 0x0004 /**< Supports eXternal Memory Allocation */
/*!\brief Stream properties
*
* This structure is used to query or set properties of the decoded
* stream. Algorithms may extend this structure with data specific
* to their bitstream by setting the sz member appropriately.
*/
#if 1
typedef vpx_codec_stream_info_t vpx_dec_stream_info_t;
#else
typedef struct
{
unsigned int sz; /**< Size of this structure */
unsigned int w; /**< Width (or 0 for unknown/default) */
unsigned int h; /**< Height (or 0 for unknown/default) */
unsigned int is_kf; /**< Current frame is a keyframe */
} vpx_dec_stream_info_t;
#endif
/*!\brief Decoder interface structure.
*
* Contains function pointers and other data private to the decoder
* implementation. This structure is opaque to the application.
*/
typedef const struct vpx_codec_iface vpx_dec_iface_t;
typedef struct vpx_codec_priv vpx_dec_priv_t;
/*!\brief Iterator
*
* Opaque storage used for iterating over lists.
*/
typedef vpx_codec_iter_t vpx_dec_iter_t;
/*!\brief Decoder context structure
*
* All decoders \ref MUST support this context structure fully. In general,
* this data should be considered private to the decoder algorithm, and
* not be manipulated or examined by the calling application. Applications
* may reference the 'name' member to get a printable description of the
* algorithm.
*/
#if 1
typedef vpx_codec_ctx_t vpx_dec_ctx_t;
#else
typedef struct
{
const char *name; /**< Printable interface name */
vpx_dec_iface_t *iface; /**< Interface pointers */
vpx_dec_err_t err; /**< Last returned error */
vpx_dec_priv_t *priv; /**< Algorithm private storage */
} vpx_dec_ctx_t;
#endif
/*!\brief Return the build configuration
*
* Returns a printable string containing an encoded version of the build
* configuration. This may be useful to vpx support.
*
*/
const char *vpx_dec_build_config(void) DEPRECATED;
/*!\brief Return the name for a given interface
*
* Returns a human readable string for name of the given decoder interface.
*
* \param[in] iface Interface pointer
*
*/
const char *vpx_dec_iface_name(vpx_dec_iface_t *iface) DEPRECATED;
/*!\brief Convert error number to printable string
*
* Returns a human readable string for the last error returned by the
* algorithm. The returned error will be one line and will not contain
* any newline characters.
*
*
* \param[in] err Error number.
*
*/
const char *vpx_dec_err_to_string(vpx_dec_err_t err) DEPRECATED;
/*!\brief Retrieve error synopsis for decoder context
*
* Returns a human readable string for the last error returned by the
* algorithm. The returned error will be one line and will not contain
* any newline characters.
*
*
* \param[in] ctx Pointer to this instance's context.
*
*/
const char *vpx_dec_error(vpx_dec_ctx_t *ctx) DEPRECATED;
/*!\brief Retrieve detailed error information for decoder context
*
* Returns a human readable string providing detailed information about
* the last error.
*
* \param[in] ctx Pointer to this instance's context.
*
* \retval NULL
* No detailed information is available.
*/
const char *vpx_dec_error_detail(vpx_dec_ctx_t *ctx) DEPRECATED;
/* REQUIRED FUNCTIONS
*
* The following functions are required to be implemented for all decoders.
* They represent the base case functionality expected of all decoders.
*/
/*!\brief Initialize a decoder instance
*
* Initializes a decoder context using the given interface. Applications
* should call the vpx_dec_init convenience macro instead of this
* function directly, to ensure that the ABI version number parameter
* is properly initialized.
*
* \param[in] ctx Pointer to this instance's context.
* \param[in] iface Pointer to the algorithm interface to use.
* \param[in] ver ABI version number. Must be set to
* VPX_DECODER_ABI_VERSION
* \retval #VPX_DEC_OK
* The decoder algorithm initialized.
* \retval #VPX_DEC_MEM_ERROR
* Memory allocation failed.
*/
vpx_dec_err_t vpx_dec_init_ver(vpx_dec_ctx_t *ctx,
vpx_dec_iface_t *iface,
int ver) DEPRECATED;
#define vpx_dec_init(ctx, iface) \
vpx_dec_init_ver(ctx, iface, VPX_DECODER_ABI_VERSION)
/*!\brief Destroy a decoder instance
*
* Destroys a decoder context, freeing any associated memory buffers.
*
* \param[in] ctx Pointer to this instance's context
*
* \retval #VPX_DEC_OK
* The decoder algorithm initialized.
* \retval #VPX_DEC_MEM_ERROR
* Memory allocation failed.
*/
vpx_dec_err_t vpx_dec_destroy(vpx_dec_ctx_t *ctx) DEPRECATED;
/*!\brief Get the capabilities of an algorithm.
*
* Retrieves the capabilities bitfield from the algorithm's interface.
*
* \param[in] iface Pointer to the algorithm interface
*
*/
vpx_dec_caps_t vpx_dec_get_caps(vpx_dec_iface_t *iface) DEPRECATED;
/*!\brief Parse stream info from a buffer
*
* Performs high level parsing of the bitstream. Construction of a decoder
* context is not necessary. Can be used to determine if the bitstream is
* of the proper format, and to extract information from the stream.
*
* \param[in] iface Pointer to the algorithm interface
* \param[in] data Pointer to a block of data to parse
* \param[in] data_sz Size of the data buffer
* \param[in,out] si Pointer to stream info to update. The size member
* \ref MUST be properly initialized, but \ref MAY be
* clobbered by the algorithm. This parameter \ref MAY
* be NULL.
*
* \retval #VPX_DEC_OK
* Bitstream is parsable and stream information updated
*/
vpx_dec_err_t vpx_dec_peek_stream_info(vpx_dec_iface_t *iface,
const uint8_t *data,
unsigned int data_sz,
vpx_dec_stream_info_t *si) DEPRECATED;
/*!\brief Return information about the current stream.
*
* Returns information about the stream that has been parsed during decoding.
*
* \param[in] ctx Pointer to this instance's context
* \param[in,out] si Pointer to stream info to update. The size member
* \ref MUST be properly initialized, but \ref MAY be
* clobbered by the algorithm. This parameter \ref MAY
* be NULL.
*
* \retval #VPX_DEC_OK
* Bitstream is parsable and stream information updated
*/
vpx_dec_err_t vpx_dec_get_stream_info(vpx_dec_ctx_t *ctx,
vpx_dec_stream_info_t *si) DEPRECATED;
/*!\brief Control algorithm
*
* This function is used to exchange algorithm specific data with the decoder
* instance. This can be used to implement features specific to a particular
* algorithm.
*
* This wrapper function dispatches the request to the helper function
* associated with the given ctrl_id. It tries to call this function
* transparently, but will return #VPX_DEC_ERROR if the request could not
* be dispatched.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] ctrl_id Algorithm specific control identifier
* \param[in,out] data Data to exchange with algorithm instance.
*
* \retval #VPX_DEC_OK
* The control request was processed.
* \retval #VPX_DEC_ERROR
* The control request was not processed.
* \retval #VPX_DEC_INVALID_PARAM
* The data was not valid.
*/
vpx_dec_err_t vpx_dec_control(vpx_dec_ctx_t *ctx,
int ctrl_id,
void *data) DEPRECATED;
/*!\brief Decode data
*
* Processes a buffer of coded data. If the processing results in a new
* decoded frame becoming available, #VPX_DEC_CB_PUT_SLICE and
* #VPX_DEC_CB_PUT_FRAME events may be generated, as appropriate. Encoded data
* \ref MUST be passed in DTS (decode time stamp) order. Frames produced will
* always be in PTS (presentation time stamp) order.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] data Pointer to this block of new coded data. If
* NULL, a VPX_DEC_CB_PUT_FRAME event is posted
* for the previously decoded frame.
* \param[in] data_sz Size of the coded data, in bytes.
* \param[in] user_priv Application specific data to associate with
* this frame.
* \param[in] rel_pts PTS relative to the previous frame, in us. If
* unknown or unavailable, set to zero.
*
* \return Returns #VPX_DEC_OK if the coded data was processed completely
* and future pictures can be decoded without error. Otherwise,
* see the descriptions of the other error codes in ::vpx_dec_err_t
* for recoverability capabilities.
*/
vpx_dec_err_t vpx_dec_decode(vpx_dec_ctx_t *ctx,
uint8_t *data,
unsigned int data_sz,
void *user_priv,
int rel_pts) DEPRECATED;
/*!\brief Decoded frames iterator
*
* Iterates over a list of the frames available for display. The iterator
* storage should be initialized to NULL to start the iteration. Iteration is
* complete when this function returns NULL.
*
* The list of available frames becomes valid upon completion of the
* vpx_dec_decode call, and remains valid until the next call to vpx_dec_decode.
*
* \param[in] ctx Pointer to this instance's context
* \param[in out] iter Iterator storage, initialized to NULL
*
* \return Returns a pointer to an image, if one is ready for display. Frames
* produced will always be in PTS (presentation time stamp) order.
*/
vpx_image_t *vpx_dec_get_frame(vpx_dec_ctx_t *ctx,
vpx_dec_iter_t *iter) DEPRECATED;
/*!\defgroup cap_put_frame Frame-Based Decoding Functions
*
* The following functions are required to be implemented for all decoders
* that advertise the VPX_DEC_CAP_PUT_FRAME capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_DEC_ERROR
* @{
*/
/*!\brief put frame callback prototype
*
* This callback is invoked by the decoder to notify the application of
* the availability of decoded image data.
*/
typedef void (*vpx_dec_put_frame_cb_fn_t)(void *user_priv,
const vpx_image_t *img);
/*!\brief Register for notification of frame completion.
*
* Registers a given function to be called when a decoded frame is
* available.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] cb Pointer to the callback function
* \param[in] user_priv User's private data
*
* \retval #VPX_DEC_OK
* Callback successfully registered.
* \retval #VPX_DEC_ERROR
* Decoder context not initialized, or algorithm not capable of
* posting slice completion.
*/
vpx_dec_err_t vpx_dec_register_put_frame_cb(vpx_dec_ctx_t *ctx,
vpx_dec_put_frame_cb_fn_t cb,
void *user_priv) DEPRECATED;
/*!@} - end defgroup cap_put_frame */
/*!\defgroup cap_put_slice Slice-Based Decoding Functions
*
* The following functions are required to be implemented for all decoders
* that advertise the VPX_DEC_CAP_PUT_SLICE capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_DEC_ERROR
* @{
*/
/*!\brief put slice callback prototype
*
* This callback is invoked by the decoder to notify the application of
* the availability of partially decoded image data. The
*/
typedef void (*vpx_dec_put_slice_cb_fn_t)(void *user_priv,
const vpx_image_t *img,
const vpx_image_rect_t *valid,
const vpx_image_rect_t *update);
/*!\brief Register for notification of slice completion.
*
* Registers a given function to be called when a decoded slice is
* available.
*
* \param[in] ctx Pointer to this instance's context
* \param[in] cb Pointer to the callback function
* \param[in] user_priv User's private data
*
* \retval #VPX_DEC_OK
* Callback successfully registered.
* \retval #VPX_DEC_ERROR
* Decoder context not initialized, or algorithm not capable of
* posting slice completion.
*/
vpx_dec_err_t vpx_dec_register_put_slice_cb(vpx_dec_ctx_t *ctx,
vpx_dec_put_slice_cb_fn_t cb,
void *user_priv) DEPRECATED;
/*!@} - end defgroup cap_put_slice*/
/*!\defgroup cap_xma External Memory Allocation Functions
*
* The following functions are required to be implemented for all decoders
* that advertise the VPX_DEC_CAP_XMA capability. Calling these functions
* for codecs that don't advertise this capability will result in an error
* code being returned, usually VPX_DEC_ERROR
* @{
*/
/*!\brief Memory Map Entry
*
* This structure is used to contain the properties of a memory segment. It
* is populated by the decoder in the request phase, and by the calling
* application once the requested allocation has been performed.
*/
#if 1
#define VPX_DEC_MEM_ZERO 0x1 /**< Segment must be zeroed by allocation */
#define VPX_DEC_MEM_WRONLY 0x2 /**< Segment need not be readable */
#define VPX_DEC_MEM_FAST 0x4 /**< Place in fast memory, if available */
typedef struct vpx_codec_mmap vpx_dec_mmap_t;
#else
typedef struct vpx_dec_mmap
{
/*
* The following members are set by the codec when requesting a segment
*/
unsigned int id; /**< identifier for the segment's contents */
unsigned long sz; /**< size of the segment, in bytes */
unsigned int align; /**< required alignment of the segment, in bytes */
unsigned int flags; /**< bitfield containing segment properties */
#define VPX_DEC_MEM_ZERO 0x1 /**< Segment must be zeroed by allocation */
#define VPX_DEC_MEM_WRONLY 0x2 /**< Segment need not be readable */
#define VPX_DEC_MEM_FAST 0x4 /**< Place in fast memory, if available */
/* The following members are to be filled in by the allocation function */
void *base; /**< pointer to the allocated segment */
void (*dtor)(struct vpx_dec_mmap *map); /**< destructor to call */
void *priv; /**< allocator private storage */
} vpx_dec_mmap_t;
#endif
/*!\brief Initialize a decoder instance in external allocation mode
*
* Initializes a decoder context using the given interface. Applications
* should call the vpx_dec_xma_init convenience macro instead of this
* function directly, to ensure that the ABI version number parameter
* is properly initialized.
*
* \param[in] ctx Pointer to this instance's context.
* \param[in] iface Pointer to the algorithm interface to use.
* \param[in] ver ABI version number. Must be set to
* VPX_DECODER_ABI_VERSION
* \retval #VPX_DEC_OK
* The decoder algorithm initialized.
* \retval #VPX_DEC_ERROR
* Decoder does not support XMA mode.
*/
vpx_dec_err_t vpx_dec_xma_init_ver(vpx_dec_ctx_t *ctx,
vpx_dec_iface_t *iface,
int ver) DEPRECATED;
#define vpx_dec_xma_init(ctx, iface) \
vpx_dec_xma_init_ver(ctx, iface, VPX_DECODER_ABI_VERSION)
/*!\brief Iterate over the list of segments to allocate.
*
* Iterates over a list of the segments to allocate. The iterator storage
* should be initialized to NULL to start the iteration. Iteration is complete
* when this function returns VPX_DEC_LIST_END. The amount of memory needed to
* allocate is dependant upon the size of the encoded stream. This means that
* the stream info structure must be known at allocation time. It can be
* populated with the vpx_dec_peek_stream_info() function. In cases where the
* stream to be decoded is not available at allocation time, a fixed size must
* be requested. The decoder will not be able to decode streams larger than
* the size used at allocation time.
*
* \param[in] ctx Pointer to this instance's context.
* \param[out] mmap Pointer to the memory map entry to populate.
* \param[in] si Pointer to the stream info.
* \param[in out] iter Iterator storage, initialized to NULL
*
* \retval #VPX_DEC_OK
* The memory map entry was populated.
* \retval #VPX_DEC_ERROR
* Decoder does not support XMA mode.
* \retval #VPX_DEC_MEM_ERROR
* Unable to determine segment size from stream info.
*/
vpx_dec_err_t vpx_dec_get_mem_map(vpx_dec_ctx_t *ctx,
vpx_dec_mmap_t *mmap,
const vpx_dec_stream_info_t *si,
vpx_dec_iter_t *iter) DEPRECATED;
/*!\brief Identify allocated segments to decoder instance
*
* Stores a list of allocated segments in the decoder. Segments \ref MUST be
* passed in the order they are read from vpx_dec_get_mem_map(), but may be
* passed in groups of any size. Segments \ref MUST be set only once. The
* allocation function \ref MUST ensure that the vpx_dec_mmap_t::base member
* is non-NULL. If the segment requires cleanup handling (e.g., calling free()
* or close()) then the vpx_dec_mmap_t::dtor member \ref MUST be populated.
*
* \param[in] ctx Pointer to this instance's context.
* \param[in] mmaps Pointer to the first memory map entry in the list.
* \param[in] num_maps Number of entries being set at this time
*
* \retval #VPX_DEC_OK
* The segment was stored in the decoder context.
* \retval #VPX_DEC_ERROR
* Decoder does not support XMA mode.
* \retval #VPX_DEC_MEM_ERROR
* Segment base address was not set, or segment was already stored.
*/
vpx_dec_err_t vpx_dec_set_mem_map(vpx_dec_ctx_t *ctx,
vpx_dec_mmap_t *mmaps,
unsigned int num_maps) DEPRECATED;
/*!@} - end defgroup cap_xma*/
/*!@} - end defgroup decoder*/
#endif
#ifdef __cplusplus
}
#endif

View File

@ -20,14 +20,14 @@ extern "C" {
#ifndef VPX_IMAGE_H
#define VPX_IMAGE_H
/*!\brief Current ABI version number
*
* \internal
* If this file is altered in any way that changes the ABI, this value
* must be bumped. Examples include, but are not limited to, changing
* types, removing or reassigning enums, adding/removing/rearranging
* fields to structures
*/
/*!\brief Current ABI version number
*
* \internal
* If this file is altered in any way that changes the ABI, this value
* must be bumped. Examples include, but are not limited to, changing
* types, removing or reassigning enums, adding/removing/rearranging
* fields to structures
*/
#define VPX_IMAGE_ABI_VERSION (1) /**<\hideinitializer*/
@ -36,41 +36,43 @@ extern "C" {
#define VPX_IMG_FMT_HAS_ALPHA 0x400 /**< Image has an alpha channel component */
/*!\brief List of supported image formats */
typedef enum vpx_img_fmt {
VPX_IMG_FMT_NONE,
VPX_IMG_FMT_RGB24, /**< 24 bit per pixel packed RGB */
VPX_IMG_FMT_RGB32, /**< 32 bit per pixel packed 0RGB */
VPX_IMG_FMT_RGB565, /**< 16 bit per pixel, 565 */
VPX_IMG_FMT_RGB555, /**< 16 bit per pixel, 555 */
VPX_IMG_FMT_UYVY, /**< UYVY packed YUV */
VPX_IMG_FMT_YUY2, /**< YUYV packed YUV */
VPX_IMG_FMT_YVYU, /**< YVYU packed YUV */
VPX_IMG_FMT_BGR24, /**< 24 bit per pixel packed BGR */
VPX_IMG_FMT_RGB32_LE, /**< 32 bit packed BGR0 */
VPX_IMG_FMT_ARGB, /**< 32 bit packed ARGB, alpha=255 */
VPX_IMG_FMT_ARGB_LE, /**< 32 bit packed BGRA, alpha=255 */
VPX_IMG_FMT_RGB565_LE, /**< 16 bit per pixel, gggbbbbb rrrrrggg */
VPX_IMG_FMT_RGB555_LE, /**< 16 bit per pixel, gggbbbbb 0rrrrrgg */
VPX_IMG_FMT_YV12 = VPX_IMG_FMT_PLANAR | VPX_IMG_FMT_UV_FLIP | 1, /**< planar YVU */
VPX_IMG_FMT_I420 = VPX_IMG_FMT_PLANAR | 2,
VPX_IMG_FMT_VPXYV12 = VPX_IMG_FMT_PLANAR | VPX_IMG_FMT_UV_FLIP | 3, /** < planar 4:2:0 format with vpx color space */
VPX_IMG_FMT_VPXI420 = VPX_IMG_FMT_PLANAR | 4 /** < planar 4:2:0 format with vpx color space */
}
vpx_img_fmt_t; /**< alias for enum vpx_img_fmt */
/*!\brief List of supported image formats */
typedef enum vpx_img_fmt {
VPX_IMG_FMT_NONE,
VPX_IMG_FMT_RGB24, /**< 24 bit per pixel packed RGB */
VPX_IMG_FMT_RGB32, /**< 32 bit per pixel packed 0RGB */
VPX_IMG_FMT_RGB565, /**< 16 bit per pixel, 565 */
VPX_IMG_FMT_RGB555, /**< 16 bit per pixel, 555 */
VPX_IMG_FMT_UYVY, /**< UYVY packed YUV */
VPX_IMG_FMT_YUY2, /**< YUYV packed YUV */
VPX_IMG_FMT_YVYU, /**< YVYU packed YUV */
VPX_IMG_FMT_BGR24, /**< 24 bit per pixel packed BGR */
VPX_IMG_FMT_RGB32_LE, /**< 32 bit packed BGR0 */
VPX_IMG_FMT_ARGB, /**< 32 bit packed ARGB, alpha=255 */
VPX_IMG_FMT_ARGB_LE, /**< 32 bit packed BGRA, alpha=255 */
VPX_IMG_FMT_RGB565_LE, /**< 16 bit per pixel, gggbbbbb rrrrrggg */
VPX_IMG_FMT_RGB555_LE, /**< 16 bit per pixel, gggbbbbb 0rrrrrgg */
VPX_IMG_FMT_YV12 = VPX_IMG_FMT_PLANAR | VPX_IMG_FMT_UV_FLIP | 1, /**< planar YVU */
VPX_IMG_FMT_I420 = VPX_IMG_FMT_PLANAR | 2,
VPX_IMG_FMT_VPXYV12 = VPX_IMG_FMT_PLANAR | VPX_IMG_FMT_UV_FLIP | 3, /** < planar 4:2:0 format with vpx color space */
VPX_IMG_FMT_VPXI420 = VPX_IMG_FMT_PLANAR | 4,
VPX_IMG_FMT_I422 = VPX_IMG_FMT_PLANAR | 5,
VPX_IMG_FMT_I444 = VPX_IMG_FMT_PLANAR | 6,
VPX_IMG_FMT_444A = VPX_IMG_FMT_PLANAR | VPX_IMG_FMT_HAS_ALPHA | 7
} vpx_img_fmt_t; /**< alias for enum vpx_img_fmt */
#if !defined(VPX_CODEC_DISABLE_COMPAT) || !VPX_CODEC_DISABLE_COMPAT
#define IMG_FMT_PLANAR VPX_IMG_FMT_PLANAR /**< \deprecated Use #VPX_IMG_FMT_PLANAR */
#define IMG_FMT_UV_FLIP VPX_IMG_FMT_UV_FLIP /**< \deprecated Use #VPX_IMG_FMT_UV_FLIP */
#define IMG_FMT_HAS_ALPHA VPX_IMG_FMT_HAS_ALPHA /**< \deprecated Use #VPX_IMG_FMT_HAS_ALPHA */
/*!\brief Deprecated list of supported image formats
* \deprecated New code should use #vpx_img_fmt
*/
/*!\brief Deprecated list of supported image formats
* \deprecated New code should use #vpx_img_fmt
*/
#define img_fmt vpx_img_fmt
/*!\brief alias for enum img_fmt.
* \deprecated New code should use #vpx_img_fmt_t
*/
/*!\brief alias for enum img_fmt.
* \deprecated New code should use #vpx_img_fmt_t
*/
#define img_fmt_t vpx_img_fmt_t
#define IMG_FMT_NONE VPX_IMG_FMT_NONE /**< \deprecated Use #VPX_IMG_FMT_NONE */
@ -93,24 +95,23 @@ extern "C" {
#define IMG_FMT_VPXI420 VPX_IMG_FMT_VPXI420 /**< \deprecated Use #VPX_IMG_FMT_VPXI420 */
#endif /* VPX_CODEC_DISABLE_COMPAT */
/**\brief Image Descriptor */
typedef struct vpx_image
{
vpx_img_fmt_t fmt; /**< Image Format */
/**\brief Image Descriptor */
typedef struct vpx_image {
vpx_img_fmt_t fmt; /**< Image Format */
/* Image storage dimensions */
unsigned int w; /**< Stored image width */
unsigned int h; /**< Stored image height */
/* Image storage dimensions */
unsigned int w; /**< Stored image width */
unsigned int h; /**< Stored image height */
/* Image display dimensions */
unsigned int d_w; /**< Displayed image width */
unsigned int d_h; /**< Displayed image height */
/* Image display dimensions */
unsigned int d_w; /**< Displayed image width */
unsigned int d_h; /**< Displayed image height */
/* Chroma subsampling info */
unsigned int x_chroma_shift; /**< subsampling order, X */
unsigned int y_chroma_shift; /**< subsampling order, Y */
/* Chroma subsampling info */
unsigned int x_chroma_shift; /**< subsampling order, X */
unsigned int y_chroma_shift; /**< subsampling order, Y */
/* Image data pointers. */
/* Image data pointers. */
#define VPX_PLANE_PACKED 0 /**< To be used for all packed formats */
#define VPX_PLANE_Y 0 /**< Y (Luminance) plane */
#define VPX_PLANE_U 1 /**< U (Chroma) plane */
@ -123,119 +124,118 @@ extern "C" {
#define PLANE_V VPX_PLANE_V
#define PLANE_ALPHA VPX_PLANE_ALPHA
#endif
unsigned char *planes[4]; /**< pointer to the top left pixel for each plane */
int stride[4]; /**< stride between rows for each plane */
unsigned char *planes[4]; /**< pointer to the top left pixel for each plane */
int stride[4]; /**< stride between rows for each plane */
int bps; /**< bits per sample (for packed formats) */
int bps; /**< bits per sample (for packed formats) */
/* The following member may be set by the application to associate data
* with this image.
*/
void *user_priv; /**< may be set by the application to associate data
/* The following member may be set by the application to associate data
* with this image.
*/
void *user_priv; /**< may be set by the application to associate data
* with this image. */
/* The following members should be treated as private. */
unsigned char *img_data; /**< private */
int img_data_owner; /**< private */
int self_allocd; /**< private */
} vpx_image_t; /**< alias for struct vpx_image */
/* The following members should be treated as private. */
unsigned char *img_data; /**< private */
int img_data_owner; /**< private */
int self_allocd; /**< private */
} vpx_image_t; /**< alias for struct vpx_image */
/**\brief Representation of a rectangle on a surface */
typedef struct vpx_image_rect
{
unsigned int x; /**< leftmost column */
unsigned int y; /**< topmost row */
unsigned int w; /**< width */
unsigned int h; /**< height */
} vpx_image_rect_t; /**< alias for struct vpx_image_rect */
/**\brief Representation of a rectangle on a surface */
typedef struct vpx_image_rect {
unsigned int x; /**< leftmost column */
unsigned int y; /**< topmost row */
unsigned int w; /**< width */
unsigned int h; /**< height */
} vpx_image_rect_t; /**< alias for struct vpx_image_rect */
/*!\brief Open a descriptor, allocating storage for the underlying image
*
* Returns a descriptor for storing an image of the given format. The
* storage for the descriptor is allocated on the heap.
*
* \param[in] img Pointer to storage for descriptor. If this parameter
* is NULL, the storage for the descriptor will be
* allocated on the heap.
* \param[in] fmt Format for the image
* \param[in] d_w Width of the image
* \param[in] d_h Height of the image
* \param[in] align Alignment, in bytes, of the image buffer and
* each row in the image(stride).
*
* \return Returns a pointer to the initialized image descriptor. If the img
* parameter is non-null, the value of the img parameter will be
* returned.
*/
vpx_image_t *vpx_img_alloc(vpx_image_t *img,
vpx_img_fmt_t fmt,
unsigned int d_w,
unsigned int d_h,
unsigned int align);
/*!\brief Open a descriptor, allocating storage for the underlying image
*
* Returns a descriptor for storing an image of the given format. The
* storage for the descriptor is allocated on the heap.
*
* \param[in] img Pointer to storage for descriptor. If this parameter
* is NULL, the storage for the descriptor will be
* allocated on the heap.
* \param[in] fmt Format for the image
* \param[in] d_w Width of the image
* \param[in] d_h Height of the image
* \param[in] align Alignment, in bytes, of the image buffer and
* each row in the image(stride).
*
* \return Returns a pointer to the initialized image descriptor. If the img
* parameter is non-null, the value of the img parameter will be
* returned.
*/
vpx_image_t *vpx_img_alloc(vpx_image_t *img,
vpx_img_fmt_t fmt,
unsigned int d_w,
unsigned int d_h,
unsigned int align);
/*!\brief Open a descriptor, using existing storage for the underlying image
*
* Returns a descriptor for storing an image of the given format. The
* storage for descriptor has been allocated elsewhere, and a descriptor is
* desired to "wrap" that storage.
*
* \param[in] img Pointer to storage for descriptor. If this parameter
* is NULL, the storage for the descriptor will be
* allocated on the heap.
* \param[in] fmt Format for the image
* \param[in] d_w Width of the image
* \param[in] d_h Height of the image
* \param[in] align Alignment, in bytes, of each row in the image.
* \param[in] img_data Storage to use for the image
*
* \return Returns a pointer to the initialized image descriptor. If the img
* parameter is non-null, the value of the img parameter will be
* returned.
*/
vpx_image_t *vpx_img_wrap(vpx_image_t *img,
vpx_img_fmt_t fmt,
unsigned int d_w,
unsigned int d_h,
unsigned int align,
unsigned char *img_data);
/*!\brief Open a descriptor, using existing storage for the underlying image
*
* Returns a descriptor for storing an image of the given format. The
* storage for descriptor has been allocated elsewhere, and a descriptor is
* desired to "wrap" that storage.
*
* \param[in] img Pointer to storage for descriptor. If this parameter
* is NULL, the storage for the descriptor will be
* allocated on the heap.
* \param[in] fmt Format for the image
* \param[in] d_w Width of the image
* \param[in] d_h Height of the image
* \param[in] align Alignment, in bytes, of each row in the image.
* \param[in] img_data Storage to use for the image
*
* \return Returns a pointer to the initialized image descriptor. If the img
* parameter is non-null, the value of the img parameter will be
* returned.
*/
vpx_image_t *vpx_img_wrap(vpx_image_t *img,
vpx_img_fmt_t fmt,
unsigned int d_w,
unsigned int d_h,
unsigned int align,
unsigned char *img_data);
/*!\brief Set the rectangle identifying the displayed portion of the image
*
* Updates the displayed rectangle (aka viewport) on the image surface to
* match the specified coordinates and size.
*
* \param[in] img Image descriptor
* \param[in] x leftmost column
* \param[in] y topmost row
* \param[in] w width
* \param[in] h height
*
* \return 0 if the requested rectangle is valid, nonzero otherwise.
*/
int vpx_img_set_rect(vpx_image_t *img,
unsigned int x,
unsigned int y,
unsigned int w,
unsigned int h);
/*!\brief Set the rectangle identifying the displayed portion of the image
*
* Updates the displayed rectangle (aka viewport) on the image surface to
* match the specified coordinates and size.
*
* \param[in] img Image descriptor
* \param[in] x leftmost column
* \param[in] y topmost row
* \param[in] w width
* \param[in] h height
*
* \return 0 if the requested rectangle is valid, nonzero otherwise.
*/
int vpx_img_set_rect(vpx_image_t *img,
unsigned int x,
unsigned int y,
unsigned int w,
unsigned int h);
/*!\brief Flip the image vertically (top for bottom)
*
* Adjusts the image descriptor's pointers and strides to make the image
* be referenced upside-down.
*
* \param[in] img Image descriptor
*/
void vpx_img_flip(vpx_image_t *img);
/*!\brief Flip the image vertically (top for bottom)
*
* Adjusts the image descriptor's pointers and strides to make the image
* be referenced upside-down.
*
* \param[in] img Image descriptor
*/
void vpx_img_flip(vpx_image_t *img);
/*!\brief Close an image descriptor
*
* Frees all allocated storage associated with an image descriptor.
*
* \param[in] img Image descriptor
*/
void vpx_img_free(vpx_image_t *img);
/*!\brief Close an image descriptor
*
* Frees all allocated storage associated with an image descriptor.
*
* \param[in] img Image descriptor
*/
void vpx_img_free(vpx_image_t *img);
#endif
#ifdef __cplusplus

View File

@ -27,6 +27,9 @@ typedef unsigned int uint32_t;
#if (defined(_MSC_VER) && (_MSC_VER < 1600))
typedef signed __int64 int64_t;
typedef unsigned __int64 uint64_t;
#define INT64_MAX _I64_MAX
#define INT16_MAX _I16_MAX
#define INT16_MIN _I16_MIN
#endif
#ifndef _UINTPTR_T_DEFINED

View File

@ -152,7 +152,7 @@ static int _tbfcp_attr_write(const tbfcp_attr_t* pc_self, uint8_t* p_buff_ptr, t
case tbfcp_attribute_format_Unsigned16:
case tbfcp_attribute_format_OctetString16: {
*p_written = TBFCP_ATTR_HDR_SIZE_IN_OCTETS + 2;
p_buff_ptr[1] = *p_written;
p_buff_ptr[1] = (uint8_t)*p_written;
if (pc_self->format == tbfcp_attribute_format_Unsigned16) {
*((uint16_t*)&p_buff_ptr[2]) = tnet_htons(((const tbfcp_attr_unsigned16_t*)pc_self)->Unsigned16);
}
@ -168,7 +168,7 @@ static int _tbfcp_attr_write(const tbfcp_attr_t* pc_self, uint8_t* p_buff_ptr, t
case tbfcp_attribute_format_OctetString: {
const tbfcp_attr_octetstring_t* _pc_self = (const tbfcp_attr_octetstring_t*)pc_self;
*p_written = TBFCP_ATTR_HDR_SIZE_IN_OCTETS + ((_pc_self->OctetStringLength && _pc_self->OctetString) ? _pc_self->OctetStringLength : 0);
p_buff_ptr[1] = *p_written;
p_buff_ptr[1] = (uint8_t)*p_written;
if (_pc_self->OctetStringLength && _pc_self->OctetString) {
memcpy(&p_buff_ptr[2], _pc_self->OctetString, _pc_self->OctetStringLength);
}
@ -190,7 +190,7 @@ static int _tbfcp_attr_write(const tbfcp_attr_t* pc_self, uint8_t* p_buff_ptr, t
if ((ret = tbfcp_attr_get_size_in_octetunits_without_padding(pc_self, p_written))) {
return ret;
}
p_buff_ptr[1] = *p_written;
p_buff_ptr[1] = (uint8_t)*p_written;
p_buff_ptr += 2;
n_buff_size -= 2;
if (_pc_self->extra_hdr_size_in_octets) {

View File

@ -208,8 +208,8 @@ int tbfcp_pkt_write_with_padding(const tbfcp_pkt_t* pc_self, uint8_t* p_buff_ptr
}
p_buff_ptr[0] = (((uint8_t)pc_self->hdr.ver) << 5) | (pc_self->hdr.reserved & 0x7F);
p_buff_ptr[1] = pc_self->hdr.primitive;
*((uint32_t*)&p_buff_ptr[2]) = tnet_htons(((*p_written - TBFCP_PKT_HDR_SIZE_IN_OCTETS) >> 2));
p_buff_ptr[1] = (uint8_t)pc_self->hdr.primitive;
*((uint32_t*)&p_buff_ptr[2]) = tnet_htons((unsigned short)((*p_written - TBFCP_PKT_HDR_SIZE_IN_OCTETS) >> 2));
*((uint32_t*)&p_buff_ptr[4]) = (uint32_t)tnet_htonl(pc_self->hdr.conf_id);
*((uint16_t*)&p_buff_ptr[8]) = tnet_htons(pc_self->hdr.transac_id);
*((uint16_t*)&p_buff_ptr[10]) = tnet_htons(pc_self->hdr.user_id);
@ -464,7 +464,7 @@ int tbfcp_pkt_create_HelloAck_2(uint32_t conf_id, uint16_t transac_id, uint16_t
return ret;
}
/* SUPPORTED-ATTRIBUTES */
if ((ret = tbfcp_attr_octetstring_create(tbfcp_attribute_type_SUPPORTED_ATTRIBUTES, kBfcpFieldMNo, kNullOctetStringPtr, __supp_attrs_count, &p_supp_attr))) {
if ((ret = tbfcp_attr_octetstring_create(tbfcp_attribute_type_SUPPORTED_ATTRIBUTES, kBfcpFieldMNo, kNullOctetStringPtr, (uint8_t)__supp_attrs_count, &p_supp_attr))) {
return ret;
}
for (u = 0; u < p_supp_attr->OctetStringLength; ++u) {
@ -475,7 +475,7 @@ int tbfcp_pkt_create_HelloAck_2(uint32_t conf_id, uint16_t transac_id, uint16_t
return ret;
}
/* SUPPORTED-PRIMITIVES */
if ((ret = tbfcp_attr_octetstring_create(tbfcp_attribute_type_SUPPORTED_PRIMITIVES, kBfcpFieldMNo, kNullOctetStringPtr, __supp_prims_count, &p_supp_prim))) {
if ((ret = tbfcp_attr_octetstring_create(tbfcp_attribute_type_SUPPORTED_PRIMITIVES, kBfcpFieldMNo, kNullOctetStringPtr, (uint8_t)__supp_prims_count, &p_supp_prim))) {
return ret;
}
for (u = 0; u < p_supp_prim->OctetStringLength; ++u) {

View File

@ -239,8 +239,8 @@ static int tdav_codec_h264_common_get_profile_and_level(const char* fmtp, profil
if((start = tsk_strindexOf(fmtp, size, "profile-level-id")) !=-1){
tsk_param_t* param;
if((end = tsk_strindexOf((fmtp+start), (size-start), ";")) == -1){
end = size;
if((end = (int)tsk_strindexOf((fmtp+start), (tsk_size_t)(size-start), ";")) == -1){
end = (int)size;
}
if((param = tsk_params_parse_param((fmtp+start), (end-start)))){

View File

@ -94,7 +94,7 @@ static void* TSK_STDCALL _tdav_consumer_dsound_playback_thread(void *param)
hr = IDirectSoundBuffer_Lock(
dsound->secondaryBuffer,
dwWriteCursor/* Ignored because of DSBLOCK_FROMWRITECURSOR */,
dsound->bytes_per_notif_size,
(DWORD)dsound->bytes_per_notif_size,
&lpvAudio1, &dwBytesAudio1,
&lpvAudio2, &dwBytesAudio2,
DSBLOCK_FROMWRITECURSOR);
@ -255,7 +255,7 @@ static int tdav_consumer_dsound_prepare(tmedia_consumer_t* self, const tmedia_co
/* Creates the secondary buffer and apply format */
dsbd.dwFlags = (DSBCAPS_CTRLPOSITIONNOTIFY | DSBCAPS_GLOBALFOCUS | DSBCAPS_CTRLVOLUME);
dsbd.dwBufferBytes = (TDAV_DSOUND_CONSUMER_NOTIF_POS_COUNT * dsound->bytes_per_notif_size);
dsbd.dwBufferBytes = (DWORD)(TDAV_DSOUND_CONSUMER_NOTIF_POS_COUNT * dsound->bytes_per_notif_size);
dsbd.lpwfxFormat = &wfx;
if((hr = IDirectSound_CreateSoundBuffer(dsound->device, &dsbd, &dsound->secondaryBuffer, NULL)) != DS_OK){
@ -314,7 +314,7 @@ static int tdav_consumer_dsound_start(tmedia_consumer_t* self)
for(i = 0; i<TDAV_DSOUND_CONSUMER_NOTIF_POS_COUNT; i++){
dsound->notifEvents[i] = CreateEvent(NULL, FALSE, FALSE, NULL);
// set notification point offset at the start of the buffer for Windows Vista and later and at the half of the buffer of XP and before
pPosNotify[i].dwOffset = (dsound->bytes_per_notif_size * i) + (dwMajorVersion > 5 ? (dsound->bytes_per_notif_size >> 1) : 1);
pPosNotify[i].dwOffset = (DWORD)((dsound->bytes_per_notif_size * i) + (dwMajorVersion > 5 ? (dsound->bytes_per_notif_size >> 1) : 1));
pPosNotify[i].hEventNotify = dsound->notifEvents[i];
}
if((hr = IDirectSoundNotify_SetNotificationPositions(lpDSBNotify, TDAV_DSOUND_CONSUMER_NOTIF_POS_COUNT, pPosNotify)) != DS_OK){

View File

@ -1,20 +1,18 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
* Copyright (C) 2010-2015 Mamadou DIOP.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@ -23,9 +21,6 @@
/**@file tdav_producer_dsound.c
* @brief Microsoft DirectSound producer.
*
* @author Mamadou Diop <diopmamadou(at)doubango.org>
*
*/
#include "tinydav/audio/directsound/tdav_producer_dsound.h"
@ -75,7 +70,7 @@ tdav_producer_dsound_t;
static void* TSK_STDCALL _tdav_producer_dsound_record_thread(void *param)
{
tdav_producer_dsound_t* dsound = (tdav_producer_dsound_t*)param;
tdav_producer_dsound_t* dsound = (tdav_producer_dsound_t*)param;
HRESULT hr;
LPVOID lpvAudio1, lpvAudio2;
@ -90,14 +85,14 @@ static void* TSK_STDCALL _tdav_producer_dsound_record_thread(void *param)
if (!dsound->started) {
break;
}
if (dwEvent < WAIT_OBJECT_0 || dwEvent > (WAIT_OBJECT_0 + TDAV_DSOUND_PRODUCER_NOTIF_POS_COUNT)) {
if (dwEvent < WAIT_OBJECT_0 || dwEvent >(WAIT_OBJECT_0 + TDAV_DSOUND_PRODUCER_NOTIF_POS_COUNT)) {
TSK_DEBUG_ERROR("Invalid dwEvent(%d)", dwEvent);
break;
}
dwIndex = (dwEvent - WAIT_OBJECT_0);
// lock
if ((hr = IDirectSoundCaptureBuffer_Lock(dsound->captureBuffer, (dwIndex * dsound->bytes_per_notif_size), dsound->bytes_per_notif_size, &lpvAudio1, &dwBytesAudio1, &lpvAudio2, &dwBytesAudio2, 0)) != DS_OK) {
if ((hr = IDirectSoundCaptureBuffer_Lock(dsound->captureBuffer, (DWORD)(dwIndex * dsound->bytes_per_notif_size), (DWORD)dsound->bytes_per_notif_size, &lpvAudio1, &dwBytesAudio1, &lpvAudio2, &dwBytesAudio2, 0)) != DS_OK) {
tdav_win32_print_error("IDirectSoundCaptureBuffer_Lock", hr);
continue;
}
@ -116,16 +111,16 @@ static void* TSK_STDCALL _tdav_producer_dsound_record_thread(void *param)
TMEDIA_PRODUCER(dsound)->enc_cb.callback(TMEDIA_PRODUCER(dsound)->enc_cb.callback_data, lpvAudio2, dwBytesAudio2);
}
}
// unlock
if ((hr = IDirectSoundCaptureBuffer_Unlock(dsound->captureBuffer, lpvAudio1, dwBytesAudio1, lpvAudio2, dwBytesAudio2)) != DS_OK) {
tdav_win32_print_error("IDirectSoundCaptureBuffer_Unlock", hr);
continue;
}
}
}
TSK_DEBUG_INFO("_tdav_producer_dsound_record_thread -- STOP");
return tsk_null;
}
@ -142,8 +137,8 @@ static int _tdav_producer_dsound_unprepare(tdav_producer_dsound_t* dsound)
IDirectSoundCapture_Release(dsound->device);
dsound->device = NULL;
}
for (i = 0; i<(sizeof(dsound->notifEvents)/sizeof(dsound->notifEvents[0])); i++){
if(dsound->notifEvents[i]) {
for (i = 0; i < (sizeof(dsound->notifEvents) / sizeof(dsound->notifEvents[0])); i++){
if (dsound->notifEvents[i]) {
CloseHandle(dsound->notifEvents[i]);
dsound->notifEvents[i] = NULL;
}
@ -157,7 +152,7 @@ static int _tdav_producer_dsound_unprepare(tdav_producer_dsound_t* dsound)
/* ============ Media Producer Interface ================= */
static int tdav_producer_dsound_set(tmedia_producer_t* self, const tmedia_param_t* param)
{
{
tdav_producer_dsound_t* dsound = (tdav_producer_dsound_t*)self;
if (param->plugin_type == tmedia_ppt_producer) {
if (param->value_type == tmedia_pvt_int32) {
@ -186,8 +181,8 @@ static int tdav_producer_dsound_prepare(tmedia_producer_t* self, const tmedia_co
{
HRESULT hr;
WAVEFORMATEX wfx = {0};
DSCBUFFERDESC dsbd = {0};
WAVEFORMATEX wfx = { 0 };
DSCBUFFERDESC dsbd = { 0 };
tdav_producer_dsound_t* dsound = (tdav_producer_dsound_t*)self;
@ -221,20 +216,20 @@ static int tdav_producer_dsound_prepare(tmedia_producer_t* self, const tmedia_co
wfx.nChannels = TMEDIA_PRODUCER(dsound)->audio.channels;
wfx.nSamplesPerSec = TMEDIA_PRODUCER(dsound)->audio.rate;
wfx.wBitsPerSample = TMEDIA_PRODUCER(dsound)->audio.bits_per_sample;
wfx.nBlockAlign = (wfx.nChannels * wfx.wBitsPerSample/8);
wfx.nBlockAlign = (wfx.nChannels * wfx.wBitsPerSample / 8);
wfx.nAvgBytesPerSec = (wfx.nSamplesPerSec * wfx.nBlockAlign);
/* Average bytes (count) for each notification */
dsound->bytes_per_notif_size = ((wfx.nAvgBytesPerSec * TMEDIA_PRODUCER(dsound)->audio.ptime)/1000);
dsound->bytes_per_notif_size = ((wfx.nAvgBytesPerSec * TMEDIA_PRODUCER(dsound)->audio.ptime) / 1000);
dsbd.dwSize = sizeof(DSCBUFFERDESC);
dsbd.dwBufferBytes = (TDAV_DSOUND_PRODUCER_NOTIF_POS_COUNT * dsound->bytes_per_notif_size);
dsbd.dwBufferBytes = (DWORD)(TDAV_DSOUND_PRODUCER_NOTIF_POS_COUNT * dsound->bytes_per_notif_size);
dsbd.lpwfxFormat = &wfx;
if ((hr = IDirectSoundCapture_CreateCaptureBuffer(dsound->device, &dsbd, &dsound->captureBuffer, NULL)) != DS_OK) {
tdav_win32_print_error("IDirectSoundCapture_CreateCaptureBuffer", hr);
return -4;
}
}
return 0;
}
@ -247,51 +242,51 @@ static int tdav_producer_dsound_start(tmedia_producer_t* self)
DWORD dwOffset;
HRESULT hr;
LPDIRECTSOUNDNOTIFY lpDSBNotify;
DSBPOSITIONNOTIFY pPosNotify[TDAV_DSOUND_PRODUCER_NOTIF_POS_COUNT] = {0};
DSBPOSITIONNOTIFY pPosNotify[TDAV_DSOUND_PRODUCER_NOTIF_POS_COUNT] = { 0 };
if (!dsound) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if (!dsound->device || !dsound->captureBuffer) {
TSK_DEBUG_ERROR("Producer not prepared");
return -2;
}
if (dsound->started) {
return 0;
}
if ((hr = IDirectSoundCaptureBuffer_QueryInterface(dsound->captureBuffer, &IID_IDirectSoundNotify, (LPVOID*)&lpDSBNotify)) != DS_OK) {
tdav_win32_print_error("IDirectSoundCaptureBuffer_QueryInterface", hr);
return -3;
}
/* Events associated to notification points */
dwOffset = dsound->bytes_per_notif_size - 1;
for (i = 0; i<(sizeof(dsound->notifEvents)/sizeof(dsound->notifEvents[0])); i++){
dwOffset = (DWORD)(dsound->bytes_per_notif_size - 1);
for (i = 0; i < (sizeof(dsound->notifEvents) / sizeof(dsound->notifEvents[0])); i++){
dsound->notifEvents[i] = CreateEvent(NULL, FALSE, FALSE, NULL);
pPosNotify[i].dwOffset = dwOffset;
pPosNotify[i].hEventNotify = dsound->notifEvents[i];
dwOffset += dsound->bytes_per_notif_size;
dwOffset += (DWORD)dsound->bytes_per_notif_size;
}
if ((hr = IDirectSoundNotify_SetNotificationPositions(lpDSBNotify, TDAV_DSOUND_PRODUCER_NOTIF_POS_COUNT, pPosNotify)) != DS_OK) {
IDirectSoundNotify_Release(lpDSBNotify);
tdav_win32_print_error("IDirectSoundBuffer_QueryInterface", hr);
return -4;
}
if ((hr = IDirectSoundNotify_Release(lpDSBNotify))) {
tdav_win32_print_error("IDirectSoundNotify_Release", hr);
}
/* Start the buffer */
if ((hr = IDirectSoundCaptureBuffer_Start(dsound->captureBuffer, DSBPLAY_LOOPING)) != DS_OK) {
tdav_win32_print_error("IDirectSoundCaptureBuffer_Start", hr);
return -5;
}
/* start the reader thread */
dsound->started = tsk_true;
tsk_thread_create(&dsound->tid[0], _tdav_producer_dsound_record_thread, dsound);
@ -326,7 +321,7 @@ static int tdav_producer_dsound_stop(tmedia_producer_t* self)
if (dsound->mute && dsound->notifEvents[0]) {
// thread is paused -> raise event now that "started" is equal to false
SetEvent(dsound->notifEvents[0]);
}
}
#endif
// stop thread
@ -357,13 +352,13 @@ static tsk_object_t* tdav_producer_dsound_ctor(tsk_object_t * self, va_list * ap
/* init base */
tdav_producer_audio_init(TDAV_PRODUCER_AUDIO(producer));
/* init self */
}
return self;
}
/* destructor */
static tsk_object_t* tdav_producer_dsound_dtor(tsk_object_t * self)
{
{
tdav_producer_dsound_t *dsound = self;
if (dsound) {
/* stop */
@ -380,21 +375,21 @@ static tsk_object_t* tdav_producer_dsound_dtor(tsk_object_t * self)
return self;
}
/* object definition */
static const tsk_object_def_t tdav_producer_dsound_def_s =
static const tsk_object_def_t tdav_producer_dsound_def_s =
{
sizeof(tdav_producer_dsound_t),
tdav_producer_dsound_ctor,
tdav_producer_dsound_ctor,
tdav_producer_dsound_dtor,
tdav_producer_audio_cmp,
tdav_producer_audio_cmp,
};
/* plugin definition*/
static const tmedia_producer_plugin_def_t tdav_producer_dsound_plugin_def_s =
static const tmedia_producer_plugin_def_t tdav_producer_dsound_plugin_def_s =
{
&tdav_producer_dsound_def_s,
tmedia_audio,
"Microsoft DirectSound producer",
tdav_producer_dsound_set,
tdav_producer_dsound_prepare,
tdav_producer_dsound_start,

View File

@ -1,29 +1,24 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
* Copyright (C) 2010-2015 Mamadou DIOP.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
*/
/**@file tdav_consumer_audio.c
* @brief Base class for all Audio consumers.
*
* @author Mamadou Diop <diopmamadou(at)doubango.org>
*/
#include "tinydav/audio/tdav_consumer_audio.h"
@ -60,12 +55,12 @@ int tdav_consumer_audio_init(tdav_consumer_audio_t* self)
TSK_DEBUG_INFO("tdav_consumer_audio_init()");
if(!self){
if (!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
/* base */
if((ret = tmedia_consumer_init(TMEDIA_CONSUMER(self)))){
if ((ret = tmedia_consumer_init(TMEDIA_CONSUMER(self)))){
return ret;
}
@ -91,22 +86,24 @@ int tdav_consumer_audio_init(tdav_consumer_audio_t* self)
* >0 : @a consumer1 greater than @a consumer2.<br>
*/
int tdav_consumer_audio_cmp(const tsk_object_t* consumer1, const tsk_object_t* consumer2)
{
return (TDAV_CONSUMER_AUDIO(consumer1) - TDAV_CONSUMER_AUDIO(consumer2));
{
int ret;
tsk_subsat_int32_ptr(consumer1, consumer2, &ret);
return ret;
}
int tdav_consumer_audio_set(tdav_consumer_audio_t* self, const tmedia_param_t* param)
{
if(!self){
if (!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if(param->plugin_type == tmedia_ppt_consumer){
if(param->value_type == tmedia_pvt_int32){
if(tsk_striequals(param->key, "gain")){
if (param->plugin_type == tmedia_ppt_consumer){
if (param->value_type == tmedia_pvt_int32){
if (tsk_striequals(param->key, "gain")){
int32_t gain = *((int32_t*)param->value);
if(gain<TDAV_AUDIO_GAIN_MAX && gain>=0){
if (gain < TDAV_AUDIO_GAIN_MAX && gain >= 0){
TMEDIA_CONSUMER(self)->audio.gain = (uint8_t)gain;
TSK_DEBUG_INFO("audio consumer gain=%u", gain);
}
@ -115,7 +112,7 @@ int tdav_consumer_audio_set(tdav_consumer_audio_t* self, const tmedia_param_t* p
return -2;
}
}
else if(tsk_striequals(param->key, "volume")){
else if (tsk_striequals(param->key, "volume")){
TMEDIA_CONSUMER(self)->audio.volume = TSK_TO_INT32((uint8_t*)param->value);
TMEDIA_CONSUMER(self)->audio.volume = TSK_CLAMP(0, TMEDIA_CONSUMER(self)->audio.volume, 100);
}
@ -130,23 +127,23 @@ int tdav_consumer_audio_put(tdav_consumer_audio_t* self, const void* data, tsk_s
{
int ret;
if(!self || !data || !self->jitterbuffer){
if (!self || !data || !self->jitterbuffer){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
tsk_safeobj_lock(self);
if(!TMEDIA_JITTER_BUFFER(self->jitterbuffer)->opened){
if (!TMEDIA_JITTER_BUFFER(self->jitterbuffer)->opened){
uint32_t rate = TMEDIA_CONSUMER(self)->audio.out.rate ? TMEDIA_CONSUMER(self)->audio.out.rate : TMEDIA_CONSUMER(self)->audio.in.rate;
uint32_t channels = TMEDIA_CONSUMER(self)->audio.out.channels ? TMEDIA_CONSUMER(self)->audio.out.channels : tmedia_defaults_get_audio_channels_playback();
if((ret = tmedia_jitterbuffer_open(self->jitterbuffer, TMEDIA_CONSUMER(self)->audio.ptime, rate, channels))){
if ((ret = tmedia_jitterbuffer_open(self->jitterbuffer, TMEDIA_CONSUMER(self)->audio.ptime, rate, channels))){
TSK_DEBUG_ERROR("Failed to open jitterbuffer (%d)", ret);
tsk_safeobj_unlock(self);
return ret;
}
}
ret = tmedia_jitterbuffer_put(self->jitterbuffer, (void*)data, data_size, proto_hdr);
tsk_safeobj_unlock(self);
@ -158,19 +155,19 @@ int tdav_consumer_audio_put(tdav_consumer_audio_t* self, const void* data, tsk_s
tsk_size_t tdav_consumer_audio_get(tdav_consumer_audio_t* self, void* out_data, tsk_size_t out_size)
{
tsk_size_t ret_size = 0;
if(!self || !self->jitterbuffer){
if (!self || !self->jitterbuffer){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
tsk_safeobj_lock(self);
if(!TMEDIA_JITTER_BUFFER(self->jitterbuffer)->opened){
if (!TMEDIA_JITTER_BUFFER(self->jitterbuffer)->opened){
int ret;
uint32_t frame_duration = TMEDIA_CONSUMER(self)->audio.ptime;
uint32_t rate = TMEDIA_CONSUMER(self)->audio.out.rate ? TMEDIA_CONSUMER(self)->audio.out.rate : TMEDIA_CONSUMER(self)->audio.in.rate;
uint32_t channels = TMEDIA_CONSUMER(self)->audio.out.channels ? TMEDIA_CONSUMER(self)->audio.out.channels : tmedia_defaults_get_audio_channels_playback();
if((ret = tmedia_jitterbuffer_open(TMEDIA_JITTER_BUFFER(self->jitterbuffer), frame_duration, rate, channels))){
if ((ret = tmedia_jitterbuffer_open(TMEDIA_JITTER_BUFFER(self->jitterbuffer), frame_duration, rate, channels))){
TSK_DEBUG_ERROR("Failed to open jitterbuffer (%d)", ret);
tsk_safeobj_unlock(self);
return 0;
@ -178,14 +175,14 @@ tsk_size_t tdav_consumer_audio_get(tdav_consumer_audio_t* self, void* out_data,
}
ret_size = tmedia_jitterbuffer_get(TMEDIA_JITTER_BUFFER(self->jitterbuffer), out_data, out_size);
tsk_safeobj_unlock(self);
tsk_safeobj_unlock(self);
// denoiser
if (self->denoise && self->denoise->opened && (self->denoise->echo_supp_enabled || self->denoise->noise_supp_enabled)) {
if (self->denoise->echo_supp_enabled) {
// Echo process last frame
if (self->denoise->playback_frame && self->denoise->playback_frame->size) {
tmedia_denoise_echo_playback(self->denoise, self->denoise->playback_frame->data, self->denoise->playback_frame->size);
tmedia_denoise_echo_playback(self->denoise, self->denoise->playback_frame->data, (uint32_t)self->denoise->playback_frame->size);
}
if (ret_size){
// save
@ -196,7 +193,7 @@ tsk_size_t tdav_consumer_audio_get(tdav_consumer_audio_t* self, void* out_data,
#if 1 // suppress noise if not supported by remote party's encoder
// suppress noise
if (self->denoise->noise_supp_enabled && ret_size) {
tmedia_denoise_process_playback(self->denoise, out_data, ret_size);
tmedia_denoise_process_playback(self->denoise, out_data, (uint32_t)ret_size);
}
#endif
}
@ -206,7 +203,7 @@ tsk_size_t tdav_consumer_audio_get(tdav_consumer_audio_t* self, void* out_data,
int tdav_consumer_audio_tick(tdav_consumer_audio_t* self)
{
if(!self || !self->jitterbuffer){
if (!self || !self->jitterbuffer){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
@ -253,13 +250,13 @@ int tdav_consumer_audio_deinit(tdav_consumer_audio_t* self)
{
int ret;
if(!self){
if (!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
/* base */
if((ret = tmedia_consumer_deinit(TMEDIA_CONSUMER(self)))){
if ((ret = tmedia_consumer_deinit(TMEDIA_CONSUMER(self)))){
/* return ret; */
}

View File

@ -77,7 +77,9 @@ int tdav_producer_audio_init(tdav_producer_audio_t* self)
*/
int tdav_producer_audio_cmp(const tsk_object_t* producer1, const tsk_object_t* producer2)
{
return (TDAV_PRODUCER_AUDIO(producer1) - TDAV_PRODUCER_AUDIO(producer2));
int ret;
tsk_subsat_int32_ptr(producer1, producer2, &ret);
return ret;
}
int tdav_producer_audio_set(tdav_producer_audio_t* self, const tmedia_param_t* param)

View File

@ -1,20 +1,18 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
* Copyright (C) 2010-2015 Mamadou DIOP.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@ -98,7 +96,7 @@ static int tdav_session_audio_rtp_cb(const void* callback_data, const struct trt
TSK_DEBUG_ERROR("Failed to get decoder codec");
goto bail;
}
// Open codec if not already done
if (!TMEDIA_CODEC(codec)->opened) {
tsk_safeobj_lock(base);
@ -122,18 +120,18 @@ static int tdav_session_audio_rtp_cb(const void* callback_data, const struct trt
int bytesPerSample = (base->consumer->audio.bits_per_sample >> 3);
if (!audio->decoder.resampler.instance) {
TSK_DEBUG_INFO("Create audio resampler(%s) for consumer: rate=%d->%d, channels=%d->%d, bytesPerSample=%d",
codec->plugin->desc,
TSK_DEBUG_INFO("Create audio resampler(%s) for consumer: rate=%d->%d, channels=%d->%d, bytesPerSample=%d",
codec->plugin->desc,
codec->in.rate, base->consumer->audio.out.rate,
TMEDIA_CODEC_AUDIO(codec)->in.channels, base->consumer->audio.out.channels,
bytesPerSample);
audio->decoder.resampler.instance = _tdav_session_audio_resampler_create(
bytesPerSample,
codec->in.rate, base->consumer->audio.out.rate,
base->consumer->audio.ptime,
TMEDIA_CODEC_AUDIO(codec)->in.channels, base->consumer->audio.out.channels,
TDAV_AUDIO_RESAMPLER_DEFAULT_QUALITY,
&audio->decoder.resampler.buffer, &audio->decoder.resampler.buffer_size
bytesPerSample,
codec->in.rate, base->consumer->audio.out.rate,
base->consumer->audio.ptime,
TMEDIA_CODEC_AUDIO(codec)->in.channels, base->consumer->audio.out.channels,
TDAV_AUDIO_RESAMPLER_DEFAULT_QUALITY,
&audio->decoder.resampler.buffer, &audio->decoder.resampler.buffer_size
);
}
if (!audio->decoder.resampler.instance) {
@ -141,19 +139,19 @@ static int tdav_session_audio_rtp_cb(const void* callback_data, const struct trt
ret = -5;
goto bail;
}
if(!(resampler_result_size = tmedia_resampler_process(audio->decoder.resampler.instance, buffer, size/bytesPerSample, audio->decoder.resampler.buffer, audio->decoder.resampler.buffer_size/bytesPerSample))){
if (!(resampler_result_size = tmedia_resampler_process(audio->decoder.resampler.instance, buffer, size / bytesPerSample, audio->decoder.resampler.buffer, audio->decoder.resampler.buffer_size / bytesPerSample))){
TSK_DEBUG_ERROR("Failed to process audio resampler input buffer");
ret = -6;
goto bail;
}
buffer = audio->decoder.resampler.buffer;
size = audio->decoder.resampler.buffer_size;
}
// adjust the gain
if (base->consumer->audio.gain) {
_tdav_session_audio_apply_gain(buffer, size, base->consumer->audio.bits_per_sample, base->consumer->audio.gain);
_tdav_session_audio_apply_gain(buffer, (int)size, base->consumer->audio.bits_per_sample, base->consumer->audio.gain);
}
// consume the frame
tmedia_consumer_consume(base->consumer, buffer, size, packet->header);
@ -189,7 +187,7 @@ static int tdav_session_audio_producer_enc_cb(const void* callback_data, const v
if (TMEDIA_SESSION(audio)->lo_held) {
return 0;
}
// get best negotiated codec if not already done
// the encoder codec could be null when session is renegotiated without re-starting (e.g. hold/resume)
if (!audio->encoder.codec) {
@ -227,60 +225,60 @@ static int tdav_session_audio_producer_enc_cb(const void* callback_data, const v
TSK_DEBUG_INFO("Skiping audio frame as we're sending DTMF...");
return 0;
}
// resample if needed
if(base->producer->audio.rate != audio->encoder.codec->out.rate || base->producer->audio.channels != TMEDIA_CODEC_AUDIO(audio->encoder.codec)->out.channels){
if (base->producer->audio.rate != audio->encoder.codec->out.rate || base->producer->audio.channels != TMEDIA_CODEC_AUDIO(audio->encoder.codec)->out.channels){
tsk_size_t resampler_result_size = 0;
int bytesPerSample = (base->producer->audio.bits_per_sample >> 3);
if(!audio->encoder.resampler.instance){
TSK_DEBUG_INFO("Create audio resampler(%s) for producer: rate=%d->%d, channels=%d->%d, bytesPerSample=%d",
audio->encoder.codec->plugin->desc,
if (!audio->encoder.resampler.instance){
TSK_DEBUG_INFO("Create audio resampler(%s) for producer: rate=%d->%d, channels=%d->%d, bytesPerSample=%d",
audio->encoder.codec->plugin->desc,
base->producer->audio.rate, audio->encoder.codec->out.rate,
base->producer->audio.channels, TMEDIA_CODEC_AUDIO(audio->encoder.codec)->out.channels,
bytesPerSample);
audio->encoder.resampler.instance = _tdav_session_audio_resampler_create(
bytesPerSample,
base->producer->audio.rate, audio->encoder.codec->out.rate,
base->producer->audio.ptime,
base->producer->audio.channels, TMEDIA_CODEC_AUDIO(audio->encoder.codec)->out.channels,
TDAV_AUDIO_RESAMPLER_DEFAULT_QUALITY,
&audio->encoder.resampler.buffer, &audio->encoder.resampler.buffer_size
bytesPerSample,
base->producer->audio.rate, audio->encoder.codec->out.rate,
base->producer->audio.ptime,
base->producer->audio.channels, TMEDIA_CODEC_AUDIO(audio->encoder.codec)->out.channels,
TDAV_AUDIO_RESAMPLER_DEFAULT_QUALITY,
&audio->encoder.resampler.buffer, &audio->encoder.resampler.buffer_size
);
}
if(!audio->encoder.resampler.instance){
if (!audio->encoder.resampler.instance){
TSK_DEBUG_ERROR("No resampler to handle data");
ret = -1;
goto done;
}
if(!(resampler_result_size = tmedia_resampler_process(audio->encoder.resampler.instance, buffer, size/bytesPerSample, audio->encoder.resampler.buffer, audio->encoder.resampler.buffer_size/bytesPerSample))){
if (!(resampler_result_size = tmedia_resampler_process(audio->encoder.resampler.instance, buffer, size / bytesPerSample, audio->encoder.resampler.buffer, audio->encoder.resampler.buffer_size / bytesPerSample))){
TSK_DEBUG_ERROR("Failed to process audio resampler input buffer");
ret = -1;
goto done;
}
buffer = audio->encoder.resampler.buffer;
size = audio->encoder.resampler.buffer_size;
}
// Denoise (VAD, AGC, Noise suppression, ...)
// Must be done after resampling
if(audio->denoise){
if (audio->denoise){
tsk_bool_t silence_or_noise = tsk_false;
if(audio->denoise->echo_supp_enabled){
ret = tmedia_denoise_process_record(TMEDIA_DENOISE(audio->denoise), (void*)buffer, size, &silence_or_noise);
if (audio->denoise->echo_supp_enabled){
ret = tmedia_denoise_process_record(TMEDIA_DENOISE(audio->denoise), (void*)buffer, (uint32_t)size, &silence_or_noise);
}
}
// adjust the gain
// Must be done after resampling
if(base->producer->audio.gain){
_tdav_session_audio_apply_gain((void*)buffer, size, base->producer->audio.bits_per_sample, base->producer->audio.gain);
if (base->producer->audio.gain){
_tdav_session_audio_apply_gain((void*)buffer, (int)size, base->producer->audio.bits_per_sample, base->producer->audio.gain);
}
// Encode data
if((audio->encoder.codec = tsk_object_ref(audio->encoder.codec))){ /* Thread safeness (SIP reINVITE or UPDATE could update the encoder) */
if ((audio->encoder.codec = tsk_object_ref(audio->encoder.codec))){ /* Thread safeness (SIP reINVITE or UPDATE could update the encoder) */
out_size = audio->encoder.codec->plugin->encode(audio->encoder.codec, buffer, size, &audio->encoder.buffer, &audio->encoder.buffer_size);
if(out_size){
if (out_size){
trtp_manager_send_rtp(base->rtp_manager, audio->encoder.buffer, out_size, TMEDIA_CODEC_FRAME_DURATION_AUDIO_ENCODING(audio->encoder.codec), tsk_false/*Marker*/, tsk_true/*lastPacket*/);
}
tsk_object_unref(audio->encoder.codec);
@ -302,32 +300,32 @@ static int tdav_session_audio_set(tmedia_session_t* self, const tmedia_param_t*
int ret = 0;
tdav_session_audio_t* audio;
if(!self){
if (!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if(tdav_session_av_set(TDAV_SESSION_AV(self), param) == tsk_true){
if (tdav_session_av_set(TDAV_SESSION_AV(self), param) == tsk_true){
return 0;
}
audio = (tdav_session_audio_t*)self;
if(param->plugin_type == tmedia_ppt_consumer){
if (param->plugin_type == tmedia_ppt_consumer){
TSK_DEBUG_ERROR("Not expected consumer_set(%s)", param->key);
}
else if(param->plugin_type == tmedia_ppt_producer){
else if (param->plugin_type == tmedia_ppt_producer){
TSK_DEBUG_ERROR("Not expected producer_set(%s)", param->key);
}
else{
if(param->value_type == tmedia_pvt_int32){
if(tsk_striequals(param->key, "echo-supp")){
if(audio->denoise){
if (param->value_type == tmedia_pvt_int32){
if (tsk_striequals(param->key, "echo-supp")){
if (audio->denoise){
audio->denoise->echo_supp_enabled = (TSK_TO_INT32((uint8_t*)param->value) != 0);
}
}
else if(tsk_striequals(param->key, "echo-tail")){
if(audio->denoise){
else if (tsk_striequals(param->key, "echo-tail")){
if (audio->denoise){
return tmedia_denoise_set(audio->denoise, param);
}
}
@ -339,24 +337,24 @@ static int tdav_session_audio_set(tmedia_session_t* self, const tmedia_param_t*
static int tdav_session_audio_get(tmedia_session_t* self, tmedia_param_t* param)
{
if(!self || !param){
if (!self || !param){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if(tdav_session_av_get(TDAV_SESSION_AV(self), param) == tsk_true){
if (tdav_session_av_get(TDAV_SESSION_AV(self), param) == tsk_true){
return 0;
}
// the codec information is held by the session even if the user is authorized to request it for the consumer/producer
if(tsk_striequals("codec", param->key) && param->value_type == tmedia_pvt_pobject){
if(param->plugin_type == tmedia_ppt_consumer){
if (tsk_striequals("codec", param->key) && param->value_type == tmedia_pvt_pobject){
if (param->plugin_type == tmedia_ppt_consumer){
TSK_DEBUG_ERROR("Not implemented");
return -4;
}
else if(param->plugin_type == tmedia_ppt_producer){
else if (param->plugin_type == tmedia_ppt_producer){
const tmedia_codec_t* codec;
if(!(codec = TDAV_SESSION_AUDIO(self)->encoder.codec)){
if (!(codec = TDAV_SESSION_AUDIO(self)->encoder.codec)){
codec = tdav_session_av_get_best_neg_codec((const tdav_session_av_t*)self);
}
*((tsk_object_t**)param->value) = tsk_object_ref(TSK_OBJECT(codec));
@ -373,12 +371,12 @@ static int tdav_session_audio_prepare(tmedia_session_t* self)
tdav_session_av_t* base = (tdav_session_av_t*)(self);
int ret;
if((ret = tdav_session_av_prepare(base))){
if ((ret = tdav_session_av_prepare(base))){
TSK_DEBUG_ERROR("tdav_session_av_prepare(audio) failed");
return ret;
}
if(base->rtp_manager){
if (base->rtp_manager){
ret = trtp_manager_set_rtp_callback(base->rtp_manager, tdav_session_audio_rtp_cb, base);
}
@ -392,7 +390,7 @@ static int tdav_session_audio_start(tmedia_session_t* self)
const tmedia_codec_t* codec;
tdav_session_av_t* base;
if(!self){
if (!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
@ -400,25 +398,25 @@ static int tdav_session_audio_start(tmedia_session_t* self)
audio = (tdav_session_audio_t*)self;
base = (tdav_session_av_t*)self;
if(!(codec = tdav_session_av_get_best_neg_codec(base))){
if (!(codec = tdav_session_av_get_best_neg_codec(base))){
TSK_DEBUG_ERROR("No codec matched");
return -2;
}
TSK_OBJECT_SAFE_FREE(audio->encoder.codec);
audio->encoder.codec = tsk_object_ref((tsk_object_t*)codec);
if((ret = tdav_session_av_start(base, codec))){
if ((ret = tdav_session_av_start(base, codec))){
TSK_DEBUG_ERROR("tdav_session_av_start(audio) failed");
return ret;
}
if(base->rtp_manager){
if (base->rtp_manager){
/* Denoise (AEC, Noise Suppression, AGC)
* tmedia_denoise_process_record() is called after resampling and before encoding which means sampling rate is equal to codec's rate
* tmedia_denoise_echo_playback() is called before playback which means sampling rate is equal to consumer's rate
*/
if(audio->denoise){
if (audio->denoise){
uint32_t record_frame_size_samples = TMEDIA_CODEC_PCM_FRAME_SIZE_AUDIO_ENCODING(audio->encoder.codec);
uint32_t record_sampling_rate = TMEDIA_CODEC_RATE_ENCODING(audio->encoder.codec);
uint32_t record_channels = TMEDIA_CODEC_CHANNELS_AUDIO_ENCODING(audio->encoder.codec);
@ -433,13 +431,13 @@ static int tdav_session_audio_start(tmedia_session_t* self)
? base->consumer->audio.out.channels
: TMEDIA_CODEC_CHANNELS_AUDIO_DECODING(audio->encoder.codec);
TSK_DEBUG_INFO("Audio denoiser to be opened(record_frame_size_samples=%u, record_sampling_rate=%u, record_channels=%u, playback_frame_size_samples=%u, playback_sampling_rate=%u, playback_channels=%u)",
TSK_DEBUG_INFO("Audio denoiser to be opened(record_frame_size_samples=%u, record_sampling_rate=%u, record_channels=%u, playback_frame_size_samples=%u, playback_sampling_rate=%u, playback_channels=%u)",
record_frame_size_samples, record_sampling_rate, record_channels, playback_frame_size_samples, playback_sampling_rate, playback_channels);
// close()
tmedia_denoise_close(audio->denoise);
// open() with new values
tmedia_denoise_open(audio->denoise,
tmedia_denoise_open(audio->denoise,
record_frame_size_samples, record_sampling_rate, TSK_CLAMP(1, record_channels, 2),
playback_frame_size_samples, playback_sampling_rate, TSK_CLAMP(1, playback_channels, 2));
}
@ -461,10 +459,10 @@ static int tdav_session_audio_stop(tmedia_session_t* self)
// close the jitter buffer and denoiser to be sure it will be reopened and reinitialized if reINVITE or UPDATE
// this is a "must" when the initial and updated sessions use codecs with different rate
if(audio->jitterbuffer && audio->jitterbuffer->opened) {
if (audio->jitterbuffer && audio->jitterbuffer->opened) {
ret = tmedia_jitterbuffer_close(audio->jitterbuffer);
}
if(audio->denoise && audio->denoise->opened) {
if (audio->denoise && audio->denoise->opened) {
ret = tmedia_denoise_close(audio->denoise);
}
return ret;
@ -480,7 +478,7 @@ static int tdav_session_audio_send_dtmf(tmedia_session_t* self, uint8_t event)
tdav_session_audio_dtmfe_t *dtmfe, *copy;
int format = 101;
if(!self){
if (!self){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
@ -489,34 +487,34 @@ static int tdav_session_audio_send_dtmf(tmedia_session_t* self, uint8_t event)
base = (tdav_session_av_t*)self;
// Find the DTMF codec to use to use the RTP payload
if((codec = tmedia_codec_find_by_format(TMEDIA_SESSION(audio)->codecs, TMEDIA_CODEC_FORMAT_DTMF))){
if ((codec = tmedia_codec_find_by_format(TMEDIA_SESSION(audio)->codecs, TMEDIA_CODEC_FORMAT_DTMF))){
rate = (int)codec->out.rate;
format = atoi(codec->neg_format ? codec->neg_format : codec->format);
TSK_OBJECT_SAFE_FREE(codec);
}
/* do we have an RTP manager? */
if(!base->rtp_manager){
if (!base->rtp_manager){
TSK_DEBUG_ERROR("No RTP manager associated to this session");
return -2;
}
/* Create Events list */
if(!audio->dtmf_events){
if (!audio->dtmf_events){
audio->dtmf_events = tsk_list_create();
}
/* Create global reference to the timer manager */
if(!audio->timer.handle_mgr_global){
if(!(audio->timer.handle_mgr_global = tsk_timer_mgr_global_ref())){
if (!audio->timer.handle_mgr_global){
if (!(audio->timer.handle_mgr_global = tsk_timer_mgr_global_ref())){
TSK_DEBUG_ERROR("Failed to create Global Timer Manager");
return -3;
}
}
/* Start the timer manager */
if(!audio->timer.started){
if((ret = tsk_timer_manager_start(audio->timer.handle_mgr_global))){
if (!audio->timer.started){
if ((ret = tsk_timer_manager_start(audio->timer.handle_mgr_global))){
TSK_DEBUG_ERROR("Failed to start Global Timer Manager");
return ret;
}
@ -575,25 +573,25 @@ static int tdav_session_audio_send_dtmf(tmedia_session_t* self, uint8_t event)
// lock() list
tsk_list_lock(audio->dtmf_events);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration*1, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_true, tsk_false);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration * 1, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_true, tsk_false);
tsk_list_push_back_data(audio->dtmf_events, (void**)&dtmfe);
tsk_timer_mgr_global_schedule(ptime*0, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration*2, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_false);
tsk_timer_mgr_global_schedule(ptime * 0, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration * 2, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_false);
tsk_list_push_back_data(audio->dtmf_events, (void**)&dtmfe);
tsk_timer_mgr_global_schedule(ptime*1, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration*3, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_false);
tsk_timer_mgr_global_schedule(ptime * 1, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration * 3, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_false);
tsk_list_push_back_data(audio->dtmf_events, (void**)&dtmfe);
tsk_timer_mgr_global_schedule(ptime*2, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration*4, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_false);
tsk_timer_mgr_global_schedule(ptime * 2, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration * 4, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_false);
tsk_list_push_back_data(audio->dtmf_events, (void**)&dtmfe);
tsk_timer_mgr_global_schedule(ptime*3, _tdav_session_audio_dtmfe_timercb, copy);
tsk_timer_mgr_global_schedule(ptime * 3, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration*4, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_true);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration * 4, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_true);
tsk_list_push_back_data(audio->dtmf_events, (void**)&dtmfe);
tsk_timer_mgr_global_schedule(ptime*4, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration*4, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_true);
tsk_timer_mgr_global_schedule(ptime * 4, _tdav_session_audio_dtmfe_timercb, copy);
copy = dtmfe = _tdav_session_audio_dtmfe_create(audio, event, duration * 4, ++base->rtp_manager->rtp.seq_num, base->rtp_manager->rtp.timestamp, (uint8_t)format, tsk_false, tsk_true);
tsk_list_push_back_data(audio->dtmf_events, (void**)&dtmfe);
tsk_timer_mgr_global_schedule(ptime*5, _tdav_session_audio_dtmfe_timercb, copy);
tsk_timer_mgr_global_schedule(ptime * 5, _tdav_session_audio_dtmfe_timercb, copy);
// unlock() list
tsk_list_unlock(audio->dtmf_events);
@ -603,7 +601,7 @@ static int tdav_session_audio_send_dtmf(tmedia_session_t* self, uint8_t event)
// unref()(thread safeness)
audio = tsk_object_unref(audio);
return 0;
}
@ -619,17 +617,17 @@ static const tsdp_header_M_t* tdav_session_audio_get_lo(tmedia_session_t* self)
tdav_session_av_t* base = TDAV_SESSION_AV(self);
if(!(ret = tdav_session_av_get_lo(base, &updated))){
if (!(ret = tdav_session_av_get_lo(base, &updated))){
TSK_DEBUG_ERROR("tdav_session_av_get_lo(audio) failed");
return tsk_null;
}
if(updated){
if (updated){
tsk_safeobj_lock(base);
TSK_OBJECT_SAFE_FREE(TDAV_SESSION_AUDIO(self)->encoder.codec);
tsk_safeobj_unlock(base);
}
return ret;
}
@ -653,7 +651,7 @@ static int tdav_session_audio_set_ro(tmedia_session_t* self, const tsdp_header_M
// destroy encoder to force requesting new one
TSK_OBJECT_SAFE_FREE(TDAV_SESSION_AUDIO(self)->encoder.codec);
tsk_safeobj_unlock(base);
}
}
return ret;
}
@ -691,7 +689,7 @@ static tdav_session_audio_dtmfe_t* _tdav_session_audio_dtmfe_create(const tdav_s
static uint8_t volume = 10;
static uint32_t ssrc = 0x5234A8;
uint8_t pay[4] = {0};
uint8_t pay[4] = { 0 };
/* RFC 4733 - 2.3. Payload Format
0 1 2 3
@ -701,13 +699,13 @@ static tdav_session_audio_dtmfe_t* _tdav_session_audio_dtmfe_create(const tdav_s
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
if(!(dtmfe = tsk_object_new(tdav_session_audio_dtmfe_def_t))){
if (!(dtmfe = tsk_object_new(tdav_session_audio_dtmfe_def_t))){
TSK_DEBUG_ERROR("Failed to create new DTMF event");
return tsk_null;
}
dtmfe->session = session;
if(!(dtmfe->packet = trtp_rtp_packet_create((session && base->rtp_manager) ? base->rtp_manager->rtp.ssrc.local : ssrc, seq, timestamp, format, M))){
if (!(dtmfe->packet = trtp_rtp_packet_create((session && base->rtp_manager) ? base->rtp_manager->rtp.ssrc.local : ssrc, seq, timestamp, format, M))){
TSK_DEBUG_ERROR("Failed to create DTMF RTP packet");
TSK_OBJECT_SAFE_FREE(dtmfe);
return tsk_null;
@ -719,7 +717,7 @@ static tdav_session_audio_dtmfe_t* _tdav_session_audio_dtmfe_create(const tdav_s
pay[3] = (duration & 0xFF);
/* set data */
if((dtmfe->packet->payload.data = tsk_calloc(sizeof(pay), sizeof(uint8_t)))){
if ((dtmfe->packet->payload.data = tsk_calloc(sizeof(pay), sizeof(uint8_t)))){
memcpy(dtmfe->packet->payload.data, pay, sizeof(pay));
dtmfe->packet->payload.size = sizeof(pay);
}
@ -732,7 +730,7 @@ static int _tdav_session_audio_dtmfe_timercb(const void* arg, tsk_timer_id_t tim
tdav_session_audio_dtmfe_t* dtmfe = (tdav_session_audio_dtmfe_t*)arg;
tdav_session_audio_t *audio;
if(!dtmfe || !dtmfe->session || !dtmfe->session->dtmf_events){
if (!dtmfe || !dtmfe->session || !dtmfe->session->dtmf_events){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
@ -741,7 +739,7 @@ static int _tdav_session_audio_dtmfe_timercb(const void* arg, tsk_timer_id_t tim
TSK_DEBUG_INFO("Sending DTMF event...");
trtp_manager_send_rtp_packet(TDAV_SESSION_AV(dtmfe->session)->rtp_manager, dtmfe->packet, tsk_false);
audio = tsk_object_ref(TSK_OBJECT(dtmfe->session));
tsk_list_lock(audio->dtmf_events);
/* Remove and delete the event from the queue */
@ -760,12 +758,12 @@ static tmedia_resampler_t* _tdav_session_audio_resampler_create(int32_t bytes_pe
tmedia_resampler_t* resampler;
int ret;
if(out_channels > 2 || in_channels > 2) {
if (out_channels > 2 || in_channels > 2) {
TSK_DEBUG_ERROR("Invalid parameter: out_channels=%u, in_channels=%u", out_channels, in_channels);
return tsk_null;
}
resampler_buff_size = (((out_freq * frame_duration)/1000) * bytes_per_sample) << (out_channels == 2 ? 1 : 0);
resampler_buff_size = (((out_freq * frame_duration) / 1000) * bytes_per_sample) << (out_channels == 2 ? 1 : 0);
if (!(resampler = tmedia_resampler_create())) {
TSK_DEBUG_ERROR("Failed to create audio resampler");
@ -799,31 +797,31 @@ done:
static tsk_object_t* tdav_session_audio_ctor(tsk_object_t * self, va_list * app)
{
tdav_session_audio_t *audio = self;
if(audio){
if (audio){
int ret;
tdav_session_av_t *base = TDAV_SESSION_AV(self);
/* init() base */
if((ret = tdav_session_av_init(base, tmedia_audio)) != 0){
if ((ret = tdav_session_av_init(base, tmedia_audio)) != 0){
TSK_DEBUG_ERROR("tdav_session_av_init(audio) failed");
return tsk_null;
}
/* init() self */
if(base->producer){
if (base->producer){
tmedia_producer_set_enc_callback(base->producer, tdav_session_audio_producer_enc_cb, audio);
}
if(base->consumer){
if (base->consumer){
// It's important to create the denoiser and jitter buffer here as dynamic plugins (from shared libs) don't have access to the registry
if(!(audio->denoise = tmedia_denoise_create())){
if (!(audio->denoise = tmedia_denoise_create())){
TSK_DEBUG_WARN("No Audio denoiser found");
}
else{
// IMPORTANT: This means that the consumer must be child of "tdav_consumer_audio_t" object
tdav_consumer_audio_set_denoise(TDAV_CONSUMER_AUDIO(base->consumer), audio->denoise);
}
if(!(audio->jitterbuffer = tmedia_jitterbuffer_create(tmedia_audio))){
if (!(audio->jitterbuffer = tmedia_jitterbuffer_create(tmedia_audio))){
TSK_DEBUG_ERROR("Failed to create jitter buffer");
}
else{
@ -836,16 +834,16 @@ static tsk_object_t* tdav_session_audio_ctor(tsk_object_t * self, va_list * app)
}
/* destructor */
static tsk_object_t* tdav_session_audio_dtor(tsk_object_t * self)
{
{
tdav_session_audio_t *audio = self;
TSK_DEBUG_INFO("*** tdav_session_audio_t destroyed ***");
if(audio){
if (audio){
tdav_session_audio_stop((tmedia_session_t*)audio);
// Do it in this order (deinit self first)
/* Timer manager */
if(audio->timer.started){
if(audio->dtmf_events){
if (audio->timer.started){
if (audio->dtmf_events){
/* Cancel all events */
tsk_list_item_t* item;
tsk_list_foreach(item, audio->dtmf_events){
@ -853,12 +851,12 @@ static tsk_object_t* tdav_session_audio_dtor(tsk_object_t * self)
}
}
}
tsk_timer_mgr_global_unref(&audio->timer.handle_mgr_global);
/* CleanUp the DTMF events */
TSK_OBJECT_SAFE_FREE(audio->dtmf_events);
TSK_OBJECT_SAFE_FREE(audio->denoise);
TSK_OBJECT_SAFE_FREE(audio->jitterbuffer);
@ -866,7 +864,7 @@ static tsk_object_t* tdav_session_audio_dtor(tsk_object_t * self)
TSK_FREE(audio->encoder.buffer);
TSK_OBJECT_SAFE_FREE(audio->decoder.codec);
TSK_FREE(audio->decoder.buffer);
// free resamplers
TSK_FREE(audio->encoder.resampler.buffer);
TSK_OBJECT_SAFE_FREE(audio->encoder.resampler.instance);
@ -882,15 +880,15 @@ static tsk_object_t* tdav_session_audio_dtor(tsk_object_t * self)
return self;
}
/* object definition */
static const tsk_object_def_t tdav_session_audio_def_s =
static const tsk_object_def_t tdav_session_audio_def_s =
{
sizeof(tdav_session_audio_t),
tdav_session_audio_ctor,
tdav_session_audio_ctor,
tdav_session_audio_dtor,
tmedia_session_cmp,
tmedia_session_cmp,
};
/* plugin definition*/
static const tmedia_session_plugin_def_t tdav_session_audio_plugin_def_s =
static const tmedia_session_plugin_def_t tdav_session_audio_plugin_def_s =
{
&tdav_session_audio_def_s,
@ -913,7 +911,7 @@ static const tmedia_session_plugin_def_t tdav_session_audio_plugin_def_s =
tdav_session_audio_set_ro
};
const tmedia_session_plugin_def_t *tdav_session_audio_plugin_def_t = &tdav_session_audio_plugin_def_s;
static const tmedia_session_plugin_def_t tdav_session_bfcpaudio_plugin_def_s =
static const tmedia_session_plugin_def_t tdav_session_bfcpaudio_plugin_def_s =
{
&tdav_session_audio_def_s,
@ -945,16 +943,16 @@ const tmedia_session_plugin_def_t *tdav_session_bfcpaudio_plugin_def_t = &tdav_s
static tsk_object_t* tdav_session_audio_dtmfe_ctor(tsk_object_t * self, va_list * app)
{
tdav_session_audio_dtmfe_t *event = self;
if(event){
if (event){
event->timer_id = TSK_INVALID_TIMER_ID;
}
return self;
}
static tsk_object_t* tdav_session_audio_dtmfe_dtor(tsk_object_t * self)
{
{
tdav_session_audio_dtmfe_t *event = self;
if(event){
if (event){
TSK_OBJECT_SAFE_FREE(event->packet);
}
@ -963,17 +961,16 @@ static tsk_object_t* tdav_session_audio_dtmfe_dtor(tsk_object_t * self)
static int tdav_session_audio_dtmfe_cmp(const tsk_object_t *_e1, const tsk_object_t *_e2)
{
const tdav_session_audio_dtmfe_t *e1 = _e1;
const tdav_session_audio_dtmfe_t *e2 = _e2;
return (e1 - e2);
int ret;
tsk_subsat_int32_ptr(_e1, _e2, &ret);
return ret;
}
static const tsk_object_def_t tdav_session_audio_dtmfe_def_s =
static const tsk_object_def_t tdav_session_audio_dtmfe_def_s =
{
sizeof(tdav_session_audio_dtmfe_t),
tdav_session_audio_dtmfe_ctor,
tdav_session_audio_dtmfe_ctor,
tdav_session_audio_dtmfe_dtor,
tdav_session_audio_dtmfe_cmp,
tdav_session_audio_dtmfe_cmp,
};
const tsk_object_def_t *tdav_session_audio_dtmfe_def_t = &tdav_session_audio_dtmfe_def_s;

View File

@ -1,20 +1,18 @@
/*
* Copyright (C) 2011 Mamadou Diop.
* Copyright (C) 2011-2015 Mamadou DIOP.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@ -22,9 +20,6 @@
/**@file tdav_speex_jitterbuffer.c
* @brief Speex Audio jitterbuffer Plugin
*
* @author Mamadou Diop <diopmamadou(at)doubango.org>
*/
#include "tinydav/audio/tdav_speex_jitterbuffer.h"
#include "tinyrtp/rtp/trtp_rtp_header.h"
@ -93,7 +88,7 @@ static int tdav_speex_jitterbuffer_open(tmedia_jitterbuffer_t* self, uint32_t fr
TSK_DEBUG_INFO("Default Jitter buffer margin=%d", tmp);
jitter_buffer_ctl(jitterbuffer->state, JITTER_BUFFER_GET_MAX_LATE_RATE, &tmp);
TSK_DEBUG_INFO("Default Jitter max late rate=%d", tmp);
if ((tmp = tmedia_defaults_get_jb_margin()) >= 0) {
jitter_buffer_ctl(jitterbuffer->state, JITTER_BUFFER_SET_MARGIN, &tmp);
TSK_DEBUG_INFO("New Jitter buffer margin=%d", tmp);
@ -120,26 +115,26 @@ static int tdav_speex_jitterbuffer_tick(tmedia_jitterbuffer_t* self)
static int tdav_speex_jitterbuffer_put(tmedia_jitterbuffer_t* self, void* data, tsk_size_t data_size, const tsk_object_t* proto_hdr)
{
tdav_speex_jitterbuffer_t *jb = (tdav_speex_jitterbuffer_t *)self;
const trtp_rtp_header_t* rtp_hdr;
JitterBufferPacket jb_packet;
const trtp_rtp_header_t* rtp_hdr;
JitterBufferPacket jb_packet;
static uint16_t seq_num = 0;
if (!data || !data_size || !proto_hdr) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if (!jb->state) {
TSK_DEBUG_ERROR("Invalid state");
return -2;
}
rtp_hdr = TRTP_RTP_HEADER(proto_hdr);
if (!data || !data_size || !proto_hdr) {
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if (!jb->state) {
TSK_DEBUG_ERROR("Invalid state");
return -2;
}
rtp_hdr = TRTP_RTP_HEADER(proto_hdr);
jb_packet.user_data = 0;
jb_packet.span = jb->frame_duration;
jb_packet.len = jb->x_data_size;
if (jb->x_data_size == data_size) { /* ptime match */
jb_packet.data = data;
jb_packet.sequence = rtp_hdr->seq_num;
@ -180,61 +175,61 @@ static int tdav_speex_jitterbuffer_put(tmedia_jitterbuffer_t* self, void* data,
}
}
++jb->num_pkt_in;
return 0;
return 0;
}
static tsk_size_t tdav_speex_jitterbuffer_get(tmedia_jitterbuffer_t* self, void* out_data, tsk_size_t out_size)
{
tdav_speex_jitterbuffer_t *jb = (tdav_speex_jitterbuffer_t *)self;
JitterBufferPacket jb_packet;
int ret, miss = 0;
JitterBufferPacket jb_packet;
int ret, miss = 0;
tsk_size_t ret_size = 0;
if (!out_data || !out_size) {
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
if (!jb->state) {
TSK_DEBUG_ERROR("Invalid state");
return 0;
}
if (!out_data || !out_size) {
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
if (!jb->state) {
TSK_DEBUG_ERROR("Invalid state");
return 0;
}
if (jb->x_data_size != out_size) { // consumer must request PTIME data
TSK_DEBUG_WARN("%d not expected as frame size. %u<>%u", out_size, jb->frame_duration, (out_size * 500)/jb->rate);
TSK_DEBUG_WARN("%d not expected as frame size. %u<>%u", out_size, jb->frame_duration, (out_size * 500) / jb->rate);
return 0;
}
jb_packet.data = out_data;
jb_packet.len = out_size;
jb_packet.data = out_data;
jb_packet.len = (spx_uint32_t)out_size;
if ((ret = jitter_buffer_get(jb->state, &jb_packet, jb->frame_duration/*(out_size * 500)/jb->rate*/, tsk_null)) != JITTER_BUFFER_OK) {
++jb->num_pkt_miss;
switch(ret) {
case JITTER_BUFFER_MISSING:
/*TSK_DEBUG_INFO("JITTER_BUFFER_MISSING - %d", ret);*/
if (jb->num_pkt_miss > jb->num_pkt_miss_max /*too much missing pkts*/ && jb->num_pkt_in > jb->num_pkt_miss_max/*we're really receiving pkts*/) {
jb->num_pkt_miss = 0;
self->plugin->reset(self);
TSK_DEBUG_WARN("Too much missing audio pkts");
}
break;
case JITTER_BUFFER_INSERTION:
/*TSK_DEBUG_INFO("JITTER_BUFFER_INSERTION - %d", ret);*/
break;
default:
TSK_DEBUG_INFO("jitter_buffer_get() failed - %d", ret);
break;
}
// jitter_buffer_update_delay(jb->state, &jb_packet, NULL);
//return 0;
}
switch (ret) {
case JITTER_BUFFER_MISSING:
/*TSK_DEBUG_INFO("JITTER_BUFFER_MISSING - %d", ret);*/
if (jb->num_pkt_miss > jb->num_pkt_miss_max /*too much missing pkts*/ && jb->num_pkt_in > jb->num_pkt_miss_max/*we're really receiving pkts*/) {
jb->num_pkt_miss = 0;
self->plugin->reset(self);
TSK_DEBUG_WARN("Too much missing audio pkts");
}
break;
case JITTER_BUFFER_INSERTION:
/*TSK_DEBUG_INFO("JITTER_BUFFER_INSERTION - %d", ret);*/
break;
default:
TSK_DEBUG_INFO("jitter_buffer_get() failed - %d", ret);
break;
}
// jitter_buffer_update_delay(jb->state, &jb_packet, NULL);
//return 0;
}
else {
jb->num_pkt_miss = 0; // reset
ret_size = jb_packet.len;
}
//jitter_buffer_update_delay(jb->state, &jb_packet, NULL);
return ret_size;
//jitter_buffer_update_delay(jb->state, &jb_packet, NULL);
return ret_size;
}
static int tdav_speex_jitterbuffer_reset(tmedia_jitterbuffer_t* self)
@ -269,7 +264,7 @@ static tsk_object_t* tdav_speex_jitterbuffer_ctor(tsk_object_t * self, va_list *
{
tdav_speex_jitterbuffer_t *jitterbuffer = self;
TSK_DEBUG_INFO("Create SpeexDSP jitter buffer");
if(jitterbuffer){
if (jitterbuffer){
/* init base */
tmedia_jitterbuffer_init(TMEDIA_JITTER_BUFFER(jitterbuffer));
/* init self */
@ -278,13 +273,13 @@ static tsk_object_t* tdav_speex_jitterbuffer_ctor(tsk_object_t * self, va_list *
}
/* destructor */
static tsk_object_t* tdav_speex_jitterbuffer_dtor(tsk_object_t * self)
{
{
tdav_speex_jitterbuffer_t *jb = self;
if(jb){
if (jb){
/* deinit base */
tmedia_jitterbuffer_deinit(TMEDIA_JITTER_BUFFER(jb));
/* deinit self */
if(jb->state){
if (jb->state){
jitter_buffer_destroy(jb->state);
jb->state = tsk_null;
}
@ -296,20 +291,20 @@ static tsk_object_t* tdav_speex_jitterbuffer_dtor(tsk_object_t * self)
return self;
}
/* object definition */
static const tsk_object_def_t tdav_speex_jitterbuffer_def_s =
static const tsk_object_def_t tdav_speex_jitterbuffer_def_s =
{
sizeof(tdav_speex_jitterbuffer_t),
tdav_speex_jitterbuffer_ctor,
tdav_speex_jitterbuffer_ctor,
tdav_speex_jitterbuffer_dtor,
tsk_null,
tsk_null,
};
/* plugin definition*/
static const tmedia_jitterbuffer_plugin_def_t tdav_speex_jitterbuffer_plugin_def_s =
static const tmedia_jitterbuffer_plugin_def_t tdav_speex_jitterbuffer_plugin_def_s =
{
&tdav_speex_jitterbuffer_def_s,
tmedia_audio,
"Audio JitterBuffer based on Speex",
tdav_speex_jitterbuffer_set,
tdav_speex_jitterbuffer_open,
tdav_speex_jitterbuffer_tick,

View File

@ -1,18 +1,18 @@
/*
* Copyright (C) 2011-2014 Mamadou DIOP.
*
* Copyright (C) 2011-2015 Mamadou DIOP.
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@ -53,7 +53,7 @@ static int tdav_speex_resampler_open(tmedia_resampler_t* self, uint32_t in_freq,
tdav_speex_resampler_t *resampler = (tdav_speex_resampler_t *)self;
int ret = 0;
uint32_t bytes_per_sample = (bits_per_sample >> 3);
if (in_channels != 1 && in_channels != 2) {
TSK_DEBUG_ERROR("%d not valid as input channel", in_channels);
return -1;
@ -73,7 +73,7 @@ static int tdav_speex_resampler_open(tmedia_resampler_t* self, uint32_t in_freq,
}
resampler->bytes_per_sample = bytes_per_sample;
resampler->in_size = ((in_freq * frame_duration) / 1000) << (in_channels == 2 ? 1 : 0);
resampler->in_size = ((in_freq * frame_duration) / 1000) << (in_channels == 2 ? 1 : 0);
resampler->out_size = ((out_freq * frame_duration) / 1000) << (out_channels == 2 ? 1 : 0);
resampler->in_channels = in_channels;
resampler->out_channels = out_channels;
@ -113,13 +113,13 @@ static tsk_size_t tdav_speex_resampler_process(tmedia_resampler_t* self, const v
if (resampler->in_channels == resampler->out_channels) {
if (resampler->bytes_per_sample == sizeof(spx_int16_t)) {
err = speex_resampler_process_int(resampler->state, 0,
(const spx_int16_t *)in_data, (spx_uint32_t *)&in_size_in_sample,
(spx_int16_t *)out_data, &_out_size_in_sample);
(const spx_int16_t *)in_data, (spx_uint32_t *)&in_size_in_sample,
(spx_int16_t *)out_data, &_out_size_in_sample);
}
else {
err = speex_resampler_process_float(resampler->state, 0,
(const float *)in_data, (spx_uint32_t *)&in_size_in_sample,
(float *)out_data, &_out_size_in_sample);
(const float *)in_data, (spx_uint32_t *)&in_size_in_sample,
(float *)out_data, &_out_size_in_sample);
}
}
else {
@ -144,37 +144,37 @@ static tsk_size_t tdav_speex_resampler_process(tmedia_resampler_t* self, const v
}
}
}
}
else {
// in_channels = 2, out_channels = 1
spx_uint32_t _out_size2_in_sample = (_out_size_in_sample << 1);
if (resampler->bytes_per_sample == sizeof(spx_int16_t)) {
err = speex_resampler_process_int(resampler->state, 0,
(const spx_int16_t *)in_data, (spx_uint32_t *)&in_size_in_sample,
(spx_int16_t *)resampler->tmp_buffer.ptr, &_out_size2_in_sample);
(const spx_int16_t *)in_data, (spx_uint32_t *)&in_size_in_sample,
(spx_int16_t *)resampler->tmp_buffer.ptr, &_out_size2_in_sample);
if (err == RESAMPLER_ERR_SUCCESS) {
spx_int16_t* pout_data = (spx_int16_t*)(out_data);
_out_size_in_sample = resampler->out_size;
for (i = 0, j = 0; j < _out_size2_in_sample; ++i, j+=2) {
_out_size_in_sample = (spx_uint32_t)resampler->out_size;
for (i = 0, j = 0; j < _out_size2_in_sample; ++i, j += 2) {
pout_data[i] = *(((const spx_int16_t*)resampler->tmp_buffer.ptr) + j);
}
}
}
else {
err = speex_resampler_process_float(resampler->state, 0,
(const float *)in_data, (spx_uint32_t *)&in_size_in_sample,
(float *)resampler->tmp_buffer.ptr, &_out_size2_in_sample);
(const float *)in_data, (spx_uint32_t *)&in_size_in_sample,
(float *)resampler->tmp_buffer.ptr, &_out_size2_in_sample);
if (err == RESAMPLER_ERR_SUCCESS) {
float* pout_data = (float*)(out_data);
for (i = 0, j = 0; j < _out_size2_in_sample; ++i, j+=2) {
for (i = 0, j = 0; j < _out_size2_in_sample; ++i, j += 2) {
pout_data[i] = *(((const float*)resampler->tmp_buffer.ptr) + j);
}
}
}
}
}
if (err != RESAMPLER_ERR_SUCCESS) {
TSK_DEBUG_ERROR("speex_resampler_process_int() failed with error code %d", err);
return 0;
@ -203,7 +203,7 @@ static int tdav_speex_resampler_close(tmedia_resampler_t* self)
static tsk_object_t* tdav_speex_resampler_ctor(tsk_object_t * self, va_list * app)
{
tdav_speex_resampler_t *resampler = (tdav_speex_resampler_t *)self;
if(resampler){
if (resampler){
/* init base */
tmedia_resampler_init(TMEDIA_RESAMPLER(resampler));
/* init self */
@ -212,9 +212,9 @@ static tsk_object_t* tdav_speex_resampler_ctor(tsk_object_t * self, va_list * ap
}
/* destructor */
static tsk_object_t* tdav_speex_resampler_dtor(tsk_object_t * self)
{
{
tdav_speex_resampler_t *resampler = (tdav_speex_resampler_t *)self;
if(resampler){
if (resampler){
/* deinit base */
tmedia_resampler_deinit(TMEDIA_RESAMPLER(resampler));
/* deinit self */
@ -230,20 +230,20 @@ static tsk_object_t* tdav_speex_resampler_dtor(tsk_object_t * self)
return self;
}
/* object definition */
static const tsk_object_def_t tdav_speex_resampler_def_s =
static const tsk_object_def_t tdav_speex_resampler_def_s =
{
sizeof(tdav_speex_resampler_t),
tdav_speex_resampler_ctor,
tdav_speex_resampler_ctor,
tdav_speex_resampler_dtor,
tsk_null,
tsk_null,
};
/* plugin definition*/
static const tmedia_resampler_plugin_def_t tdav_speex_resampler_plugin_def_s =
static const tmedia_resampler_plugin_def_t tdav_speex_resampler_plugin_def_s =
{
&tdav_speex_resampler_def_s,
"Audio Resampler based on Speex",
tdav_speex_resampler_open,
tdav_speex_resampler_process,
tdav_speex_resampler_close,

View File

@ -1,19 +1,19 @@
/*
* Copyright (C) 2011-204 Mamadou DIOP
* Copyright (C) 2011-204 Doubango Telecom <http://www.doubango.org>
*
* Copyright (C) 2011-2015 Mamadou DIOP
* Copyright (C) 2011-2015 Doubango Telecom <http://www.doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@ -63,7 +63,7 @@ tdav_webrtc_pin_xt;
typedef struct tdav_webrtc_resampler_s
{
TSK_DECLARE_OBJECT;
tmedia_resampler_t* p_resampler;
void* p_bufftmp_ptr; // used to convert float <->int16
tsk_size_t n_bufftmp_size_in_bytes;
@ -78,7 +78,7 @@ typedef struct tdav_webrtc_resampler_s
void* p_buff_ptr;
tsk_size_t n_buff_size_in_bytes;
tsk_size_t n_buff_size_in_samples;
} out;
} out;
}
tdav_webrtc_resampler_t;
@ -96,7 +96,7 @@ typedef struct tdav_webrtc_denoise_s
#else
TDAV_NsHandle *NS_inst;
#endif
uint32_t echo_tail;
uint32_t echo_skew;
@ -126,7 +126,7 @@ static int tdav_webrtc_denoise_set(tmedia_denoise_t* _self, const tmedia_param_t
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
if (param->value_type == tmedia_pvt_int32) {
if (tsk_striequals(param->key, "echo-tail")) {
int32_t echo_tail = *((int32_t*)param->value);
@ -149,7 +149,7 @@ static int tdav_webrtc_denoise_open(tmedia_denoise_t* self, uint32_t record_fram
return -1;
}
if (denoiser->AEC_inst ||
if (denoiser->AEC_inst ||
#if HAVE_SPEEX_DSP && PREFER_SPEEX_DENOISER
denoiser->SpeexDenoiser_proc
#else
@ -159,7 +159,7 @@ static int tdav_webrtc_denoise_open(tmedia_denoise_t* self, uint32_t record_fram
TSK_DEBUG_ERROR("Denoiser already initialized");
return -2;
}
denoiser->echo_tail = TSK_CLAMP(WEBRTC_MIN_ECHO_TAIL, TMEDIA_DENOISE(denoiser)->echo_tail, WEBRTC_MAX_ECHO_TAIL);
denoiser->echo_skew = TMEDIA_DENOISE(denoiser)->echo_skew;
TSK_DEBUG_INFO("echo_tail=%d, echo_skew=%d, echo_supp_enabled=%d, noise_supp_enabled=%d", denoiser->echo_tail, denoiser->echo_skew, self->echo_supp_enabled, self->noise_supp_enabled);
@ -186,7 +186,7 @@ static int tdav_webrtc_denoise_open(tmedia_denoise_t* self, uint32_t record_fram
pin_record_in.n_duration = (((record_frame_size_samples * 1000) / record_sampling_rate)) / record_channels;
pin_record_den.n_sample_size = sizeof(sample_t);
pin_record_den.n_rate = denoiser->neg.sampling_rate;
pin_record_den.n_channels = 1;
pin_record_den.n_duration = pin_record_in.n_duration;
if (pin_record_in.n_sample_size != pin_record_den.n_sample_size || pin_record_in.n_rate != pin_record_den.n_rate || pin_record_in.n_channels != pin_record_den.n_channels) {
@ -233,7 +233,7 @@ static int tdav_webrtc_denoise_open(tmedia_denoise_t* self, uint32_t record_fram
#if TDAV_UNDER_MOBILE
#else
{
{
AecConfig aecConfig;
#if WEBRTC_AEC_AGGRESSIVE
aecConfig.nlpMode = kAecNlpAggressive;
@ -249,7 +249,7 @@ static int tdav_webrtc_denoise_open(tmedia_denoise_t* self, uint32_t record_fram
}
#endif
//
// Noise Suppression instance
//
@ -272,12 +272,12 @@ static int tdav_webrtc_denoise_open(tmedia_denoise_t* self, uint32_t record_fram
}
#endif
}
TSK_DEBUG_INFO("WebRTC denoiser opened: record:%uHz,%uchannels // playback:%uHz,%uchannels // neg:%uHz,%uchannels",
record_sampling_rate, record_channels,
playback_sampling_rate, playback_channels,
denoiser->neg.sampling_rate, denoiser->neg.channels);
return ret;
}
@ -289,8 +289,8 @@ static int tdav_webrtc_denoise_echo_playback(tmedia_denoise_t* self, const void*
tsk_safeobj_lock(p_self);
if (p_self->AEC_inst && echo_frame && echo_frame_size_bytes) {
const sample_t* _echo_frame = (const sample_t*)echo_frame;
uint32_t _echo_frame_size_bytes = echo_frame_size_bytes;
uint32_t _echo_frame_size_samples = (_echo_frame_size_bytes / sizeof(int16_t));
tsk_size_t _echo_frame_size_bytes = echo_frame_size_bytes;
tsk_size_t _echo_frame_size_samples = (_echo_frame_size_bytes / sizeof(int16_t));
// IN -> DEN
if (p_self->playback.p_rpl_in2den) {
if ((ret = _tdav_webrtc_resampler_process(p_self->playback.p_rpl_in2den, _echo_frame, _echo_frame_size_bytes))) {
@ -303,7 +303,7 @@ static int tdav_webrtc_denoise_echo_playback(tmedia_denoise_t* self, const void*
// PROCESS
if (_echo_frame_size_samples && _echo_frame) {
uint32_t _samples;
for (_samples = 0; _samples < _echo_frame_size_samples; _samples+= p_self->neg.nb_samples_per_process) {
for (_samples = 0; _samples < _echo_frame_size_samples; _samples += p_self->neg.nb_samples_per_process) {
if ((ret = TDAV_WebRtcAec_BufferFarend(p_self->AEC_inst, &_echo_frame[_samples], p_self->neg.nb_samples_per_process))){
TSK_DEBUG_ERROR("WebRtcAec_BufferFarend failed with error code = %d, nb_samples_per_process=%u", ret, p_self->neg.nb_samples_per_process);
goto bail;
@ -320,16 +320,16 @@ static int tdav_webrtc_denoise_process_record(tmedia_denoise_t* self, void* audi
{
tdav_webrtc_denoise_t *p_self = (tdav_webrtc_denoise_t *)self;
int ret = 0;
*silence_or_noise = tsk_false;
tsk_safeobj_lock(p_self);
if (p_self->AEC_inst && audio_frame && audio_frame_size_bytes) {
uint32_t _samples;
tsk_size_t _samples;
const sample_t* _audio_frame = (const sample_t*)audio_frame;
uint32_t _audio_frame_size_bytes = audio_frame_size_bytes;
uint32_t _audio_frame_size_samples = (_audio_frame_size_bytes / sizeof(int16_t));
tsk_size_t _audio_frame_size_bytes = audio_frame_size_bytes;
tsk_size_t _audio_frame_size_samples = (_audio_frame_size_bytes / sizeof(int16_t));
// IN -> DEN
if (p_self->record.p_rpl_in2den) {
if ((ret = _tdav_webrtc_resampler_process(p_self->record.p_rpl_in2den, _audio_frame, _audio_frame_size_bytes))) {
@ -358,7 +358,7 @@ static int tdav_webrtc_denoise_process_record(tmedia_denoise_t* self, void* audi
#endif
// PROCESS
if (_audio_frame_size_samples && _audio_frame) {
for (_samples = 0; _samples < _audio_frame_size_samples; _samples+= p_self->neg.nb_samples_per_process) {
for (_samples = 0; _samples < _audio_frame_size_samples; _samples += p_self->neg.nb_samples_per_process) {
if ((ret = TDAV_WebRtcAec_Process(p_self->AEC_inst, &_audio_frame[_samples], tsk_null, (sample_t*)&_audio_frame[_samples], tsk_null, p_self->neg.nb_samples_per_process, p_self->echo_tail, p_self->echo_skew))){
TSK_DEBUG_ERROR("WebRtcAec_Process with error code = %d, nb_samples_per_process=%u", ret, p_self->neg.nb_samples_per_process);
goto bail;
@ -393,7 +393,7 @@ bail:
static int tdav_webrtc_denoise_process_playback(tmedia_denoise_t* self, void* audio_frame, uint32_t audio_frame_size_bytes)
{
tdav_webrtc_denoise_t *denoiser = (tdav_webrtc_denoise_t *)self;
(void)(denoiser);
// Not mandatory to denoise audio before playback.
@ -466,7 +466,7 @@ static int _tdav_webrtc_resampler_create(const tdav_webrtc_pin_xt* p_pin_in, con
(*pp_resampler)->out.n_buff_size_in_samples = (*pp_resampler)->out.n_buff_size_in_bytes / p_pin_out->n_sample_size;
(*pp_resampler)->in.n_buff_size_in_bytes = ((((p_pin_in->n_rate * p_pin_in->n_duration) / 1000)) * p_pin_in->n_channels) * p_pin_in->n_sample_size;
(*pp_resampler)->in.n_buff_size_in_samples = (*pp_resampler)->in.n_buff_size_in_bytes / p_pin_in->n_sample_size;
(*pp_resampler)->n_bufftmp_size_in_bytes = (((48000 * TSK_MAX(p_pin_in->n_duration, p_pin_out->n_duration)) / 1000) * 2/*channels*/) * sizeof(float); // Max
(*pp_resampler)->p_bufftmp_ptr = tsk_malloc((*pp_resampler)->n_bufftmp_size_in_bytes);
if (!(*pp_resampler)->p_bufftmp_ptr) {
@ -474,7 +474,7 @@ static int _tdav_webrtc_resampler_create(const tdav_webrtc_pin_xt* p_pin_in, con
ret = -3;
goto bail;
}
memcpy(&(*pp_resampler)->in.x_pin, p_pin_in, sizeof(tdav_webrtc_pin_xt));
memcpy(&(*pp_resampler)->out.x_pin, p_pin_out, sizeof(tdav_webrtc_pin_xt));
bail:
@ -505,7 +505,7 @@ static int _tdav_webrtc_resampler_process(tdav_webrtc_resampler_t *p_self, const
if (p_self->in.x_pin.n_sample_size == sizeof(int16_t)) {
// int16_t -> float
const int16_t* p_src = (const int16_t*)p_buff_ptr;
float* p_dst = (float*) p_self->p_bufftmp_ptr;
float* p_dst = (float*)p_self->p_bufftmp_ptr;
for (index = 0; index < _n_buff_size_in_samples; ++index) {
p_dst[index] = (float)p_src[index];
}
@ -513,7 +513,7 @@ static int _tdav_webrtc_resampler_process(tdav_webrtc_resampler_t *p_self, const
else {
// float -> int16_t
const float* p_src = (const float*)p_buff_ptr;
int16_t* p_dst = (int16_t*) p_self->p_bufftmp_ptr;
int16_t* p_dst = (int16_t*)p_self->p_bufftmp_ptr;
for (index = 0; index < _n_buff_size_in_samples; ++index) {
p_dst[index] = (int16_t)p_src[index];
}
@ -536,12 +536,12 @@ static tsk_object_t* tdav_webrtc_resampler_ctor(tsk_object_t * self, va_list * a
{
tdav_webrtc_resampler_t *p_resampler = (tdav_webrtc_resampler_t*)self;
if (p_resampler) {
}
return self;
}
static tsk_object_t* tdav_webrtc_resampler_dtor(tsk_object_t * self)
{
{
tdav_webrtc_resampler_t *p_resampler = (tdav_webrtc_resampler_t*)self;
if (p_resampler) {
TSK_OBJECT_SAFE_FREE(p_resampler->p_resampler);
@ -550,12 +550,12 @@ static tsk_object_t* tdav_webrtc_resampler_dtor(tsk_object_t * self)
}
return self;
}
static const tsk_object_def_t tdav_webrtc_resampler_def_s =
static const tsk_object_def_t tdav_webrtc_resampler_def_s =
{
sizeof(tdav_webrtc_resampler_t),
tdav_webrtc_resampler_ctor,
tdav_webrtc_resampler_ctor,
tdav_webrtc_resampler_dtor,
tsk_object_cmp,
tsk_object_cmp,
};
const tsk_object_def_t *tdav_webrtc_resampler_def_t = &tdav_webrtc_resampler_def_s;
@ -568,7 +568,7 @@ const tsk_object_def_t *tdav_webrtc_resampler_def_t = &tdav_webrtc_resampler_def
static tsk_object_t* tdav_webrtc_denoise_ctor(tsk_object_t * _self, va_list * app)
{
tdav_webrtc_denoise_t *self = _self;
if(self){
if (self){
/* init base */
tmedia_denoise_init(TMEDIA_DENOISE(self));
/* init self */
@ -581,9 +581,9 @@ static tsk_object_t* tdav_webrtc_denoise_ctor(tsk_object_t * _self, va_list * ap
}
/* destructor */
static tsk_object_t* tdav_webrtc_denoise_dtor(tsk_object_t * _self)
{
{
tdav_webrtc_denoise_t *self = _self;
if(self){
if (self){
/* deinit base (will close the denoise if not done yet) */
tmedia_denoise_deinit(TMEDIA_DENOISE(self));
/* deinit self */
@ -600,15 +600,15 @@ static tsk_object_t* tdav_webrtc_denoise_dtor(tsk_object_t * _self)
return self;
}
/* object definition */
static const tsk_object_def_t tdav_webrtc_denoise_def_s =
static const tsk_object_def_t tdav_webrtc_denoise_def_s =
{
sizeof(tdav_webrtc_denoise_t),
tdav_webrtc_denoise_ctor,
tdav_webrtc_denoise_ctor,
tdav_webrtc_denoise_dtor,
tsk_null,
tsk_null,
};
/* plugin definition*/
static const tmedia_denoise_plugin_def_t tdav_webrtc_denoise_plugin_def_s =
static const tmedia_denoise_plugin_def_t tdav_webrtc_denoise_plugin_def_s =
{
&tdav_webrtc_denoise_def_s,

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
* Copyright (C) 2010-2015 Mamadou DIOP
*
* This file is part of Open Source Doubango Framework.
*
@ -23,9 +21,6 @@
/**@file tdav_consumer_waveapi.c
* @brief Audio Consumer for Win32 and WinCE platforms.
*
* @author Mamadou Diop <diopmamadou(at)doubango.org>
*
*/
#include "tinydav/audio/waveapi/tdav_consumer_waveapi.h"
@ -73,7 +68,7 @@ static int create_wavehdr(tdav_consumer_waveapi_t* consumer, tsk_size_t index)
consumer->hWaveHeaders[index] = tsk_calloc(1, sizeof(WAVEHDR));
consumer->hWaveHeaders[index]->lpData = tsk_calloc(1, consumer->bytes_per_notif);
consumer->hWaveHeaders[index]->dwBufferLength = consumer->bytes_per_notif;
consumer->hWaveHeaders[index]->dwBufferLength = (DWORD)consumer->bytes_per_notif;
consumer->hWaveHeaders[index]->dwFlags = WHDR_BEGINLOOP | WHDR_ENDLOOP;
consumer->hWaveHeaders[index]->dwLoops = 0x01;
consumer->hWaveHeaders[index]->dwUser = index;

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
* Copyright (C) 2010-2015 Mamadou DIOP.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,10 +20,6 @@
/**@file tdav_producer_waveapi.c
* @brief Audio Producer for Win32 and WinCE platforms.
*
* @author Mamadou Diop <diopmamadou(at)doubango.org>
*
*/
#include "tinydav/audio/waveapi/tdav_producer_waveapi.h"
@ -73,7 +67,7 @@ static int create_wavehdr(tdav_producer_waveapi_t* producer, tsk_size_t index)
producer->hWaveHeaders[index] = tsk_calloc(1, sizeof(WAVEHDR));
producer->hWaveHeaders[index]->lpData = tsk_calloc(1, producer->bytes_per_notif);
producer->hWaveHeaders[index]->dwBufferLength = producer->bytes_per_notif;
producer->hWaveHeaders[index]->dwBufferLength = (DWORD)producer->bytes_per_notif;
producer->hWaveHeaders[index]->dwFlags = WHDR_BEGINLOOP | WHDR_ENDLOOP;
producer->hWaveHeaders[index]->dwLoops = 0x01;
producer->hWaveHeaders[index]->dwUser = index;

View File

@ -151,7 +151,7 @@ static tsk_size_t tdav_codec_red_decode(tmedia_codec_t* self, const void* in_dat
|0| Block PT |
+-+-+-+-+-+-+-+-+
*/
block_length = in_size;
block_length = (uint16_t)in_size;
}
else{
/*

View File

@ -1,20 +1,18 @@
/*
* Copyright (C) 2012 Doubango Telecom <http://www.doubango.org>
* Copyright (C) 2012-2015 Doubango Telecom <http://www.doubango.org>
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@ -36,7 +34,7 @@
typedef struct tdav_codec_ulpfec_s
{
TMEDIA_DECLARE_CODEC_VIDEO;
struct{
struct tdav_fec_pkt_s* pkt;
} encoder;
@ -64,26 +62,26 @@ typedef tsk_list_t tdav_fec_levels_L_t;
static tsk_object_t* tdav_fec_level_ctor(tsk_object_t * self, va_list * app)
{
tdav_fec_level_t *level = self;
if(level){
if (level){
level->hdr.mask_size = 16; // L=0
}
return self;
}
static tsk_object_t* tdav_fec_level_dtor(tsk_object_t * self)
{
{
tdav_fec_level_t *level = self;
if(level){
if (level){
TSK_FREE(level->payload.ptr);
}
return self;
}
static const tsk_object_def_t tdav_fec_level_def_s =
static const tsk_object_def_t tdav_fec_level_def_s =
{
sizeof(tdav_fec_level_t),
tdav_fec_level_ctor,
tdav_fec_level_ctor,
tdav_fec_level_dtor,
tsk_null,
tsk_null,
};
const tsk_object_def_t *tdav_fec_level_def_t = &tdav_fec_level_def_s;
@ -96,29 +94,29 @@ typedef struct tdav_fec_pkt_s
TSK_DECLARE_OBJECT;
struct{ // RFC 5109 - 7.3. FEC Header for FEC Packets
unsigned E:1;
unsigned L:1;
unsigned P:1;
unsigned X:1;
unsigned CC:4;
unsigned M:1;
unsigned PT:7;
unsigned E : 1;
unsigned L : 1;
unsigned P : 1;
unsigned X : 1;
unsigned CC : 4;
unsigned M : 1;
unsigned PT : 7;
struct{
uint16_t value;
unsigned set:1;
unsigned set : 1;
}SN_base;
uint32_t TS;
uint16_t length;
}hdr;
tdav_fec_levels_L_t* levels;
}
tdav_fec_pkt_t;
static tsk_object_t* tdav_fec_pkt_ctor(tsk_object_t * self, va_list * app)
{
tdav_fec_pkt_t *pkt = self;
if(pkt){
if(!(pkt->levels = tsk_list_create())){
if (pkt){
if (!(pkt->levels = tsk_list_create())){
TSK_DEBUG_ERROR("Failed to create levels");
return tsk_null;
}
@ -126,9 +124,9 @@ static tsk_object_t* tdav_fec_pkt_ctor(tsk_object_t * self, va_list * app)
return self;
}
static tsk_object_t* tdav_fec_pkt_dtor(tsk_object_t * self)
{
{
tdav_fec_pkt_t *pkt = self;
if(pkt){
if (pkt){
TSK_OBJECT_SAFE_FREE(pkt->levels);
}
@ -139,18 +137,18 @@ static int tdav_fec_pkt_cmp(const tsk_object_t *_p1, const tsk_object_t *_p2)
const tdav_fec_pkt_t *p1 = _p1;
const tdav_fec_pkt_t *p2 = _p2;
if(p1 && p2){
if (p1 && p2){
return (int)(p1->hdr.SN_base.value - p2->hdr.SN_base.value);
}
else if(!p1 && !p2) return 0;
else if (!p1 && !p2) return 0;
else return -1;
}
static const tsk_object_def_t tdav_fec_pkt_def_s =
static const tsk_object_def_t tdav_fec_pkt_def_s =
{
sizeof(tdav_fec_pkt_t),
tdav_fec_pkt_ctor,
tdav_fec_pkt_ctor,
tdav_fec_pkt_dtor,
tdav_fec_pkt_cmp,
tdav_fec_pkt_cmp,
};
const tsk_object_def_t *tdav_fec_pkt_def_t = &tdav_fec_pkt_def_s;
@ -161,13 +159,13 @@ tsk_size_t tdav_codec_ulpfec_guess_serialbuff_size(const tdav_codec_ulpfec_t* se
tsk_list_item_t *item;
tdav_fec_level_t* level;
if(!self){
if (!self){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
tsk_list_foreach(item, self->encoder.pkt->levels){
if(!(level = item->data)){
if (!(level = item->data)){
continue;
}
size += 2 /* Protection length */ + (level->hdr.mask_size >> 3) + level->hdr.length;
@ -181,7 +179,7 @@ int tdav_codec_ulpfec_enc_reset(tdav_codec_ulpfec_t* self)
tsk_list_item_t *item;
tdav_fec_level_t* level;
if(!self || !self->encoder.pkt){
if (!self || !self->encoder.pkt){
TSK_DEBUG_ERROR("invalid parameter");
return -1;
}
@ -191,9 +189,9 @@ int tdav_codec_ulpfec_enc_reset(tdav_codec_ulpfec_t* self)
// reset levels
tsk_list_foreach(item, self->encoder.pkt->levels){
if((level = item->data)){
if ((level = item->data)){
memset(&level->hdr, 0, sizeof(level->hdr));
if(level->payload.ptr){
if (level->payload.ptr){
memset(level->payload.ptr, 0, level->payload.size);
}
}
@ -203,7 +201,7 @@ int tdav_codec_ulpfec_enc_reset(tdav_codec_ulpfec_t* self)
int tdav_codec_ulpfec_enc_protect(tdav_codec_ulpfec_t* self, const trtp_rtp_packet_t* rtp_packet)
{
if(!self || !self->encoder.pkt || !rtp_packet || !rtp_packet->header){
if (!self || !self->encoder.pkt || !rtp_packet || !rtp_packet->header){
TSK_DEBUG_ERROR("invalid parameter");
return -1;
}
@ -214,7 +212,7 @@ int tdav_codec_ulpfec_enc_protect(tdav_codec_ulpfec_t* self, const trtp_rtp_pack
self->encoder.pkt->hdr.CC ^= rtp_packet->header->csrc_count;
self->encoder.pkt->hdr.M ^= rtp_packet->header->marker;
self->encoder.pkt->hdr.PT ^= rtp_packet->header->payload_type;
if(!self->encoder.pkt->hdr.SN_base.set){
if (!self->encoder.pkt->hdr.SN_base.set){
self->encoder.pkt->hdr.SN_base.value = rtp_packet->header->seq_num;
self->encoder.pkt->hdr.SN_base.set = 1;
}
@ -230,29 +228,29 @@ int tdav_codec_ulpfec_enc_protect(tdav_codec_ulpfec_t* self, const trtp_rtp_pack
tdav_fec_level_t* level0 = TSK_LIST_FIRST_DATA(self->encoder.pkt->levels);
const uint8_t* rtp_payload = (const uint8_t*)(rtp_packet->payload.data_const ? rtp_packet->payload.data_const : rtp_packet->payload.data);
tsk_size_t i;
if(!level0){
if (!level0){
tdav_fec_level_t* _level0;
if(!(_level0 = tsk_object_new(tdav_fec_level_def_t))){
if (!(_level0 = tsk_object_new(tdav_fec_level_def_t))){
TSK_DEBUG_ERROR("Failed to create level");
return -2;
}
level0 = _level0;
tsk_list_push_back_data(self->encoder.pkt->levels, (void**)&_level0);
}
if(level0->payload.size < rtp_packet->payload.size){
if(!(level0->payload.ptr = tsk_realloc(level0->payload.ptr, rtp_packet->payload.size))){
if (level0->payload.size < rtp_packet->payload.size){
if (!(level0->payload.ptr = tsk_realloc(level0->payload.ptr, rtp_packet->payload.size))){
TSK_DEBUG_ERROR("Failed to realloc size %d", rtp_packet->payload.size);
level0->payload.size = 0;
return -3;
}
level0->payload.size = rtp_packet->payload.size;
}
for(i = 0; i < rtp_packet->payload.size; ++i){
for (i = 0; i < rtp_packet->payload.size; ++i){
level0->payload.ptr[i] ^= rtp_payload[i];
}
level0->hdr.mask_size = self->encoder.pkt->hdr.L ? 48 : 16;
level0->hdr.mask |= ((uint64_t)1 << (level0->hdr.mask_size - (rtp_packet->header->seq_num - self->encoder.pkt->hdr.SN_base.value)));
level0->hdr.length = TSK_MAX(level0->hdr.length, rtp_packet->payload.size);
level0->hdr.mask |= (uint64_t)((uint64_t)1 << (level0->hdr.mask_size - (rtp_packet->header->seq_num - self->encoder.pkt->hdr.SN_base.value)));
level0->hdr.length = (uint16_t)(TSK_MAX(level0->hdr.length, rtp_packet->payload.size));
}
return 0;
@ -266,14 +264,14 @@ tsk_size_t tdav_codec_ulpfec_enc_serialize(const tdav_codec_ulpfec_t* self, void
tsk_list_item_t* item;
tdav_fec_level_t* level;
if(!self || !self->encoder.pkt || !out_data){
if (!self || !self->encoder.pkt || !out_data){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
xsize = tdav_codec_ulpfec_guess_serialbuff_size(self);
if(*out_max_size < xsize){
if(!(*out_data = tsk_realloc(*out_data, xsize))){
if (*out_max_size < xsize){
if (!(*out_data = tsk_realloc(*out_data, xsize))){
TSK_DEBUG_ERROR("Failed to reallocate buffer with size =%d", xsize);
*out_max_size = 0;
return 0;
@ -283,7 +281,7 @@ tsk_size_t tdav_codec_ulpfec_enc_serialize(const tdav_codec_ulpfec_t* self, void
pdata = (uint8_t*)*out_data;
// E(1), L(1), P(1), X(1), CC(4)
pdata[0] =
pdata[0] =
(self->encoder.pkt->hdr.E << 7) |
(self->encoder.pkt->hdr.L << 6) |
(self->encoder.pkt->hdr.P << 5) |
@ -303,18 +301,18 @@ tsk_size_t tdav_codec_ulpfec_enc_serialize(const tdav_codec_ulpfec_t* self, void
pdata[8] = (self->encoder.pkt->hdr.length >> 8);
pdata[9] = (self->encoder.pkt->hdr.length & 0xFF);
pdata+= 10;
pdata += 10;
tsk_list_foreach(item, self->encoder.pkt->levels){
if(!(level = item->data)){
if (!(level = item->data)){
continue;
}
// Protection length (16)
pdata[0] = (level->hdr.length >> 8);
pdata[1] = (level->hdr.length & 0xFF);
pdata+=2;
pdata += 2;
// mask (16 or 48)
for(i = (level->hdr.mask_size - 8); i >= 0; i-=8){
for (i = (int32_t)(level->hdr.mask_size - 8); i >= 0; i -= 8){
*pdata = ((level->hdr.mask >> i) & 0xFF); ++pdata;
}
// payload
@ -365,10 +363,10 @@ static char* tdav_codec_ulpfec_sdp_att_get(const tmedia_codec_t* self, const cha
static tsk_object_t* tdav_codec_ulpfec_ctor(tsk_object_t * self, va_list * app)
{
tdav_codec_ulpfec_t *ulpfec = self;
if(ulpfec){
if (ulpfec){
/* init base: called by tmedia_codec_create() */
/* init self */
if(!(ulpfec->encoder.pkt = tsk_object_new(tdav_fec_pkt_def_t))){
if (!(ulpfec->encoder.pkt = tsk_object_new(tdav_fec_pkt_def_t))){
TSK_DEBUG_ERROR("Failed to create FEC packet");
return tsk_null;
}
@ -377,9 +375,9 @@ static tsk_object_t* tdav_codec_ulpfec_ctor(tsk_object_t * self, va_list * app)
}
/* destructor */
static tsk_object_t* tdav_codec_ulpfec_dtor(tsk_object_t * self)
{
{
tdav_codec_ulpfec_t *ulpfec = self;
if(ulpfec){
if (ulpfec){
/* deinit base */
tmedia_codec_video_deinit(ulpfec);
/* deinit self */
@ -389,15 +387,15 @@ static tsk_object_t* tdav_codec_ulpfec_dtor(tsk_object_t * self)
return self;
}
/* object definition */
static const tsk_object_def_t tdav_codec_ulpfec_def_s =
static const tsk_object_def_t tdav_codec_ulpfec_def_s =
{
sizeof(tdav_codec_ulpfec_t),
tdav_codec_ulpfec_ctor,
tdav_codec_ulpfec_ctor,
tdav_codec_ulpfec_dtor,
tmedia_codec_cmp,
tmedia_codec_cmp,
};
/* plugin definition*/
static const tmedia_codec_plugin_def_t tdav_codec_ulpfec_plugin_def_s =
static const tmedia_codec_plugin_def_t tdav_codec_ulpfec_plugin_def_s =
{
&tdav_codec_ulpfec_def_s,
@ -408,12 +406,12 @@ static const tmedia_codec_plugin_def_t tdav_codec_ulpfec_plugin_def_s =
TMEDIA_CODEC_FORMAT_ULPFEC,
tsk_true,
90000, // rate
/* audio */
{ 0 },
/* video (defaul width,height,fps) */
{176, 144, 15},
{ 176, 144, 15 },
tsk_null, // set()
tdav_codec_ulpfec_open,

View File

@ -1,20 +1,18 @@
/*
* Copyright (C) 2011 Doubango Telecom <http://www.doubango.org>
* Copyright (C) 2011-2015 Doubango Telecom <http://www.doubango.org>
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@ -22,9 +20,6 @@
/**@file tdav_codec_g722.c
* @brief G.722 codec plugins.
*
* @author Mamadou Diop <diopmamadou(at)doubango.org>
*
*/
#include "tinydav/codecs/g722/tdav_codec_g722.h"
@ -44,34 +39,34 @@ static int tdav_codec_g722_open(tmedia_codec_t* self)
{
tdav_codec_g722_t* g722 = (tdav_codec_g722_t*)self;
if(!g722){
if (!g722){
TSK_DEBUG_ERROR("Invalid parameter");
return -1;
}
// Initialize the decoder
if(!g722->dec_state){
if(!(g722->dec_state = tsk_calloc(1, sizeof(g722_decode_state_t)))){
if (!g722->dec_state){
if (!(g722->dec_state = tsk_calloc(1, sizeof(g722_decode_state_t)))){
TSK_DEBUG_ERROR("Failed to create G.722 decoder state");
return -2;
}
// Create and/or reset the G.722 decoder
// Bitrate 64 kbps and wideband mode (2)
if(!(g722->dec_state = g722_decode_init(g722->dec_state, 64000, 2))){
if (!(g722->dec_state = g722_decode_init(g722->dec_state, 64000, 2))){
TSK_DEBUG_ERROR("g722_decode_init failed");
return -3;
}
}
// Initialize the encoder
if(!g722->enc_state){
if(!(g722->enc_state = tsk_calloc(1, sizeof(g722_encode_state_t)))){
if (!g722->enc_state){
if (!(g722->enc_state = tsk_calloc(1, sizeof(g722_encode_state_t)))){
TSK_DEBUG_ERROR("Failed to create G.722 encoder state");
return -4;
}
// Create and/or reset the G.722 encoder
// Bitrate 64 kbps and wideband mode (2)
if(!(g722->enc_state = g722_encode_init(g722->enc_state, 64000, 2))){
if (!(g722->enc_state = g722_encode_init(g722->enc_state, 64000, 2))){
TSK_DEBUG_ERROR("g722_encode_init failed");
return -5;
}
@ -82,63 +77,63 @@ static int tdav_codec_g722_open(tmedia_codec_t* self)
static int tdav_codec_g722_close(tmedia_codec_t* self)
{
tdav_codec_g722_t* g722 = (tdav_codec_g722_t*)self;
tdav_codec_g722_t* g722 = (tdav_codec_g722_t*)self;
(void)(g722);
/* resources will be freed by the dctor() */
return 0;
}
static tsk_size_t tdav_codec_g722_encode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size)
{
tsk_size_t out_g722_size;
tdav_codec_g722_t* g722 = (tdav_codec_g722_t*)self;
if(!self || !in_data || !in_size || !out_data){
tdav_codec_g722_t* g722 = (tdav_codec_g722_t*)self;
if (!self || !in_data || !in_size || !out_data){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
out_g722_size = in_size>>2;
if(*out_max_size <out_g722_size){
if(!(*out_data = tsk_realloc(*out_data, out_g722_size))){
out_g722_size = in_size >> 2;
if (*out_max_size < out_g722_size){
if (!(*out_data = tsk_realloc(*out_data, out_g722_size))){
TSK_DEBUG_ERROR("Failed to allocate new buffer");
*out_max_size = 0;
return 0;
}
*out_max_size = out_g722_size;
}
g722_encode(g722->enc_state, (uint8_t*)*out_data, (int16_t*)in_data, in_size/sizeof(int16_t));
g722_encode(g722->enc_state, (uint8_t*)*out_data, (int16_t*)in_data, (int)in_size / sizeof(int16_t));
return out_g722_size;
}
static tsk_size_t tdav_codec_g722_decode(tmedia_codec_t* self, const void* in_data, tsk_size_t in_size, void** out_data, tsk_size_t* out_max_size, const tsk_object_t* proto_hdr)
{
tdav_codec_g722_t* g722 = (tdav_codec_g722_t*)self;
tdav_codec_g722_t* g722 = (tdav_codec_g722_t*)self;
if(!self || !in_data || !in_size || !out_data){
if (!self || !in_data || !in_size || !out_data){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
/* allocate new buffer */
if(*out_max_size<(in_size<<2)){
if(!(*out_data = tsk_realloc(*out_data, in_size<<2))){
if (*out_max_size < (in_size << 2)){
if (!(*out_data = tsk_realloc(*out_data, in_size << 2))){
TSK_DEBUG_ERROR("Failed to allocate new buffer");
*out_max_size = 0;
return 0;
}
*out_max_size = in_size<<2;
*out_max_size = in_size << 2;
}
g722_decode(g722->dec_state, (int16_t*)*out_data, (uint8_t*)in_data, in_size);
g722_decode(g722->dec_state, (int16_t*)*out_data, (uint8_t*)in_data, (int)in_size);
return (in_size<<2);
return (in_size << 2);
}
static tsk_bool_t tdav_codec_g722_sdp_att_match(const tmedia_codec_t* codec, const char* att_name, const char* att_value)
@ -159,25 +154,25 @@ static char* tdav_codec_g722_sdp_att_get(const tmedia_codec_t* codec, const char
static tsk_object_t* tdav_codec_g722_ctor(tsk_object_t * self, va_list * app)
{
tdav_codec_g722_t *g722 = self;
if(g722){
if (g722){
/* init base: called by tmedia_codec_create() */
/* init self */
}
return self;
}
/* destructor */
static tsk_object_t* tdav_codec_g722_dtor(tsk_object_t * self)
{
{
tdav_codec_g722_t *g722 = self;
if(g722){
if (g722){
/* deinit base */
tmedia_codec_audio_deinit(g722);
/* deinit self */
if(g722->enc_state){
if (g722->enc_state){
g722_encode_release(g722->enc_state), g722->enc_state = tsk_null;
}
if(g722->dec_state){
if (g722->dec_state){
g722_decode_release(g722->dec_state), g722->dec_state = tsk_null;
}
}
@ -185,15 +180,15 @@ static tsk_object_t* tdav_codec_g722_dtor(tsk_object_t * self)
return self;
}
/* object definition */
static const tsk_object_def_t tdav_codec_g722_def_s =
static const tsk_object_def_t tdav_codec_g722_def_s =
{
sizeof(tdav_codec_g722_t),
tdav_codec_g722_ctor,
tdav_codec_g722_ctor,
tdav_codec_g722_dtor,
tmedia_codec_cmp,
tmedia_codec_cmp,
};
/* plugin definition*/
static const tmedia_codec_plugin_def_t tdav_codec_g722_plugin_def_s =
static const tmedia_codec_plugin_def_t tdav_codec_g722_plugin_def_s =
{
&tdav_codec_g722_def_s,
@ -204,14 +199,14 @@ static const tmedia_codec_plugin_def_t tdav_codec_g722_plugin_def_s =
TMEDIA_CODEC_FORMAT_G722,
tsk_false,
16000,
{ /* audio */
1, // channels
0 // ptime @deprecated
},
/* video */
{0},
{ 0 },
tsk_null, // set()
tdav_codec_g722_open,

View File

@ -316,7 +316,7 @@ tsk_size_t tdav_codec_h261_decode(tmedia_codec_t* self, const void* in_data, tsk
/* decode the picture */
av_init_packet(&packet);
packet.size = h261->decoder.accumulator_pos;
packet.size = (int)h261->decoder.accumulator_pos;
packet.data = h261->decoder.accumulator;
ret = avcodec_decode_video2(h261->decoder.context, h261->decoder.picture, &got_picture_ptr, &packet);
@ -328,8 +328,8 @@ tsk_size_t tdav_codec_h261_decode(tmedia_codec_t* self, const void* in_data, tsk
TMEDIA_CODEC_VIDEO(h261)->in.width = h261->decoder.context->width;
TMEDIA_CODEC_VIDEO(h261)->in.height = h261->decoder.context->height;
/* copy picture into a linear buffer */
avpicture_layout((AVPicture *)h261->decoder.picture, h261->decoder.context->pix_fmt, h261->decoder.context->width, h261->decoder.context->height,
*out_data, retsize);
avpicture_layout((AVPicture *)h261->decoder.picture, h261->decoder.context->pix_fmt, (int)h261->decoder.context->width, (int)h261->decoder.context->height,
*out_data, (int)retsize);
}
/* in all cases: reset accumulator */
h261->decoder.accumulator_pos = 0;

View File

@ -456,7 +456,7 @@ static tsk_size_t tdav_codec_h263_decode(tmedia_codec_t* self, const void* in_da
}
av_init_packet(&packet);
packet.size = h263->decoder.accumulator_pos;
packet.size = (int)h263->decoder.accumulator_pos;
packet.data = h263->decoder.accumulator;
ret = avcodec_decode_video2(h263->decoder.context, h263->decoder.picture, &got_picture_ptr, &packet);
@ -480,8 +480,8 @@ static tsk_size_t tdav_codec_h263_decode(tmedia_codec_t* self, const void* in_da
TMEDIA_CODEC_VIDEO(h263)->in.width = h263->decoder.context->width;
TMEDIA_CODEC_VIDEO(h263)->in.height = h263->decoder.context->height;
/* copy picture into a linear buffer */
avpicture_layout((AVPicture *)h263->decoder.picture, h263->decoder.context->pix_fmt, h263->decoder.context->width, h263->decoder.context->height,
*out_data, retsize);
avpicture_layout((AVPicture *)h263->decoder.picture, h263->decoder.context->pix_fmt, (int)h263->decoder.context->width, (int)h263->decoder.context->height,
*out_data, (int)retsize);
}
/* in all cases: reset accumulator */
h263->decoder.accumulator_pos = 0;
@ -741,7 +741,7 @@ static tsk_size_t tdav_codec_h263p_decode(tmedia_codec_t* self, const void* in_d
/* decode the picture */
av_init_packet(&packet);
packet.size = h263->decoder.accumulator_pos;
packet.size = (int)h263->decoder.accumulator_pos;
packet.data = h263->decoder.accumulator;
ret = avcodec_decode_video2(h263->decoder.context, h263->decoder.picture, &got_picture_ptr, &packet);
@ -753,8 +753,8 @@ static tsk_size_t tdav_codec_h263p_decode(tmedia_codec_t* self, const void* in_d
TMEDIA_CODEC_VIDEO(h263)->in.width = h263->decoder.context->width;
TMEDIA_CODEC_VIDEO(h263)->in.height = h263->decoder.context->height;
/* copy picture into a linear buffer */
avpicture_layout((AVPicture *)h263->decoder.picture, h263->decoder.context->pix_fmt, h263->decoder.context->width, h263->decoder.context->height,
*out_data, retsize);
avpicture_layout((AVPicture *)h263->decoder.picture, h263->decoder.context->pix_fmt, (int)h263->decoder.context->width, (int)h263->decoder.context->height,
*out_data, (int)retsize);
}
/* in all cases: reset accumulator */
h263->decoder.accumulator_pos = 0;

View File

@ -429,7 +429,7 @@ static tsk_size_t tdav_codec_h264_decode(tmedia_codec_t* self, const void* in_da
/* decode the picture */
av_init_packet(&packet);
packet.dts = packet.pts = AV_NOPTS_VALUE;
packet.size = h264->decoder.accumulator_pos;
packet.size = (int)h264->decoder.accumulator_pos;
packet.data = h264->decoder.accumulator;
ret = avcodec_decode_video2(h264->decoder.context, h264->decoder.picture, &got_picture_ptr, &packet);
@ -465,8 +465,8 @@ static tsk_size_t tdav_codec_h264_decode(tmedia_codec_t* self, const void* in_da
retsize = xsize;
TMEDIA_CODEC_VIDEO(h264)->in.width = h264->decoder.context->width;
TMEDIA_CODEC_VIDEO(h264)->in.height = h264->decoder.context->height;
avpicture_layout((AVPicture *)h264->decoder.picture, h264->decoder.context->pix_fmt, h264->decoder.context->width, h264->decoder.context->height,
*out_data, retsize);
avpicture_layout((AVPicture *)h264->decoder.picture, h264->decoder.context->pix_fmt, (int)h264->decoder.context->width, (int)h264->decoder.context->height,
*out_data, (int)retsize);
}
#endif /* HAVE_FFMPEG */
} // else(h264->decoder.passthrough)

View File

@ -354,7 +354,7 @@ tsk_size_t tdav_codec_mp4ves_decode(tmedia_codec_t* _self, const void* in_data,
}
av_init_packet(&packet);
packet.size = self->decoder.accumulator_pos;
packet.size = (int)self->decoder.accumulator_pos;
packet.data = self->decoder.accumulator;
ret = avcodec_decode_video2(self->decoder.context, self->decoder.picture, &got_picture_ptr, &packet);
@ -372,8 +372,8 @@ tsk_size_t tdav_codec_mp4ves_decode(tmedia_codec_t* _self, const void* in_data,
TMEDIA_CODEC_VIDEO(self)->in.height = self->decoder.context->height;
/* copy picture into a linear buffer */
avpicture_layout((AVPicture *)self->decoder.picture, self->decoder.context->pix_fmt, self->decoder.context->width, self->decoder.context->height,
*out_data, retsize);
avpicture_layout((AVPicture *)self->decoder.picture, self->decoder.context->pix_fmt, (int)self->decoder.context->width, (int)self->decoder.context->height,
*out_data, (int)retsize);
}
/* in all cases: reset accumulator */
self->decoder.accumulator_pos = 0;

View File

@ -341,7 +341,7 @@ tsk_size_t tdav_codec_theora_decode(tmedia_codec_t* self, const void* in_data, t
AVPacket packet;
/* Perform decoding */
av_init_packet(&packet);
packet.size = theora->decoder.accumulator_pos;
packet.size = (int)theora->decoder.accumulator_pos;
packet.data = theora->decoder.accumulator;
ret = avcodec_decode_video2(theora->decoder.context, theora->decoder.picture, &got_picture_ptr, &packet);
@ -370,8 +370,8 @@ tsk_size_t tdav_codec_theora_decode(tmedia_codec_t* self, const void* in_data, t
}
}
/* copy picture into a linear buffer */
avpicture_layout((AVPicture *)theora->decoder.picture, theora->decoder.context->pix_fmt, theora->decoder.context->width, theora->decoder.context->height,
*out_data, retsize);
avpicture_layout((AVPicture *)theora->decoder.picture, theora->decoder.context->pix_fmt, (int)theora->decoder.context->width, (int)theora->decoder.context->height,
*out_data, (int)retsize);
}
/* in all cases: reset accumulator */
theora->decoder.accumulator_pos = 0;
@ -411,7 +411,7 @@ tsk_size_t tdav_codec_theora_decode(tmedia_codec_t* self, const void* in_data, t
if((pdata[3]>>6) == Not_Fragmented || (pdata[3]>>6) == End_Fragment || rtp_hdr->marker){
if(theora->decoder.conf_pkt->size > THEORA_IDENT_HEADER_SIZE){
const uint8_t* conf_ptr = theora->decoder.conf_pkt->data;
int setup_size = theora->decoder.conf_pkt->size - THEORA_IDENT_HEADER_SIZE;
int setup_size = (int)theora->decoder.conf_pkt->size - THEORA_IDENT_HEADER_SIZE;
int extradata_size = (2 + THEORA_IDENT_HEADER_SIZE) + (2 + setup_size) + (2 + sizeof(__theora_comment_hdr));
if(conf_ptr[0] == 0x80 && conf_ptr[THEORA_IDENT_HEADER_SIZE] == 0x82){ /* Do not check for 't'h'e'o'r'a' */
/* save configration identification */
@ -805,7 +805,7 @@ int tdav_codec_theora_send(tdav_codec_theora_t* self, const uint8_t* data, tsk_s
while(size){
pay_size = TSK_MIN(THEORA_RTP_PAYLOAD_SIZE, size);
pay_hdr[4] = pay_size>>8, pay_hdr[5] = pay_size & 0xFF;
pay_hdr[4] = (uint8_t)(pay_size >> 8), pay_hdr[5] = (uint8_t)(pay_size & 0xFF);
if(frag){
if(first){

View File

@ -465,7 +465,7 @@ static tsk_size_t tdav_codec_vp8_decode(tmedia_codec_t* self, const void* in_dat
}
#endif
vpx_ret = vpx_codec_decode(&vp8->decoder.context, pay_ptr, pay_size, tsk_null, 0);
vpx_ret = vpx_codec_decode(&vp8->decoder.context, pay_ptr, (int)pay_size, tsk_null, 0);
if(vpx_ret != VPX_CODEC_OK){
TSK_DEBUG_INFO("vpx_codec_decode failed with error =%s", vpx_codec_err_to_string(vpx_ret));
@ -821,7 +821,7 @@ static void tdav_codec_vp8_encap(tdav_codec_vp8_t* self, const vpx_codec_cx_pkt_
index = 0;
frame_ptr = pkt->data.frame.buf ;
pkt_size = pkt->data.frame.sz;
pkt_size = (uint32_t)pkt->data.frame.sz;
non_ref = (pkt->data.frame.flags & VPX_FRAME_IS_DROPPABLE);
is_keyframe = (pkt->data.frame.flags & VPX_FRAME_IS_KEY);

View File

@ -52,7 +52,7 @@ static int tdav_consumer_t140_consume(tmedia_consumer_t* self, const void* buffe
return TDAV_CONSUMER_T140(self)->cb_ondata.func(TDAV_CONSUMER_T140(self)->cb_ondata.context,
(enum tmedia_t140_data_type_e)*((int32_t*)buffer),
&((uint8_t*)buffer)[4],
(size - 4));
(unsigned int)(size - 4));
}
return 0;

View File

@ -648,7 +648,7 @@ static int _tdav_session_t140_recv_red(tdav_session_t140_t* self, const struct t
|0| Block PT |
+-+-+-+-+-+-+-+-+
*/
block_length = in_size;
block_length = (uint16_t)in_size;
seq_num = packet->header->seq_num;
timestamp_offset = 0;
block_add = tsk_true;
@ -659,7 +659,7 @@ static int _tdav_session_t140_recv_red(tdav_session_t140_t* self, const struct t
|1| block PT=7 | timestamp offset | block length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
seq_num = packet->header->seq_num - (red_hdrs_count - 1 - i); // inferred by counting backwards
seq_num = (int32_t)(packet->header->seq_num - (red_hdrs_count - 1 - i)); // inferred by counting backwards
block_add = (pkt_loss_start != -1 && (seq_num <= pkt_loss_start && pkt_loss_start >= seq_num));
timestamp_offset = ((red_hdr[1] << 8) | red_hdr[2]) >> 2;
block_length = ((red_hdr[2] & 0x03) << 8) | red_hdr[3];

View File

@ -473,7 +473,7 @@ int tdav_set_codecs(tdav_codec_id_t codecs)
for(i=0,prio=0; i<__codec_plugins_all_count && __codec_plugins_all[i]; ++i){
if((codecs & __codec_plugins_all[i]->codec_id) || __codec_plugins_all[i]->codec_id == tmedia_codec_id_none){
if(_tdav_codec_is_supported((tdav_codec_id_t)__codec_plugins_all[i]->codec_id, __codec_plugins_all[i])){
tmedia_codec_plugin_register_2(__codec_plugins_all[i], prio++);
tmedia_codec_plugin_register_2(__codec_plugins_all[i], (int)prio++);
}
}
}

View File

@ -1413,7 +1413,7 @@ int tdav_session_av_set_ro(tdav_session_av_t* self, const struct tsdp_header_M_s
#if HAVE_SRTP
// this is SRTP negotiation -> do not trust the remote profile
if(is_srtp_dtls_local_enabled || is_srtp_sdes_local_enabled){
tsk_size_t i, j;
int32_t i, j;
const sdp_acap_xt *acap;
int ret;
if(is_srtp_sdes_local_enabled){
@ -1907,7 +1907,7 @@ static const sdp_acap_xt* _sdp_acaps_find_by_field(const sdp_acap_xt (*acaps)[SD
}
i = 0, j = 0;
size = tsk_strlen(field);
size = (int32_t)tsk_strlen(field);
while((*acaps)[j].tag && j < SDP_CAPS_COUNT_MAX){
k = _sdp_str_index_of((*acaps)[j].value, field);
if(k == 0 && (*acaps)[j].value[size] == ':'){
@ -1946,7 +1946,7 @@ static int _sdp_acaps_from_sdp(const sdp_headerM_Or_Message* sdp, sdp_acap_xt (*
acaps_idx = 0;
while((A = _sdp_findA_at(sdp, "acap", acaps_idx++))){
if(!(size = tsk_strlen(A->value))){
if (!(size = (int32_t)tsk_strlen(A->value))){
goto next;
}
if(sscanf(A->value, "%d", &tag) == EOF){
@ -1979,7 +1979,7 @@ _SDP_DECLARE_INDEX_OF(tcap);
static int _sdp_tcaps_from_sdp(const sdp_headerM_Or_Message* sdp, sdp_tcap_xt (*tcaps)[SDP_CAPS_COUNT_MAX], tsk_bool_t reset)
{
tsk_size_t tcaps_count, tcaps_idx, profiles_count;
int32_t tcaps_count, tcaps_idx, profiles_count;
const tsdp_header_A_t* A;
int32_t tag, index, size, tag_fake;
char tcap[256];
@ -2004,7 +2004,7 @@ static int _sdp_tcaps_from_sdp(const sdp_headerM_Or_Message* sdp, sdp_tcap_xt (*
index = 0;
tcaps_idx = 0;
while((A = _sdp_findA_at(sdp, "tcap", tcaps_idx++))){
if(!(size = tsk_strlen(A->value))){
if (!(size = (int32_t)tsk_strlen(A->value))){
goto next;
}
if(sscanf(&A->value[index], "%d", &tag) == EOF || (_sdp_integer_length(tag) + 1 >= size)){
@ -2025,7 +2025,7 @@ static int _sdp_tcaps_from_sdp(const sdp_headerM_Or_Message* sdp, sdp_tcap_xt (*
(*tcaps)[tcaps_count + profiles_count].tag = tag_fake;
(*tcaps)[tcaps_count + profiles_count].profile = _sdp_profile_from_string(tcap); // split profiles
}
if((index += tsk_strlen(tcap) + 1/*SPACE*/) >= size){
if ((index += (int32_t)tsk_strlen(tcap) + 1/*SPACE*/) >= size){
break;
}
++tag_fake;
@ -2120,7 +2120,7 @@ static int _sdp_pcfgs_from_sdp(const sdp_headerM_Or_Message* sdp, sdp_acap_xt (*
pcfgs_idx = 0;
tcap_curr = tsk_null;
while((A = _sdp_findA_at(sdp, "pcfg", pcfgs_idx++))){
if(!(size = tsk_strlen(A->value))){
if (!(size = (int32_t)tsk_strlen(A->value))){
goto next_A;
}
if(sscanf(A->value, "%d", &tag) == EOF || (_sdp_integer_length(tag) + 1 >= size)){
@ -2180,7 +2180,7 @@ next_a:
tcap_curr = tsk_null;
}
next_pcfg:
if((index += tsk_strlen(pcfg) + 1/*SPACE*/) >= size){
if ((index += (int32_t)tsk_strlen(pcfg) + 1/*SPACE*/) >= size){
break;
}
}
@ -2291,7 +2291,7 @@ static int _sdp_pcfg_ensure(sdp_headerM_Or_Message* sdp, const sdp_pcfg_xt* pcfg
}
for(i = 0; i < SDP_CAPS_COUNT_MAX && pcfg->acaps[i].tag > 0; ++i){
if(sscanf(pcfg->acaps[i].value, "%255s%*s", field) != EOF && (n = tsk_strlen(field)) > 2){
if (sscanf(pcfg->acaps[i].value, "%255s%*s", field) != EOF && (n = (int32_t)tsk_strlen(field)) > 2){
field[n - 2] = '\0';
_sdp_add_headerA(sdp, field, &pcfg->acaps[i].value[n + 1/*SPACE*/]);
}

View File

@ -115,10 +115,10 @@ static int tdav_consumer_video_gdi_prepare(tmedia_consumer_t* self, const tmedia
p_gdi->bitmapInfo.bmiHeader.biPlanes = 1;
p_gdi->bitmapInfo.bmiHeader.biBitCount = 24; // RGB24
p_gdi->bitmapInfo.bmiHeader.biCompression = BI_RGB;
p_gdi->bitmapInfo.bmiHeader.biWidth = TMEDIA_CONSUMER(p_gdi)->video.in.width;
p_gdi->bitmapInfo.bmiHeader.biHeight = TMEDIA_CONSUMER(p_gdi)->video.in.height * -1;
p_gdi->bitmapInfo.bmiHeader.biSizeImage = TMEDIA_CONSUMER(p_gdi)->video.in.width * abs(TMEDIA_CONSUMER(p_gdi)->video.in.height) *
(p_gdi->bitmapInfo.bmiHeader.biBitCount >> 3);
p_gdi->bitmapInfo.bmiHeader.biWidth = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.width;
p_gdi->bitmapInfo.bmiHeader.biHeight = (LONG)(TMEDIA_CONSUMER(p_gdi)->video.in.height * -1);
p_gdi->bitmapInfo.bmiHeader.biSizeImage = (DWORD)(TMEDIA_CONSUMER(p_gdi)->video.in.width * abs((int)TMEDIA_CONSUMER(p_gdi)->video.in.height) *
(p_gdi->bitmapInfo.bmiHeader.biBitCount >> 3));
return 0;
}
@ -169,9 +169,9 @@ static int tdav_consumer_video_gdi_consume(tmedia_consumer_t* self, const void*
ret = -1;
goto bail;
}
p_gdi->bitmapInfo.bmiHeader.biWidth = TMEDIA_CONSUMER(p_gdi)->video.in.width;
p_gdi->bitmapInfo.bmiHeader.biHeight = TMEDIA_CONSUMER(p_gdi)->video.in.height * -1;
p_gdi->bitmapInfo.bmiHeader.biSizeImage = xNewSize;
p_gdi->bitmapInfo.bmiHeader.biWidth = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.width;
p_gdi->bitmapInfo.bmiHeader.biHeight = (LONG)TMEDIA_CONSUMER(p_gdi)->video.in.height * -1;
p_gdi->bitmapInfo.bmiHeader.biSizeImage = (DWORD)xNewSize;
p_gdi->pBuffer = tsk_realloc(p_gdi->pBuffer, p_gdi->bitmapInfo.bmiHeader.biSizeImage);
}
@ -365,7 +365,7 @@ static HRESULT HookWindow(struct tdav_consumer_video_gdi_s *p_gdi, HWND hWnd, BO
#if TDAV_UNDER_WINDOWS_CE
*p_wndProc = (WNDPROC)SetWindowLong(hWnd, GWL_WNDPROC, (LONG)WndProc);
#else
*p_wndProc = (WNDPROC)SetWindowLongPtr(hWnd, GWL_WNDPROC, (LONG)WndProc);
*p_wndProc = (WNDPROC)SetWindowLongPtr(hWnd, GWLP_WNDPROC, (LONG_PTR)WndProc);
#endif
if (!*p_wndProc) {
TSK_DEBUG_ERROR("HookWindowLongPtr() failed with errcode=%d", GetLastError());
@ -390,7 +390,7 @@ static HRESULT UnhookWindow(struct tdav_consumer_video_gdi_s *p_gdi, BOOL bFullS
#if TDAV_UNDER_WINDOWS_CE
SetWindowLong(*p_Window, GWL_WNDPROC, (LONG)*p_wndProc);
#else
SetWindowLongPtr(*p_Window, GWL_WNDPROC, (LONG)*p_wndProc);
SetWindowLongPtr(*p_Window, GWLP_WNDPROC, (LONG_PTR)*p_wndProc);
#endif
*p_wndProc = NULL;
}

View File

@ -127,9 +127,9 @@ static int _tdav_producer_screencast_gdi_prepare(tmedia_producer_t* p_self, cons
TMEDIA_PRODUCER(p_gdi)->video.width = TMEDIA_CODEC_VIDEO(pc_codec)->out.width;
TMEDIA_PRODUCER(p_gdi)->video.height = TMEDIA_CODEC_VIDEO(pc_codec)->out.height;
p_gdi->bitmapInfoNeg.bmiHeader.biSize = p_gdi->bitmapInfoSrc.bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
p_gdi->bitmapInfoNeg.bmiHeader.biWidth = p_gdi->bitmapInfoSrc.bmiHeader.biWidth = TMEDIA_PRODUCER(p_gdi)->video.width;
p_gdi->bitmapInfoNeg.bmiHeader.biHeight = p_gdi->bitmapInfoSrc.bmiHeader.biHeight = TMEDIA_PRODUCER(p_gdi)->video.height;
p_gdi->bitmapInfoNeg.bmiHeader.biSize = p_gdi->bitmapInfoSrc.bmiHeader.biSize = (DWORD)sizeof(BITMAPINFOHEADER);
p_gdi->bitmapInfoNeg.bmiHeader.biWidth = p_gdi->bitmapInfoSrc.bmiHeader.biWidth = (LONG)TMEDIA_PRODUCER(p_gdi)->video.width;
p_gdi->bitmapInfoNeg.bmiHeader.biHeight = p_gdi->bitmapInfoSrc.bmiHeader.biHeight = (LONG)TMEDIA_PRODUCER(p_gdi)->video.height;
p_gdi->bitmapInfoNeg.bmiHeader.biPlanes = p_gdi->bitmapInfoSrc.bmiHeader.biPlanes = 1;
p_gdi->bitmapInfoNeg.bmiHeader.biBitCount = p_gdi->bitmapInfoSrc.bmiHeader.biBitCount = 24;
p_gdi->bitmapInfoNeg.bmiHeader.biCompression = p_gdi->bitmapInfoSrc.bmiHeader.biCompression = BI_RGB;
@ -290,13 +290,13 @@ static int _tdav_producer_screencast_grab(tdav_producer_screencast_gdi_t* p_self
hSrcDC = GetDC(p_self->hwnd_src);
if (!hSrcDC) {
TSK_DEBUG_ERROR("GetDC(%x) failed", p_self->hwnd_src);
TSK_DEBUG_ERROR("GetDC(%x) failed", (int64_t)p_self->hwnd_src);
ret = -5;
goto bail;
}
hMemDC = CreateCompatibleDC(hSrcDC);
if (!hMemDC) {
TSK_DEBUG_ERROR("CreateCompatibleDC(%x) failed", hSrcDC);
TSK_DEBUG_ERROR("CreateCompatibleDC(%x) failed", (int64_t)hSrcDC);
ret = -6;
goto bail;
}

View File

@ -1,7 +1,6 @@
/*
* Copyright (C) 2011 Doubango Telecom <http://www.doubango.org>
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(DOT)org>
* Copyright (C) 2011-2015 Mamadou DIOP
* Copyright (C) 2011-2015 Doubango Telecom <http://www.doubango.org>
*
* This file is part of Open Source Doubango Framework.
*
@ -22,8 +21,6 @@
/**@file tdav_video_jb.c
* @brief Video Jitter Buffer
*
* @author Mamadou Diop <diopmamadou(at)doubango(DOT)org>
*/
#include "tinydav/video/jb/tdav_video_jb.h"
#include "tinydav/video/jb/tdav_video_frame.h"
@ -427,7 +424,7 @@ static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
// TSK_DEBUG_INFO("Frames count = %d", jb->frames_count);
if(jb->frames_count >= jb->latency_min){
if(jb->frames_count >= (int64_t)jb->latency_min){
item = tsk_null;
postpone = tsk_false;
@ -435,7 +432,7 @@ static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
tsk_list_lock(jb->frames); // against put()
// is it still acceptable to wait for missing packets?
if(jb->frames_count < jb->latency_max){
if (jb->frames_count < (int64_t)jb->latency_max){
frame = (const tdav_video_frame_t*)jb->frames->head->data;
if(!tdav_video_frame_is_complete(frame, jb->decode_last_seq_num_with_mark, &missing_seq_num_start, &missing_seq_num_count)){
TSK_DEBUG_INFO("Time to decode frame...but some RTP packets are missing (missing_seq_num_start=%hu, missing_seq_num_count=%u, last_seq_num_with_mark=%d). Postpone :(", missing_seq_num_start, missing_seq_num_count, jb->decode_last_seq_num_with_mark);
@ -490,7 +487,7 @@ static void* TSK_STDCALL _tdav_video_jb_decode_thread_func(void *arg)
#if 1
now = tsk_time_now();
if(jb->frames_count > jb->latency_max){
if (jb->frames_count > (int64_t)jb->latency_max){
x_decode_time = now;
next_decode_duration = 0;
}

View File

@ -80,7 +80,9 @@ int tdav_consumer_video_init(tdav_consumer_video_t* self)
*/
int tdav_consumer_video_cmp(const tsk_object_t* consumer1, const tsk_object_t* consumer2)
{
return (TDAV_CONSUMER_VIDEO(consumer1) - TDAV_CONSUMER_VIDEO(consumer2));
int ret;
tsk_subsat_int32_ptr(consumer1, consumer2, &ret);
return ret;
}
int tdav_consumer_video_set(tdav_consumer_video_t* self, const tmedia_param_t* param)

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango.org>
* Copyright (C) 2010-2015 Mamadou DIOP.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,8 +22,6 @@
*
* @author Mamadou Diop <diopmamadou(at)doubango.org>
* @author Alex Vishnev (Added support for rotation)
*
* @date Created: Sat Nov 8 16:54:58 2009 mdiop
*/
#include "tinydav/video/tdav_converter_video.h"
@ -190,7 +186,7 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
scale |= (_self->dstWidth != _self->dstHeight) && (rotation == kRotate90 || rotation == kRotate270);
}
src_w = _self->srcWidth , src_h = _self->srcHeight;
src_w = (int)_self->srcWidth , src_h = (int)_self->srcHeight;
if(self->toI420){
tsk_size_t x_in_size;
@ -219,25 +215,25 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
// convert to I420 without scaling or rotation
ret = ConvertToI420(
(const uint8*)buffer, x_in_size,
(const uint8*)buffer, (int)x_in_size,
dst_y, dst_y_stride,
dst_u, dst_u_stride,
dst_v, dst_v_stride,
crop_x, crop_y,
_self->srcWidth, (_self->flip ? (_self->srcHeight * -1) : _self->srcHeight),
_self->srcWidth, _self->srcHeight,
(int)_self->srcWidth, (int)(_self->flip ? (_self->srcHeight * -1) : _self->srcHeight),
(int)_self->srcWidth, (int)_self->srcHeight,
kRotate0,
(uint32) self->srcFormat);
if(ret){
if (ret){
TSK_DEBUG_ERROR("ConvertToI420 failed with error code = %d, in_size:%u", ret, x_in_size);
return 0;
}
// rotate
if(rotation != kRotate0){
dst_w = (rotation == kRotate90 || rotation == kRotate270) ? _self->srcHeight : _self->srcWidth;
dst_h = (rotation == kRotate90 || rotation == kRotate270) ? _self->srcWidth : _self->srcHeight;
dst_w = (int)((rotation == kRotate90 || rotation == kRotate270) ? _self->srcHeight : _self->srcWidth);
dst_h = (int)((rotation == kRotate90 || rotation == kRotate270) ? _self->srcWidth : _self->srcHeight);
src_y = dst_y, src_u = dst_u, src_v = dst_v;
src_y_stride = src_y_stride, src_u_stride = src_u_stride, src_v_stride = src_v_stride;
@ -262,16 +258,16 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
dst_y, dst_y_stride,
dst_u, dst_u_stride,
dst_v, dst_v_stride,
_self->srcWidth, _self->srcHeight, rotation);
if(ret){
(int)_self->srcWidth, (int)_self->srcHeight, rotation);
if (ret){
TSK_DEBUG_ERROR("I420Rotate failed with error code = %d", ret);
return 0;
}
// scale to fit ratio, pad, crop then copy
if((rotation == kRotate90 || rotation == kRotate270) && _self->scale_rotated_frames){
int iwidth = _self->srcHeight;
int iheight = _self->srcWidth;
int iwidth = (int)_self->srcHeight;
int iheight = (int)_self->srcWidth;
src_y = dst_y, src_u = dst_u, src_v = dst_v;
src_w = dst_w, src_h = dst_h;
@ -279,14 +275,14 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
if (_self->dstWidth != _self->dstHeight) {
if (iwidth * _self->srcHeight > iheight * _self->srcWidth) {
iwidth = (iheight * _self->srcWidth / _self->srcHeight) & ~1;
int iwidth_offset = (_self->srcHeight - iwidth) >> 1;
iwidth = (int)((iheight * _self->srcWidth / _self->srcHeight) & ~1);
int iwidth_offset = (int)((_self->srcHeight - iwidth) >> 1);
src_y += iwidth_offset;
src_u += iwidth_offset >> 1;
src_v += iwidth_offset >> 1;
} else if (iwidth * _self->srcHeight < iheight * _self->srcWidth) {
iheight = iwidth * _self->srcHeight / _self->srcWidth;
int iheight_offset = (_self->srcWidth - iheight) >> 2;
iheight = (int)(iwidth * _self->srcHeight / _self->srcWidth);
int iheight_offset = (int)((_self->srcWidth - iheight) >> 2);
iheight_offset <<= 1;
src_y += iheight_offset * src_y_stride;
src_u += (iheight_offset >> 1) * src_u_stride;
@ -297,8 +293,8 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
src_y_stride = src_w;
src_u_stride = src_v_stride = ((src_y_stride + 1) >> 1);
dst_w = _self->dstWidth;
dst_h = _self->dstHeight;
dst_w = (int)_self->dstWidth;
dst_h = (int)_self->dstHeight;
ls = dst_w * dst_h;
s = ((ls * 3) >> 1);
RESIZE_BUFFER((*output), (*output_max_size), s);
@ -330,8 +326,8 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
// scale
if(scale){
src_w = dst_w, src_h = dst_h;
dst_w = ((rotation == kRotate90 || rotation == kRotate270) && !_self->scale_rotated_frames) ? _self->dstHeight : _self->dstWidth;
dst_h = ((rotation == kRotate90 || rotation == kRotate270) && !_self->scale_rotated_frames) ? _self->dstWidth : _self->dstHeight;
dst_w = (int)(((rotation == kRotate90 || rotation == kRotate270) && !_self->scale_rotated_frames) ? _self->dstHeight : _self->dstWidth);
dst_h = (int)(((rotation == kRotate90 || rotation == kRotate270) && !_self->scale_rotated_frames) ? _self->dstWidth : _self->dstHeight);
src_y = dst_y, src_u = dst_u, src_v = dst_v;
src_y_stride = dst_y_stride, src_u_stride = dst_u_stride, src_v_stride = dst_v_stride;
dst_y_stride = dst_w;
@ -365,7 +361,7 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
else if(self->fromI420){
static const int dst_sample_stride = 0;
dst_w = _self->dstWidth, dst_h = _self->dstHeight;
dst_w = (int)_self->dstWidth, dst_h = (int)_self->dstHeight;
src_y = (uint8*)buffer;
src_u = (src_y + (src_w * src_h));
src_v = (src_u + ((src_w * src_h) >> 2));
@ -405,7 +401,7 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
src_u_stride = src_v_stride = ((src_y_stride + 1) >> 1);
}
s = _tdav_converter_video_libyuv_get_size(_self->dstChroma, _self->srcWidth, _self->srcHeight);
s = (int)_tdav_converter_video_libyuv_get_size(_self->dstChroma, _self->srcWidth, _self->srcHeight);
RESIZE_BUFFER((*output), (*output_max_size), s);
ret = ConvertFromI420(
@ -413,7 +409,7 @@ static tsk_size_t tdav_converter_video_libyuv_process(tmedia_converter_video_t*
src_u, src_u_stride,
src_v, src_v_stride,
(uint8*)*output, dst_sample_stride,
_self->dstWidth, (_self->flip ? (_self->dstHeight * -1) : _self->dstHeight),
(int)_self->dstWidth, (_self->flip ? ((int)_self->dstHeight * -1) : (int)_self->dstHeight),
(uint32) self->dstFormat);
if(ret){
TSK_DEBUG_ERROR("ConvertFromI420 failed with error code = %d", ret);
@ -511,7 +507,7 @@ tdav_converter_video_ffmpeg_t;
register int i,j; \
register int newx = 0; \
for (i = 0; i < (int)(srcw); i ++ ){ \
for( j = srch-1; j >=0; j -- ){ \
for( j = (int)srch-1; j >=0; j -- ){ \
(dstdata)[newx++] = (srcdata)[j * (srcw) + i]; \
} \
} \
@ -581,28 +577,28 @@ static tsk_size_t tdav_converter_video_ffmpeg_process(tmedia_converter_video_t*
tsk_bool_t _rotate = tsk_false;
tdav_converter_video_ffmpeg_t* self = TDAV_CONVERTER_VIDEO_FFMPEG(_self);
if(!self || !buffer || !output){
if (!self || !buffer || !output){
TSK_DEBUG_ERROR("Invalid parameter");
return 0;
}
/* Pictures */
if(!self->srcFrame){
if(!(self->srcFrame = avcodec_alloc_frame())){
if (!self->srcFrame){
if (!(self->srcFrame = avcodec_alloc_frame())){
TSK_DEBUG_ERROR("Failed to create picture");
return 0;
}
}
if(!self->dstFrame){
if(!(self->dstFrame = avcodec_alloc_frame())){
if (!self->dstFrame){
if (!(self->dstFrame = avcodec_alloc_frame())){
TSK_DEBUG_ERROR("Failed to create picture");
return 0;
}
}
size = avpicture_get_size(self->dstFormat, _self->dstWidth, _self->dstHeight);
if((int)*output_max_size <size){
if(!(*output = tsk_realloc(*output, (size + FF_INPUT_BUFFER_PADDING_SIZE)))){
size = avpicture_get_size(self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight);
if ((int)*output_max_size < size){
if (!(*output = tsk_realloc(*output, (size + FF_INPUT_BUFFER_PADDING_SIZE)))){
*output_max_size = 0;
TSK_DEBUG_ERROR("Failed to allocate buffer");
return 0;
@ -611,48 +607,48 @@ static tsk_size_t tdav_converter_video_ffmpeg_process(tmedia_converter_video_t*
}
/* Wrap the source buffer */
ret = avpicture_fill((AVPicture *)self->srcFrame, (uint8_t*)buffer, self->srcFormat, _self->srcWidth, _self->srcHeight);
ret = avpicture_fill((AVPicture *)self->srcFrame, (uint8_t*)buffer, self->srcFormat, (int)_self->srcWidth, (int)_self->srcHeight);
/* Wrap the destination buffer */
ret = avpicture_fill((AVPicture *)self->dstFrame, (uint8_t*)*output, self->dstFormat, _self->dstWidth, _self->dstHeight);
ret = avpicture_fill((AVPicture *)self->dstFrame, (uint8_t*)*output, self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight);
/* === performs conversion === */
/* Context */
if(!self->context){
if (!self->context) {
self->context = sws_getContext(
_self->srcWidth, _self->srcHeight, self->srcFormat,
_self->dstWidth, _self->dstHeight, self->dstFormat,
(int)_self->srcWidth, (int)_self->srcHeight, self->srcFormat,
(int)_self->dstWidth, (int)_self->dstHeight, self->dstFormat,
SWS_FAST_BILINEAR, NULL, NULL, NULL);
if(!self->context){
if (!self->context) {
TSK_DEBUG_ERROR("Failed to create context");
return 0;
}
}
/*FIXME: For now only 90\B0 rotation is supported this is why we always use libyuv on mobile devices */
_rotate = (PIX_FMT_YUV420P == self->dstFormat) && _self->rotation==90;
_rotate = (PIX_FMT_YUV420P == self->dstFormat) && _self->rotation == 90;
// if no rotation then, flip while scaling othersize do it after rotation
if(!_rotate && _self->flip){
if (!_rotate && _self->flip) {
_tdav_converter_video_ffmpeg_flip(self->dstFrame, _self->dstHeight);
}
// chroma conversion and scaling
ret = sws_scale(self->context, (const uint8_t* const*)self->srcFrame->data, self->srcFrame->linesize, 0, _self->srcHeight,
ret = sws_scale(self->context, (const uint8_t* const*)self->srcFrame->data, self->srcFrame->linesize, 0, (int)_self->srcHeight,
self->dstFrame->data, self->dstFrame->linesize);
if(ret < 0){
if (ret < 0){
TSK_FREE(*output);
return 0;
}
// Rotation
if(_rotate){
if (_rotate){
// because we rotated 90 width = original height, height = original width
int w = _self->dstHeight;
int h = _self->dstWidth;
int w = (int)_self->dstHeight;
int h = (int)_self->dstWidth;
// allocation rotation frame if not already done
if(!(self->rot.frame) && !(self->rot.frame = avcodec_alloc_frame())){
if (!(self->rot.frame) && !(self->rot.frame = avcodec_alloc_frame())){
TSK_DEBUG_ERROR("failed to allocate rotation frame");
TSK_FREE(*output);
return(0);
@ -660,7 +656,7 @@ static tsk_size_t tdav_converter_video_ffmpeg_process(tmedia_converter_video_t*
// allocate rotation temporary buffer
size = avpicture_get_size(self->dstFormat, w, h);
if(self->rot.buffer_size != size){
if (self->rot.buffer_size != size){
if (!(self->rot.buffer = (uint8_t *)av_realloc(self->rot.buffer, size))){
TSK_DEBUG_ERROR("failed to allocate new buffer for the frame");
self->rot.buffer_size = 0;
@ -668,65 +664,65 @@ static tsk_size_t tdav_converter_video_ffmpeg_process(tmedia_converter_video_t*
}
self->rot.buffer_size = size;
}
//wrap
avpicture_fill((AVPicture *)self->rot.frame, self->rot.buffer, self->dstFormat, w, h);
// rotate
_tdav_converter_video_ffmpeg_rotate90(_self->dstWidth, _self->dstHeight, self->dstFrame->data[0], self->rot.frame->data[0]);
_tdav_converter_video_ffmpeg_rotate90((_self->dstWidth>>1), (_self->dstHeight>>1), self->dstFrame->data[1], self->rot.frame->data[1]);
_tdav_converter_video_ffmpeg_rotate90((_self->dstWidth>>1), (_self->dstHeight>>1), self->dstFrame->data[2], self->rot.frame->data[2]);
_tdav_converter_video_ffmpeg_rotate90((_self->dstWidth >> 1), (_self->dstHeight >> 1), self->dstFrame->data[1], self->rot.frame->data[1]);
_tdav_converter_video_ffmpeg_rotate90((_self->dstWidth >> 1), (_self->dstHeight >> 1), self->dstFrame->data[2], self->rot.frame->data[2]);
// flip
if(_self->flip){
if (_self->flip){
_tdav_converter_video_ffmpeg_flip(self->rot.frame, h);
}
{
static const int y_shift = 1;
static const int x_shift = 1;
int r_size, r_w, r_h, left_band, top_band;
int pad = ((int)_self->dstWidth-w)>((int)_self->dstHeight-h)?((int)_self->dstWidth-w):((int)_self->dstHeight-h);
if(pad<0){
pad=0;
int pad = ((int)_self->dstWidth - w) > ((int)_self->dstHeight - h) ? ((int)_self->dstWidth - w) : ((int)_self->dstHeight - h);
if (pad < 0){
pad = 0;
}
r_size;
r_w = w+pad;
r_h = h+pad;
left_band = (r_w-_self->dstWidth)/2;
top_band = (r_h-_self->dstHeight)/3;
if(!self->rot.context){
if(!(self->rot.context = sws_getContext(w, h, self->dstFormat, r_w, r_h, self->dstFormat, SWS_FAST_BILINEAR, NULL, NULL, NULL))){
r_w = w + pad;
r_h = h + pad;
left_band = (int)((r_w - _self->dstWidth) / 2);
top_band = (int)((r_h - _self->dstHeight) / 3);
if (!self->rot.context){
if (!(self->rot.context = sws_getContext(w, h, self->dstFormat, r_w, r_h, self->dstFormat, SWS_FAST_BILINEAR, NULL, NULL, NULL))){
TSK_DEBUG_ERROR("Failed to create context");
TSK_FREE(*output);
return 0;
}
}
r_size = avpicture_get_size(self->dstFormat, r_w, r_h);
if((int)*output_max_size < r_size){
if(!(*output = tsk_realloc(*output, (r_size + FF_INPUT_BUFFER_PADDING_SIZE)))){
if ((int)*output_max_size < r_size){
if (!(*output = tsk_realloc(*output, (r_size + FF_INPUT_BUFFER_PADDING_SIZE)))){
*output_max_size = 0;
TSK_DEBUG_ERROR("Failed to allocate buffer");
return 0;
}
*output_max_size = r_size;
}
// re-wrap
avpicture_fill((AVPicture *)self->dstFrame, (uint8_t*)*output, self->dstFormat, r_w, r_h);
// pad
sws_scale(self->rot.context, (const uint8_t* const*)self->rot.frame->data, self->rot.frame->linesize,
0, h, self->dstFrame->data, self->dstFrame->linesize);
sws_scale(self->rot.context, (const uint8_t* const*)self->rot.frame->data, self->rot.frame->linesize,
0, h, self->dstFrame->data, self->dstFrame->linesize);
// crop
self->dstFrame->data[0] = self->dstFrame->data[0] + (top_band * self->dstFrame->linesize[0]) + left_band;
self->dstFrame->data[1] = self->dstFrame->data[1] + ((top_band >> y_shift) * self->dstFrame->linesize[1]) + (left_band >> x_shift);
self->dstFrame->data[2] = self->dstFrame->data[2] + ((top_band >> y_shift) * self->dstFrame->linesize[2]) + (left_band >> x_shift);
avpicture_layout((const AVPicture*)self->dstFrame, self->dstFormat, _self->dstWidth, _self->dstHeight, (unsigned char *)*output, *output_max_size);
avpicture_layout((const AVPicture*)self->dstFrame, self->dstFormat, (int)_self->dstWidth, (int)_self->dstHeight, (unsigned char *)*output, (int)*output_max_size);
}
}//end of rotation
return size;

View File

@ -644,7 +644,7 @@ static int tdav_session_video_rtcp_cb(const void* callback_data, const trtp_rtcp
}
if(item == video->avpf.packets->tail){
// should never be called unless the tail is too small
int32_t old_max = video->avpf.max;
int32_t old_max = (int32_t)video->avpf.max;
int32_t len_drop = (pkt_rtp->header->seq_num - pid);
video->avpf.max = TSK_CLAMP((int32_t)tmedia_defaults_get_avpf_tail_min(), (old_max + len_drop), (int32_t)tmedia_defaults_get_avpf_tail_max());
TSK_DEBUG_INFO("**NACK requesting dropped frames. List=[%d-%d], requested=%d, List.Max=%d, List.Count=%d. RTT is probably too high.",
@ -701,7 +701,7 @@ static int _tdav_session_video_jb_cb(const tdav_video_jb_cb_data_xt* data)
uint16_t seq_nums[16];
for(i = 0; i < data->fl.count; i+=16){
for(j = 0, k = i; j < 16 && k < data->fl.count; ++j, ++k){
seq_nums[j] = (data->fl.seq_num + i + j);
seq_nums[j] = (uint16_t)(data->fl.seq_num + i + j);
TSK_DEBUG_INFO("Request re-send(%u)", seq_nums[j]);
}
trtp_manager_signal_pkt_loss(base->rtp_manager, data->ssrc, seq_nums, j);

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -21,10 +19,6 @@
*/
/**@file thttp_machine_headers.rl.
* @brief Ragel file.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
/*== Header pasrsing machine. Supports both full-length and compact mode. */

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -21,10 +19,6 @@
*/
/**@file thttp_machine_message.rl
* @brief Ragel file.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
%%{
machine thttp_machine_message;

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,9 +20,6 @@
/**@file thttp_machine_utils.rl
* @brief Ragel file.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
%%{
machine thttp_machine_utils;

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -19,6 +17,7 @@
* along with DOUBANGO.
*
*/
*/
%%{
machine thttp_machine_ws;

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,10 +20,6 @@
/**@file thttp_parser_header.c
* @brief HTTP headers parser.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/parsers/thttp_parser_header.h"
@ -514,6 +508,7 @@ int thttp_header_parse(tsk_ragel_state_t *state, thttp_message_t *message)
const char *pe = state->tag_end;
const char *eof = pe;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_headers_first_final);
@ -521,6 +516,7 @@ int thttp_header_parse(tsk_ragel_state_t *state, thttp_message_t *message)
(void)(thttp_machine_parser_headers_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
return ( cs >= %%{ write first_final; }%% ) ? 0 : -1;
}

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,10 +20,6 @@
/**@file thttp_header_Authorization.c
* @brief HTTP Authorization header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Authorization.h"
@ -222,6 +216,7 @@ thttp_header_Authorization_t *thttp_header_Authorization_parse(const char *data,
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Authorization_first_final);
@ -230,6 +225,7 @@ thttp_header_Authorization_t *thttp_header_Authorization_parse(const char *data,
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse Authorization header.");

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -19,13 +17,9 @@
* along with DOUBANGO.
*
*/
/**@file thttp_header_Content_Length.c
* @brief HTTP Content-Length header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Content_Length.h"
@ -86,6 +80,7 @@ thttp_header_Content_Length_t *thttp_header_Content_Length_parse(const char *dat
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Content_Length_first_final);
@ -93,6 +88,7 @@ thttp_header_Content_Length_t *thttp_header_Content_Length_parse(const char *dat
(void)(thttp_machine_parser_header_Content_Length_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_OBJECT_SAFE_FREE(hdr_clength);

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,10 +20,6 @@
/**@file thttp_header_Content_Type.c
* @brief HTTP Content-Type header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Content_Type.h"
@ -110,6 +104,7 @@ thttp_header_Content_Type_t *thttp_header_Content_Type_parse(const char *data, t
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Content_Type_first_final);
@ -117,6 +112,7 @@ thttp_header_Content_Type_t *thttp_header_Content_Type_parse(const char *data, t
(void)(thttp_machine_parser_header_Content_Type_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse Content-Type header.");

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -23,9 +21,6 @@
/**@file thttp_header_Dummy.c
* @brief HTTP 'Dummy' header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Dummy.h"
@ -103,6 +98,7 @@ thttp_header_Dummy_t *thttp_header_Dummy_parse(const char *data, tsk_size_t size
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Dummy_first_final);
@ -110,6 +106,7 @@ thttp_header_Dummy_t *thttp_header_Dummy_parse(const char *data, tsk_size_t size
(void)(thttp_machine_parser_header_Dummy_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_OBJECT_SAFE_FREE(hdr_Dummy);

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,10 +20,6 @@
/**@file thttp_header_ETag.c
* @brief HTTP 'Etag' header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_ETag.h"
@ -110,6 +104,7 @@ thttp_header_ETag_t *thttp_header_ETag_parse(const char *data, tsk_size_t size)
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_ETag_first_final);
@ -117,6 +112,7 @@ thttp_header_ETag_t *thttp_header_ETag_parse(const char *data, tsk_size_t size)
(void)(thttp_machine_parser_header_ETag_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse ETag header.");

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,9 +20,6 @@
/**@file thttp_header_Sec_WebSocket_Accept.c
* @brief WebSocket "Sec-WebSocket-Accept" header.
*
* @author Mamadou Diop <diopmamadou(at)doubango(dor)org>
*
*/
#include "tinyhttp/headers/thttp_header_Sec_WebSocket_Accept.h"
@ -84,6 +79,7 @@ thttp_header_Sec_WebSocket_Accept_t *thttp_header_Sec_WebSocket_Accept_parse(con
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Sec_WebSocket_Accept_first_final);
@ -91,6 +87,7 @@ thttp_header_Sec_WebSocket_Accept_t *thttp_header_Sec_WebSocket_Accept_parse(con
(void)(thttp_machine_parser_header_Sec_WebSocket_Accept_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse Sec-WebSocket-Accept header.");

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,9 +20,6 @@
/**@file thttp_header_Sec_WebSocket_Key.c
* @brief WebSocket "Sec-WebSocket-Key" header.
*
* @author Mamadou Diop <diopmamadou(at)doubango(dor)org>
*
*/
#include "tinyhttp/headers/thttp_header_Sec_WebSocket_Key.h"
@ -84,6 +79,7 @@ thttp_header_Sec_WebSocket_Key_t *thttp_header_Sec_WebSocket_Key_parse(const cha
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Sec_WebSocket_Key_first_final);
@ -91,6 +87,7 @@ thttp_header_Sec_WebSocket_Key_t *thttp_header_Sec_WebSocket_Key_parse(const cha
(void)(thttp_machine_parser_header_Sec_WebSocket_Key_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse Sec-WebSocket-Key header.");

View File

@ -1,14 +1,12 @@
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either protocol 3 of the License, or
* (at your option) any later protocol.
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
@ -23,8 +21,6 @@
/**@file thttp_header_Sec_WebSocket_Protocol.c
* @brief WebSocket "Sec-WebSocket-Protocol" header.
*
* @author Mamadou Diop <diopmamadou(at)doubango(dor)org>
*
*/
#include "tinyhttp/headers/thttp_header_Sec_WebSocket_Protocol.h"
@ -92,6 +88,7 @@ thttp_header_Sec_WebSocket_Protocol_t *thttp_header_Sec_WebSocket_Protocol_parse
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Sec_WebSocket_Protocol_first_final);
@ -99,6 +96,7 @@ thttp_header_Sec_WebSocket_Protocol_t *thttp_header_Sec_WebSocket_Protocol_parse
(void)(thttp_machine_parser_header_Sec_WebSocket_Protocol_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse Sec-WebSocket-Protocol header.");

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,9 +20,6 @@
/**@file thttp_header_Sec_WebSocket_Version.c
* @brief WebSocket "Sec-WebSocket-Key" header.
*
* @author Mamadou Diop <diopmamadou(at)doubango(dor)org>
*
*/
#include "tinyhttp/headers/thttp_header_Sec_WebSocket_Version.h"
@ -92,6 +87,7 @@ thttp_header_Sec_WebSocket_Version_t *thttp_header_Sec_WebSocket_Version_parse(c
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Sec_WebSocket_Version_first_final);
@ -99,6 +95,7 @@ thttp_header_Sec_WebSocket_Version_t *thttp_header_Sec_WebSocket_Version_parse(c
(void)(thttp_machine_parser_header_Sec_WebSocket_Version_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse Sec-WebSocket-Version header.");

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,10 +20,6 @@
/**@file thttp_header_Transfer_Encoding.c
* @brief HTTP Transfer-Encoding header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Transfer_Encoding.h"
@ -101,6 +95,7 @@ thttp_header_Transfer_Encoding_t *thttp_header_Transfer_Encoding_parse(const cha
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_Transfer_Encoding_first_final);
@ -108,6 +103,7 @@ thttp_header_Transfer_Encoding_t *thttp_header_Transfer_Encoding_parse(const cha
(void)(thttp_machine_parser_header_Transfer_Encoding_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse Tansfer-Encoding header.");

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -22,10 +20,6 @@
/**@file thttp_header_WWW_Authenticate.c
* @brief HTTP WWW-Authenticate header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_WWW_Authenticate.h"
@ -190,6 +184,7 @@ thttp_header_WWW_Authenticate_t *thttp_header_WWW_Authenticate_parse(const char
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(eof);
(void)(thttp_machine_parser_header_WWW_Authenticate_first_final);
@ -197,6 +192,7 @@ thttp_header_WWW_Authenticate_t *thttp_header_WWW_Authenticate_parse(const char
(void)(thttp_machine_parser_header_WWW_Authenticate_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse WWW-Authenticate header.");

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -19,13 +17,9 @@
* along with DOUBANGO.
*
*/
/**@file thttp_parser_message.c
* @brief HTTP parser.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/parsers/thttp_parser_message.h"
#include "tinyhttp/parsers/thttp_parser_header.h"
@ -165,8 +159,10 @@ static void thttp_message_parser_eoh(tsk_ragel_state_t *state, thttp_message_t *
}%%
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* Regel data */
%%write data;
TSK_RAGEL_DISABLE_WARNINGS_END()
/** Parses raw HTTP buffer.
*
@ -223,7 +219,9 @@ static void thttp_message_parser_execute(tsk_ragel_state_t *state, thttp_message
const char *pe = state->pe;
const char *eof = state->eof;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%% write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
state->cs = cs;
state->p = p;

View File

@ -1,7 +1,5 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -19,13 +17,8 @@
* along with DOUBANGO.
*
*/
/**@file thttp_parser_url.c
* @brief HTTP/HTTPS URL parser.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/parsers/thttp_parser_url.h"
@ -116,6 +109,7 @@ thttp_url_t *thttp_url_parse(const char *urlstring, tsk_size_t length)
const char *tag_start = 0;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
%%write data;
(void)(ts);
(void)(te);
@ -126,6 +120,7 @@ thttp_url_t *thttp_url_parse(const char *urlstring, tsk_size_t length)
(void)(thttp_machine_parser_url_en_main);
%%write init;
%%write exec;
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs < %%{ write first_final; }%% ){
TSK_DEBUG_ERROR("Failed to parse HTTP/HTTPS URL: '%.*s'", length, urlstring);

View File

@ -1,30 +1,24 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
* Copyright (C) 2010-2015 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
*/
/**@file thttp_auth.c
* @brief HTTP basic/digest authetication (RFC 2617) implementations.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/auth/thttp_auth.h"
@ -44,23 +38,23 @@
*
* Generates HTTP-basic response as per RFC 2617.
*
* @param [in,out] userid The user-id.
* @param [in,out] userid The user-id.
* @param [in,out] password The user-password.
* @param [in,out] response A pointer to the response. It will be up to the caller to free the newly allocated buffer.
* @param [in,out] response A pointer to the response. It will be up to the caller to free the newly allocated buffer.
*
* @return The size of the response.
**/
* @return The size of the response.
**/
tsk_size_t thttp_auth_basic_response(const char* userid, const char* password, char** response)
{
tsk_size_t ret;
/* RFC 2617 - 2 Basic Authentication Scheme
To receive authorization, the client sends the userid and password,
separated by a single colon (":") character, within a base64 [7]
encoded string in the credentials.
*/
char *res = 0;
tsk_sprintf(&res, "%s:%s", userid, password);
ret = tsk_base64_encode((const uint8_t*)res, tsk_strlen(res), response);
@ -71,28 +65,28 @@ tsk_size_t thttp_auth_basic_response(const char* userid, const char* password, c
/**@ingroup thttp_auth_group
* Generates digest HA1 value as per RFC 2617 subclause 3.2.2.2.
* Generates digest HA1 value as per RFC 2617 subclause 3.2.2.2.
*
*
* @param [in,out] username The user's name (unquoted) in the specified @a realm.
* @param [in,out] realm The realm. (unquoted)
* @param [in,out] password The user's password.
* @param [in,out] ha1 A pointer to the result.
* @param [in,out] username The user's name (unquoted) in the specified @a realm.
* @param [in,out] realm The realm. (unquoted)
* @param [in,out] password The user's password.
* @param [in,out] ha1 A pointer to the result.
*
* @return Zero if succeed and non-zero error code otherwise.
**/
* @return Zero if succeed and non-zero error code otherwise.
**/
int thttp_auth_digest_HA1(const char* username, const char* realm, const char* password, tsk_md5string_t* ha1)
{
int ret;
/* RFC 2617 - 3.2.2.2 A1
A1 = unq(username-value) ":" unq(realm-value) ":" passwd
*/
*/
char *a1 = tsk_null;
tsk_sprintf(&a1, "%s:%s:%s", username, realm, password);
tsk_sprintf(&a1, "%s:%s:%s", username, realm, password);
ret = tsk_md5compute(a1, tsk_strlen(a1), ha1);
TSK_FREE(a1);
return ret;
}
@ -102,26 +96,26 @@ int thttp_auth_digest_HA1(const char* username, const char* realm, const char* p
*
*
* @param [in,out] username The user's name (unquoted) in the specified @a realm.
* @param [in,out] realm The realm (unquoted).
* @param [in,out] password The user's password.
* @param [in,out] realm The realm (unquoted).
* @param [in,out] password The user's password.
* @param [in,out] nonce The nonce (unquoted).
* @param [in,out] cnonce The client nonce (unquoted).
* @param [in,out] ha1sess A pointer to the result.
* @param [in,out] ha1sess A pointer to the result.
*
* @return Zero if succeed and non-zero error code otherwise.
**/
* @return Zero if succeed and non-zero error code otherwise.
**/
int thttp_auth_digest_HA1sess(const char* username, const char* realm, const char* password, const char* nonce, const char* cnonce, tsk_md5string_t* ha1sess)
{
int ret;
/* RFC 2617 - 3.2.2.2 A1
A1 = H( unq(username-value) ":" unq(realm-value)
":" passwd )
":" unq(nonce-value) ":" unq(cnonce-value)
*/
":" passwd )
":" unq(nonce-value) ":" unq(cnonce-value)
*/
char *a1sess = tsk_null;
tsk_sprintf(&a1sess, "%s:%s:%s:%s:%s", username, realm, password, nonce, cnonce);
tsk_sprintf(&a1sess, "%s:%s:%s:%s:%s", username, realm, password, nonce, cnonce);
ret = tsk_md5compute(a1sess, tsk_strlen(a1sess), ha1sess);
TSK_FREE(a1sess);
@ -129,17 +123,17 @@ int thttp_auth_digest_HA1sess(const char* username, const char* realm, const cha
}
/**@ingroup thttp_auth_group
* Generates digest HA2 value as per RFC 2617 subclause 3.2.2.3.
* Generates digest HA2 value as per RFC 2617 subclause 3.2.2.3.
*
*
* @param [in,out] method The HTTP/SIP method name.
* @param [in,out] url The HTTP URL or SIP URI of the request.
* @param [in,out] entity_body The entity body.
* @param [in,out] method The HTTP/SIP method name.
* @param [in,out] url The HTTP URL or SIP URI of the request.
* @param [in,out] entity_body The entity body.
* @param [in,out] qop The Quality Of Protection.
* @param [in,out] ha2 A pointer to the response.
* @param [in,out] ha2 A pointer to the response.
*
* @return Zero if succeed and non-zero error code otherwise.
**/
* @return Zero if succeed and non-zero error code otherwise.
**/
int thttp_auth_digest_HA2(const char* method, const char* url, const tsk_buffer_t* entity_body, const char* qop, tsk_md5string_t* ha2)
{
int ret;
@ -155,14 +149,14 @@ int thttp_auth_digest_HA2(const char* method, const char* url, const tsk_buffer_
char *a2 = tsk_null;
if(!qop || tsk_strempty(qop) || tsk_striequals(qop, "auth")){
if (!qop || tsk_strempty(qop) || tsk_striequals(qop, "auth")){
tsk_sprintf(&a2, "%s:%s", method, url);
}
else if(tsk_striequals(qop, "auth-int"))
else if (tsk_striequals(qop, "auth-int"))
{
if(entity_body && entity_body->data){
if (entity_body && entity_body->data){
tsk_md5string_t hEntity;
if((ret = tsk_md5compute(entity_body->data, entity_body->size, &hEntity))){
if ((ret = tsk_md5compute(entity_body->data, entity_body->size, &hEntity))){
goto bail;
}
tsk_sprintf(&a2, "%s:%s:%s", method, url, hEntity);
@ -175,9 +169,9 @@ int thttp_auth_digest_HA2(const char* method, const char* url, const tsk_buffer_
ret = tsk_md5compute(a2, tsk_strlen(a2), ha2);
bail:
TSK_FREE(a2);
TSK_FREE(a2);
return ret;
return ret;
}
@ -186,17 +180,17 @@ bail:
* Generates HTTP digest response as per RFC 2617 subclause 3.2.2.1.
*
* @param [in,out] ha1 HA1 string generated using @ref thttp_auth_digest_HA1 or @ref thttp_auth_digest_HA1sess.
* @param [in,out] nonce The nonce value.
* @param [in,out] nonce The nonce value.
* @param [in,out] noncecount The nonce count.
* @param [in,out] cnonce The client nounce (unquoted).
* @param [in,out] qop The Quality Of Protection (unquoted).
* @param [in,out] ha2 HA2 string generated using @ref thttp_auth_digest_HA2.
* @param [in,out] response A pointer to the response.
*
* @return Zero if succeed and non-zero error code otherwise.
**/
int thttp_auth_digest_response(const tsk_md5string_t *ha1, const char* nonce, const nonce_count_t noncecount, const char* cnonce,
const char* qop, const tsk_md5string_t* ha2, tsk_md5string_t* response)
* @return Zero if succeed and non-zero error code otherwise.
**/
int thttp_auth_digest_response(const tsk_md5string_t *ha1, const char* nonce, const nonce_count_t noncecount, const char* cnonce,
const char* qop, const tsk_md5string_t* ha2, tsk_md5string_t* response)
{
int ret;
@ -220,7 +214,7 @@ int thttp_auth_digest_response(const tsk_md5string_t *ha1, const char* nonce, co
char *res = tsk_null;
if(tsk_striequals(qop, "auth") || tsk_striequals(qop, "auth-int")){
if (tsk_striequals(qop, "auth") || tsk_striequals(qop, "auth-int")){
/* CASE 1 */
tsk_sprintf(&res, "%s:%s:%s:%s:%s:%s", *ha1, nonce, noncecount, cnonce, qop, *ha2);
}
@ -232,7 +226,7 @@ int thttp_auth_digest_response(const tsk_md5string_t *ha1, const char* nonce, co
ret = tsk_md5compute(res, tsk_strlen(res), response);
TSK_FREE(res);
return ret;
return ret;
}
/**@ingroup thttp_auth_group
@ -241,11 +235,11 @@ int thttp_auth_digest_response(const tsk_md5string_t *ha1, const char* nonce, co
* @param [in] key The value of the key received from the client ("Sec-WebSocket-Key" header). Must be null-terminated.
* @param [in,out] response The response ("Sec-WebSocket-Value" header).
*
* @return The size of the response. Zero if error.
* @return The size of the response. Zero if error.
*/
tsk_size_t thttp_auth_ws_response(const char* key, thttp_auth_ws_keystring_t* response)
{
if(!key || !response){
if (!key || !response){
TSK_DEBUG_ERROR("invalid parameter");
return 0;
}
@ -260,8 +254,9 @@ tsk_size_t thttp_auth_ws_response(const char* key, thttp_auth_ws_keystring_t* re
tsk_sha1compute(tmp, tsk_strlen(tmp), &sha1result);
size = tsk_strlen((char*)sha1result);
for(i = 0; i<size; i+=2){
if(sscanf((const char*)&sha1result[i], "%2x", (unsigned int*)&ret) != EOF){;
for (i = 0; i < size; i += 2){
if (sscanf((const char*)&sha1result[i], "%2x", (unsigned int*)&ret) != EOF){
;
result[i >> 1] = (char)ret;
}
}

View File

@ -1,20 +1,18 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
* Copyright (C) 2010-2015 Mamadou DIOP.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*
* You should have received a copy of the GNU General Public License
* along with DOUBANGO.
*
@ -22,10 +20,6 @@
/**@file thttp_challenge.c
* @brief HTTP authentication challenge.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/auth/thttp_challenge.h"
@ -46,22 +40,22 @@
#define THTTP_CHALLENGE_IS_AKAv1(self) ((self) ? tsk_striequals((self)->algorithm, "AKAv1-MD5") : tsk_false)
#define THTTP_CHALLENGE_IS_AKAv2(self) ((self) ? tsk_striequals((self)->algorithm, "AKAv2-MD5") : tsk_false)
thttp_challenge_t* thttp_challenge_create(tsk_bool_t isproxy,const char* scheme, const char* realm, const char* nonce, const char* opaque, const char* algorithm, const char* qop)
thttp_challenge_t* thttp_challenge_create(tsk_bool_t isproxy, const char* scheme, const char* realm, const char* nonce, const char* opaque, const char* algorithm, const char* qop)
{
return tsk_object_new(thttp_challenge_def_t, isproxy, scheme, realm, nonce, opaque, algorithm, qop);
}
int thttp_challenge_reset_cnonce(thttp_challenge_t *self)
{
if(self)
if (self)
{
if(self->qop) /* client nonce is only used if qop=auth, auth-int or both */
if (self->qop) /* client nonce is only used if qop=auth, auth-int or both */
{
#if 0
memcpy(self->cnonce, "f221681c1e42fb5f8f9957bf7e72eb2b", 32);
#else
tsk_istr_t istr;
tsk_strrandom(&istr);
tsk_md5compute(istr, tsk_strlen(istr), &self->cnonce);
#endif
@ -73,18 +67,18 @@ int thttp_challenge_reset_cnonce(thttp_challenge_t *self)
int thttp_challenge_get_digest_response(thttp_challenge_t *self, const char* username, const char* password, const char* method, const char* uristring, const tsk_buffer_t* entity_body, char** response)
{
if(THTTP_CHALLENGE_IS_DIGEST(self)){
if (THTTP_CHALLENGE_IS_DIGEST(self)){
tsk_md5string_t ha1, ha2, md5_response;
nonce_count_t nc;
/* ===
Calculate HA1 = MD5(A1) = M5(username:realm:secret)
*/
*/
thttp_auth_digest_HA1(username, self->realm, password, &ha1);
/* ===
HA2
*/
HA2
*/
thttp_auth_digest_HA2(method,
uristring,
entity_body,
@ -92,21 +86,21 @@ int thttp_challenge_get_digest_response(thttp_challenge_t *self, const char* use
&ha2);
/* RESPONSE */
if(self->nc){
if (self->nc){
THTTP_NCOUNT_2_STRING(self->nc, nc);
}
thttp_auth_digest_response((const tsk_md5string_t *)&ha1,
thttp_auth_digest_response((const tsk_md5string_t *)&ha1,
self->nonce,
nc,
self->cnonce,
self->qop,
(const tsk_md5string_t *)&ha2,
&md5_response);
if(self->qop){
if (self->qop){
self->nc++;
}
if(response && !*response){
if (response && !*response){
*response = tsk_strdup(md5_response);
}
@ -117,7 +111,7 @@ int thttp_challenge_get_digest_response(thttp_challenge_t *self, const char* use
int thttp_challenge_update(thttp_challenge_t *self, const char* scheme, const char* realm, const char* nonce, const char* opaque, const char* algorithm, const char* qop)
{
if(self)
if (self)
{
int noncechanged = !tsk_striequals(self->nonce, nonce);
@ -126,12 +120,12 @@ int thttp_challenge_update(thttp_challenge_t *self, const char* scheme, const ch
tsk_strupdate(&self->nonce, nonce);
tsk_strupdate(&self->opaque, opaque);
tsk_strupdate(&self->algorithm, algorithm);
if(qop){
self->qop = tsk_strcontains(qop, tsk_strlen(qop), "auth-int") ? "auth-int" :
(tsk_strcontains(qop, tsk_strlen(qop), "auth") ? "auth" : tsk_null);
if (qop){
self->qop = tsk_strcontains(qop, tsk_strlen(qop), "auth-int") ? "auth-int" :
(tsk_strcontains(qop, tsk_strlen(qop), "auth") ? "auth" : tsk_null);
}
if(noncechanged && self->qop){
if (noncechanged && self->qop){
thttp_challenge_reset_cnonce(self);
}
return 0;
@ -147,7 +141,7 @@ thttp_header_t *thttp_challenge_create_header_authorization(thttp_challenge_t *s
char *uristring = tsk_null;
thttp_header_t *header = 0;
if(!self || !request || !request->line.request.url){
if (!self || !request || !request->line.request.url){
goto bail;
}
@ -155,18 +149,18 @@ thttp_header_t *thttp_challenge_create_header_authorization(thttp_challenge_t *s
tsk_sprintf(&uristring, "/%s", request->line.request.url->hpath ? request->line.request.url->hpath : "");
/* We compute the nc here because @ref thttp_challenge_get_response function will increment it's value. */
if(self->nc){
if (self->nc){
THTTP_NCOUNT_2_STRING(self->nc, nc);
}
/* Computes the response (Basic and Digest)*/
if(THTTP_CHALLENGE_IS_DIGEST(self)){
if(thttp_challenge_get_digest_response(self, username, password, request->line.request.method, uristring, request->Content, &response)){
if (THTTP_CHALLENGE_IS_DIGEST(self)){
if (thttp_challenge_get_digest_response(self, username, password, request->line.request.method, uristring, request->Content, &response)){
goto bail;
}
response_size = (TSK_MD5_DIGEST_SIZE*2);
response_size = (TSK_MD5_DIGEST_SIZE * 2);
}
else if(THTTP_CHALLENGE_IS_BASIC(self)){
else if (THTTP_CHALLENGE_IS_BASIC(self)){
response_size = thttp_auth_basic_response(username, password, &response);
}
else{
@ -188,7 +182,7 @@ thttp_header_t *thttp_challenge_create_header_authorization(thttp_challenge_t *s
hdr->nc = self->nc? tsk_strdup(nc) : 0; \
hdr->response = tsk_strndup(response, response_size); \
if(self->isproxy){
if (self->isproxy){
thttp_header_Proxy_Authorization_t *proxy_auth = thttp_header_authorization_create(); // Very bad way to create Proxy_auth header.
THTTP_HEADER(proxy_auth)->type = thttp_htype_Proxy_Authorization;
@ -242,7 +236,7 @@ bail:
static tsk_object_t* thttp_challenge_ctor(tsk_object_t *self, va_list * app)
{
thttp_challenge_t *challenge = self;
if(challenge){
if (challenge){
const char* qop;
challenge->isproxy = va_arg(*app, tsk_bool_t);
@ -252,17 +246,17 @@ static tsk_object_t* thttp_challenge_ctor(tsk_object_t *self, va_list * app)
challenge->opaque = tsk_strdup(va_arg(*app, const char*));
challenge->algorithm = tsk_strdup(va_arg(*app, const char*));
qop = va_arg(*app, const char*);
if(qop){
challenge->qop = tsk_strcontains(qop, tsk_strlen(qop), "auth-int") ? "auth-int" :
(tsk_strcontains(qop, tsk_strlen(qop), "auth") ? "auth" : tsk_null);
if (qop){
challenge->qop = tsk_strcontains(qop, tsk_strlen(qop), "auth-int") ? "auth-int" :
(tsk_strcontains(qop, tsk_strlen(qop), "auth") ? "auth" : tsk_null);
}
if(challenge->qop){
if (challenge->qop){
thttp_challenge_reset_cnonce(challenge);
}
}
else TSK_DEBUG_ERROR("Failed to create new http challenge object.");
return self;
}
@ -271,13 +265,13 @@ static tsk_object_t* thttp_challenge_ctor(tsk_object_t *self, va_list * app)
static tsk_object_t* thttp_challenge_dtor(tsk_object_t *self)
{
thttp_challenge_t *challenge = self;
if(challenge){
if (challenge){
TSK_FREE(challenge->scheme);
TSK_FREE(challenge->realm);
TSK_FREE(challenge->nonce);
TSK_FREE(challenge->opaque);
TSK_FREE(challenge->algorithm);
//TSK_FREE(challenge->qop);
}
else{
@ -287,7 +281,7 @@ static tsk_object_t* thttp_challenge_dtor(tsk_object_t *self)
return self;
}
static const tsk_object_def_t thttp_challenge_def_s =
static const tsk_object_def_t thttp_challenge_def_s =
{
sizeof(thttp_challenge_t),
thttp_challenge_ctor,

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_Authorization.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,10 +22,6 @@
/**@file thttp_header_Authorization.c
* @brief HTTP Authorization header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Authorization.h"
@ -43,7 +37,7 @@
* Ragel state machine.
*/
/* #line 148 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 142 "./ragel/thttp_parser_header_Authorization.rl" */
thttp_header_Authorization_t* thttp_header_authorization_create()
@ -121,8 +115,9 @@ thttp_header_Authorization_t *thttp_header_Authorization_parse(const char *data,
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 126 "./src/headers/thttp_header_Authorization.c" */
/* #line 121 "./src/headers/thttp_header_Authorization.c" */
static const char _thttp_machine_parser_header_Authorization_actions[] = {
0, 1, 0, 1, 1, 1, 2, 1,
3, 1, 4, 1, 5, 1, 6, 1,
@ -6700,21 +6695,21 @@ static const int thttp_machine_parser_header_Authorization_error = 0;
static const int thttp_machine_parser_header_Authorization_en_main = 1;
/* #line 226 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 221 "./ragel/thttp_parser_header_Authorization.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Authorization_first_final);
(void)(thttp_machine_parser_header_Authorization_error);
(void)(thttp_machine_parser_header_Authorization_en_main);
/* #line 6711 "./src/headers/thttp_header_Authorization.c" */
/* #line 6706 "./src/headers/thttp_header_Authorization.c" */
{
cs = thttp_machine_parser_header_Authorization_start;
}
/* #line 232 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 227 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 6718 "./src/headers/thttp_header_Authorization.c" */
/* #line 6713 "./src/headers/thttp_header_Authorization.c" */
{
int _klen;
unsigned int _trans;
@ -6789,114 +6784,114 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 49 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 43 "./ragel/thttp_parser_header_Authorization.rl" */
{
tag_start = p;
}
break;
case 1:
/* #line 53 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 47 "./ragel/thttp_parser_header_Authorization.rl" */
{
hdr_Authorization->scheme = tsk_strdup("Digest");
}
break;
case 2:
/* #line 57 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 51 "./ragel/thttp_parser_header_Authorization.rl" */
{
hdr_Authorization->scheme = tsk_strdup("Basic");
}
break;
case 3:
/* #line 61 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 55 "./ragel/thttp_parser_header_Authorization.rl" */
{
THTTP_HEADER(hdr_Authorization)->type = thttp_htype_Authorization;
}
break;
case 4:
/* #line 65 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 59 "./ragel/thttp_parser_header_Authorization.rl" */
{
THTTP_HEADER(hdr_Authorization)->type = thttp_htype_Proxy_Authorization;
}
break;
case 5:
/* #line 69 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 63 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->username);
tsk_strunquote(&hdr_Authorization->username);
}
break;
case 6:
/* #line 74 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 68 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->realm);
tsk_strunquote(&hdr_Authorization->realm);
}
break;
case 7:
/* #line 79 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 73 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->nonce);
tsk_strunquote(&hdr_Authorization->nonce);
}
break;
case 8:
/* #line 84 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 78 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->uri);
}
break;
case 9:
/* #line 88 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 82 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->response);
tsk_strunquote(&hdr_Authorization->response);
}
break;
case 10:
/* #line 93 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 87 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->algorithm);
}
break;
case 11:
/* #line 97 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 91 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->cnonce);
tsk_strunquote(&hdr_Authorization->cnonce);
}
break;
case 12:
/* #line 102 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 96 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->opaque);
tsk_strunquote(&hdr_Authorization->opaque);
}
break;
case 13:
/* #line 107 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 101 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->qop);
//tsk_strunquote(&hdr_Authorization->qop);
}
break;
case 14:
/* #line 112 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 106 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_SET_STRING(hdr_Authorization->nc);
}
break;
case 15:
/* #line 116 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 110 "./ragel/thttp_parser_header_Authorization.rl" */
{
TSK_PARSER_ADD_PARAM(THTTP_HEADER_PARAMS(hdr_Authorization));
}
break;
case 16:
/* #line 120 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 114 "./ragel/thttp_parser_header_Authorization.rl" */
{
}
break;
/* #line 6900 "./src/headers/thttp_header_Authorization.c" */
/* #line 6895 "./src/headers/thttp_header_Authorization.c" */
}
}
@ -6909,12 +6904,13 @@ _again:
_out: {}
}
/* #line 233 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 228 "./ragel/thttp_parser_header_Authorization.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 6916 "./src/headers/thttp_header_Authorization.c" */
/* #line 6912 "./src/headers/thttp_header_Authorization.c" */
1351
/* #line 234 "./ragel/thttp_parser_header_Authorization.rl" */
/* #line 230 "./ragel/thttp_parser_header_Authorization.rl" */
){
TSK_DEBUG_ERROR("Failed to parse Authorization header.");
TSK_OBJECT_SAFE_FREE(hdr_Authorization);

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_Content_Length.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -21,13 +19,9 @@
* along with DOUBANGO.
*
*/
/**@file thttp_header_Content_Length.c
* @brief HTTP Content-Length header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Content_Length.h"
@ -38,7 +32,7 @@
* Ragel state machine.
*/
/* #line 60 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 54 "./ragel/thttp_parser_header_Content_Length.rl" */
thttp_header_Content_Length_t* thttp_header_content_length_create(uint32_t length)
@ -68,8 +62,9 @@ thttp_header_Content_Length_t *thttp_header_Content_Length_parse(const char *dat
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 73 "./src/headers/thttp_header_Content_Length.c" */
/* #line 68 "./src/headers/thttp_header_Content_Length.c" */
static const char _thttp_machine_parser_header_Content_Length_actions[] = {
0, 1, 0, 1, 1, 1, 2
};
@ -138,20 +133,20 @@ static const int thttp_machine_parser_header_Content_Length_error = 0;
static const int thttp_machine_parser_header_Content_Length_en_main = 1;
/* #line 90 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 85 "./ragel/thttp_parser_header_Content_Length.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Content_Length_first_final);
(void)(thttp_machine_parser_header_Content_Length_error);
(void)(thttp_machine_parser_header_Content_Length_en_main);
/* #line 148 "./src/headers/thttp_header_Content_Length.c" */
/* #line 143 "./src/headers/thttp_header_Content_Length.c" */
{
cs = thttp_machine_parser_header_Content_Length_start;
}
/* #line 95 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 90 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 155 "./src/headers/thttp_header_Content_Length.c" */
/* #line 150 "./src/headers/thttp_header_Content_Length.c" */
{
int _klen;
unsigned int _trans;
@ -226,23 +221,23 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 44 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 38 "./ragel/thttp_parser_header_Content_Length.rl" */
{
tag_start = p;
}
break;
case 1:
/* #line 48 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 42 "./ragel/thttp_parser_header_Content_Length.rl" */
{
TSK_PARSER_SET_INTEGER(hdr_clength->length);
}
break;
case 2:
/* #line 52 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 46 "./ragel/thttp_parser_header_Content_Length.rl" */
{
}
break;
/* #line 246 "./src/headers/thttp_header_Content_Length.c" */
/* #line 241 "./src/headers/thttp_header_Content_Length.c" */
}
}
@ -255,12 +250,13 @@ _again:
_out: {}
}
/* #line 96 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 91 "./ragel/thttp_parser_header_Content_Length.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 262 "./src/headers/thttp_header_Content_Length.c" */
/* #line 258 "./src/headers/thttp_header_Content_Length.c" */
22
/* #line 97 "./ragel/thttp_parser_header_Content_Length.rl" */
/* #line 93 "./ragel/thttp_parser_header_Content_Length.rl" */
){
TSK_OBJECT_SAFE_FREE(hdr_clength);
}

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_Content_Type.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,10 +22,6 @@
/**@file thttp_header_Content_Type.c
* @brief HTTP Content-Type header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Content_Type.h"
@ -40,7 +34,7 @@
* Ragel state machine.
*/
/* #line 79 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 73 "./ragel/thttp_parser_header_Content_Type.rl" */
thttp_header_Content_Type_t* thttp_header_content_type_create(const char* type)
@ -75,8 +69,9 @@ thttp_header_Content_Type_t *thttp_header_Content_Type_parse(const char *data, t
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 80 "./src/headers/thttp_header_Content_Type.c" */
/* #line 75 "./src/headers/thttp_header_Content_Type.c" */
static const char _thttp_machine_parser_header_Content_Type_actions[] = {
0, 1, 0, 1, 1, 1, 2, 1,
3
@ -232,20 +227,20 @@ static const int thttp_machine_parser_header_Content_Type_error = 0;
static const int thttp_machine_parser_header_Content_Type_en_main = 1;
/* #line 114 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 109 "./ragel/thttp_parser_header_Content_Type.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Content_Type_first_final);
(void)(thttp_machine_parser_header_Content_Type_error);
(void)(thttp_machine_parser_header_Content_Type_en_main);
/* #line 242 "./src/headers/thttp_header_Content_Type.c" */
/* #line 237 "./src/headers/thttp_header_Content_Type.c" */
{
cs = thttp_machine_parser_header_Content_Type_start;
}
/* #line 119 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 114 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 249 "./src/headers/thttp_header_Content_Type.c" */
/* #line 244 "./src/headers/thttp_header_Content_Type.c" */
{
int _klen;
unsigned int _trans;
@ -320,29 +315,29 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 46 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 40 "./ragel/thttp_parser_header_Content_Type.rl" */
{
tag_start = p;
}
break;
case 1:
/* #line 50 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 44 "./ragel/thttp_parser_header_Content_Type.rl" */
{
TSK_PARSER_SET_STRING(hdr_ctype->type);
}
break;
case 2:
/* #line 54 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 48 "./ragel/thttp_parser_header_Content_Type.rl" */
{
TSK_PARSER_ADD_PARAM(THTTP_HEADER_PARAMS(hdr_ctype));
}
break;
case 3:
/* #line 58 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 52 "./ragel/thttp_parser_header_Content_Type.rl" */
{
}
break;
/* #line 346 "./src/headers/thttp_header_Content_Type.c" */
/* #line 341 "./src/headers/thttp_header_Content_Type.c" */
}
}
@ -355,12 +350,13 @@ _again:
_out: {}
}
/* #line 120 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 115 "./ragel/thttp_parser_header_Content_Type.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 362 "./src/headers/thttp_header_Content_Type.c" */
/* #line 358 "./src/headers/thttp_header_Content_Type.c" */
55
/* #line 121 "./ragel/thttp_parser_header_Content_Type.rl" */
/* #line 117 "./ragel/thttp_parser_header_Content_Type.rl" */
){
TSK_DEBUG_ERROR("Failed to parse Content-Type header.");
TSK_OBJECT_SAFE_FREE(hdr_ctype);

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_Dummy.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -25,9 +23,6 @@
/**@file thttp_header_Dummy.c
* @brief HTTP 'Dummy' header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Dummy.h"
@ -42,7 +37,7 @@
* Ragel state machine.
*/
/* #line 68 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 63 "./ragel/thttp_parser_header_Dummy.rl" */
thttp_header_Dummy_t* thttp_header_dummy_create(const char* name, const char* value)
@ -81,8 +76,9 @@ thttp_header_Dummy_t *thttp_header_Dummy_parse(const char *data, tsk_size_t size
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 86 "./src/headers/thttp_header_Dummy.c" */
/* #line 82 "./src/headers/thttp_header_Dummy.c" */
static const char _thttp_machine_parser_header_Dummy_actions[] = {
0, 1, 0, 1, 1, 1, 2, 1,
3, 2, 0, 2
@ -143,20 +139,20 @@ static const int thttp_machine_parser_header_Dummy_error = 0;
static const int thttp_machine_parser_header_Dummy_en_main = 1;
/* #line 107 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 103 "./ragel/thttp_parser_header_Dummy.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Dummy_first_final);
(void)(thttp_machine_parser_header_Dummy_error);
(void)(thttp_machine_parser_header_Dummy_en_main);
/* #line 153 "./src/headers/thttp_header_Dummy.c" */
/* #line 149 "./src/headers/thttp_header_Dummy.c" */
{
cs = thttp_machine_parser_header_Dummy_start;
}
/* #line 112 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 108 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 160 "./src/headers/thttp_header_Dummy.c" */
/* #line 156 "./src/headers/thttp_header_Dummy.c" */
{
int _klen;
unsigned int _trans;
@ -231,29 +227,29 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 48 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 43 "./ragel/thttp_parser_header_Dummy.rl" */
{
tag_start = p;
}
break;
case 1:
/* #line 52 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 47 "./ragel/thttp_parser_header_Dummy.rl" */
{
TSK_PARSER_SET_STRING(hdr_Dummy->name);
}
break;
case 2:
/* #line 56 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 51 "./ragel/thttp_parser_header_Dummy.rl" */
{
TSK_PARSER_SET_STRING(hdr_Dummy->value);
}
break;
case 3:
/* #line 60 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 55 "./ragel/thttp_parser_header_Dummy.rl" */
{
}
break;
/* #line 257 "./src/headers/thttp_header_Dummy.c" */
/* #line 253 "./src/headers/thttp_header_Dummy.c" */
}
}
@ -266,12 +262,13 @@ _again:
_out: {}
}
/* #line 113 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 109 "./ragel/thttp_parser_header_Dummy.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 273 "./src/headers/thttp_header_Dummy.c" */
/* #line 270 "./src/headers/thttp_header_Dummy.c" */
10
/* #line 114 "./ragel/thttp_parser_header_Dummy.rl" */
/* #line 111 "./ragel/thttp_parser_header_Dummy.rl" */
){
TSK_OBJECT_SAFE_FREE(hdr_Dummy);
}

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_ETag.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,10 +22,6 @@
/**@file thttp_header_ETag.c
* @brief HTTP 'Etag' header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_ETag.h"
@ -42,7 +36,7 @@
* Ragel state machine.
*/
/* #line 73 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 67 "./ragel/thttp_parser_header_ETag.rl" */
thttp_header_ETag_t* thttp_header_etag_create(const char* value)
@ -83,8 +77,9 @@ thttp_header_ETag_t *thttp_header_ETag_parse(const char *data, tsk_size_t size)
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 88 "./src/headers/thttp_header_ETag.c" */
/* #line 83 "./src/headers/thttp_header_ETag.c" */
static const char _thttp_machine_parser_header_ETag_actions[] = {
0, 1, 0, 1, 2, 1, 3, 2,
1, 0
@ -159,20 +154,20 @@ static const int thttp_machine_parser_header_ETag_error = 0;
static const int thttp_machine_parser_header_ETag_en_main = 1;
/* #line 114 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 109 "./ragel/thttp_parser_header_ETag.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_ETag_first_final);
(void)(thttp_machine_parser_header_ETag_error);
(void)(thttp_machine_parser_header_ETag_en_main);
/* #line 169 "./src/headers/thttp_header_ETag.c" */
/* #line 164 "./src/headers/thttp_header_ETag.c" */
{
cs = thttp_machine_parser_header_ETag_start;
}
/* #line 119 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 114 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 176 "./src/headers/thttp_header_ETag.c" */
/* #line 171 "./src/headers/thttp_header_ETag.c" */
{
int _klen;
unsigned int _trans;
@ -247,30 +242,30 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 49 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 43 "./ragel/thttp_parser_header_ETag.rl" */
{
tag_start = p;
}
break;
case 1:
/* #line 53 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 47 "./ragel/thttp_parser_header_ETag.rl" */
{
hdr_ETag->isWeak = tsk_true;
}
break;
case 2:
/* #line 57 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 51 "./ragel/thttp_parser_header_ETag.rl" */
{
TSK_PARSER_SET_STRING(hdr_ETag->value);
tsk_strunquote(&hdr_ETag->value);
}
break;
case 3:
/* #line 62 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 56 "./ragel/thttp_parser_header_ETag.rl" */
{
}
break;
/* #line 274 "./src/headers/thttp_header_ETag.c" */
/* #line 269 "./src/headers/thttp_header_ETag.c" */
}
}
@ -283,12 +278,13 @@ _again:
_out: {}
}
/* #line 120 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 115 "./ragel/thttp_parser_header_ETag.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 290 "./src/headers/thttp_header_ETag.c" */
/* #line 286 "./src/headers/thttp_header_ETag.c" */
22
/* #line 121 "./ragel/thttp_parser_header_ETag.rl" */
/* #line 117 "./ragel/thttp_parser_header_ETag.rl" */
){
TSK_DEBUG_ERROR("Failed to parse ETag header.");
TSK_OBJECT_SAFE_FREE(hdr_ETag);

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,9 +22,6 @@
/**@file thttp_header_Sec_WebSocket_Accept.c
* @brief WebSocket "Sec-WebSocket-Accept" header.
*
* @author Mamadou Diop <diopmamadou(at)doubango(dor)org>
*
*/
#include "tinyhttp/headers/thttp_header_Sec_WebSocket_Accept.h"
@ -35,7 +30,7 @@
#include <string.h>
/* #line 51 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 46 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
thttp_header_Sec_WebSocket_Accept_t* thttp_header_Sec_WebSocket_Accept_create(const char* value)
@ -72,8 +67,9 @@ thttp_header_Sec_WebSocket_Accept_t *thttp_header_Sec_WebSocket_Accept_parse(con
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 77 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
/* #line 73 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
static const char _thttp_machine_parser_header_Sec_WebSocket_Accept_actions[] = {
0, 1, 0, 1, 1, 1, 2
};
@ -167,20 +163,20 @@ static const int thttp_machine_parser_header_Sec_WebSocket_Accept_error = 0;
static const int thttp_machine_parser_header_Sec_WebSocket_Accept_en_main = 1;
/* #line 88 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 84 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Sec_WebSocket_Accept_first_final);
(void)(thttp_machine_parser_header_Sec_WebSocket_Accept_error);
(void)(thttp_machine_parser_header_Sec_WebSocket_Accept_en_main);
/* #line 177 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
/* #line 173 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
{
cs = thttp_machine_parser_header_Sec_WebSocket_Accept_start;
}
/* #line 93 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 89 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 184 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
/* #line 180 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
{
int _klen;
unsigned int _trans;
@ -255,20 +251,20 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 42 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 37 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
{ tag_start = p; }
break;
case 1:
/* #line 43 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 38 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
{ }
break;
case 2:
/* #line 45 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 40 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
{
TSK_PARSER_SET_STRING(hdr_Sec_WebSocket_Accept->value);
}
break;
/* #line 272 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
/* #line 268 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
}
}
@ -281,12 +277,13 @@ _again:
_out: {}
}
/* #line 94 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 90 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 288 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
/* #line 285 "./src/headers/thttp_header_Sec_WebSocket_Accept.c" */
33
/* #line 95 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
/* #line 92 "./ragel/thttp_parser_header_Sec_WebSocket_Accept.rl" */
){
TSK_DEBUG_ERROR("Failed to parse Sec-WebSocket-Accept header.");
TSK_OBJECT_SAFE_FREE(hdr_Sec_WebSocket_Accept);

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,9 +22,6 @@
/**@file thttp_header_Sec_WebSocket_Key.c
* @brief WebSocket "Sec-WebSocket-Key" header.
*
* @author Mamadou Diop <diopmamadou(at)doubango(dor)org>
*
*/
#include "tinyhttp/headers/thttp_header_Sec_WebSocket_Key.h"
@ -35,7 +30,7 @@
#include <string.h>
/* #line 51 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 46 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
thttp_header_Sec_WebSocket_Key_t* thttp_header_Sec_WebSocket_Key_create(const char* value)
@ -72,8 +67,9 @@ thttp_header_Sec_WebSocket_Key_t *thttp_header_Sec_WebSocket_Key_parse(const cha
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 77 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
/* #line 73 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
static const char _thttp_machine_parser_header_Sec_WebSocket_Key_actions[] = {
0, 1, 0, 1, 1, 1, 2
};
@ -158,20 +154,20 @@ static const int thttp_machine_parser_header_Sec_WebSocket_Key_error = 0;
static const int thttp_machine_parser_header_Sec_WebSocket_Key_en_main = 1;
/* #line 88 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 84 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Sec_WebSocket_Key_first_final);
(void)(thttp_machine_parser_header_Sec_WebSocket_Key_error);
(void)(thttp_machine_parser_header_Sec_WebSocket_Key_en_main);
/* #line 168 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
/* #line 164 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
{
cs = thttp_machine_parser_header_Sec_WebSocket_Key_start;
}
/* #line 93 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 89 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 175 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
/* #line 171 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
{
int _klen;
unsigned int _trans;
@ -246,20 +242,20 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 42 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 37 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
{ tag_start = p; }
break;
case 1:
/* #line 43 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 38 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
{ }
break;
case 2:
/* #line 45 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 40 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
{
TSK_PARSER_SET_STRING(hdr_Sec_WebSocket_Key->value);
}
break;
/* #line 263 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
/* #line 259 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
}
}
@ -272,12 +268,13 @@ _again:
_out: {}
}
/* #line 94 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 90 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 279 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
/* #line 276 "./src/headers/thttp_header_Sec_WebSocket_Key.c" */
30
/* #line 95 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
/* #line 92 "./ragel/thttp_parser_header_Sec_WebSocket_Key.rl" */
){
TSK_DEBUG_ERROR("Failed to parse Sec-WebSocket-Key header.");
TSK_OBJECT_SAFE_FREE(hdr_Sec_WebSocket_Key);

View File

@ -1,16 +1,14 @@
/* #line 1 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either protocol 3 of the License, or
* (at your option) any later protocol.
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* DOUBANGO is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
@ -25,8 +23,6 @@
/**@file thttp_header_Sec_WebSocket_Protocol.c
* @brief WebSocket "Sec-WebSocket-Protocol" header.
*
* @author Mamadou Diop <diopmamadou(at)doubango(dor)org>
*
*/
#include "tinyhttp/headers/thttp_header_Sec_WebSocket_Protocol.h"
@ -35,7 +31,7 @@
#include <string.h>
/* #line 53 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 49 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
thttp_header_Sec_WebSocket_Protocol_t* thttp_header_Sec_WebSocket_Protocol_create(const char* protocol)
@ -78,8 +74,9 @@ thttp_header_Sec_WebSocket_Protocol_t *thttp_header_Sec_WebSocket_Protocol_parse
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 83 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
/* #line 80 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
static const char _thttp_machine_parser_header_Sec_WebSocket_Protocol_actions[] = {
0, 1, 0, 1, 1, 1, 2
};
@ -176,20 +173,20 @@ static const int thttp_machine_parser_header_Sec_WebSocket_Protocol_error = 0;
static const int thttp_machine_parser_header_Sec_WebSocket_Protocol_en_main = 1;
/* #line 96 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 93 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Sec_WebSocket_Protocol_first_final);
(void)(thttp_machine_parser_header_Sec_WebSocket_Protocol_error);
(void)(thttp_machine_parser_header_Sec_WebSocket_Protocol_en_main);
/* #line 186 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
/* #line 183 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
{
cs = thttp_machine_parser_header_Sec_WebSocket_Protocol_start;
}
/* #line 101 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 98 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 193 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
/* #line 190 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
{
int _klen;
unsigned int _trans;
@ -264,20 +261,20 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 42 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 38 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
{ tag_start = p; }
break;
case 1:
/* #line 43 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 39 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
{ }
break;
case 2:
/* #line 45 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 41 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
{
TSK_PARSER_ADD_STRING(hdr_Sec_WebSocket_Protocol->values);
}
break;
/* #line 281 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
/* #line 278 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
}
}
@ -290,12 +287,13 @@ _again:
_out: {}
}
/* #line 102 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 99 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 297 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
/* #line 295 "./src/headers/thttp_header_Sec_WebSocket_Protocol.c" */
34
/* #line 103 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
/* #line 101 "./ragel/thttp_parser_header_Sec_WebSocket_Protocol.rl" */
){
TSK_DEBUG_ERROR("Failed to parse Sec-WebSocket-Protocol header.");
TSK_OBJECT_SAFE_FREE(hdr_Sec_WebSocket_Protocol);

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/*
* Copyright (C) 2012 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango(dot)org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,9 +22,6 @@
/**@file thttp_header_Sec_WebSocket_Version.c
* @brief WebSocket "Sec-WebSocket-Key" header.
*
* @author Mamadou Diop <diopmamadou(at)doubango(dor)org>
*
*/
#include "tinyhttp/headers/thttp_header_Sec_WebSocket_Version.h"
@ -35,7 +30,7 @@
#include <string.h>
/* #line 53 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 48 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
thttp_header_Sec_WebSocket_Version_t* thttp_header_Sec_WebSocket_Version_create(const char* version)
@ -78,8 +73,9 @@ thttp_header_Sec_WebSocket_Version_t *thttp_header_Sec_WebSocket_Version_parse(c
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 83 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
/* #line 79 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
static const char _thttp_machine_parser_header_Sec_WebSocket_Version_actions[] = {
0, 1, 0, 1, 1, 1, 2
};
@ -172,20 +168,20 @@ static const int thttp_machine_parser_header_Sec_WebSocket_Version_error = 0;
static const int thttp_machine_parser_header_Sec_WebSocket_Version_en_main = 1;
/* #line 96 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 92 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Sec_WebSocket_Version_first_final);
(void)(thttp_machine_parser_header_Sec_WebSocket_Version_error);
(void)(thttp_machine_parser_header_Sec_WebSocket_Version_en_main);
/* #line 182 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
/* #line 178 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
{
cs = thttp_machine_parser_header_Sec_WebSocket_Version_start;
}
/* #line 101 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 97 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 189 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
/* #line 185 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
{
int _klen;
unsigned int _trans;
@ -260,20 +256,20 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 42 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 37 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
{ tag_start = p; }
break;
case 1:
/* #line 43 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 38 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
{ }
break;
case 2:
/* #line 45 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 40 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
{
TSK_PARSER_ADD_STRING(hdr_Sec_WebSocket_Version->values);
}
break;
/* #line 277 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
/* #line 273 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
}
}
@ -286,12 +282,13 @@ _again:
_out: {}
}
/* #line 102 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 98 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 293 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
/* #line 290 "./src/headers/thttp_header_Sec_WebSocket_Version.c" */
35
/* #line 103 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
/* #line 100 "./ragel/thttp_parser_header_Sec_WebSocket_Version.rl" */
){
TSK_DEBUG_ERROR("Failed to parse Sec-WebSocket-Version header.");
TSK_OBJECT_SAFE_FREE(hdr_Sec_WebSocket_Version);

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,10 +22,6 @@
/**@file thttp_header_Transfer_Encoding.c
* @brief HTTP Transfer-Encoding header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_Transfer_Encoding.h"
@ -40,7 +34,7 @@
* Ragel state machine.
*/
/* #line 66 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 60 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
thttp_header_Transfer_Encoding_t* thttp_header_transfer_encoding_create(const char* encoding)
@ -79,8 +73,9 @@ thttp_header_Transfer_Encoding_t *thttp_header_Transfer_Encoding_parse(const cha
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 84 "./src/headers/thttp_header_Transfer_Encoding.c" */
/* #line 79 "./src/headers/thttp_header_Transfer_Encoding.c" */
static const char _thttp_machine_parser_header_Transfer_Encoding_actions[] = {
0, 1, 0, 1, 1, 1, 2, 1,
3
@ -190,20 +185,20 @@ static const int thttp_machine_parser_header_Transfer_Encoding_error = 0;
static const int thttp_machine_parser_header_Transfer_Encoding_en_main = 1;
/* #line 105 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 100 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_Transfer_Encoding_first_final);
(void)(thttp_machine_parser_header_Transfer_Encoding_error);
(void)(thttp_machine_parser_header_Transfer_Encoding_en_main);
/* #line 200 "./src/headers/thttp_header_Transfer_Encoding.c" */
/* #line 195 "./src/headers/thttp_header_Transfer_Encoding.c" */
{
cs = thttp_machine_parser_header_Transfer_Encoding_start;
}
/* #line 110 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 105 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 207 "./src/headers/thttp_header_Transfer_Encoding.c" */
/* #line 202 "./src/headers/thttp_header_Transfer_Encoding.c" */
{
int _klen;
unsigned int _trans;
@ -278,29 +273,29 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 46 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 40 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
{
tag_start = p;
}
break;
case 1:
/* #line 50 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 44 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
{
TSK_PARSER_SET_STRING(hdr_tencoding->encoding);
}
break;
case 2:
/* #line 54 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 48 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
{
TSK_PARSER_ADD_PARAM(THTTP_HEADER_PARAMS(hdr_tencoding));
}
break;
case 3:
/* #line 58 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 52 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
{
}
break;
/* #line 304 "./src/headers/thttp_header_Transfer_Encoding.c" */
/* #line 299 "./src/headers/thttp_header_Transfer_Encoding.c" */
}
}
@ -313,12 +308,13 @@ _again:
_out: {}
}
/* #line 111 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 106 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 320 "./src/headers/thttp_header_Transfer_Encoding.c" */
/* #line 316 "./src/headers/thttp_header_Transfer_Encoding.c" */
33
/* #line 112 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
/* #line 108 "./ragel/thttp_parser_header_Transfer_Encoding.rl" */
){
TSK_DEBUG_ERROR("Failed to parse Tansfer-Encoding header.");
TSK_OBJECT_SAFE_FREE(hdr_tencoding);

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,10 +22,6 @@
/**@file thttp_header_WWW_Authenticate.c
* @brief HTTP WWW-Authenticate header.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/headers/thttp_header_WWW_Authenticate.h"
@ -48,7 +42,7 @@
* Ragel state machine.
*/
/* #line 137 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 131 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
thttp_header_WWW_Authenticate_t* thttp_header_www_authenticate_create()
@ -105,8 +99,9 @@ thttp_header_WWW_Authenticate_t *thttp_header_WWW_Authenticate_parse(const char
const char *tag_start = tsk_null;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 110 "./src/headers/thttp_header_WWW_Authenticate.c" */
/* #line 105 "./src/headers/thttp_header_WWW_Authenticate.c" */
static const char _thttp_machine_parser_header_WWW_Authenticate_actions[] = {
0, 1, 0, 1, 1, 1, 2, 1,
3, 1, 4, 1, 5, 1, 6, 1,
@ -8063,20 +8058,20 @@ static const int thttp_machine_parser_header_WWW_Authenticate_error = 0;
static const int thttp_machine_parser_header_WWW_Authenticate_en_main = 1;
/* #line 194 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 189 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
(void)(eof);
(void)(thttp_machine_parser_header_WWW_Authenticate_first_final);
(void)(thttp_machine_parser_header_WWW_Authenticate_error);
(void)(thttp_machine_parser_header_WWW_Authenticate_en_main);
/* #line 8073 "./src/headers/thttp_header_WWW_Authenticate.c" */
/* #line 8068 "./src/headers/thttp_header_WWW_Authenticate.c" */
{
cs = thttp_machine_parser_header_WWW_Authenticate_start;
}
/* #line 199 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 194 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 8080 "./src/headers/thttp_header_WWW_Authenticate.c" */
/* #line 8075 "./src/headers/thttp_header_WWW_Authenticate.c" */
{
int _klen;
unsigned int _trans;
@ -8151,94 +8146,94 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 54 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 48 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
tag_start = p;
}
break;
case 1:
/* #line 58 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 52 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
hdr_WWW_Authenticate->scheme = tsk_strdup("Digest");
}
break;
case 2:
/* #line 62 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 56 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
hdr_WWW_Authenticate->scheme = tsk_strdup("Basic");
}
break;
case 3:
/* #line 66 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 60 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
THTTP_HEADER(hdr_WWW_Authenticate)->type = thttp_htype_WWW_Authenticate;
}
break;
case 4:
/* #line 70 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 64 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
THTTP_HEADER(hdr_WWW_Authenticate)->type = thttp_htype_Proxy_Authenticate;
}
break;
case 5:
/* #line 74 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 68 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
TSK_PARSER_SET_STRING(hdr_WWW_Authenticate->realm);
tsk_strunquote(&hdr_WWW_Authenticate->realm);
}
break;
case 6:
/* #line 79 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 73 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
TSK_PARSER_SET_STRING(hdr_WWW_Authenticate->domain);
//tsk_strunquote(&hdr_WWW_Authenticate->domain);
}
break;
case 7:
/* #line 84 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 78 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
TSK_PARSER_SET_STRING(hdr_WWW_Authenticate->nonce);
tsk_strunquote(&hdr_WWW_Authenticate->nonce);
}
break;
case 8:
/* #line 89 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 83 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
TSK_PARSER_SET_STRING(hdr_WWW_Authenticate->opaque);
tsk_strunquote(&hdr_WWW_Authenticate->opaque);
}
break;
case 9:
/* #line 94 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 88 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
hdr_WWW_Authenticate->stale = tsk_strniequals(tag_start, "true", 4);
}
break;
case 10:
/* #line 98 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 92 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
TSK_PARSER_SET_STRING(hdr_WWW_Authenticate->algorithm);
}
break;
case 11:
/* #line 102 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 96 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
TSK_PARSER_SET_STRING(hdr_WWW_Authenticate->qop);
//tsk_strunquote(&hdr_WWW_Authenticate->qop);
}
break;
case 12:
/* #line 107 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 101 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
TSK_PARSER_ADD_PARAM(THTTP_HEADER_PARAMS(hdr_WWW_Authenticate));
}
break;
case 13:
/* #line 115 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 109 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
{
}
break;
/* #line 8242 "./src/headers/thttp_header_WWW_Authenticate.c" */
/* #line 8237 "./src/headers/thttp_header_WWW_Authenticate.c" */
}
}
@ -8251,12 +8246,13 @@ _again:
_out: {}
}
/* #line 200 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 195 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 8258 "./src/headers/thttp_header_WWW_Authenticate.c" */
/* #line 8254 "./src/headers/thttp_header_WWW_Authenticate.c" */
1738
/* #line 201 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
/* #line 197 "./ragel/thttp_parser_header_WWW_Authenticate.rl" */
){
TSK_DEBUG_ERROR("Failed to parse WWW-Authenticate header.");
TSK_OBJECT_SAFE_FREE(hdr_WWW_Authenticate);

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_header.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -24,10 +22,6 @@
/**@file thttp_parser_header.c
* @brief HTTP headers parser.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/parsers/thttp_parser_header.h"
@ -90,7 +84,7 @@
* Ragel state machine.
*/
/* #line 508 "./ragel/thttp_parser_header.rl" */
/* #line 502 "./ragel/thttp_parser_header.rl" */
int thttp_header_parse(tsk_ragel_state_t *state, thttp_message_t *message)
@ -100,8 +94,9 @@ int thttp_header_parse(tsk_ragel_state_t *state, thttp_message_t *message)
const char *pe = state->tag_end;
const char *eof = pe;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 105 "./src/parsers/thttp_parser_header.c" */
/* #line 100 "./src/parsers/thttp_parser_header.c" */
static const char _thttp_machine_parser_headers_actions[] = {
0, 1, 0, 1, 1, 1, 2, 1,
3, 1, 4, 1, 5, 1, 6, 1,
@ -2334,20 +2329,20 @@ static const int thttp_machine_parser_headers_error = 0;
static const int thttp_machine_parser_headers_en_main = 1;
/* #line 518 "./ragel/thttp_parser_header.rl" */
/* #line 513 "./ragel/thttp_parser_header.rl" */
(void)(eof);
(void)(thttp_machine_parser_headers_first_final);
(void)(thttp_machine_parser_headers_error);
(void)(thttp_machine_parser_headers_en_main);
/* #line 2344 "./src/parsers/thttp_parser_header.c" */
/* #line 2339 "./src/parsers/thttp_parser_header.c" */
{
cs = thttp_machine_parser_headers_start;
}
/* #line 523 "./ragel/thttp_parser_header.rl" */
/* #line 518 "./ragel/thttp_parser_header.rl" */
/* #line 2351 "./src/parsers/thttp_parser_header.c" */
/* #line 2346 "./src/parsers/thttp_parser_header.c" */
{
int _klen;
unsigned int _trans;
@ -2422,7 +2417,7 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 96 "./ragel/thttp_parser_header.rl" */
/* #line 90 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2431,7 +2426,7 @@ _match:
}
break;
case 1:
/* #line 105 "./ragel/thttp_parser_header.rl" */
/* #line 99 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2440,7 +2435,7 @@ _match:
}
break;
case 2:
/* #line 114 "./ragel/thttp_parser_header.rl" */
/* #line 108 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2449,7 +2444,7 @@ _match:
}
break;
case 3:
/* #line 123 "./ragel/thttp_parser_header.rl" */
/* #line 117 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2458,7 +2453,7 @@ _match:
}
break;
case 4:
/* #line 132 "./ragel/thttp_parser_header.rl" */
/* #line 126 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2467,14 +2462,14 @@ _match:
}
break;
case 5:
/* #line 141 "./ragel/thttp_parser_header.rl" */
/* #line 135 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Authorization_t *header = thttp_header_Authorization_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
}
break;
case 6:
/* #line 148 "./ragel/thttp_parser_header.rl" */
/* #line 142 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2483,7 +2478,7 @@ _match:
}
break;
case 7:
/* #line 157 "./ragel/thttp_parser_header.rl" */
/* #line 151 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2492,7 +2487,7 @@ _match:
}
break;
case 8:
/* #line 166 "./ragel/thttp_parser_header.rl" */
/* #line 160 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2501,7 +2496,7 @@ _match:
}
break;
case 9:
/* #line 175 "./ragel/thttp_parser_header.rl" */
/* #line 169 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2510,7 +2505,7 @@ _match:
}
break;
case 10:
/* #line 184 "./ragel/thttp_parser_header.rl" */
/* #line 178 "./ragel/thttp_parser_header.rl" */
{
if(!message->Content_Length){
message->Content_Length = thttp_header_Content_Length_parse(state->tag_start, (state->tag_end-state->tag_start));
@ -2523,7 +2518,7 @@ _match:
}
break;
case 11:
/* #line 197 "./ragel/thttp_parser_header.rl" */
/* #line 191 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2532,7 +2527,7 @@ _match:
}
break;
case 12:
/* #line 206 "./ragel/thttp_parser_header.rl" */
/* #line 200 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2541,7 +2536,7 @@ _match:
}
break;
case 13:
/* #line 215 "./ragel/thttp_parser_header.rl" */
/* #line 209 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2550,7 +2545,7 @@ _match:
}
break;
case 14:
/* #line 224 "./ragel/thttp_parser_header.rl" */
/* #line 218 "./ragel/thttp_parser_header.rl" */
{
if(!message->Content_Type){
message->Content_Type = thttp_header_Content_Type_parse(state->tag_start, (state->tag_end-state->tag_start));
@ -2563,7 +2558,7 @@ _match:
}
break;
case 15:
/* #line 237 "./ragel/thttp_parser_header.rl" */
/* #line 231 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2572,7 +2567,7 @@ _match:
}
break;
case 16:
/* #line 246 "./ragel/thttp_parser_header.rl" */
/* #line 240 "./ragel/thttp_parser_header.rl" */
{
thttp_header_ETag_t *header = thttp_header_ETag_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2581,7 +2576,7 @@ _match:
}
break;
case 17:
/* #line 255 "./ragel/thttp_parser_header.rl" */
/* #line 249 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2590,7 +2585,7 @@ _match:
}
break;
case 18:
/* #line 264 "./ragel/thttp_parser_header.rl" */
/* #line 258 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2599,7 +2594,7 @@ _match:
}
break;
case 19:
/* #line 273 "./ragel/thttp_parser_header.rl" */
/* #line 267 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2608,7 +2603,7 @@ _match:
}
break;
case 20:
/* #line 282 "./ragel/thttp_parser_header.rl" */
/* #line 276 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2617,7 +2612,7 @@ _match:
}
break;
case 21:
/* #line 291 "./ragel/thttp_parser_header.rl" */
/* #line 285 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2626,7 +2621,7 @@ _match:
}
break;
case 22:
/* #line 300 "./ragel/thttp_parser_header.rl" */
/* #line 294 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2635,7 +2630,7 @@ _match:
}
break;
case 23:
/* #line 309 "./ragel/thttp_parser_header.rl" */
/* #line 303 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2644,7 +2639,7 @@ _match:
}
break;
case 24:
/* #line 318 "./ragel/thttp_parser_header.rl" */
/* #line 312 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2653,7 +2648,7 @@ _match:
}
break;
case 25:
/* #line 327 "./ragel/thttp_parser_header.rl" */
/* #line 321 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2662,7 +2657,7 @@ _match:
}
break;
case 26:
/* #line 336 "./ragel/thttp_parser_header.rl" */
/* #line 330 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2671,7 +2666,7 @@ _match:
}
break;
case 27:
/* #line 345 "./ragel/thttp_parser_header.rl" */
/* #line 339 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2680,7 +2675,7 @@ _match:
}
break;
case 28:
/* #line 354 "./ragel/thttp_parser_header.rl" */
/* #line 348 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2689,21 +2684,21 @@ _match:
}
break;
case 29:
/* #line 363 "./ragel/thttp_parser_header.rl" */
/* #line 357 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Proxy_Authenticate_t *header = thttp_header_Proxy_Authenticate_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
}
break;
case 30:
/* #line 370 "./ragel/thttp_parser_header.rl" */
/* #line 364 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Proxy_Authorization_t *header = thttp_header_Proxy_Authorization_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
}
break;
case 31:
/* #line 377 "./ragel/thttp_parser_header.rl" */
/* #line 371 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2712,7 +2707,7 @@ _match:
}
break;
case 32:
/* #line 386 "./ragel/thttp_parser_header.rl" */
/* #line 380 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2721,35 +2716,35 @@ _match:
}
break;
case 33:
/* #line 395 "./ragel/thttp_parser_header.rl" */
/* #line 389 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Sec_WebSocket_Accept_t* header = thttp_header_Sec_WebSocket_Accept_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
}
break;
case 34:
/* #line 402 "./ragel/thttp_parser_header.rl" */
/* #line 396 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Sec_WebSocket_Key_t* header = thttp_header_Sec_WebSocket_Key_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
}
break;
case 35:
/* #line 409 "./ragel/thttp_parser_header.rl" */
/* #line 403 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Sec_WebSocket_Protocol_t* header = thttp_header_Sec_WebSocket_Protocol_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
}
break;
case 36:
/* #line 417 "./ragel/thttp_parser_header.rl" */
/* #line 411 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Sec_WebSocket_Version_t* header = thttp_header_Sec_WebSocket_Version_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
}
break;
case 37:
/* #line 424 "./ragel/thttp_parser_header.rl" */
/* #line 418 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2758,7 +2753,7 @@ _match:
}
break;
case 38:
/* #line 433 "./ragel/thttp_parser_header.rl" */
/* #line 427 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2767,7 +2762,7 @@ _match:
}
break;
case 39:
/* #line 442 "./ragel/thttp_parser_header.rl" */
/* #line 436 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Transfer_Encoding_t *header = thttp_header_Transfer_Encoding_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2776,7 +2771,7 @@ _match:
}
break;
case 40:
/* #line 451 "./ragel/thttp_parser_header.rl" */
/* #line 445 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2785,7 +2780,7 @@ _match:
}
break;
case 41:
/* #line 460 "./ragel/thttp_parser_header.rl" */
/* #line 454 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2794,7 +2789,7 @@ _match:
}
break;
case 42:
/* #line 469 "./ragel/thttp_parser_header.rl" */
/* #line 463 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2803,7 +2798,7 @@ _match:
}
break;
case 43:
/* #line 478 "./ragel/thttp_parser_header.rl" */
/* #line 472 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2812,14 +2807,14 @@ _match:
}
break;
case 44:
/* #line 487 "./ragel/thttp_parser_header.rl" */
/* #line 481 "./ragel/thttp_parser_header.rl" */
{
thttp_header_WWW_Authenticate_t *header = thttp_header_WWW_Authenticate_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
}
break;
case 45:
/* #line 494 "./ragel/thttp_parser_header.rl" */
/* #line 488 "./ragel/thttp_parser_header.rl" */
{
thttp_header_Dummy_t *header = thttp_header_Dummy_parse(state->tag_start, (state->tag_end-state->tag_start));
ADD_HEADER(header);
@ -2827,7 +2822,7 @@ _match:
//TSK_DEBUG_WARN("parse_header_extension_header NOT IMPLEMENTED. Will be added as Dummy header.");
}
break;
/* #line 2831 "./src/parsers/thttp_parser_header.c" */
/* #line 2826 "./src/parsers/thttp_parser_header.c" */
}
}
@ -2840,11 +2835,12 @@ _again:
_out: {}
}
/* #line 524 "./ragel/thttp_parser_header.rl" */
/* #line 519 "./ragel/thttp_parser_header.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
return ( cs >=
/* #line 2847 "./src/parsers/thttp_parser_header.c" */
/* #line 2843 "./src/parsers/thttp_parser_header.c" */
639
/* #line 525 "./ragel/thttp_parser_header.rl" */
/* #line 521 "./ragel/thttp_parser_header.rl" */
) ? 0 : -1;
}

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_message.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -21,13 +19,9 @@
* along with DOUBANGO.
*
*/
/**@file thttp_parser_message.c
* @brief HTTP parser.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/parsers/thttp_parser_message.h"
#include "tinyhttp/parsers/thttp_parser_header.h"
@ -45,13 +39,14 @@ static void thttp_message_parser_eoh(tsk_ragel_state_t *state, thttp_message_t *
* Ragel state machine.
*/
/* #line 165 "./ragel/thttp_parser_message.rl" */
/* #line 159 "./ragel/thttp_parser_message.rl" */
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* Regel data */
/* #line 55 "./src/parsers/thttp_parser_message.c" */
/* #line 50 "./src/parsers/thttp_parser_message.c" */
static const char _thttp_machine_parser_message_actions[] = {
0, 1, 0, 1, 1, 1, 2, 1,
3, 1, 4, 1, 5, 1, 6, 1,
@ -165,7 +160,8 @@ static const int thttp_machine_parser_message_error = 0;
static const int thttp_machine_parser_message_en_main = 1;
/* #line 170 "./ragel/thttp_parser_message.rl" */
/* #line 165 "./ragel/thttp_parser_message.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
/** Parses raw HTTP buffer.
*
@ -197,9 +193,9 @@ int thttp_message_parse(tsk_ragel_state_t *state, thttp_message_t **result, tsk_
/* Check result */
if( state->cs <
/* #line 201 "./src/parsers/thttp_parser_message.c" */
/* #line 197 "./src/parsers/thttp_parser_message.c" */
36
/* #line 200 "./ragel/thttp_parser_message.rl" */
/* #line 196 "./ragel/thttp_parser_message.rl" */
){
TSK_DEBUG_ERROR("Failed to parse HTTP message.");
TSK_OBJECT_SAFE_FREE(*result);
@ -215,12 +211,12 @@ static void thttp_message_parser_init(tsk_ragel_state_t *state)
/* Regel machine initialization. */
/* #line 219 "./src/parsers/thttp_parser_message.c" */
/* #line 215 "./src/parsers/thttp_parser_message.c" */
{
cs = thttp_machine_parser_message_start;
}
/* #line 215 "./ragel/thttp_parser_message.rl" */
/* #line 211 "./ragel/thttp_parser_message.rl" */
state->cs = cs;
}
@ -232,8 +228,9 @@ static void thttp_message_parser_execute(tsk_ragel_state_t *state, thttp_message
const char *pe = state->pe;
const char *eof = state->eof;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 237 "./src/parsers/thttp_parser_message.c" */
/* #line 234 "./src/parsers/thttp_parser_message.c" */
{
int _klen;
unsigned int _trans;
@ -308,13 +305,13 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 49 "./ragel/thttp_parser_message.rl" */
/* #line 43 "./ragel/thttp_parser_message.rl" */
{
state->tag_start = p;
}
break;
case 1:
/* #line 54 "./ragel/thttp_parser_message.rl" */
/* #line 48 "./ragel/thttp_parser_message.rl" */
{
int len;
state->tag_end = p;
@ -333,7 +330,7 @@ _match:
}
break;
case 2:
/* #line 72 "./ragel/thttp_parser_message.rl" */
/* #line 66 "./ragel/thttp_parser_message.rl" */
{
int len;
state->tag_end = p;
@ -345,7 +342,7 @@ _match:
}
break;
case 3:
/* #line 83 "./ragel/thttp_parser_message.rl" */
/* #line 77 "./ragel/thttp_parser_message.rl" */
{
int len;
state->tag_end = p;
@ -358,7 +355,7 @@ _match:
}
break;
case 4:
/* #line 95 "./ragel/thttp_parser_message.rl" */
/* #line 89 "./ragel/thttp_parser_message.rl" */
{
int len;
state->tag_end = p;
@ -374,7 +371,7 @@ _match:
}
break;
case 5:
/* #line 110 "./ragel/thttp_parser_message.rl" */
/* #line 104 "./ragel/thttp_parser_message.rl" */
{
int len;
state->tag_end = p;
@ -387,7 +384,7 @@ _match:
}
break;
case 6:
/* #line 122 "./ragel/thttp_parser_message.rl" */
/* #line 116 "./ragel/thttp_parser_message.rl" */
{
int len;
state->tag_end = p;
@ -402,7 +399,7 @@ _match:
}
break;
case 7:
/* #line 145 "./ragel/thttp_parser_message.rl" */
/* #line 139 "./ragel/thttp_parser_message.rl" */
{
state->cs = cs;
state->p = p;
@ -417,7 +414,7 @@ _match:
eof = state->eof;
}
break;
/* #line 421 "./src/parsers/thttp_parser_message.c" */
/* #line 418 "./src/parsers/thttp_parser_message.c" */
}
}
@ -430,7 +427,8 @@ _again:
_out: {}
}
/* #line 227 "./ragel/thttp_parser_message.rl" */
/* #line 224 "./ragel/thttp_parser_message.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
state->cs = cs;
state->p = p;

View File

@ -1,9 +1,7 @@
/* #line 1 "./ragel/thttp_parser_url.rl" */
/*
* Copyright (C) 2010-2011 Mamadou Diop.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
* Copyright (C) 2010-2015 Mamadou Diop.
*
* This file is part of Open Source Doubango Framework.
*
@ -21,13 +19,8 @@
* along with DOUBANGO.
*
*/
/**@file thttp_parser_url.c
* @brief HTTP/HTTPS URL parser.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "tinyhttp/parsers/thttp_parser_url.h"
@ -39,7 +32,7 @@
* Ragel state machine.
*/
/* #line 89 "./ragel/thttp_parser_url.rl" */
/* #line 82 "./ragel/thttp_parser_url.rl" */
/**@ingroup thttp_url_group
@ -70,8 +63,9 @@ thttp_url_t *thttp_url_parse(const char *urlstring, tsk_size_t length)
const char *tag_start = 0;
TSK_RAGEL_DISABLE_WARNINGS_BEGIN()
/* #line 75 "./src/parsers/thttp_parser_url.c" */
/* #line 69 "./src/parsers/thttp_parser_url.c" */
static const char _thttp_machine_parser_url_actions[] = {
0, 1, 1, 1, 2, 1, 6, 1,
7, 1, 8, 1, 9, 1, 10, 2,
@ -287,7 +281,7 @@ static const int thttp_machine_parser_url_error = 0;
static const int thttp_machine_parser_url_en_main = 56;
/* #line 120 "./ragel/thttp_parser_url.rl" */
/* #line 114 "./ragel/thttp_parser_url.rl" */
(void)(ts);
(void)(te);
(void)(act);
@ -296,14 +290,14 @@ static const int thttp_machine_parser_url_en_main = 56;
(void)(thttp_machine_parser_url_error);
(void)(thttp_machine_parser_url_en_main);
/* #line 300 "./src/parsers/thttp_parser_url.c" */
/* #line 294 "./src/parsers/thttp_parser_url.c" */
{
cs = thttp_machine_parser_url_start;
}
/* #line 128 "./ragel/thttp_parser_url.rl" */
/* #line 122 "./ragel/thttp_parser_url.rl" */
/* #line 307 "./src/parsers/thttp_parser_url.c" */
/* #line 301 "./src/parsers/thttp_parser_url.c" */
{
int _klen;
unsigned int _trans;
@ -378,56 +372,56 @@ _match:
switch ( *_acts++ )
{
case 0:
/* #line 45 "./ragel/thttp_parser_url.rl" */
/* #line 38 "./ragel/thttp_parser_url.rl" */
{
tag_start = p;
}
break;
case 1:
/* #line 50 "./ragel/thttp_parser_url.rl" */
/* #line 43 "./ragel/thttp_parser_url.rl" */
{ url->scheme = tsk_strdup("http"), url->type = thttp_url_http; }
break;
case 2:
/* #line 51 "./ragel/thttp_parser_url.rl" */
/* #line 44 "./ragel/thttp_parser_url.rl" */
{ url->scheme = tsk_strdup("https"), url->type = thttp_url_https; }
break;
case 3:
/* #line 54 "./ragel/thttp_parser_url.rl" */
/* #line 47 "./ragel/thttp_parser_url.rl" */
{ url->host_type = url->host_type = thttp_host_ipv4; }
break;
case 4:
/* #line 55 "./ragel/thttp_parser_url.rl" */
/* #line 48 "./ragel/thttp_parser_url.rl" */
{ url->host_type = url->host_type = thttp_host_ipv6; }
break;
case 5:
/* #line 56 "./ragel/thttp_parser_url.rl" */
/* #line 49 "./ragel/thttp_parser_url.rl" */
{ url->host_type = url->host_type = thttp_host_hostname; }
break;
case 6:
/* #line 58 "./ragel/thttp_parser_url.rl" */
/* #line 51 "./ragel/thttp_parser_url.rl" */
{
TSK_PARSER_SET_STRING(url->host);
}
break;
case 7:
/* #line 62 "./ragel/thttp_parser_url.rl" */
/* #line 55 "./ragel/thttp_parser_url.rl" */
{
have_port = 1;
TSK_PARSER_SET_INT(url->port);
}
break;
case 8:
/* #line 67 "./ragel/thttp_parser_url.rl" */
/* #line 60 "./ragel/thttp_parser_url.rl" */
{
TSK_PARSER_SET_STRING(url->hpath);
}
break;
case 10:
/* #line 75 "./ragel/thttp_parser_url.rl" */
/* #line 68 "./ragel/thttp_parser_url.rl" */
{
}
break;
/* #line 431 "./src/parsers/thttp_parser_url.c" */
/* #line 425 "./src/parsers/thttp_parser_url.c" */
}
}
@ -444,37 +438,37 @@ _again:
while ( __nacts-- > 0 ) {
switch ( *__acts++ ) {
case 0:
/* #line 45 "./ragel/thttp_parser_url.rl" */
/* #line 38 "./ragel/thttp_parser_url.rl" */
{
tag_start = p;
}
break;
case 6:
/* #line 58 "./ragel/thttp_parser_url.rl" */
/* #line 51 "./ragel/thttp_parser_url.rl" */
{
TSK_PARSER_SET_STRING(url->host);
}
break;
case 7:
/* #line 62 "./ragel/thttp_parser_url.rl" */
/* #line 55 "./ragel/thttp_parser_url.rl" */
{
have_port = 1;
TSK_PARSER_SET_INT(url->port);
}
break;
case 8:
/* #line 67 "./ragel/thttp_parser_url.rl" */
/* #line 60 "./ragel/thttp_parser_url.rl" */
{
TSK_PARSER_SET_STRING(url->hpath);
}
break;
case 9:
/* #line 71 "./ragel/thttp_parser_url.rl" */
/* #line 64 "./ragel/thttp_parser_url.rl" */
{
TSK_PARSER_SET_STRING(url->search);
}
break;
/* #line 478 "./src/parsers/thttp_parser_url.c" */
/* #line 472 "./src/parsers/thttp_parser_url.c" */
}
}
}
@ -482,12 +476,13 @@ _again:
_out: {}
}
/* #line 129 "./ragel/thttp_parser_url.rl" */
/* #line 123 "./ragel/thttp_parser_url.rl" */
TSK_RAGEL_DISABLE_WARNINGS_END()
if( cs <
/* #line 489 "./src/parsers/thttp_parser_url.c" */
/* #line 484 "./src/parsers/thttp_parser_url.c" */
56
/* #line 130 "./ragel/thttp_parser_url.rl" */
/* #line 125 "./ragel/thttp_parser_url.rl" */
){
TSK_DEBUG_ERROR("Failed to parse HTTP/HTTPS URL: '%.*s'", length, urlstring);
TSK_OBJECT_SAFE_FREE(url);

View File

@ -1,8 +1,6 @@
/*
* Copyright (C) 2010-2011 Mamadou Diop.
* Copyright (C) 2010-2015 Mamadou DIOP.
*
* Contact: Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
* This file is part of Open Source Doubango Framework.
*
* DOUBANGO is free software: you can redistribute it and/or modify
@ -21,10 +19,6 @@
*/
/**@file thttp.c
* @brief HTTP (RFC 2616) and HTTP basic/digest authetication (RFC 2617) implementations.
*
* @author Mamadou Diop <diopmamadou(at)doubango[dot]org>
*
*/
#include "thttp.h"
@ -374,7 +368,7 @@ parse_buffer:
/* RFC 2616 - 19.4.6 Introduction of Transfer-Encoding */
// read chunk-size, chunk-extension (if any) and CRLF
tsk_size_t chunk_size = (tsk_size_t)tsk_atox(start);
if((index = tsk_strindexOf(start, (end-start), "\r\n")) >=0){
if((index = tsk_strindexOf(start, (tsk_size_t)(end-start), "\r\n")) >=0){
start += index + 2/*CRLF*/;
}
else{
@ -383,7 +377,7 @@ parse_buffer:
}
if(chunk_size == 0 && ((start + 2) <= end) && *start == '\r' && *(start+ 1) == '\n'){
int parsed_len = (start - (const char*)(TSK_BUFFER_TO_U8(dialog->buf))) + 2/*CRLF*/;
int parsed_len = (int)(start - (const char*)(TSK_BUFFER_TO_U8(dialog->buf))) + 2/*CRLF*/;
tsk_buffer_remove(dialog->buf, 0, parsed_len);
have_all_content = tsk_true;
break;

View File

@ -237,7 +237,7 @@ int thttp_message_append_content(thttp_message_t *self, const void* content, tsk
THTTP_MESSAGE_ADD_HEADER(self, THTTP_HEADER_CONTENT_LENGTH_VA_ARGS(size));
}
else{
self->Content_Length->length += size;
self->Content_Length->length += (uint32_t)size;
}
return 0;
}

Some files were not shown because too many files have changed in this diff Show More