uhd: increase initial transmit latency and disable adaptive control
The underrun behaviour of the USRP2 is different from the USRP1, and the adaptive latency mechanism is not directly transferable. Instead, fix the latency with a higher starting value, which effectively buffers more samples on the host in front of the Ethernet interface. An alternative may be to use the adaptive approach with USRP2 specific upper and lower bounds. For now, just use preprocessor directives until a better solution comes around. Signed-off-by: Thomas Tsou <ttsou@vt.edu>
This commit is contained in:
parent
af2ded3b01
commit
589dd9091e
|
@ -29,12 +29,10 @@
|
|||
*/
|
||||
|
||||
|
||||
#include <stdio.h>
|
||||
#include <cstdio>
|
||||
|
||||
#include "config.h"
|
||||
#include "Transceiver.h"
|
||||
#include <Logger.h>
|
||||
|
||||
#include <cstdio>
|
||||
|
||||
|
||||
Transceiver::Transceiver(int wBasePort,
|
||||
|
@ -696,6 +694,7 @@ void Transceiver::driveTransmitFIFO()
|
|||
radioClock->wait(); // wait until clock updates
|
||||
while (radioClock->get() + mTransmitLatency >
|
||||
mTransmitDeadlineClock) {
|
||||
#ifndef USE_UHD
|
||||
// if underrun, then we're not providing bursts to radio/USRP fast
|
||||
// enough. Need to increase latency by one GSM frame.
|
||||
if (mRadioInterface->isUnderrun()) {
|
||||
|
@ -717,6 +716,7 @@ void Transceiver::driveTransmitFIFO()
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
// time to push burst to transmit FIFO
|
||||
pushRadioVector(mTransmitDeadlineClock);
|
||||
mTransmitDeadlineClock.incTN();
|
||||
|
|
|
@ -56,7 +56,7 @@ int main(int argc, char *argv[]) {
|
|||
}
|
||||
|
||||
RadioInterface* radio = new RadioInterface(usrp,3);
|
||||
Transceiver *trx = new Transceiver(5700,"127.0.0.1",SAMPSPERSYM,GSM::Time(2,0),radio);
|
||||
Transceiver *trx = new Transceiver(5700,"127.0.0.1",SAMPSPERSYM,GSM::Time(3,0),radio);
|
||||
trx->transmitFIFO(radio->transmitFIFO());
|
||||
trx->receiveFIFO(radio->receiveFIFO());
|
||||
|
||||
|
|
Reference in New Issue