1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "modules/audio_coding/test/Channel.h"
12
13 #include <assert.h>
14 #include <iostream>
15
16 #include "rtc_base/format_macros.h"
17 #include "rtc_base/timeutils.h"
18
19 namespace webrtc {
20
SendData(FrameType frameType,uint8_t payloadType,uint32_t timeStamp,const uint8_t * payloadData,size_t payloadSize,const RTPFragmentationHeader * fragmentation)21 int32_t Channel::SendData(FrameType frameType,
22 uint8_t payloadType,
23 uint32_t timeStamp,
24 const uint8_t* payloadData,
25 size_t payloadSize,
26 const RTPFragmentationHeader* fragmentation) {
27 WebRtcRTPHeader rtpInfo;
28 int32_t status;
29 size_t payloadDataSize = payloadSize;
30
31 rtpInfo.header.markerBit = false;
32 rtpInfo.header.ssrc = 0;
33 rtpInfo.header.sequenceNumber = (external_sequence_number_ < 0) ?
34 _seqNo++ : static_cast<uint16_t>(external_sequence_number_);
35 rtpInfo.header.payloadType = payloadType;
36 rtpInfo.header.timestamp = (external_send_timestamp_ < 0) ? timeStamp :
37 static_cast<uint32_t>(external_send_timestamp_);
38
39 if (frameType == kAudioFrameCN) {
40 rtpInfo.type.Audio.isCNG = true;
41 } else {
42 rtpInfo.type.Audio.isCNG = false;
43 }
44 if (frameType == kEmptyFrame) {
45 // When frame is empty, we should not transmit it. The frame size of the
46 // next non-empty frame will be based on the previous frame size.
47 _useLastFrameSize = _lastFrameSizeSample > 0;
48 return 0;
49 }
50
51 rtpInfo.type.Audio.channel = 1;
52 // Treat fragmentation separately
53 if (fragmentation != NULL) {
54 // If silence for too long, send only new data.
55 if ((fragmentation->fragmentationVectorSize == 2) &&
56 (fragmentation->fragmentationTimeDiff[1] <= 0x3fff)) {
57 // only 0x80 if we have multiple blocks
58 _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
59 size_t REDheader = (fragmentation->fragmentationTimeDiff[1] << 10) +
60 fragmentation->fragmentationLength[1];
61 _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
62 _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
63 _payloadData[3] = uint8_t(REDheader & 0x000000FF);
64
65 _payloadData[4] = fragmentation->fragmentationPlType[0];
66 // copy the RED data
67 memcpy(_payloadData + 5,
68 payloadData + fragmentation->fragmentationOffset[1],
69 fragmentation->fragmentationLength[1]);
70 // copy the normal data
71 memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
72 payloadData + fragmentation->fragmentationOffset[0],
73 fragmentation->fragmentationLength[0]);
74 payloadDataSize += 5;
75 } else {
76 // single block (newest one)
77 memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
78 fragmentation->fragmentationLength[0]);
79 payloadDataSize = fragmentation->fragmentationLength[0];
80 rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
81 }
82 } else {
83 memcpy(_payloadData, payloadData, payloadDataSize);
84 if (_isStereo) {
85 if (_leftChannel) {
86 memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
87 _leftChannel = false;
88 rtpInfo.type.Audio.channel = 1;
89 } else {
90 memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
91 _leftChannel = true;
92 rtpInfo.type.Audio.channel = 2;
93 }
94 }
95 }
96
97 _channelCritSect.Enter();
98 if (_saveBitStream) {
99 //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
100 }
101
102 if (!_isStereo) {
103 CalcStatistics(rtpInfo, payloadSize);
104 }
105 _useLastFrameSize = false;
106 _lastInTimestamp = timeStamp;
107 _totalBytes += payloadDataSize;
108 _channelCritSect.Leave();
109
110 if (_useFECTestWithPacketLoss) {
111 _packetLoss += 1;
112 if (_packetLoss == 3) {
113 _packetLoss = 0;
114 return 0;
115 }
116 }
117
118 if (num_packets_to_drop_ > 0) {
119 num_packets_to_drop_--;
120 return 0;
121 }
122
123 status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize, rtpInfo);
124
125 return status;
126 }
127
128 // TODO(turajs): rewite this method.
CalcStatistics(WebRtcRTPHeader & rtpInfo,size_t payloadSize)129 void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, size_t payloadSize) {
130 int n;
131 if ((rtpInfo.header.payloadType != _lastPayloadType)
132 && (_lastPayloadType != -1)) {
133 // payload-type is changed.
134 // we have to terminate the calculations on the previous payload type
135 // we ignore the last packet in that payload type just to make things
136 // easier.
137 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
138 if (_lastPayloadType == _payloadStats[n].payloadType) {
139 _payloadStats[n].newPacket = true;
140 break;
141 }
142 }
143 }
144 _lastPayloadType = rtpInfo.header.payloadType;
145
146 bool newPayload = true;
147 ACMTestPayloadStats* currentPayloadStr = NULL;
148 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
149 if (rtpInfo.header.payloadType == _payloadStats[n].payloadType) {
150 newPayload = false;
151 currentPayloadStr = &_payloadStats[n];
152 break;
153 }
154 }
155
156 if (!newPayload) {
157 if (!currentPayloadStr->newPacket) {
158 if (!_useLastFrameSize) {
159 _lastFrameSizeSample = (uint32_t) ((uint32_t) rtpInfo.header.timestamp -
160 (uint32_t) currentPayloadStr->lastTimestamp);
161 }
162 assert(_lastFrameSizeSample > 0);
163 int k = 0;
164 for (; k < MAX_NUM_FRAMESIZES; ++k) {
165 if ((currentPayloadStr->frameSizeStats[k].frameSizeSample ==
166 _lastFrameSizeSample) ||
167 (currentPayloadStr->frameSizeStats[k].frameSizeSample == 0)) {
168 break;
169 }
170 }
171 if (k == MAX_NUM_FRAMESIZES) {
172 // New frame size found but no space to count statistics on it. Skip it.
173 printf("No memory to store statistics for payload %d : frame size %d\n",
174 _lastPayloadType, _lastFrameSizeSample);
175 return;
176 }
177 ACMTestFrameSizeStats* currentFrameSizeStats = &(currentPayloadStr
178 ->frameSizeStats[k]);
179 currentFrameSizeStats->frameSizeSample = (int16_t) _lastFrameSizeSample;
180
181 // increment the number of encoded samples.
182 currentFrameSizeStats->totalEncodedSamples += _lastFrameSizeSample;
183 // increment the number of recveived packets
184 currentFrameSizeStats->numPackets++;
185 // increment the total number of bytes (this is based on
186 // the previous payload we don't know the frame-size of
187 // the current payload.
188 currentFrameSizeStats->totalPayloadLenByte += currentPayloadStr
189 ->lastPayloadLenByte;
190 // store the maximum payload-size (this is based on
191 // the previous payload we don't know the frame-size of
192 // the current payload.
193 if (currentFrameSizeStats->maxPayloadLen
194 < currentPayloadStr->lastPayloadLenByte) {
195 currentFrameSizeStats->maxPayloadLen = currentPayloadStr
196 ->lastPayloadLenByte;
197 }
198 // store the current values for the next time
199 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
200 currentPayloadStr->lastPayloadLenByte = payloadSize;
201 } else {
202 currentPayloadStr->newPacket = false;
203 currentPayloadStr->lastPayloadLenByte = payloadSize;
204 currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
205 currentPayloadStr->payloadType = rtpInfo.header.payloadType;
206 memset(currentPayloadStr->frameSizeStats, 0, MAX_NUM_FRAMESIZES *
207 sizeof(ACMTestFrameSizeStats));
208 }
209 } else {
210 n = 0;
211 while (_payloadStats[n].payloadType != -1) {
212 n++;
213 }
214 // first packet
215 _payloadStats[n].newPacket = false;
216 _payloadStats[n].lastPayloadLenByte = payloadSize;
217 _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
218 _payloadStats[n].payloadType = rtpInfo.header.payloadType;
219 memset(_payloadStats[n].frameSizeStats, 0, MAX_NUM_FRAMESIZES *
220 sizeof(ACMTestFrameSizeStats));
221 }
222 }
223
Channel(int16_t chID)224 Channel::Channel(int16_t chID)
225 : _receiverACM(NULL),
226 _seqNo(0),
227 _bitStreamFile(NULL),
228 _saveBitStream(false),
229 _lastPayloadType(-1),
230 _isStereo(false),
231 _leftChannel(true),
232 _lastInTimestamp(0),
233 _useLastFrameSize(false),
234 _lastFrameSizeSample(0),
235 _packetLoss(0),
236 _useFECTestWithPacketLoss(false),
237 _beginTime(rtc::TimeMillis()),
238 _totalBytes(0),
239 external_send_timestamp_(-1),
240 external_sequence_number_(-1),
241 num_packets_to_drop_(0) {
242 int n;
243 int k;
244 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
245 _payloadStats[n].payloadType = -1;
246 _payloadStats[n].newPacket = true;
247 for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
248 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
249 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
250 _payloadStats[n].frameSizeStats[k].numPackets = 0;
251 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
252 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
253 }
254 }
255 if (chID >= 0) {
256 _saveBitStream = true;
257 char bitStreamFileName[500];
258 sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
259 _bitStreamFile = fopen(bitStreamFileName, "wb");
260 } else {
261 _saveBitStream = false;
262 }
263 }
264
~Channel()265 Channel::~Channel() {
266 }
267
RegisterReceiverACM(AudioCodingModule * acm)268 void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
269 _receiverACM = acm;
270 return;
271 }
272
ResetStats()273 void Channel::ResetStats() {
274 int n;
275 int k;
276 _channelCritSect.Enter();
277 _lastPayloadType = -1;
278 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
279 _payloadStats[n].payloadType = -1;
280 _payloadStats[n].newPacket = true;
281 for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
282 _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
283 _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
284 _payloadStats[n].frameSizeStats[k].numPackets = 0;
285 _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
286 _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
287 }
288 }
289 _beginTime = rtc::TimeMillis();
290 _totalBytes = 0;
291 _channelCritSect.Leave();
292 }
293
Stats(CodecInst & codecInst,ACMTestPayloadStats & payloadStats)294 int16_t Channel::Stats(CodecInst& codecInst,
295 ACMTestPayloadStats& payloadStats) {
296 _channelCritSect.Enter();
297 int n;
298 payloadStats.payloadType = -1;
299 for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
300 if (_payloadStats[n].payloadType == codecInst.pltype) {
301 memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
302 break;
303 }
304 }
305 if (payloadStats.payloadType == -1) {
306 _channelCritSect.Leave();
307 return -1;
308 }
309 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
310 if (payloadStats.frameSizeStats[n].frameSizeSample == 0) {
311 _channelCritSect.Leave();
312 return 0;
313 }
314 payloadStats.frameSizeStats[n].usageLenSec = (double) payloadStats
315 .frameSizeStats[n].totalEncodedSamples / (double) codecInst.plfreq;
316
317 payloadStats.frameSizeStats[n].rateBitPerSec =
318 payloadStats.frameSizeStats[n].totalPayloadLenByte * 8
319 / payloadStats.frameSizeStats[n].usageLenSec;
320
321 }
322 _channelCritSect.Leave();
323 return 0;
324 }
325
Stats(uint32_t * numPackets)326 void Channel::Stats(uint32_t* numPackets) {
327 _channelCritSect.Enter();
328 int k;
329 int n;
330 memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
331 for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
332 if (_payloadStats[k].payloadType == -1) {
333 break;
334 }
335 numPackets[k] = 0;
336 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
337 if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
338 break;
339 }
340 numPackets[k] += _payloadStats[k].frameSizeStats[n].numPackets;
341 }
342 }
343 _channelCritSect.Leave();
344 }
345
Stats(uint8_t * payloadType,uint32_t * payloadLenByte)346 void Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte) {
347 _channelCritSect.Enter();
348
349 int k;
350 int n;
351 memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
352 for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
353 if (_payloadStats[k].payloadType == -1) {
354 break;
355 }
356 payloadType[k] = (uint8_t) _payloadStats[k].payloadType;
357 payloadLenByte[k] = 0;
358 for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
359 if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
360 break;
361 }
362 payloadLenByte[k] += (uint16_t) _payloadStats[k].frameSizeStats[n]
363 .totalPayloadLenByte;
364 }
365 }
366
367 _channelCritSect.Leave();
368 }
369
PrintStats(CodecInst & codecInst)370 void Channel::PrintStats(CodecInst& codecInst) {
371 ACMTestPayloadStats payloadStats;
372 Stats(codecInst, payloadStats);
373 printf("%s %d kHz\n", codecInst.plname, codecInst.plfreq / 1000);
374 printf("=====================================================\n");
375 if (payloadStats.payloadType == -1) {
376 printf("No Packets are sent with payload-type %d (%s)\n\n",
377 codecInst.pltype, codecInst.plname);
378 return;
379 }
380 for (int k = 0; k < MAX_NUM_FRAMESIZES; k++) {
381 if (payloadStats.frameSizeStats[k].frameSizeSample == 0) {
382 break;
383 }
384 printf("Frame-size.................... %d samples\n",
385 payloadStats.frameSizeStats[k].frameSizeSample);
386 printf("Average Rate.................. %.0f bits/sec\n",
387 payloadStats.frameSizeStats[k].rateBitPerSec);
388 printf("Maximum Payload-Size.......... %" PRIuS " Bytes\n",
389 payloadStats.frameSizeStats[k].maxPayloadLen);
390 printf(
391 "Maximum Instantaneous Rate.... %.0f bits/sec\n",
392 ((double) payloadStats.frameSizeStats[k].maxPayloadLen * 8.0
393 * (double) codecInst.plfreq)
394 / (double) payloadStats.frameSizeStats[k].frameSizeSample);
395 printf("Number of Packets............. %u\n",
396 (unsigned int) payloadStats.frameSizeStats[k].numPackets);
397 printf("Duration...................... %0.3f sec\n\n",
398 payloadStats.frameSizeStats[k].usageLenSec);
399
400 }
401
402 }
403
LastInTimestamp()404 uint32_t Channel::LastInTimestamp() {
405 uint32_t timestamp;
406 _channelCritSect.Enter();
407 timestamp = _lastInTimestamp;
408 _channelCritSect.Leave();
409 return timestamp;
410 }
411
BitRate()412 double Channel::BitRate() {
413 double rate;
414 uint64_t currTime = rtc::TimeMillis();
415 _channelCritSect.Enter();
416 rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
417 _channelCritSect.Leave();
418 return rate;
419 }
420
421 } // namespace webrtc
422