1 //=========================================================
2 //  MusE
3 //  Linux Music Editor
4 //  $Id: wavetrack.cpp,v 1.15.2.12 2009/12/20 05:00:35 terminator356 Exp $
5 //
6 //  (C) Copyright 2003 Werner Schweer (ws@seh.de)
7 //
8 //  This program is free software; you can redistribute it and/or
9 //  modify it under the terms of the GNU General Public License
10 //  as published by the Free Software Foundation; version 2 of
11 //  the License, or (at your option) any later version.
12 //
13 //  This program is distributed in the hope that it will be useful,
14 //  but WITHOUT ANY WARRANTY; without even the implied warranty of
15 //  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
16 //  GNU General Public License for more details.
17 //
18 //  You should have received a copy of the GNU General Public License
19 //  along with this program; if not, write to the Free Software
20 //  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
21 //
22 //=========================================================
23 
24 #include <stdint.h>
25 
26 #include "muse_math.h"
27 #include "track.h"
28 #include "event.h"
29 #include "audio.h"
30 #include "wave.h"
31 #include "xml.h"
32 #include "song.h"
33 #include "globals.h"
34 #include "gconfig.h"
35 #include "al/dsp.h"
36 #include "audioprefetch.h"
37 #include "latency_compensator.h"
38 #include "config.h"
39 
40 // Turn on some cool terminal 'peak' meters for debugging
41 //  presence of actual audio at various places
42 // #define NODE_DEBUG_TERMINAL_PEAK_METERS
43 
44 // For debugging output: Uncomment the fprintf section.
45 #define WAVETRACK_DEBUG(dev, format, args...) // fprintf(dev, format, ##args)
46 // For debugging transport timing: Uncomment the fprintf section.
47 #define WAVETRACK_DEBUG_TRANSPORT_SYNC(dev, format, args...) // fprintf(dev, format, ##args);
48 
49 namespace MusECore {
50 
51 //---------------------------------------------------------
52 //   WaveTrack
53 //---------------------------------------------------------
54 
55 // Default 1 channel for wave tracks.
WaveTrack()56 WaveTrack::WaveTrack() : AudioTrack(Track::WAVE, 1)
57 {
58   _prefetchWritePos = ~0;
59 }
60 
WaveTrack(const WaveTrack & wt,int flags)61 WaveTrack::WaveTrack(const WaveTrack& wt, int flags) : AudioTrack(wt, flags)
62 {
63   _prefetchWritePos = ~0;
64 
65   internal_assign(wt, flags | Track::ASSIGN_PROPERTIES);
66 }
67 
internal_assign(const Track & t,int flags)68 void WaveTrack::internal_assign(const Track& t, int flags)
69 {
70       if(t.type() != WAVE)
71         return;
72       //const WaveTrack& wt = (const WaveTrack&)t;
73 
74       const bool dup = flags & ASSIGN_DUPLICATE_PARTS;
75       const bool cpy = flags & ASSIGN_COPY_PARTS;
76       const bool cln = flags & ASSIGN_CLONE_PARTS;
77       if(dup || cpy || cln)
78       {
79         const PartList* pl = t.cparts();
80         for (ciPart ip = pl->begin(); ip != pl->end(); ++ip) {
81               Part* spart = ip->second;
82               Part* dpart = 0;
83               if(dup)
84                 dpart = spart->hasClones() ? spart->createNewClone() : spart->duplicate();
85               else if(cpy)
86                 dpart = spart->duplicate();
87               else if(cln)
88                 dpart = spart->createNewClone();
89               if(dpart)
90               {
91                 dpart->setTrack(this);
92                 parts()->add(dpart);
93               }
94               }
95       }
96 
97 }
98 
assign(const Track & t,int flags)99 void WaveTrack::assign(const Track& t, int flags)
100 {
101       AudioTrack::assign(t, flags);
102       internal_assign(t, flags);
103 }
104 
105 //---------------------------------------------------------
106 //   seekData
107 //    called from prefetch thread
108 //---------------------------------------------------------
109 
seekData(sf_count_t pos)110 void WaveTrack::seekData(sf_count_t pos)
111       {
112       WAVETRACK_DEBUG(stderr, "WaveTrack::seekData %s pos:%ld\n", name().toLatin1().constData(), pos);
113 
114       PartList* pl = parts();
115       for (iPart ip = pl->begin(); ip != pl->end(); ++ip) {
116             WavePart* part = (WavePart*)(ip->second);
117             unsigned p_spos = part->frame();
118             EventList& el = part->nonconst_events();
119             for (iEvent ie = el.begin(); ie != el.end(); ++ie) {
120                   Event& event = ie->second;
121                   unsigned e_spos  = event.frame() + p_spos;
122                   sf_count_t offset = 0;
123 
124 #ifdef ALLOW_LEFT_HIDDEN_EVENTS
125                   const sf_count_t e_pos_diff = (sf_count_t)(int)event.frame();
126                   if(pos < (sf_count_t)(int)p_spos)
127                   {
128                     if(e_pos_diff < 0)
129                       offset = -e_pos_diff;
130                   }
131                   else
132                   {
133                     offset = pos - (sf_count_t)(int)e_spos;
134                   }
135 #else
136                   offset = pos - e_spos;
137 #endif
138 
139                   if(offset < 0)
140                     offset = 0;
141                   event.seekAudio(offset);
142                   }
143             }
144       }
145 
146 //---------------------------------------------------------
147 //   fetchData
148 //    called from prefetch thread
149 //---------------------------------------------------------
150 
fetchData(unsigned pos,unsigned samples,float ** bp,bool doSeek,bool overwrite,int latency_correction)151 void WaveTrack::fetchData(unsigned pos, unsigned samples, float** bp, bool doSeek, bool overwrite, int latency_correction)
152       {
153       WAVETRACK_DEBUG(stderr, "WaveTrack::fetchData %s samples:%u pos:%u overwrite:%d\n",
154                       name().toLatin1().constData(), samples, pos, overwrite);
155 
156       // reset buffer to zero
157       if(overwrite)
158         for (int i = 0; i < channels(); ++i)
159             memset(bp[i], 0, samples * sizeof(float));
160 
161       // Process only if track is not off.
162       if(!off())
163       {
164         const bool use_latency_corr = useLatencyCorrection();
165         bool do_overwrite = overwrite;
166         PartList* pl = parts();
167         unsigned n = samples;
168         for (iPart ip = pl->begin(); ip != pl->end(); ++ip) {
169               WavePart* part = (WavePart*)(ip->second);
170               if (part->mute())
171                   continue;
172 
173               unsigned p_spos = part->frame();
174               unsigned p_epos = p_spos + part->lenFrame();
175               if (pos + n < p_spos)
176                 break;
177               if (pos >= p_epos)
178                 continue;
179 
180               EventList& el = part->nonconst_events();
181               for (iEvent ie = el.begin(); ie != el.end(); ++ie) {
182                     Event& event = ie->second;
183                     unsigned e_spos  = event.frame() + p_spos;
184                     unsigned nn      = event.lenFrame();
185                     unsigned e_epos  = e_spos + nn;
186 
187 #ifdef ALLOW_LEFT_HIDDEN_EVENTS
188                     if ((int64_t)(pos + n) < (int64_t)(int)e_spos)
189                       break;
190                     if ((int64_t)pos >= (int64_t)(int)e_epos)
191                       continue;
192 #else
193                     if (pos + n < e_spos)
194                       break;
195                     if (pos >= e_epos)
196                       continue;
197 #endif
198 
199                     int offset = e_spos - pos;
200 
201                     unsigned srcOffset, dstOffset;
202                     if (offset > 0) {
203                           nn = n - offset;
204                           srcOffset = 0;
205                           dstOffset = offset;
206                           }
207                     else {
208                           srcOffset = -offset;
209                           dstOffset = 0;
210 
211                           nn += offset;
212                           if (nn > n)
213                                 nn = n;
214                           }
215 
216                     if(use_latency_corr)
217                     {
218                       // Don't bother trying to read anything that comes before sample zero,
219                       //  or limiting to zero which would just repeat the same beginning section over.
220 
221                       // REMOVE Tim. latency. Added. Comment.
222                       // TODO: Change this: Insert blanks and use what we can from the buffer!
223 
224                       if(latency_correction > 0 && (unsigned int)latency_correction > srcOffset)
225                         continue;
226                       // Move the source FORWARD by an amount necessary for latency correction.
227                       // i_correction will be negative for correction.
228                       srcOffset -= latency_correction;
229                     }
230 
231                     float* bpp[channels()];
232                     for (int i = 0; i < channels(); ++i)
233                           bpp[i] = bp[i] + dstOffset;
234 
235                     event.readAudio(srcOffset, bpp, channels(), nn, doSeek, do_overwrite);
236                     do_overwrite = false;
237                     }
238               }
239       }
240 
241       if(overwrite && MusEGlobal::config.useDenormalBias) {
242             // add denormal bias to outdata
243             for (int i = 0; i < channels(); ++i)
244                   for (unsigned int j = 0; j < samples; ++j)
245                       bp[i][j] +=MusEGlobal::denormalBias;
246             }
247 
248       _prefetchFifo.add();
249       }
250 
251 //---------------------------------------------------------
252 //   write
253 //---------------------------------------------------------
254 
write(int level,Xml & xml) const255 void WaveTrack::write(int level, Xml& xml) const
256       {
257       xml.tag(level++, "wavetrack");
258       AudioTrack::writeProperties(level, xml);
259       const PartList* pl = cparts();
260       for (ciPart p = pl->begin(); p != pl->end(); ++p)
261             p->second->write(level, xml);
262       xml.etag(level, "wavetrack");
263       }
264 
265 //---------------------------------------------------------
266 //   read
267 //---------------------------------------------------------
268 
read(Xml & xml)269 void WaveTrack::read(Xml& xml)
270       {
271       for (;;) {
272             Xml::Token token = xml.parse();
273             const QString& tag = xml.s1();
274             switch (token) {
275                   case Xml::Error:
276                   case Xml::End:
277                         goto out_of_WaveTrackRead_forloop;
278                   case Xml::TagStart:
279                         if (tag == "part") {
280                               Part* p = 0;
281                               p = Part::readFromXml(xml, this);
282                               if(p)
283                                 parts()->add(p);
284                               }
285                         else if (AudioTrack::readProperties(xml, tag))
286                               xml.unknown("WaveTrack");
287                         break;
288                   case Xml::Attribut:
289                         break;
290                   case Xml::TagEnd:
291                         if (tag == "wavetrack") {
292                               mapRackPluginsToControllers();
293                               goto out_of_WaveTrackRead_forloop;
294                               }
295                   default:
296                         break;
297                   }
298             }
299 out_of_WaveTrackRead_forloop:
300       chainTrackParts(this);
301       }
302 
303 //---------------------------------------------------------
304 //   newPart
305 //---------------------------------------------------------
306 
newPart(Part * p,bool clone)307 Part* WaveTrack::newPart(Part*p, bool clone)
308       {
309       WavePart* part;
310       if(!p)
311       {
312         part = new WavePart(this);
313       }
314       else
315       {
316         part = clone ? (WavePart*)p->createNewClone() : (WavePart*)p->duplicate();
317         part->setTrack(this);
318       }
319       return part;
320       }
321 
openAllParts()322 bool WaveTrack::openAllParts()
323 {
324   bool opened = false;
325   const PartList* pl = parts();
326   for(ciPart ip = pl->begin(); ip != pl->end(); ++ip)
327   {
328     if(ip->second->openAllEvents())
329       opened = true;
330   }
331   return opened;
332 }
333 
closeAllParts()334 bool WaveTrack::closeAllParts()
335 {
336   bool closed = false;
337   const PartList* pl = parts();
338   for(ciPart ip = pl->begin(); ip != pl->end(); ++ip)
339   {
340     if(ip->second->closeAllEvents())
341       closed = true;
342   }
343   return closed;
344 }
345 
getPrefetchData(sf_count_t framePos,int dstChannels,sf_count_t nframe,float ** bp,bool do_overwrite)346 bool WaveTrack::getPrefetchData(
347      sf_count_t framePos, int dstChannels, sf_count_t nframe, float** bp, bool do_overwrite)
348 {
349   const bool use_latency_corr = useLatencyCorrection();
350 
351   float* pf_buf[dstChannels];
352 
353   int i_correction = 0;
354   if(use_latency_corr)
355   {
356     const TrackLatencyInfo& li = getLatencyInfo(false);
357     const float correction = li._sourceCorrectionValue;
358     i_correction = correction;
359   }
360 
361   // First, fetch any track-wide pre-mixed data, such as straight unmodified data
362   //  or from prefetch-based samplerate converters, but NOT stretchers or pitch shifters -
363   //  those belong POST-prefetch, below, so they can have fast response to changes.
364   if(MusEGlobal::audio->freewheel())
365   {
366     // when freewheeling, read data direct from file:
367     if(isMute())
368     {
369       // We are muted. We need to let the fetching progress, but discard the data.
370       for(int i = 0; i < dstChannels; ++i)
371         // Set to the audio dummy buffer.
372         pf_buf[i] = audioOutDummyBuf;
373       // Indicate do not seek file before each read.
374       fetchData(framePos, nframe, pf_buf, false, do_overwrite, i_correction);
375       return false;
376     }
377     else
378     {
379       // Not muted. Fetch the data into the given buffers.
380       // Indicate do not seek file before each read.
381       fetchData(framePos, nframe, bp, false, do_overwrite, i_correction);
382       // We have data.
383       return true;
384     }
385   }
386   else
387   {
388     bool ret_val = false;
389     MuseCount_t pos;
390     if(_prefetchFifo.peek(dstChannels, nframe, pf_buf, &pos))
391     {
392       fprintf(stderr, "WaveTrack::getPrefetchData(%s) (prefetch peek A) fifo underrun\n", name().toLocal8Bit().constData());
393       return false;
394     }
395 
396     //fprintf(stderr, "WaveTrack::getData(%s) (prefetch peek A) pos:%d\n", name().toLocal8Bit().constData(), pos);
397 
398     const int64_t frame_pos          = framePos;
399     const int64_t corr_frame_pos     = framePos - i_correction;
400     const int64_t corr_frame_end_pos = framePos - i_correction + nframe;
401 
402     WAVETRACK_DEBUG_TRANSPORT_SYNC(stderr,
403       "WaveTrack::getPrefetchData: framePos:%ld i_correction:%d nframe:%ld corr_frame_pos:%ld corr_frame_end_pos:%ld pos:%ld\n",
404       framePos, i_correction, nframe, corr_frame_pos, corr_frame_end_pos, pos);
405 
406     // Do we need to RETARD, or ADVANCE, the stream?
407     if(corr_frame_end_pos <= pos)
408     {
409       WAVETRACK_DEBUG_TRANSPORT_SYNC(stderr, " RETARDING: corr_frame_end_pos <= pos\n");
410       // Allow the stream to RETARD. (That is, let our requested frame catch up to the stream.)
411       return false;
412     }
413     else
414     {
415       // Allow the stream to ADVANCE if necessary. (That is, let the stream catch up to our requested frame.)
416       while(corr_frame_pos >= pos + nframe)
417       {
418         WAVETRACK_DEBUG_TRANSPORT_SYNC(stderr, " ADVANCING: corr_frame_pos >= pos + nframe\n");
419 
420         // Done with buffer, remove it.
421         _prefetchFifo.remove();
422 
423         if(_prefetchFifo.peek(dstChannels, nframe, pf_buf, &pos))
424         {
425           fprintf(stderr, "WaveTrack::getPrefetchData(%s) (prefetch peek B) fifo underrun\n", name().toLocal8Bit().constData());
426           return false;
427         }
428 
429         if(corr_frame_end_pos <= pos)
430         {
431           if(MusEGlobal::debugMsg)
432             fprintf(stderr, "fifo get(%s) (A) error expected %ld, got %ld\n", name().toLocal8Bit().constData(), frame_pos, pos);
433           return false;
434         }
435       }
436     }
437 
438     if(corr_frame_pos <= pos)
439     {
440       if(!isMute())
441       {
442         const unsigned blanks = pos - corr_frame_pos;
443         const unsigned buf2_frames = nframe - blanks;
444         if(do_overwrite)
445         {
446           if(blanks != 0)
447           {
448             for(int i = 0; i < dstChannels; ++i)
449               AL::dsp->clear(bp[i], blanks, MusEGlobal::config.useDenormalBias);
450           }
451           for(int i = 0; i < dstChannels; ++i)
452             AL::dsp->cpy(bp[i] + blanks, pf_buf[i], buf2_frames, MusEGlobal::config.useDenormalBias);
453         }
454         else
455         {
456           for(int i = 0; i < dstChannels; ++i)
457             AL::dsp->mix(bp[i] + blanks, pf_buf[i], buf2_frames);
458         }
459         // We have data.
460         ret_val = true;
461       }
462       // If the entire buffer was used, we are done with it.
463       if(corr_frame_pos == pos)
464       {
465         // Done with buffer, remove it.
466         _prefetchFifo.remove();
467       }
468     }
469     else
470     {
471       // This will always be at least 1, ie. corr_frame_pos > pos.
472       const unsigned buf1_pos = corr_frame_pos - pos;
473       const unsigned buf1_frames = nframe - buf1_pos;
474       const unsigned buf2_pos = buf1_frames;
475       const unsigned buf2_frames = buf1_pos;
476       if(!isMute())
477       {
478         if(do_overwrite)
479         {
480           for(int i = 0; i < dstChannels; ++i)
481             AL::dsp->cpy(bp[i], pf_buf[i] + buf1_pos, buf1_frames, MusEGlobal::config.useDenormalBias);
482         }
483         else
484         {
485           for(int i = 0; i < dstChannels; ++i)
486             AL::dsp->mix(bp[i], pf_buf[i] + buf1_pos, buf1_frames);
487         }
488       }
489 
490       // Done with buffer, remove it.
491       _prefetchFifo.remove();
492 
493       // We are expecting the next buffer.
494       const MuseCount_t expect_nextpos = pos + nframe;
495 
496       // Peek the next buffer but do not remove it,
497       //  since the rest of it will be required next cycle.
498       if(_prefetchFifo.peek(dstChannels, nframe, pf_buf, &pos))
499       {
500         fprintf(stderr, "WaveTrack::getPrefetchData(%s) (prefetch peek C) fifo underrun\n", name().toLocal8Bit().constData());
501         return false;
502       }
503 
504       if(pos != expect_nextpos)
505       {
506         if(MusEGlobal::debugMsg)
507           fprintf(stderr, "fifo get(%s) (B) error expected %ld, got %ld\n", name().toLocal8Bit().constData(), expect_nextpos, pos);
508         return false;
509       }
510 
511       if(!isMute())
512       {
513         if(do_overwrite)
514         {
515           for(int i = 0; i < dstChannels; ++i)
516             AL::dsp->cpy(bp[i] + buf2_pos, pf_buf[i], buf2_frames, MusEGlobal::config.useDenormalBias);
517         }
518         else
519         {
520           for(int i = 0; i < dstChannels; ++i)
521             AL::dsp->mix(bp[i] + buf2_pos, pf_buf[i], buf2_frames);
522         }
523         // We have data.
524         ret_val = true;
525       }
526     }
527 
528     return ret_val;
529   }
530 }
531 
532 //---------------------------------------------------------
533 //   getDataPrivate
534 //    return false if no data available
535 //---------------------------------------------------------
536 
getInputData(unsigned pos,int channels,unsigned nframes,bool * usedInChannelArray,float ** buffer)537 bool WaveTrack::getInputData(unsigned pos, int channels, unsigned nframes,
538                              bool* usedInChannelArray, float** buffer)
539       {
540       // use supplied buffers
541       const RouteList* rl = inRoutes();
542       const bool use_latency_corr = useLatencyCorrection();
543 
544       #ifdef NODE_DEBUG_PROCESS
545       fprintf(stderr, "AudioTrack::getData name:%s channels:%d inRoutes:%d\n", name().toLatin1().constData(), channels, int(rl->size()));
546       #endif
547 
548       int dst_ch, dst_chs, src_ch, src_chs, fin_dst_chs, next_chan, i;
549       unsigned long int l;
550 
551       bool have_data = false;
552 
553       for (ciRoute ir = rl->begin(); ir != rl->end(); ++ir) {
554             if(ir->type != Route::TRACK_ROUTE || !ir->track || ir->track->isMidiTrack())
555               continue;
556 
557             // Only this track knows how many destination channels there are,
558             //  while only the route track knows how many source channels there are.
559             // So take care of the destination channels here, and let the route track handle the source channels.
560             dst_ch = ir->channel <= -1 ? 0 : ir->channel;
561             if(dst_ch >= channels)
562               continue;
563             dst_chs = ir->channels <= -1 ? channels : ir->channels;
564             src_ch = ir->remoteChannel <= -1 ? 0 : ir->remoteChannel;
565             src_chs = ir->channels;
566 
567             fin_dst_chs = dst_chs;
568             if(dst_ch + fin_dst_chs > channels)
569               fin_dst_chs = channels - dst_ch;
570 
571             #ifdef NODE_DEBUG_PROCESS
572             fprintf(stderr, "    calling copy/addData on %s dst_ch:%d dst_chs:%d fin_dst_chs:%d src_ch:%d src_chs:%d ...\n",
573                     ir->track->name().toLatin1().constData(),
574                     dst_ch, dst_chs, fin_dst_chs,
575                     src_ch, src_chs);
576             #endif
577 
578             static_cast<AudioTrack*>(ir->track)->copyData(pos,
579                                                           dst_ch, dst_chs, fin_dst_chs,
580                                                           src_ch, src_chs,
581                                                           nframes, buffer,
582                                                           false, use_latency_corr ? nullptr : usedInChannelArray);
583 
584 
585 #ifdef NODE_DEBUG_TERMINAL_PEAK_METERS
586             if(MusEGlobal::audio->isPlaying())
587             {
588               fprintf(stderr, "WaveTrack::getInputData() name:%s ir->latency:%lu latencyCompWriteOffset:%lu total:%lu\n",
589                       name().toLatin1().constData(), l, latencyCompWriteOffset(), l + latencyCompWriteOffset());
590               for(int ch = 0; ch < channels; ++ch)
591               {
592                 fprintf(stderr, "channel:%d peak:", ch);
593                 float val;
594                 float peak = 0.0f;
595                 const float* buf = buffer[ch];
596                 for(unsigned int smp = 0; smp < nframes; ++smp)
597                 {
598                   val = buf[smp];
599                   if(val > peak)
600                     peak = val;
601                 }
602                 const int dots = peak * 20;
603                 for(int d = 0; d < dots; ++d)
604                   fprintf(stderr, "*");
605                 fprintf(stderr, "\n");
606               }
607             }
608 #endif
609 
610             // Prepare the latency value to be passed to the compensator's writer,
611             //  by adjusting each route latency value. ie. the route with the worst-case
612             //  latency will get ZERO delay, while routes having smaller latency will get
613             //  MORE delay, to match all the signal timings together.
614             // The route's audioLatencyOut should have already been calculated and
615             //  conveniently stored in the route.
616             if((long int)ir->audioLatencyOut < 0)
617               l = 0;
618             else
619               l = ir->audioLatencyOut;
620 
621             next_chan = dst_ch + fin_dst_chs;
622             for(i = dst_ch; i < next_chan; ++i)
623             {
624               if(use_latency_corr)
625               {
626                 // Write the buffers to the latency compensator.
627                 // By now, each copied channel should have the same latency.
628                 _latencyComp->write(i, nframes, l + latencyCompWriteOffset(), buffer[i]);
629               }
630               usedInChannelArray[i] = true;
631             }
632             have_data = true;
633             }
634 
635       return have_data;
636       }
637 
638 //---------------------------------------------------------
639 //   getData
640 //---------------------------------------------------------
641 
getData(unsigned framePos,int dstChannels,unsigned nframe,float ** bp)642 bool WaveTrack::getData(unsigned framePos, int dstChannels, unsigned nframe, float** bp)
643 {
644   bool have_data = false;
645 
646   const bool track_rec_flag = recordFlag();
647   const bool track_rec_monitor = recMonitor();        // Separate monitor and record functions.
648   const bool is_playing = MusEGlobal::audio->isPlaying();
649   const bool use_latency_corr = useLatencyCorrection();
650 
651   //---------------------------------------------
652   // Note that the supplied buffers (bp) are at first
653   //  used as temporary storage but are later written
654   //  with final data. The reading and writing of fifo
655   //  file data wants linear memory whereas the latency
656   //  compensator uses wrap-around memory.
657   //---------------------------------------------
658 
659   //---------------------------------------------
660   // Contributions to data from input sources:
661   //---------------------------------------------
662 
663   // Gather input data from connected routes.
664   if((MusEGlobal::song->bounceTrack != this) && !noInRoute())
665   {
666     bool used_in_chan_array[dstChannels];
667     for(int i = 0; i < dstChannels; ++i)
668       used_in_chan_array[i] = false;
669 
670     // The data retrieved by this will already be latency compensated.
671     have_data = getInputData(framePos, dstChannels, nframe, used_in_chan_array, bp);
672 
673     // Do we want to record the incoming data?
674     if(have_data && track_rec_flag &&
675       (MusEGlobal::audio->isRecording() ||
676        (MusEGlobal::song->record() && MusEGlobal::extSyncFlag && MusEGlobal::midiSyncContainer.isPlaying())) &&
677       recFile())
678     {
679       if(MusEGlobal::audio->freewheel())
680       {
681       }
682       else
683       {
684         for(int i = 0; i < dstChannels; ++i)
685         {
686           if(used_in_chan_array[i])
687           {
688             // Read back the latency compensated signals, using the buffers in-place.
689             if(use_latency_corr)
690               _latencyComp->peek(i, nframe, bp[i]);
691           }
692           else
693           {
694             // Fill unused channels with silence.
695             // Channel is unused. Zero the supplied buffer.
696             // REMOVE Tim. latency. Added. Maybe not required. The latency compensator already automatically clears to zero.
697             AL::dsp->clear(bp[i], nframe, MusEGlobal::denormalBias);
698           }
699         }
700 
701         //fprintf(stderr, "WaveTrack::getData: name:%s RECORD: Putting to fifo: framePos:%d audio pos frame:%d\n",
702         //        name().toLatin1().constData(),
703         //        framePos, MusEGlobal::audio->pos().frame());
704 
705         // This will adjust for the latency before putting.
706         putFifo(dstChannels, nframe, bp);
707       }
708     }
709 
710     // Advance any peeked compensator channels now.
711     if(use_latency_corr)
712       _latencyComp->advance(nframe);
713   }
714 
715   //---------------------------------------------
716   // Contributions to data from playback sources:
717   //---------------------------------------------
718 
719   if(!is_playing)
720   {
721     if(!have_data || (track_rec_monitor && have_data))
722       return have_data;
723     return false;
724   }
725 
726   // If there is no input source data or we do not want to monitor it,
727   //  overwrite the supplied buffers rather than mixing with them.
728   const bool do_overwrite = !have_data || !track_rec_monitor;
729 
730   // Set the return value.
731   // We only "have data" if we want to monitor it.
732   have_data = track_rec_monitor && have_data;
733 
734   const bool have_pf_data = getPrefetchData(framePos, dstChannels, nframe, bp, do_overwrite);
735   return have_data || have_pf_data;
736 }
737 
canDominateOutputLatency() const738 inline bool WaveTrack::canDominateOutputLatency() const
739 {
740   // The wave track's own wave file contributions can never dominate latency.
741   return false;
742 }
743 
canCorrectOutputLatency() const744 inline bool WaveTrack::canCorrectOutputLatency() const
745 {
746   return true;
747 }
748 
749 //---------------------------------------------------------
750 //   setChannels
751 //---------------------------------------------------------
752 
setChannels(int n)753 void WaveTrack::setChannels(int n)
754       {
755       AudioTrack::setChannels(n);
756       SndFileR sf = recFile();
757       if (sf) {
758             if (sf->samples() == 0) {
759                   sf->remove();
760                   sf->setFormat(sf->format(), channels(),
761                   sf->samplerate());
762                   sf->openWrite();
763                   }
764             }
765       }
766 
clearPrefetchFifo()767 void WaveTrack::clearPrefetchFifo()
768 {
769   _prefetchFifo.clear();
770 
771   PartList* pl = parts();
772   for(iPart ip = pl->begin(); ip != pl->end(); ++ip)
773   {
774     Part* part = ip->second;
775     EventList& el = part->nonconst_events();
776     for(iEvent ie = el.begin(); ie != el.end(); ++ie)
777     {
778       Event& e = ie->second;
779       if(e.audioPrefetchFifo())
780         e.audioPrefetchFifo()->clear();
781     }
782   }
783 }
784 
prefetchAudio(sf_count_t,sf_count_t frames)785 void WaveTrack::prefetchAudio(sf_count_t /*writePos*/, sf_count_t frames)
786 {
787   if(off())
788     return;
789 
790   PartList* pl = parts();
791   for(iPart ip = pl->begin(); ip != pl->end(); ++ip)
792   {
793     Part* part = ip->second;
794     if(part->mute())
795       continue;
796 
797     EventList& el = part->nonconst_events();
798     for(iEvent ie = el.begin(); ie != el.end(); ++ie)
799     {
800       Event& e = ie->second;
801 
802       if(!e.audioPrefetchFifo())
803         continue;
804 
805       e.prefetchAudio(part, frames);
806     }
807   }
808 }
809 
810 
811 
812 } // namespace MusECore
813