If capture buffer used, refresh waveform when GUI has been closed and reopened.
This commit is contained in:
parent
f872a13257
commit
e411a99f70
@ -65,6 +65,12 @@ public:
|
|||||||
seek(m_activerange.getStart());
|
seek(m_activerange.getStart());
|
||||||
updateXFadeCache();
|
updateXFadeCache();
|
||||||
}
|
}
|
||||||
|
virtual AudioBuffer<float>* getAudioBuffer() override
|
||||||
|
{
|
||||||
|
if (m_using_memory_buffer)
|
||||||
|
return &m_readbuf;
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
bool openAudioFile(File file) override
|
bool openAudioFile(File file) override
|
||||||
{
|
{
|
||||||
m_silenceoutputted = 0;
|
m_silenceoutputted = 0;
|
||||||
|
@ -79,7 +79,7 @@ public:
|
|||||||
{
|
{
|
||||||
return m_currentsample >= info.nsamples*m_activerange.getEnd();
|
return m_currentsample >= info.nsamples*m_activerange.getEnd();
|
||||||
}
|
}
|
||||||
|
virtual AudioBuffer<float>* getAudioBuffer()=0;
|
||||||
protected:
|
protected:
|
||||||
volatile int64_t m_currentsample = 0;
|
volatile int64_t m_currentsample = 0;
|
||||||
int m_silenceoutputted = 0;
|
int m_silenceoutputted = 0;
|
||||||
|
@ -44,6 +44,13 @@ void StretchAudioSource::releaseResources()
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
AudioBuffer<float>* StretchAudioSource::getSourceAudioBuffer()
|
||||||
|
{
|
||||||
|
if (m_inputfile==nullptr)
|
||||||
|
return nullptr;
|
||||||
|
return m_inputfile->getAudioBuffer();
|
||||||
|
}
|
||||||
|
|
||||||
bool StretchAudioSource::isResampling()
|
bool StretchAudioSource::isResampling()
|
||||||
{
|
{
|
||||||
if (m_inputfile==nullptr || m_inputfile->info.samplerate==0)
|
if (m_inputfile==nullptr || m_inputfile->info.samplerate==0)
|
||||||
|
@ -51,6 +51,8 @@ public:
|
|||||||
String setAudioFile(File file);
|
String setAudioFile(File file);
|
||||||
File getAudioFile();
|
File getAudioFile();
|
||||||
|
|
||||||
|
AudioBuffer<float>* getSourceAudioBuffer();
|
||||||
|
|
||||||
void setNumOutChannels(int chans);
|
void setNumOutChannels(int chans);
|
||||||
int getNumOutChannels() { return m_num_outchans; }
|
int getNumOutChannels() { return m_num_outchans; }
|
||||||
double getInfilePositionPercent();
|
double getInfilePositionPercent();
|
||||||
|
@ -111,6 +111,13 @@ void PaulstretchpluginAudioProcessorEditor::timerCallback(int id)
|
|||||||
{
|
{
|
||||||
m_wavecomponent.setAudioFile(processor.getAudioFile());
|
m_wavecomponent.setAudioFile(processor.getAudioFile());
|
||||||
}
|
}
|
||||||
|
if (processor.getAudioFile()==File() && processor.isRecordingEnabled()==false)
|
||||||
|
{
|
||||||
|
auto bufptr = processor.getStretchSource()->getSourceAudioBuffer();
|
||||||
|
if (bufptr!=nullptr)
|
||||||
|
m_wavecomponent.setAudioBuffer(bufptr,
|
||||||
|
processor.getSampleRate(), bufptr->getNumSamples());
|
||||||
|
}
|
||||||
m_wavecomponent.setTimeSelection(processor.getTimeSelection());
|
m_wavecomponent.setTimeSelection(processor.getTimeSelection());
|
||||||
|
|
||||||
}
|
}
|
||||||
@ -297,7 +304,8 @@ void WaveformComponent::setAudioFile(File f)
|
|||||||
|
|
||||||
void WaveformComponent::setAudioBuffer(AudioBuffer<float>* buf, int samplerate, int len)
|
void WaveformComponent::setAudioBuffer(AudioBuffer<float>* buf, int samplerate, int len)
|
||||||
{
|
{
|
||||||
m_waveimage = Image();
|
jassert(buf!=nullptr);
|
||||||
|
m_waveimage = Image();
|
||||||
m_curfile = File();
|
m_curfile = File();
|
||||||
m_thumb->reset(buf->getNumChannels(), samplerate, len);
|
m_thumb->reset(buf->getNumChannels(), samplerate, len);
|
||||||
m_thumb->addBlock(0, *buf, 0, len);
|
m_thumb->addBlock(0, *buf, 0, len);
|
||||||
|
Loading…
x
Reference in New Issue
Block a user