For my current project I was hoping at some point to be able to essentially mirror what the Scope displays on an OLED display. For example, below is a simple signal generator project that can generate various waveforms and sends them to the scope for viewing.
#include <Bela.h>
#include <cmath>
#include <libraries/Scope/Scope.h>
float out;
float gFrequency;
float gAmplitude;
int mode;
// Analog inputs
float gPot1;
float gPot2;
// Digital inputs
int gButton = 1;
int gLastButtonStatus = HIGH;
std::vector<int> gNum = {0, 1, 2, 3}; // vector to cylce through that chooses the filter type
unsigned int gNumLocation = 0; // pointer for vector
Scope scope;
float gPhase;
float gInverseSampleRate;
int gAudioFramesPerAnalogFrame = 0;
// define sign function for square wave from https://helloacm.com/how-to-implement-the-sgn-function-in-c/
int sgn(double v) {
return ( ( (v) < 0 ) ? -1 : ( (v) > 0 ) );
}
// signal generation, based on bela's basic.cpp and using definitions from wikipedia
void signal(float gFrequency, float gAmplitude, int mode)
{
switch(mode){
case 0:
out = gAmplitude*sinf(gPhase); // sine
break;
case 1:
out = gAmplitude*asinf(sinf(gPhase)); // triangle
break;
case 2:
out = gAmplitude*sgn(sinf(gPhase)); // square
break;
case 3:
out = gAmplitude*atanf(tanf(gPhase/2)); // sawtooth
break;
}
gPhase += 2.0 * M_PI * gFrequency * gInverseSampleRate;
if(gPhase > 2.0 * M_PI)
gPhase -= 2.0 * M_PI;
}
bool setup(BelaContext *context, void *userData)
{
// setup oscilloscope with one channel
scope.setup(1, context->audioSampleRate);
gInverseSampleRate = 1.0 / context->audioSampleRate;
gPhase = 0.0;
pinMode(context, 0, gButton, INPUT); //set input
// check audio and digital have same no. frames
if(context->audioFrames != context->digitalFrames) {
rt_fprintf(stderr, "This example needs audio and digital running at the same rate.\n");
return false;
}
// calculate no. audio frames per analog frame
if(context->analogFrames)
gAudioFramesPerAnalogFrame = context->audioFrames / context->analogFrames;
// inital signal
signal(440.0, 0.1, 0);
return true;
}
void render(BelaContext *context, void *userData)
{
for(unsigned int n = 0; n < context->audioFrames; n++) {
// read analog inputs and update frequency and amplitude
gPot1 = analogRead(context, n/gAudioFramesPerAnalogFrame, 0);
gPot2 = analogRead(context, n/gAudioFramesPerAnalogFrame, 1);
float gFrequency = map(gPot1, 0, 0.8, 100, 1000);
float gAmplitude = map(gPot2, 0, 0.8, 0.02, 0.5);
// detect falling edge of button and cycle through filter types
int status = digitalRead(context, n, gButton);
if(status == LOW && gLastButtonStatus == HIGH){
gNumLocation++;
if(gNumLocation >= gNum.size()){
gNumLocation = 0;
}
}
gLastButtonStatus = status;
mode = gNum[gNumLocation];
signal(gFrequency, gAmplitude, mode);
scope.log(out);
for(unsigned int channel = 0; channel < context->audioOutChannels; channel++) {
audioWrite(context, n, channel, out);
}
}
}
void cleanup(BelaContext *context, void *userData)
{
}
In this case, I'd want to use the OLED display to show the signal being generated. Would this sort of thing be possible? I'm very new to using an external screen and so far have only actually attempted the example O2O project [https://github.com/giuliomoro/O2O], so a lot of existing discussions regarding OLED screens on this forum go over my head I'm afraid.