sure (still in development, so a bit messy)
/*
____ _____ _ _
| __ )| ____| | / \
| _ \| _| | | / _ \
| |_) | |___| |___ / ___ \
|____/|_____|_____/_/ \_\
The platform for ultra-low latency audio and sensor processing
http://bela.io
A project of the Augmented Instruments Laboratory within the
Centre for Digital Music at Queen Mary University of London.
http://www.eecs.qmul.ac.uk/~andrewm
(c) 2016 Augmented Instruments Laboratory: Andrew McPherson,
Astrid Bin, Liam Donovan, Christian Heinrichs, Robert Jack,
Giulio Moro, Laurel Pardue, Victor Zappi. All rights reserved.
The Bela software is distributed under the GNU Lesser General Public License
(LGPL 3.0), available here: https://www.gnu.org/licenses/lgpl-3.0.txt
*/
#include <Bela.h>
#include <libraries/Midi/Midi.h>
#include <libraries/Scope/Scope.h>
#include <cmath>
#include <Heavy_bela.h>
#include <string.h>
#include <stdlib.h>
#include <string>
#include <sstream>
#include <DigitalChannelManager.h>
#include <algorithm>
#include <array>
#include <SampleLoader.h>
#include <SampleData.h>
#include <BelaContextFifo.h>
#include <vector>
#include "Lv2Host.h"
#include <libraries/Gui/Gui.h>
Lv2Host gLv2Host;
int gAudioFramesPerAnalogFrame;
int gLedPin = 0;
//float gUpdateInterval = 0.05;
void Bela_userSettings(BelaInitSettings *settings)
{
settings->uniformSampleRate = 1;
settings->interleave = 0;
settings->analogOutputsPersist = 0;
}
bool heavy_lv2 = 0;
//patch switching switch
#define DEBOUNCE_TIME 200 //how long the button has to be "off" to be considered off
bool cState_b0 = 0;
bool pState_b0 = 1;
int debounce_b0 = 0;
bool cState_b1 = 0;
bool pState_b1 = 1;
int debounce_b1 = 0;
int fx_nr = 0;
int old_fx = 1;
//scales to choose
bool chromatic[12] {1,1,1,1,1,1,1,1,1,1,1,1,};
bool major[12] {1,0,1,0,1,1,0,1,0,1,0,1,};
bool minor[12] {1,0,1,1,0,1,0,1,1,0,0,1,};
bool penta[12] {1,0,1,0,1,0,0,1,0,1,0,0,};
bool whole[12] {1,0,1,0,1,0,1,0,1,0,1,0,};
bool dim[12] {1,0,0,1,0,0,1,0,0,1,0,0,};
bool octave = 0;
bool powercut = 0;
bool echo = 0;
//pointer to choosen scale
bool *scale = chromatic;
bool gIsNoteOn = 0;
int gVelocity = 0;
int gNote = 0;
int gControl = 0;
int oldControl = 0;
int gCCVal = 0;
int choose_fx = 0;
int tap_count = 0;
int frame_count = 0;
bool vocoder = 1;
BelaContextFifo gBcf;
double gBlockDurationMs;
enum { minFirstDigitalChannel = 10 };
static unsigned int gAudioChannelsInUse;
static unsigned int gAnalogChannelsInUse;
static unsigned int gDigitalChannelsInUse;
unsigned int gScopeChannelsInUse;
static unsigned int gChannelsInUse;
static unsigned int gFirstAnalogChannel;
static unsigned int gFirstDigitalChannel;
static unsigned int gDigitalChannelOffset;
static unsigned int gFirstScopeChannel;
static unsigned int gDigitalSigInChannelsInUse;
static unsigned int gDigitalSigOutChannelsInUse;
float* gScopeOut;
// Bela Scope
static Scope* scope = NULL;
static char multiplexerArray[] = {"bela_multiplexer"};
static int multiplexerArraySize = 0;
static bool pdMultiplexerActive = false;
bool gDigitalEnabled = 0;
// Bela Midi
unsigned int hvMidiHashes[7]; // heavy-specific
static std::vector<Midi*> midi;
std::vector<std::string> gMidiPortNames;
void longRender(BelaContext* context, void* arg)
{
/* if (fx_nr == 1) {
gLv2Host.disconnect(3,0);
gLv2Host.bypass(1,0);
gLv2Host.bypass(2,1);
gLv2Host.connect(1,0,3,0);
} else if (fx_nr == 2) {
gLv2Host.disconnect(3,0);
gLv2Host.bypass(1,1);
gLv2Host.bypass(2,0);
gLv2Host.connect(-1,1,2,1);
gLv2Host.connect(0,0,2,0);
gLv2Host.connect(2,0,3,0);
} */
if (fx_nr == 1) {
gLv2Host.disconnect(3,0);
gLv2Host.disconnect(5,0);
gLv2Host.bypass(1,0);
gLv2Host.bypass(3,0);
gLv2Host.bypass(4,0);
gLv2Host.bypass(2,1);
gLv2Host.bypass(5,1);
gLv2Host.connect(1,0,3,0);
gLv2Host.connect(3,0,4,0);
gLv2Host.connect(4,0,6,0);
} else if (fx_nr==2) {
gLv2Host.disconnect(3,0);
gLv2Host.disconnect(5,0);
gLv2Host.disconnect(6,0);
gLv2Host.bypass(1,1);
gLv2Host.bypass(3,1);
gLv2Host.bypass(4,1);
gLv2Host.bypass(5,0);
gLv2Host.bypass(2,0);
gLv2Host.connect(-1,1,2,1);
gLv2Host.connect(0,0,2,0);
gLv2Host.connect(2,0,5,0);
gLv2Host.connect(5,0,6,0);
}
frame_count++;
if (frame_count > (1000*tap_count)) {
tap_count = 0;
}
// code here runs at "long" blocksize
// static bool pluginsOn[3] = {true, true, true};
// set inputs and outputs
const float* inputs[context->audioInChannels];
float* outputs[context->audioOutChannels];
for(unsigned int ch = 0; ch < context->audioInChannels; ++ch)
inputs[ch] = (float*)&context->audioIn[context->audioFrames * ch];
for(unsigned int ch = 0; ch < context->audioOutChannels; ++ch)
outputs[ch] = &context->audioOut[context->audioFrames * ch];
// do the actual processing on the buffers specified above
gLv2Host.render(context->audioFrames, inputs, outputs);
switch (gControl) {
case 7: {
gLv2Host.setPort(2, 3, float(gCCVal/127.0));
break;
}
case 8: {
gLv2Host.setPort(4, 0, float(gCCVal>>5)/4.0);
break;
}
case 9: {
gLv2Host.setPort(8, 6, (gCCVal/8) + 1);
// gLv2Host.setPort(7, 1, float(gCCVal/ 127.0));
break;
}
case 10: {
gLv2Host.setPort(9, 2, float(gCCVal/ 127.0));
break;
}
}
if(gIsNoteOn == 1){
//logic to switch between non "tonal" and "semitonal" scales
if ((scale == whole) | (scale == dim)) scale = chromatic;
switch (gNote) {
case 20: {
if (!echo) {
gLv2Host.setPort(8, 10, 0.3);
echo = 1;
} else {
gLv2Host.setPort(8, 10, 0);
echo = 0;
}
break;
}
case 21: {
/* if (tap_count == 4) {
float bpm = float(60.0/float(frame_count * 8 / 11025.0));
// gLv2Host.setPort(7, 4, bpm);
gLv2Host.setPort(7, 4, bpm);
rt_printf("bpm: %f\n",bpm);
tap_count = 0;
}
if (!tap_count) frame_count = 0;
if (tap_count < 4) tap_count++;
*/
vocoder = !vocoder;
break;
}
case 22: {
if (!octave) {
gLv2Host.setPort(3, 3, 1);
gLv2Host.setPort(3, 4, 1);
octave = 1;
} else {
gLv2Host.setPort(3, 3, 0);
gLv2Host.setPort(3, 4, 0);
octave = 0;
}
break;
}
case 23: {
// subsynth
if (!powercut) {
gLv2Host.setPort(4, 2, 1);
powercut = 1;
} else {
gLv2Host.setPort(4, 2, 0);
powercut = 0;
}
break;
}
case 36: {
//set scale, in key of c, no offset
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, n + 12, scale[n]);
// rt_printf("value%d: %d\n", n, scale[n]);
}
break;
}
case 37: {
//set scale, in key of c# offset of 1 halftone
if (scale == chromatic) scale = whole;
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+1)%12 + 12), scale[(n)]);
// rt_printf("value%d: %d\n", (n+1)%12, scale[n]);
}
break;
}
case 38: {
//set scale, in key of d offset of 2 halftones
if (scale == chromatic) scale = whole;
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+2)%12 + 12), scale[(n)]);
}
break;
}
case 39: {
scale = chromatic;
break;
}
case 40: {
//set scale, in key of d# offset of 3 halftones
if (scale == chromatic) scale = dim;
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+3)%12 + 12), scale[(n)]);
}
break;
}
case 41: {
//set scale, in key of e offset of 4 halftones
if (scale == chromatic) scale = dim;
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+4)%12 + 12), scale[(n)]);
}
break;
}
case 42: {
//set scale, in key of f offset of 5 halftones
if (scale == chromatic) scale = dim;
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+5)%12 + 12), scale[(n)]);
}
break;
}
case 43: {
scale = major;
break;
}
case 44: {
//set scale, in key of f# offset of 6 halftones
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+6)%12 + 12), scale[(n)]);
}
break;
}
case 45: {
//set scale, in key of g offset of 7 halftones
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+7)%12 + 12), scale[(n)]);
}
break;
}
case 46: {
//set scale, in key of g# offset of 8 halftones
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+8)%12 + 12), scale[(n)]);
}
break;
}
case 47: {
scale = minor;
break;
}
case 48: {
//set scale, in key of a offset of 9 halftones
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+9)%12 + 12), scale[(n)]);
}
break;
}
case 49: {
//set scale, in key of a# offset of 10 halftones
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+10)%12 + 12), scale[(n)]);
}
break;
}
case 50: {
//set scale, in key of b offset of 11 halftones
for(unsigned int n = 0; n < 12; n++){
gLv2Host.setPort(1, ((n+11)%12 + 12), scale[(n)]);
}
break;
}
case 51: {
scale = penta;
break;
}
}
gIsNoteOn = 0;
}
}
void longThread(void*)
{
while(!gShouldStop)
{
// BelaContext* context = gBcf.pop(BelaContextFifo::kToLong, gBlockDurationMs * 2);
BelaContext* context = gBcf.pop(BelaContextFifo::kToLong, 100); // wait up to 100ms
if(context)
{
// ((InternalBelaContext*)context)->audioFramesElapsed = audioFramesElapsed; // keep track of elapsed samples if your longRender needs it
longRender(context, NULL);
// audioFramesElapsed += context->audioFrames;
gBcf.push(BelaContextFifo::kToShort, context);
} else {
// if(gRTAudioVerbose)
// rt_fprintf(stderr, "fifoTask did not receive a valid context\n");
usleep(10000); // TODO: this should not be needed, given how the timeout in pop() is for a reasonable amount of time
}
}
}
AuxiliaryTask longThreadTask;
void dumpMidi()
{
if(midi.size() == 0)
{
printf("No MIDI device enabled\n");
return;
}
printf("The following MIDI devices are enabled:\n");
printf("%4s%20s %3s %3s %s\n",
"Num",
"Name",
"In",
"Out",
"Pd channels"
);
for(unsigned int n = 0; n < midi.size(); ++n)
{
printf("[%2d]%20s %3s %3s (%d-%d)\n",
n,
gMidiPortNames[n].c_str(),
midi[n]->isInputEnabled() ? "x" : "_",
midi[n]->isOutputEnabled() ? "x" : "_",
n * 16 + 1,
n * 16 + 16
);
}
}
Midi* openMidiDevice(std::string name, bool verboseSuccess = false, bool verboseError = false)
{
Midi* newMidi;
newMidi = new Midi();
newMidi->readFrom(name.c_str());
newMidi->writeTo(name.c_str());
newMidi->enableParser(true);
if(newMidi->isOutputEnabled())
{
if(verboseSuccess)
printf("Opened MIDI device %s as output\n", name.c_str());
}
if(newMidi->isInputEnabled())
{
if(verboseSuccess)
printf("Opened MIDI device %s as input\n", name.c_str());
}
if(!newMidi->isInputEnabled() && !newMidi->isOutputEnabled())
{
if(verboseError)
fprintf(stderr, "Failed to open MIDI device %s\n", name.c_str());
return nullptr;
} else {
return newMidi;
}
}
static unsigned int getPortChannel(int* channel){
unsigned int port = 0;
while(*channel > 16){
*channel -= 16;
port += 1;
}
if(port >= midi.size()){
// if the port number exceeds the number of ports available, send out
// of the first port
rt_fprintf(stderr, "Port out of range, using port 0 instead\n");
port = 0;
}
return port;
}
/*
* HEAVY CONTEXT & BUFFERS
*/
HeavyContextInterface *gHeavyContext;
float *gHvInputBuffers = NULL, *gHvOutputBuffers = NULL;
unsigned int gHvInputChannels = 0, gHvOutputChannels = 0;
uint32_t multiplexerTableHash;
float gInverseSampleRate;
/*
* HEAVY FUNCTIONS
*/
void printHook(HeavyContextInterface *context, const char *printLabel, const char *msgString, const HvMessage *msg) {
const double timestampSecs = ((double) hv_msg_getTimestamp(msg)) / hv_getSampleRate(context);
rt_printf("Message from Heavy patch: [@ %.3f] %s: %s\n", timestampSecs, printLabel, msgString);
}
// digitals
static DigitalChannelManager dcm;
void sendDigitalMessage(bool state, unsigned int delay, void* receiverName){
hv_sendFloatToReceiver(gHeavyContext, hv_stringToHash((char*)receiverName), (float)state);
// rt_printf("%s: %d\n", (char*)receiverName, state);
}
std::vector<std::string> gHvDigitalInHashes;
void generateDigitalNames(unsigned int numDigitals, unsigned int digitalOffset, std::vector<std::string>& receiverInputNames)
{
std::string inBaseString = "bela_digitalIn";
for(unsigned int i = 0; i<numDigitals; i++)
{
receiverInputNames.push_back(inBaseString + std::to_string(i+digitalOffset));
}
}
// For a message to be received here, you need to use the following syntax in Pd:
// [send receiverName @hv_param]
static void sendHook(
HeavyContextInterface *context,
const char *receiverName,
hv_uint32_t sendHash,
const HvMessage *m) {
// Bela digital run-time messages
// TODO: this first block is almost an exact copy of libpd's code, should we add this to the class?
// let's make this as optimized as possible for built-in digital Out parsing
// the built-in digital receivers are of the form "bela_digitalOutXX" where XX is between 11 and 26
static const int prefixLength = 15; // strlen("bela_digitalOut")
if(strncmp(receiverName, "bela_digitalOut", prefixLength)==0){
if(receiverName[prefixLength] != 0){ //the two ifs are used instead of if(strlen(source) >= prefixLength+2)
if(receiverName[prefixLength + 1] != 0){
// quickly convert the suffix to integer, assuming they are numbers, avoiding to call atoi
int receiver = ((receiverName[prefixLength] - '0') * 10);
receiver += (receiverName[prefixLength+1] - '0');
unsigned int channel = receiver - gDigitalChannelOffset; // go back to the actual Bela digital channel number
bool value = (hv_msg_getFloat(m, 0) != 0.0f);
if(channel < gDigitalChannelsInUse){ //gDigitalChannelsInUse is the number of digital channels
dcm.setValue(channel, value);
}
}
}
return;
}
// More MIDI and digital messages. To obtain the hashes below, use hv_stringToHash("yourString")
switch (sendHash) {
case 0xfb212be8: { // bela_setMidi
if (!hv_msg_hasFormat(m, "sfff")) {
fprintf(stderr, "Wrong format for Bela_setMidi, expected:[hw 1 0 0(");
return;
}
const char* symbol = hv_msg_getSymbol(m, 0);
int num[3] = {0, 0, 0};
for(int n = 0; n < 3; ++n)
{
num[n] = hv_msg_getFloat(m, n + 1);
}
std::ostringstream deviceName;
deviceName << symbol << ":" << num[0] << "," << num[1] << "," << num[2];
printf("Adding Midi device: %s\n", deviceName.str().c_str());
Midi* newMidi = openMidiDevice(deviceName.str(), true, true);
if(newMidi)
{
midi.push_back(newMidi);
gMidiPortNames.push_back(deviceName.str());
}
dumpMidi();
break;
}
case 0x70418732: { // bela_setDigital
if(gDigitalEnabled)
{
// Third argument (optional) can be ~ or sig for signal-rate, message-rate otherwise.
// [in 14 ~(
// |
// [s bela_setDigital]
// is signal("sig" or "~") or message("message", default) rate
bool isMessageRate = true; // defaults to message rate
bool direction = 0; // initialize it just to avoid the compiler's warning
bool disable = false;
if (!(hv_msg_isSymbol(m, 0) && hv_msg_isFloat(m, 1))) return;
const char *symbol = hv_msg_getSymbol(m, 0);
if(strcmp(symbol, "in") == 0){
direction = INPUT;
} else if(strcmp(symbol, "out") == 0){
direction = OUTPUT;
} else if(strcmp(symbol, "disable") == 0){
disable = true;
} else {
return;
}
int channel = hv_msg_getFloat(m, 1) - gDigitalChannelOffset;
if(disable == true){
dcm.unmanage(channel);
return;
}
if(hv_msg_isSymbol(m, 2)){
const char *s = hv_msg_getSymbol(m, 2);
if(strcmp(s, "~") == 0 || strncmp(s, "sig", 3) == 0){
isMessageRate = false;
}
}
dcm.manage(channel, direction, isMessageRate);
}
break;
}
case 0xd1d4ac2: { // __hv_noteout
if (!hv_msg_hasFormat(m, "fff")) return;
midi_byte_t pitch = (midi_byte_t) hv_msg_getFloat(m, 0);
midi_byte_t velocity = (midi_byte_t) hv_msg_getFloat(m, 1);
int channel = (midi_byte_t) hv_msg_getFloat(m, 2);
int port = getPortChannel(&channel);
// rt_printf("noteon[%d]: %d %d %d\n", port, channel, pitch, velocity);
midi[port]->writeNoteOn(channel, pitch, velocity);
break;
}
case 0xe5e2a040: { // __hv_ctlout
if (!hv_msg_hasFormat(m, "fff")) return;
midi_byte_t value = (midi_byte_t) hv_msg_getFloat(m, 0);
midi_byte_t controller = (midi_byte_t) hv_msg_getFloat(m, 1);
int channel = (midi_byte_t) hv_msg_getFloat(m, 2);
int port = getPortChannel(&channel);
// rt_printf("controlchange[%d]: %d %d %d\n", port, channel, controller, value);
midi[port]->writeControlChange(channel, controller, value);
break;
}
case 0x8753e39e: { // __hv_pgmout
midi_byte_t program = (midi_byte_t) hv_msg_getFloat(m, 0);
int channel = (midi_byte_t) hv_msg_getFloat(m, 1);
int port = getPortChannel(&channel);
// rt_printf("programchange[%d]: %d %d\n", port, channel, program);
midi[port]->writeProgramChange(channel, program);
break;
}
case 0xe8458013: { // __hv_bendout
if (!hv_msg_hasFormat(m, "ff")) return;
unsigned int value = ((midi_byte_t) hv_msg_getFloat(m, 0)) + 8192;
int channel = (midi_byte_t) hv_msg_getFloat(m, 1);
int port = getPortChannel(&channel);
// rt_printf("pitchbend[%d]: %d %d\n", port, channel, value);
midi[port]->writePitchBend(channel, value);
break;
}
case 0x476d4387: { // __hv_touchout
if (!hv_msg_hasFormat(m, "ff")) return;
midi_byte_t pressure = (midi_byte_t) hv_msg_getFloat(m, 0);
int channel = (midi_byte_t) hv_msg_getFloat(m, 1);
int port = getPortChannel(&channel);
// rt_printf("channelPressure[%d]: %d %d\n", port, channel, pressure);
midi[port]->writeChannelPressure(channel, pressure);
break;
}
case 0xd5aca9d1: { // __hv_polytouchout, not currently supported by Heavy. You have to [send __hv_polytouchout]
if (!hv_msg_hasFormat(m, "fff")) return;
midi_byte_t pitch = (midi_byte_t) hv_msg_getFloat(m, 0);
midi_byte_t pressure = (midi_byte_t) hv_msg_getFloat(m, 1);
int channel = (midi_byte_t) hv_msg_getFloat(m, 2);
int port = getPortChannel(&channel);
// rt_printf("polytouch[%d]: %d %d %d\n", port, channel, pitch, pressure);
midi[port]->writePolyphonicKeyPressure(channel, pitch, pressure);
break;
}
case 0x6511de55: { // __hv_midiout, not currently supported by Heavy. You have to [send __hv_midiout]
if (!hv_msg_hasFormat(m, "ff")) return;
midi_byte_t byte = (midi_byte_t) hv_msg_getFloat(m, 0);
int port = (int) hv_msg_getFloat(m, 1);
// rt_printf("port: %d, byte: %d\n", port, byte);
midi[port]->writeOutput(byte);
break;
}
case 0x6E64CDC1: { //save_table custom
if (!hv_msg_hasFormat(m, "f")) return;
int bank_nr = (int) hv_msg_getFloat(m, 0);
char fileName4[10];
sprintf(fileName4, "presets%d.wav", bank_nr);
//const char fileName[] = "presets.wav";
const char tableName[] = "presets";
hv_uint32_t tableHash = hv_stringToHash(tableName);
float * table = hv_table_getBuffer(gHeavyContext, tableHash);
writeSamples(fileName4, table, 656);
break;
}
case 0x116A3F3C: { //switch_preset_file
if (!hv_msg_hasFormat(m, "f")) return;
int bank_nr = (int) hv_msg_getFloat(m, 0);
char fileName3[10];
sprintf(fileName3, "presets%d.wav", bank_nr);
const char tableName[] = "presets";
int sampleLen = getNumFrames(fileName3);
hv_uint32_t tableHash = hv_stringToHash(tableName);
hv_table_setLength(gHeavyContext, tableHash, sampleLen); // resize the table
float * table = hv_table_getBuffer(gHeavyContext, tableHash); // once resized, get a pointer to the array
int channel = 0; // take the first channel of the file
int startFrame = 0; // start from the beginning
int lastFrame = sampleLen; // until the end of the file
getSamples(fileName3, table, channel, startFrame, lastFrame);
break;
}
/* case 0x5466427a: { //save current preset for restart
const char fileName[] = "current.wav";
const char tableName[] = "current";
hv_uint32_t tableHash = hv_stringToHash(tableName);
float * table = hv_table_getBuffer(gHeavyContext, tableHash);
writeSamples(fileName, table, 2);
break;
} */
default: {
break;
}
}
}
/*
* SETUP, RENDER LOOP & CLEANUP
*/
bool setup(BelaContext *context, void *userData) {
pinMode(context,0,1, OUTPUT); // LED1
pinMode(context,0,2, OUTPUT); // LED2
pinMode(context,0,3, INPUT); //Button 1
pinMode(context,0,4, INPUT); //Button 2
int fifoFactor = 8; // 16 to 128
BelaContext* longContext = gBcf.setup(context, fifoFactor);
if(!longContext)
{
fprintf(stderr, "Error: unable to initialise BelaContextFifo\n");
return false;}
longThreadTask = Bela_createAuxiliaryTask(longThread, 94, "long-thread", NULL);
Bela_scheduleAuxiliaryTask(longThreadTask);
gBlockDurationMs = context->audioFrames * fifoFactor / context->audioSampleRate * 1000;
//long thread setup (lv2host)
// these should be initialized by Bela_userSettings above
if((context->flags & BELA_FLAG_INTERLEAVED) || context->audioSampleRate != context->analogSampleRate)
{
fprintf(stderr, "Using Lv2Host requires non-interleaved buffers and uniform sample rate\n");
return false;
}
if(!gLv2Host.setup(longContext->audioSampleRate, longContext->audioFrames,
longContext->audioInChannels, longContext->audioOutChannels))
{
fprintf(stderr, "Unable to create Lv2 host\n");
return false;
}
std::vector<std::string> lv2Chain;
lv2Chain.emplace_back("http://moddevices.com/plugins/caps/Noisegate");
// lv2Chain.emplace_back("http://moddevices.com/plugins/caps/Compress");
lv2Chain.emplace_back("http://gareus.org/oss/lv2/fat1");
lv2Chain.emplace_back("http://drobilla.net/plugins/mda/TalkBox");
lv2Chain.emplace_back("http://guitarix.sourceforge.net/plugins/gx_oc_2_#_oc_2_");
lv2Chain.emplace_back("http://drobilla.net/plugins/mda/SubSynth");
lv2Chain.emplace_back("http://moddevices.com/plugins/caps/Compress");
lv2Chain.emplace_back("http://drobilla.net/plugins/mda/Detune");
lv2Chain.emplace_back("http://drobilla.net/plugins/mda/Stereo");
lv2Chain.emplace_back("http://calf.sourceforge.net/plugins/VintageDelay");
lv2Chain.emplace_back("http://guitarix.sourceforge.net/plugins/gx_reverb_stereo#_reverb_stereo");
for(auto &name : lv2Chain)
{
gLv2Host.add(name);
}
if(0 == gLv2Host.count())
{
fprintf(stderr, "No plugins were successfully instantiated\n");
return false;
}
if(context->analogFrames)
gAudioFramesPerAnalogFrame = context->audioFrames / context->analogFrames;
//noisegate
gLv2Host.setPort(0, 0, -15);
gLv2Host.setPort(0, 2, -20);
//autotune
gLv2Host.setPort(1, 6, 0.5);
gLv2Host.setPort(1, 7, 0.02);
// gLv2Host.setPort(2, 9, 24);
gLv2Host.setPort(1, 11, 1); //fastmode
gLv2Host.setPort(1, 12, 1); //c
gLv2Host.setPort(1, 13, 0);
gLv2Host.setPort(1, 14, 0);
gLv2Host.setPort(1, 15, 1);
gLv2Host.setPort(1, 16, 0);
gLv2Host.setPort(1, 17, 1);
gLv2Host.setPort(1, 18, 0);
gLv2Host.setPort(1, 19, 1);
gLv2Host.setPort(1, 20, 0);
gLv2Host.setPort(1, 21, 0);
gLv2Host.setPort(1, 22, 1);
gLv2Host.setPort(1, 23, 0);
//vocoder (talkbox)
gLv2Host.setPort(2, 0, 1); //wet signal
gLv2Host.setPort(2, 2, 0); //choose carrier channel
gLv2Host.setPort(2, 3, 1); //quality
// octaver off by default
gLv2Host.setPort(3, 3, 0);
gLv2Host.setPort(3, 4, 0);
gLv2Host.setPort(3, 2, 1);
//mda subsynth
// gLv2Host.setPort(4,1, 0);
gLv2Host.setPort(4, 3, 1);
gLv2Host.setPort(4, 4, 0);
//compressor
gLv2Host.setPort(5, 1, 2);
gLv2Host.setPort(5, 3, 0.6);
gLv2Host.setPort(5, 4, 0.3);
// gLv2Host.setPort(5, 6, 20);
//mda detune
gLv2Host.setPort(6, 0,0.1 );
gLv2Host.setPort(6, 1,0.5 );
gLv2Host.setPort(6, 3,0.3 );
gLv2Host.setPort(6, 2, 0.3);
//mda stereo
gLv2Host.setPort(7, 0, 0.5);
gLv2Host.setPort(7, 1, 0.3);
//echo
gLv2Host.setPort(8, 11, 0);
gLv2Host.setPort(8, 10, 0);
gLv2Host.setPort(8, 4, 100);
// gLv2Host.setPort(7, 15, 1);
//_reverb
gLv2Host.setPort(9, 0, 40);
gLv2Host.setPort(9, 3, 0.1);
//connect carrier to vocoder directly (without mono fx, gate and compressor)
// gLv2Host.connect(-1,1,2,1);
// scope.setup(4, context->audioSampleRate);
// Turn LED on
// pinMode(context, 0, gLedPin, OUTPUT); // Set pin as output
// digitalWrite(context, 0, gLedPin, 1); //Turn LED on
// return true;
// Check if digitals are enabled
if(context->digitalFrames > 0 && context->digitalChannels > 0)
gDigitalEnabled = 1;
gAudioChannelsInUse = std::max(context->audioInChannels, context->audioOutChannels);
gAnalogChannelsInUse = std::max(context->analogInChannels, context->analogOutChannels);
gDigitalChannelsInUse = context->digitalChannels;
// Channel distribution
gFirstAnalogChannel = std::max(context->audioInChannels, context->audioOutChannels);
gFirstDigitalChannel = gFirstAnalogChannel + std::max(context->analogInChannels, context->analogOutChannels);
if(gFirstDigitalChannel < minFirstDigitalChannel)
gFirstDigitalChannel = minFirstDigitalChannel; //for backwards compatibility
gDigitalChannelOffset = gFirstDigitalChannel + 1;
gFirstScopeChannel = gFirstDigitalChannel + gDigitalChannelsInUse;
gChannelsInUse = gFirstScopeChannel + gScopeChannelsInUse;
// Create hashes for digital channels
generateDigitalNames(gDigitalChannelsInUse, gDigitalChannelOffset, gHvDigitalInHashes);
/* HEAVY */
std::array<std::string, 11> outs = {{
"__hv_noteout",
"__hv_ctlout",
"__hv_pgmout",
"__hv_touchout",
"__hv_polytouchout",
"__hv_bendout",
"__hv_midiout",
"switch_bank",
"save_table",
"last_state",
}};
/* for(auto &st : outs)
{
// uncomment this if you want to display the hashes for midi
// outs. Then hardcode them in the switch() in sendHook()
// printf("%s: %#x\n", st.c_str(), hv_stringToHash(st.c_str()));
} */
hvMidiHashes[kmmNoteOn] = hv_stringToHash("__hv_notein");
// hvMidiHashes[kmmNoteOff] = hv_stringToHash("noteoff"); // this is handled differently, see the render function
hvMidiHashes[kmmControlChange] = hv_stringToHash("__hv_ctlin");
// Note that the ones below are not defined by Heavy, but they are here for (wishing) forward-compatibility
// You need to receive from the corresponding symbol in Pd and unpack the message, e.g.:
//[r __hv_pgmin]
//|
//[unpack f f]
//| |
//| [print pgmin_channel]
//[print pgmin_number]
hvMidiHashes[kmmProgramChange] = hv_stringToHash("__hv_pgmin");
hvMidiHashes[kmmPolyphonicKeyPressure] = hv_stringToHash("__hv_polytouchin");
hvMidiHashes[kmmChannelPressure] = hv_stringToHash("__hv_touchin");
hvMidiHashes[kmmPitchBend] = hv_stringToHash("__hv_bendin");
gHeavyContext = hv_bela_new_with_options(context->audioSampleRate, 10, 2, 0);
gHvInputChannels = hv_getNumInputChannels(gHeavyContext);
gHvOutputChannels = hv_getNumOutputChannels(gHeavyContext);
// add the lines below
// int k = 0;
/* const char fileName[] = "presets0.wav";
const char tableName[] = "presets";
//sprintf(lengthName, "samplelength%d", k);
int sampleLen = getNumFrames(fileName);
hv_uint32_t tableHash = hv_stringToHash(tableName);
hv_table_setLength(gHeavyContext, tableHash, sampleLen); // resize the table
float * table = hv_table_getBuffer(gHeavyContext, tableHash); // once resized, get a pointer to the array
int channel = 0; // take the first channel of the file
int startFrame = 0; // start from the beginning
int lastFrame = sampleLen; // until the end of the file
getSamples(fileName, table, channel, startFrame, lastFrame);
*/
const char fileName5[] = "current.wav";
const char tableName5[] = "current";
int sampleLen5 = getNumFrames(fileName5);
hv_uint32_t tableHash5 = hv_stringToHash(tableName5);
hv_table_setLength(gHeavyContext, tableHash5, sampleLen5); // resize the table
float * table5 = hv_table_getBuffer(gHeavyContext, tableHash5); // once resized, get a pointer to the array
int channel = 0; // take the first channel of the file
int startFrame = 0; // start from the beginning
int lastFrame5 = sampleLen5; // until the end of the file
getSamples(fileName5, table5, channel, startFrame, lastFrame5);
char fileName2[10];
char tableName2[8];
//char lengthName2[14];
for (int k = 0; k < 48; k++) {
sprintf(fileName2, "wt%d.wav", k);
sprintf(tableName2, "wt%d", k);
// sprintf(lengthName2, "samplelength%d", k);
int sampleLen = getNumFrames(fileName2);
hv_uint32_t tableHash = hv_stringToHash(tableName2);
hv_table_setLength(gHeavyContext, tableHash, sampleLen); // resize the table
float * table = hv_table_getBuffer(gHeavyContext, tableHash); // once resized, get a pointer to the array
int channel = 0; // take the first channel of the file
int startFrame = 0; // start from the beginning
int lastFrame = sampleLen; // until the end of the file
getSamples(fileName2, table, channel, startFrame, lastFrame);
}
gScopeChannelsInUse = gHvOutputChannels > gFirstScopeChannel ?
gHvOutputChannels - gFirstScopeChannel : 0;
if(gDigitalEnabled)
{
gDigitalSigInChannelsInUse = gHvInputChannels > gFirstDigitalChannel ?
gHvInputChannels - gFirstDigitalChannel : 0;
gDigitalSigOutChannelsInUse = gHvOutputChannels > gFirstDigitalChannel ?
gHvOutputChannels - gFirstDigitalChannel - gScopeChannelsInUse: 0;
}
else
{
gDigitalSigInChannelsInUse = 0;
gDigitalSigOutChannelsInUse = 0;
}
printf("Starting Heavy context with %d input channels and %d output channels\n",
gHvInputChannels, gHvOutputChannels);
printf("Channels in use:\n");
printf("Digital in : %u, Digital out: %u\n", gDigitalSigInChannelsInUse, gDigitalSigOutChannelsInUse);
printf("Scope out: %u\n", gScopeChannelsInUse);
if(gHvInputChannels != 0) {
gHvInputBuffers = (float *)calloc(gHvInputChannels * context->audioFrames,sizeof(float));
}
if(gHvOutputChannels != 0) {
gHvOutputBuffers = (float *)calloc(gHvOutputChannels * context->audioFrames,sizeof(float));
}
gInverseSampleRate = 1.0 / context->audioSampleRate;
// Set heavy print hook
hv_setPrintHook(gHeavyContext, printHook);
// Set heavy send hook
hv_setSendHook(gHeavyContext, sendHook);
// add here other devices you need
gMidiPortNames.push_back("hw:1,0,0");
//gMidiPortNames.push_back("hw:0,0,0");
//gMidiPortNames.push_back("hw:1,0,1");
unsigned int n = 0;
while(n < gMidiPortNames.size())
{
Midi* newMidi = openMidiDevice(gMidiPortNames[n], true, true);
if(newMidi)
{
midi.push_back(newMidi);
++n;
} else {
gMidiPortNames.erase(gMidiPortNames.begin() + n);
}
}
dumpMidi();
if(gScopeChannelsInUse > 0){
#if __clang_major__ == 3 && __clang_minor__ == 8
fprintf(stderr, "Scope currently not supported when compiling heavy with clang3.8, see #265 https://github.com/BelaPlatform/Bela/issues/265. You should specify `COMPILER gcc;` in your Makefile options\n");
exit(1);
#endif
scope = new Scope();
scope->setup(gScopeChannelsInUse, context->audioSampleRate);
gScopeOut = new float[gScopeChannelsInUse];
}
// Bela digital
if(gDigitalEnabled)
{
dcm.setCallback(sendDigitalMessage);
if(gDigitalChannelsInUse> 0){
for(unsigned int ch = 0; ch < gDigitalChannelsInUse; ++ch){
dcm.setCallbackArgument(ch, (void *) gHvDigitalInHashes[ch].c_str());
}
}
}
// unlike libpd, no need here to bind the bela_digitalOut.. receivers
// but make sure you do something like [send receiverName @hv_param]
// when you want to send a message from Heavy to the wrapper.
multiplexerTableHash = hv_stringToHash(multiplexerArray);
if(context->multiplexerChannels > 0){
pdMultiplexerActive = true;
multiplexerArraySize = context->multiplexerChannels * context->analogInChannels;
hv_table_setLength(gHeavyContext, multiplexerTableHash, multiplexerArraySize);
hv_sendFloatToReceiver(gHeavyContext, hv_stringToHash("bela_multiplexerChannels"), context->multiplexerChannels);
}
return true;
}
void render(BelaContext *context, void *userData)
{
for(unsigned int n = 0; n < context->digitalFrames; ++n){
cState_b0 = digitalRead(context, n, 4);
cState_b1 = digitalRead(context, n, 3);
}
if (!cState_b0 && pState_b0) {
fx_nr = (fx_nr + 1)%3;
rt_printf("fx_nr: %d\n", fx_nr);
//button is pressed down
}
if (cState_b0 && !pState_b0) {
if (debounce_b0 < DEBOUNCE_TIME) {
debounce_b0++;
cState_b0 = 0;
} else debounce_b0 = 0;
}
pState_b0 = cState_b0;
if (fx_nr != old_fx) {
switch (fx_nr) {
case 0: {
digitalWrite(context, 0, 1, 1);
digitalWrite(context, 0, 2, 1);
break;
}
case 1: {
digitalWrite(context, 0, 1, 0);
digitalWrite(context, 0, 2, 1);
break;
}
case 2: {
digitalWrite(context, 0, 1, 1);
digitalWrite(context, 0, 2, 0);
break;
}
}
old_fx = fx_nr;
}
if (fx_nr == 0) {
int num;
for(unsigned int port = 0; port < midi.size(); ++port){
while((num = midi[port]->getParser()->numAvailableMessages()) > 0){
static MidiChannelMessage message;
message = midi[port]->getParser()->getNextChannelMessage();
switch(message.getType()){
case kmmNoteOn: {
//message.prettyPrint();
int noteNumber = message.getDataByte(0);
int velocity = message.getDataByte(1);
int channel = message.getChannel();
if (velocity > 0) gIsNoteOn = 1;
gNote = noteNumber;
// rt_printf("message: noteNumber: %f, velocity: %f, channel: %f\n", noteNumber, velocity, channel);
hv_sendMessageToReceiverV(gHeavyContext, hvMidiHashes[kmmNoteOn], 0, "fff",
(float)noteNumber, (float)velocity, (float)channel+1);
break;
}
case kmmNoteOff: {
/* PureData does not seem to handle noteoff messages as per the MIDI specs,
* so that the noteoff velocity is ignored. Here we convert them to noteon
* with a velocity of 0.
*/
int noteNumber = message.getDataByte(0);
// int velocity = message.getDataByte(1); // would be ignored by Pd
int channel = message.getChannel();
// note we are sending the below to hvHashes[kmmNoteOn] !!
hv_sendMessageToReceiverV(gHeavyContext, hvMidiHashes[kmmNoteOn], 0, "fff",
(float)noteNumber, (float)0, (float)channel+1);
break;
}
case kmmControlChange: {
int channel = message.getChannel();
int controller = message.getDataByte(0);
int value = message.getDataByte(1);
gControl = controller;
gCCVal = value;
hv_sendMessageToReceiverV(gHeavyContext, hvMidiHashes[kmmControlChange], 0, "fff",
(float)value, (float)controller, (float)channel+1);
break;
}
case kmmProgramChange: {
int channel = message.getChannel();
int program = message.getDataByte(0);
hv_sendMessageToReceiverV(gHeavyContext, hvMidiHashes[kmmProgramChange], 0, "ff",
(float)program, (float)channel+1);
break;
}
case kmmPolyphonicKeyPressure: {
//TODO: untested, I do not have anything with polyTouch... who does, anyhow?
int channel = message.getChannel();
int pitch = message.getDataByte(0);
int value = message.getDataByte(1);
hv_sendMessageToReceiverV(gHeavyContext, hvMidiHashes[kmmPolyphonicKeyPressure], 0, "fff",
(float)channel+1, (float)pitch, (float)value);
break;
}
case kmmChannelPressure:
{
int channel = message.getChannel();
int value = message.getDataByte(0);
hv_sendMessageToReceiverV(gHeavyContext, hvMidiHashes[kmmChannelPressure], 0, "ff",
(float)value, (float)channel+1);
break;
}
case kmmPitchBend:
{
int channel = message.getChannel();
int value = ((message.getDataByte(1) << 7) | message.getDataByte(0));
hv_sendMessageToReceiverV(gHeavyContext, hvMidiHashes[kmmPitchBend], 0, "ff",
(float)value, (float)channel+1);
break;
}
case kmmSystem:
case kmmNone:
case kmmAny:
break;
}
}
}
// De-interleave the data
if(gHvInputBuffers != NULL) {
for(unsigned int n = 0; n < context->audioFrames; n++) {
for(unsigned int ch = 0; ch < gHvInputChannels; ch++) {
if(ch >= gAudioChannelsInUse + gAnalogChannelsInUse) {
// THESE ARE PARAMETER INPUT 'CHANNELS' USED FOR ROUTING
// 'sensor' outputs from routing channels of dac~ are passed through here
// these could be also digital channels (handled by the dcm)
// or parameter channels used for routing (currently unhandled)
break;
} else {
// If more than 2 ADC inputs are used in the pd patch, route the analog inputs
// i.e. ADC3->analogIn0 etc. (first two are always audio inputs)
if(ch >= gAudioChannelsInUse)
{
unsigned int analogCh = ch - gAudioChannelsInUse;
if(analogCh < context->analogInChannels)
{
int m = n;
float mIn = analogReadNI(context, m, analogCh);
gHvInputBuffers[ch * context->audioFrames + n] = mIn;
}
} else {
if(ch < context->audioInChannels)
gHvInputBuffers[ch * context->audioFrames + n] = audioReadNI(context, n, ch);
}
}
}
}
}
if(pdMultiplexerActive){
static int lastMuxerUpdate = 0;
if(++lastMuxerUpdate == multiplexerArraySize){
lastMuxerUpdate = 0;
memcpy(hv_table_getBuffer(gHeavyContext, multiplexerTableHash), (float *const)context->multiplexerAnalogIn, multiplexerArraySize * sizeof(float));
}
}
// Bela digital in
if(gDigitalEnabled)
{
// note: in multiple places below we assume that the number of digital frames is same as number of audio
// Bela digital in at message-rate
dcm.processInput(context->digital, context->digitalFrames);
// Bela digital in at signal-rate
if(gDigitalSigInChannelsInUse > 0)
{
unsigned int j, k;
float *p0, *p1;
const unsigned int gLibpdBlockSize = context->audioFrames;
const unsigned int audioFrameBase = 0;
float* gInBuf = gHvInputBuffers;
// block below copy/pasted from libpd, except
// 16 has been replaced with gDigitalSigInChannelsInUse
for (j = 0, p0 = gInBuf; j < gLibpdBlockSize; j++, p0++) {
unsigned int digitalFrame = audioFrameBase + j;
for (k = 0, p1 = p0 + gLibpdBlockSize * gFirstDigitalChannel;
k < gDigitalSigInChannelsInUse; ++k, p1 += gLibpdBlockSize) {
if(dcm.isSignalRate(k) && dcm.isInput(k)){ // only process input channels that are handled at signal rate
*p1 = digitalRead(context, digitalFrame, k);
}
}
}
}
}
// replacement for bang~ object
//hv_sendMessageToReceiverV(gHeavyContext, "bela_bang", 0.0f, "b");
// heavy audio callback
hv_processInline(gHeavyContext, gHvInputBuffers, gHvOutputBuffers, context->audioFrames);
/*
for(int n = 0; n < context->audioFrames*gHvOutputChannels; ++n)
{
printf("%.3f, ", gHvOutputBuffers[n]);
if(n % context->audioFrames == context->audioFrames - 1)
printf("\n");
}
*/
// Bela digital out
if(gDigitalEnabled)
{
// Bela digital out at signal-rate
if(gDigitalSigOutChannelsInUse > 0)
{
unsigned int j, k;
float *p0, *p1;
const unsigned int gLibpdBlockSize = context->audioFrames;
const unsigned int audioFrameBase = 0;
float* gOutBuf = gHvOutputBuffers;
// block below copy/pasted from libpd, except
// context->digitalChannels has been replaced with gDigitalSigOutChannelsInUse
for (j = 0, p0 = gOutBuf; j < gLibpdBlockSize; ++j, ++p0) {
unsigned int digitalFrame = (audioFrameBase + j);
for (k = 0, p1 = p0 + gLibpdBlockSize * gFirstDigitalChannel;
k < gDigitalSigOutChannelsInUse; k++, p1 += gLibpdBlockSize) {
if(dcm.isSignalRate(k) && dcm.isOutput(k)){ // only process output channels that are handled at signal rate
digitalWriteOnce(context, digitalFrame, k, *p1 > 0.5);
}
}
}
}
// Bela digital out at message-rate
dcm.processOutput(context->digital, context->digitalFrames);
}
// Bela scope
if(gScopeChannelsInUse > 0)
{
unsigned int j, k;
float *p0, *p1;
const unsigned int gLibpdBlockSize = context->audioFrames;
float* gOutBuf = gHvOutputBuffers;
// block below copy/pasted from libpd
for (j = 0, p0 = gOutBuf; j < gLibpdBlockSize; ++j, ++p0) {
for (k = 0, p1 = p0 + gLibpdBlockSize * gFirstScopeChannel; k < gScopeChannelsInUse; k++, p1 += gLibpdBlockSize) {
gScopeOut[k] = *p1;
}
scope->log(gScopeOut);
}
}
// Interleave the output data
if(gHvOutputBuffers != NULL) {
for(unsigned int n = 0; n < context->audioFrames; n++) {
for(unsigned int ch = 0; ch < gHvOutputChannels; ch++) {
if(ch >= gAudioChannelsInUse + gAnalogChannelsInUse) {
// THESE ARE SENSOR OUTPUT 'CHANNELS' USED FOR ROUTING
// they are the content of the 'sensor output' dac~ channels
} else {
if(ch >= gAudioChannelsInUse) {
int m = n;
unsigned int analogCh = ch - gAudioChannelsInUse;
if(analogCh < context->analogOutChannels)
analogWriteOnceNI(context, m, analogCh, gHvOutputBuffers[ch*context->audioFrames + n]);
} else {
if(ch < context->audioOutChannels)
audioWriteNI(context, n, ch, gHvOutputBuffers[ch * context->audioFrames + n]);
}
}
}
}
}
} else {
int num;
for(unsigned int port = 0; port < midi.size(); ++port){
while((num = midi[port]->getParser()->numAvailableMessages()) > 0){
static MidiChannelMessage message;
message = midi[port]->getParser()->getNextChannelMessage();
switch(message.getType()){
case kmmNoteOn: {
//message.prettyPrint();
gNote = message.getDataByte(0);
int velocity = message.getDataByte(1);
// int channel = message.getChannel();
if (velocity > 0) gIsNoteOn = 1;
break;
}
case kmmNoteOff: {
break;
}
case kmmControlChange: {
// int channel = message.getChannel();
gControl = message.getDataByte(0);
gCCVal = message.getDataByte(1);
break;
}
case kmmProgramChange:
case kmmPolyphonicKeyPressure:
case kmmChannelPressure:
case kmmPitchBend:
case kmmSystem:
case kmmNone:
case kmmAny:
break;
}
}
}
gBcf.push(BelaContextFifo::kToLong, context);
/// receive from the "long" render
const InternalBelaContext* rctx = (InternalBelaContext*)gBcf.pop(BelaContextFifo::kToShort);
if(rctx) {
BelaContextSplitter::contextCopyData(rctx, (InternalBelaContext*)context);
}
}
}
void cleanup(BelaContext *context, void *userData)
{
const char fileName[] = "current.wav";
const char tableName[] = "current";
hv_uint32_t tableHash = hv_stringToHash(tableName);
float * table = hv_table_getBuffer(gHeavyContext, tableHash);
writeSamples(fileName, table, 2);
hv_delete(gHeavyContext);
free(gHvInputBuffers);
free(gHvOutputBuffers);
delete[] gScopeOut;
delete scope;
}
i already tried commenting the digital write and read parts in the heavy render, but that did not help...