- //WavetableSynthesis.c
- //gcc MKAiff.c WavetableSynthesis.c -o WavetableSynthesis
- #include "MKAiff.h"
- #include <math.h>
- #define PRERECORDED_AUDIO_SAMPLE_PATH "demo.aif"
- #define SAMPLE_RATE 44100
- #define BITS_PER_SAMPLE 16
- #define NUM_SECONDS 7
- #define PLAYBACK_PITCH 70
- void linearInterpolateBuffer(float* previousFrame, int numChannels, float* input, int inNumFrames, float* output, int outNumFrames);
- int main()
- {
- MKAiff* audioSample = aiffWithContentsOfFile(PRERECORDED_AUDIO_SAMPLE_PATH);
- if((audioSample == NULL) || (!aiffHasInstrumentInfo(audioSample))) return 1;
- const int NUM_CHANNELS = aiffNumChannels(audioSample);
- const int numSamples = NUM_SECONDS * NUM_CHANNELS * SAMPLE_RATE;
- //get the original wavetable
- int wavetableNumSamples = aiffDurationInSamples(audioSample);
- float* wavetable = malloc(wavetableNumSamples * sizeof(*wavetable));
- aiffRewindPlayheadToBeginning(audioSample);
- aiffReadFloatingPointSamplesAtPlayhead(audioSample, wavetable, numSamples);
- //get the instrument info and adjust it for interpolation
- uint32_t numSamplesInAttack=0, numSamplesInSustain, numSamplesUntilRelease, numSamplesInRelease;
- aiffPositionInFramesOfMarkerWithID(audioSample, aiffInstrumentSustainLoopStartMarkerID(audioSample), &numSamplesInAttack);
- aiffPositionInFramesOfMarkerWithID (audioSample, aiffInstrumentSustainLoopEndMarkerID(audioSample), &numSamplesUntilRelease);
- double playbackSpeed = pow(pow(2, (aiffInstrumentBaseNote(audioSample) - PLAYBACK_PITCH)), (1/12.0));
- numSamplesInAttack *= NUM_CHANNELS * playbackSpeed;
- numSamplesUntilRelease *= NUM_CHANNELS * playbackSpeed;
- numSamplesInSustain = numSamplesUntilRelease - numSamplesInAttack;
- numSamplesInRelease = wavetableNumSamples*playbackSpeed - numSamplesUntilRelease;
- //interpolate the wavetable
- int i;
- int numInterpolatedSamples = wavetableNumSamples * playbackSpeed ;
- numInterpolatedSamples -= (numInterpolatedSamples % NUM_CHANNELS);
- float *interpolatedWavetable = malloc(numInterpolatedSamples * sizeof(*interpolatedWavetable));
- float previousFrame [NUM_CHANNELS]; for(i=0; i<NUM_CHANNELS; previousFrame[i++]=0);
- linearInterpolateBuffer(previousFrame, NUM_CHANNELS, wavetable, wavetableNumSamples/NUM_CHANNELS, interpolatedWavetable, numInterpolatedSamples/NUM_CHANNELS);
- //perform the synthesis
- float* nextSample = interpolatedWavetable;
- int shouldRelease = aiffNo, forwardSustain = aiffYes;
- float* audioBuffer = malloc(sizeof(*audioBuffer) * numSamples);
- for(i=0; i<numSamples; i++)
- {
- //trigger the release
- if(i >= (numSamples - (2*numSamplesInSustain + numSamplesInRelease)))
- shouldRelease = aiffYes;
- if((!shouldRelease) && forwardSustain && (nextSample >= (interpolatedWavetable + numSamplesUntilRelease)))
- forwardSustain = aiffNo;
- if((!forwardSustain) && (nextSample <= interpolatedWavetable + numSamplesInAttack))
- forwardSustain = aiffYes;
- if(nextSample >= interpolatedWavetable + numInterpolatedSamples)
- audioBuffer[i] = 0;
- else
- {
- audioBuffer[i] = *nextSample;
- nextSample += (forwardSustain) ? 1 : -1;
- }
- }
- MKAiff* aiff = aiffWithDurationInSeconds(NUM_CHANNELS, SAMPLE_RATE, BITS_PER_SAMPLE, NUM_SECONDS);
- if(aiff == NULL) return 1;
- aiffAppendFloatingPointSamples(aiff, audioBuffer, numSamples, aiffFloatSampleType);
- aiffSaveWithFilename(aiff, "WavetableSynthesis.aif");
- aiffDestroy(aiff);
- free(interpolatedWavetable);
- free(wavetable);
- return 0;
- }
- void linearInterpolateBuffer(float* previousFrame, int numChannels, float* input, int inNumFrames, float* output, int outNumFrames)
- {
- int i, j, prevIndex;
- double distance, prevValue, nextValue;
- for(i=0; i<outNumFrames; i++)
- {
- for(j=0; j<numChannels; j++)
- {
- distance = i * (inNumFrames / (double)outNumFrames) - 1;
- prevIndex = ((int)distance) * numChannels + j;
- nextValue = distance < 0 ? input[j] : input[prevIndex + numChannels];
- prevValue = distance < 0 ? previousFrame[j] : input[prevIndex];
- distance -= (int)distance;
- if(distance < 0) distance += 1;
- output[i*numChannels+j] = (nextValue-prevValue) * distance + prevValue;
- }
- }
- for(j=0; j<numChannels; j++)
- previousFrame[j] = input[(inNumFrames - 1) * numChannels + j];
- }
Output:
Builds On
AIFF Template Linear Interpolation How To Use Interpolation Preparing Samples Forward Backward PitchExplanation of the Concepts
This example is a relatively complete wavetable synthesis algorithm. It reads the "Attack Buffer", "Sustain Loop", and "Release Buffer" from the audio-sample "demo.aif". It transposes these using linear interpolation, and fills a new audio file with a single note by looping the sustain portion of the note.
In the previous examples, the wavetables were copied into an aiff file using commands that are specific to MKAiff. This examples attempts to more closely emulate a realtime environment by copying the wavetable into a final audio buffer that is attached to the aiff file only at the very end. The final audio buffer wastes a good deal of space, but it emulates a realtime output stream closely enough that the actual synthesis algorithm no longer relies upon MKAiff. Here, the release portion of the note is triggered by the approach of the end of the file. In the context of a larger program, this could be triggered by something external, like the key of a keyboard being released.