+
+
+
+
+
diff --git a/delta-modulation/panel_delta.js b/delta-modulation/panel_delta.js
new file mode 100644
index 0000000..aed7a0e
--- /dev/null
+++ b/delta-modulation/panel_delta.js
@@ -0,0 +1,785 @@
+// Canned documentation blurbs
+//Panel class. should be extended with a drawPanel method
+const log10 = Math.log(10);
+class Panel {
+ constructor(background = "white", stroke = "black", strokeWeight = 1, fill = "black") {
+ this.background = background;
+ this.stroke = stroke;
+ this.strokeWeight = strokeWeight;
+ this.fill = fill;
+ this.xAxis= "Time";
+ this.yAxis = "Amp";
+ this.tickTextSize = 9;
+ this.numTimeTicks = 8;
+ this.numFreqTicks = 4;
+ this.name = "Base Panel Class";
+ this.description = "This is the base class that other panels inherit from. If you can see this and you are not reading the source code right now there is probably a problem. Please open an issue or otherwise contact the project maintainers."
+ }
+
+ setup(p, height, width, settings) {
+ this.settings = settings;
+ this.buffer = p.createGraphics(1,1);
+ this.resize(height, width);
+ this.bufferInit();
+ this.buffer.textFont('Helvetica',20);
+ this.buffer.textAlign(p.CENTER);
+ }
+
+ resize(h, w) {
+ this.buffer.resizeCanvas(w, h);
+ this.xbezel = Math.max(70, w * 0.1);
+ this.xbezelLeft = 0.60 * this.xbezel;
+ this.xbezelRight = 0.40 * this.xbezel;
+ this.ybezel = Math.max(20, h *0.1);
+ this.halfh = h/2;
+ this.plotHeight = h - 2 * this.ybezel;
+ this.plotWidth = w - this.xbezel;
+ this.plotLeft = this.xbezelLeft; // the x coord. of the left side of the plot
+ this.plotRight = w - this.xbezelRight; // ditto of the right side of the plot
+ this.plotTop = this.ybezel; // y coord. of top
+ this.plotBottom = h - this.ybezel; // y coord. of bottom
+ }
+
+ bufferInit(){
+ this.buffer.background(this.background);
+ this.buffer.fill(this.fill);
+ this.buffer.stroke(this.stroke);
+ this.buffer.strokeWeight(this.strokeWeight);
+ }
+
+ drawStem(x,y,startHeight,ellipseSize =this.ellipseSize){
+ let actual_y = y;
+ y = (ythis.plotBottom)? y= this.plotBottom : y;
+ this.buffer.line(x, startHeight, x, y);
+ ellipseSize= (actual_ythis.plotBottom)? 0: ellipseSize;
+ this.buffer.ellipse(x, y, ellipseSize);
+ };
+
+ setbackground(backgroundClr){ this.background = backgroundClr; }
+ setStroke(strokeClr){ this.stroke = strokeClr; }
+ setStrokeWeight(strokeWgt){ this.strokeWeight = strokeWgt; }
+ setFill(fillClr){ this.fill = fillClr; }
+
+ drawBorder(){
+ this.buffer.stroke(this.stroke);
+ this.buffer.line(this.plotLeft, this.plotTop, this.plotLeft, this.plotBottom);
+ this.buffer.line(this.plotLeft, this.plotTop, this.plotRight, this.plotTop);
+ this.buffer.line(this.plotRight, this.plotTop, this.plotRight, this.plotBottom);
+ this.buffer.line(this.plotLeft, this.plotBottom, this.plotRight, this.plotBottom);
+ }
+
+ drawPanel(){}
+}
+
+class freqPanel extends Panel{
+ constructor(){ super(); this.xAxis = "Frequency";
+ }
+
+ drawPeak(x,height,base,colour="black"){
+ height = Math.abs(height);
+ this.buffer.fill(colour);
+ this.buffer.stroke(colour);
+ this.buffer.beginShape();
+ if (xthis.plotRight) return;
+ let x1=x-2; let x2 = x+2;
+ x1 = Math.max(x1, this.plotLeft);
+ x2 = Math.min(x2, this.plotRight);
+ this.buffer.vertex(x1, base);
+ this.buffer.vertex(x, this.plotBottom-height);
+ this.buffer.vertex(x2, base);
+ this.buffer.vertex(x, base);
+ this.buffer.endShape();
+ this.buffer.stroke(this.stroke); this.buffer.fill(this.fill);
+ }
+}
+
+function linToDB(a, a_0 = 1)
+{
+ return 20 * Math.log(a / a_0) / log10;
+}
+
+const midline_doc='The horizontal middle line represents an amplitude of zero. ';
+function drawMidLine(panel) {
+ // panel.buffer.drawingContext.setLineDash([5,5]);
+ panel.buffer.stroke("gray");
+ panel.buffer.line(panel.plotLeft, panel.halfh, panel.plotRight, panel.halfh);
+ panel.buffer.stroke(panel.stroke);
+ // panel.buffer.drawingContext.setLineDash([]);
+}
+
+const time_signal_doc='Because this signal approximates a continuous analog signal in our simulation, the signal value is drawn with a simple interpolation scheme. There are currently bugs with this interpolation when zooming in (time zoom > 100%). In addition, visual aliasing may occur when viewing high frequency signals due to the limited number of pixels on the screen acting as a kind of spatial sampling process. This may appear as amplitude modulation in the plot that is not actually present in the signal. Finally, note that the amplitude of the signal is clipped to the size of the panel viewport. This visual clipping happens regardless of whether the signal itself actually exhibits clipping. ';
+function drawSignal(panel, signal)
+{
+ let pixel_max = panel.plotHeight/2;
+ let pixel_per_fullscale = pixel_max * panel.settings.ampZoom;
+ panel.buffer.noFill();
+ panel.buffer.beginShape();
+ max_x = 10000
+ for (let x = 0; x < max_x; x++) {
+ let pixel_x = (x/max_x)*panel.plotWidth/panel.settings.timeZoom
+ let amp = signal[Math.floor(pixel_x)]+(pixel_x-Math.floor(pixel_x))/(Math.ceil(pixel_x)-Math.floor(pixel_x))*(signal[Math.ceil(pixel_x)]-signal[Math.floor(pixel_x)]); //Linear interpolation
+ let pixel_amp = pixel_per_fullscale * amp;
+ let y = panel.halfh - pixel_amp;
+ panel.buffer.curveTightness(1.0);
+ y = (ypanel.plotBottom)? y= panel.plotBottom: y=y;panel.buffer.curveTightness(1.0)
+ panel.buffer.curveVertex((x/max_x)*panel.plotWidth + panel.plotLeft, y);
+ }
+ panel.buffer.endShape();
+}
+
+function drawDeltaModulation(panel, signal) {
+ let pixel_max = panel.plotHeight/2;
+ panel.buffer.noFill();
+ panel.buffer.beginShape();
+ panel.buffer.curveTightness(1.0);
+ let visibleSamples = Math.floor(panel.plotWidth / panel.settings.downsamplingFactorDelta/panel.settings.timeZoom+1);
+ let ypos = panel.halfh;
+ let counter = 0; //Consecutive similar bits counter for adaptive modulation
+ let lastBit = 0;
+ for (let x = 0; x < visibleSamples; x++) {
+ let xpos = Math.round(panel.plotLeft + x * panel.settings.downsamplingFactorDelta*panel.settings.timeZoom);
+ panel.buffer.curveVertex(xpos, ypos);
+ if (pixel_max * signal[Math.floor((x/visibleSamples)*panel.plotWidth/panel.settings.timeZoom)]*panel.settings.ampZoom < panel.halfh - ypos) {
+ if (panel.settings.deltaType == "adaptive") {
+ if (lastBit == 1) { //If the last bit is similar to this one, increment counter, otherwise reset
+ counter++;
+ } else {counter = 0; lastBit = 1;}
+ }
+ ypos += panel.settings.deltaStep*panel.plotHeight*(1+Math.floor(counter/panel.settings.adaptiveNumSteps)); //For adaptive modulation, increase the multiplier every numSteps consecutive steps
+ } else {
+ if (panel.settings.deltaType == "adaptive") {
+ if (lastBit == 0) {
+ counter++;
+ } else {counter = 0; lastBit = 0;}
+ }
+ ypos -= panel.settings.deltaStep*panel.plotHeight*(1+Math.floor(counter/panel.settings.adaptiveNumSteps));
+ }
+ ypos = (ypospanel.plotBottom)? ypos= panel.plotBottom: ypos=ypos;
+ panel.buffer.curveVertex(xpos, ypos);
+ }
+ panel.buffer.endShape();
+}
+
+const lollipop_doc='Because this signal represents the discrete time output of the analog-to-digital conversion process, it is drawn with a lollipop plot where each stem represents a single sample. ';
+function drawDiscreteSignal(panel,signal){
+ let gain = panel.plotHeight/2;
+ let visibleSamples = Math.floor(panel.plotWidth / panel.settings.downsamplingFactor/panel.settings.timeZoom+1);
+ for (let x = 0; x < visibleSamples; x++) {
+ let xpos = Math.round(panel.plotLeft + x * panel.settings.downsamplingFactor*panel.settings.timeZoom);
+ let ypos = panel.halfh - gain * signal[x]*panel.settings.ampZoom;
+ panel.drawStem(xpos,ypos,panel.halfh);
+ }
+}
+
+function drawHorizontalTick(panel, text, height, tick_length = 5, side="left") {
+ panel.buffer.fill(panel.fill);
+ panel.buffer.textFont('Helvetica', panel.tickTextSize);
+ panel.buffer.textStyle(panel.buffer.ITALIC);
+ panel.buffer.strokeWeight(0);
+ panel.buffer.textAlign(panel.buffer.RIGHT);
+ let tickStart = panel.plotLeft-tick_length;
+ let tickEnd = panel.plotLeft;
+ if (side == "right"){
+ panel.buffer.textAlign(panel.buffer.LEFT);
+ tickEnd = panel.plotRight+tick_length;
+ tickStart = panel.plotRight;
+ panel.buffer.text(text, tickEnd+2, height - panel.tickTextSize/2, panel.buffer.width , height + panel.tickTextSize/2);
+ }
+ else{
+ panel.buffer.text(text, 0, height - panel.tickTextSize/2, tickStart , height + panel.tickTextSize/2);
+
+ }
+
+ panel.buffer.strokeWeight(panel.strokeWeight);
+ panel.buffer.line(tickStart , height,
+ tickEnd, height);
+}
+
+function drawVerticalTick(panel, text, x, tick_length = 5) {
+ if (xpanel.plotRight){return};
+ panel.buffer.fill(panel.fill);
+ panel.buffer.textFont('Helvetica', panel.tickTextSize);
+ panel.buffer.textAlign(panel.buffer.CENTER);
+ panel.buffer.textStyle(panel.buffer.ITALIC);
+ panel.buffer.strokeWeight(0);
+ // we draw the text in the center of an oversized box centered over the tick
+ // 20000 pixels should be more than enough for any reasonable tick text
+ panel.buffer.text(text, x - 10000, panel.plotBottom + tick_length, 20000, panel.ybezel - tick_length);
+ panel.buffer.strokeWeight(panel.strokeWeight);
+ panel.buffer.line(x, panel.plotBottom, x, panel.plotBottom + tick_length);
+}
+
+const freq_amp_ticks_doc='Amplitude is plotted on the y-axis. Ticks on the left label the linear amplitude where 1.0 is equal to the maximum amplitude. ';
+function drawFreqAmplitudeTicks(panel, pixel_max, num_ticks) {
+ for (let i = 0; i <= num_ticks; ++i) {
+ let tick_amp_pixels = i * pixel_max / num_ticks / panel.settings.ampZoom;
+ drawHorizontalTick(panel, (tick_amp_pixels/pixel_max).toFixed(2), panel.plotBottom - tick_amp_pixels*panel.settings.ampZoom, 5, "right");
+ }
+}
+
+const amp_ticks_doc='Amplitude is plotted on the y-axis. Ticks on the left label the linear amplitude where +/- 1.0 is equal to the maximum amplitude. ';
+function drawSignalAmplitudeTicks(panel, pixel_max, num_ticks) {
+ for (let i = 1; i <= num_ticks; ++i) {
+ let tick_amp_pixels = i * pixel_max / num_ticks / panel.settings.ampZoom;
+ // let tick_amp_db = linToDB(tick_amp_pixels, pixel_max);
+ drawHorizontalTick(panel, (tick_amp_pixels/pixel_max).toFixed(2), panel.halfh - tick_amp_pixels*panel.settings.ampZoom,5,"right");
+ drawHorizontalTick(panel, (-tick_amp_pixels/pixel_max).toFixed(2), panel.halfh + tick_amp_pixels*panel.settings.ampZoom,5,"right");
+ // drawHorizontalTick(panel, tick_amp_db.toFixed(1) + 'dBFS', panel.halfh - tick_amp_pixels*panel.settings.ampZoom,5, "right");
+ // drawHorizontalTick(panel, tick_amp_db.toFixed(1) + 'dBFS', panel.halfh + tick_amp_pixels*panel.settings.ampZoom,5, "right");
+ }
+ // drawHorizontalTick(panel, '-inf dBFS', panel.halfh, 5, "right");
+ drawHorizontalTick(panel, '0.00', panel.halfh, 5, "right");
+}
+
+const bin_amp_ticks_doc='Ticks on the right side of this plot label the numerical value assigned to a given amplitude by the simulated analog-to-digital conversion. The labels are written in hexadecimal unless the bit depth is 7 bits or lower, in which case the labels are in binary. ';
+function drawSignalBinaryScaling(panel,pixel_max, num_ticks, settings){
+ let maxInt = Math.pow(2, settings.bitDepth)-1;
+ let stepSize = (settings.quantType == "midTread")? 2/(maxInt-1) : 2/(maxInt);
+ let numTicks = Math.min(num_ticks,maxInt+1);
+ let tickScale =(maxInt+1)/numTicks;
+ let pixel_per_fullscale = pixel_max * panel.settings.ampZoom;
+ // let stepSize = (settings.quantType == "midRise")? 2/(numTicks-1) : 2/(numTicks);
+
+ let val=-1; let tick; let plotVal;
+ for ( tick =0; tick= panel.plotTop-.1 && y <=panel.plotBottom+.1) {
+ if (maxInt<255){
+ //if under 8 bits, we can write out binary values
+ drawHorizontalTick(panel, (Math.round(tick*tickScale)).toString(2).padStart(settings.bitDepth,"0"), y,5,"left");
+ }
+ else {
+ //draw axis labels in hex because of limited space
+ drawHorizontalTick(panel, "0x" + (tick*tickScale).toString(16).padStart(4,"0"), y,5,"left");
+ }
+ panel.buffer.stroke("gray");
+ panel.buffer.drawingContext.setLineDash([5,5]);
+ panel.buffer.line(panel.plotLeft, y, panel.plotRight, y);
+ panel.buffer.drawingContext.setLineDash([]); // drawHorizontalTick(panel, tick.toString(2), y,5,"left");
+ }
+ val = val + stepSize*tickScale;
+ }
+
+}
+
+const time_ticks_doc='Time is plotted on the x-axis. ';
+function drawTimeTicks(panel, num_ticks, seconds_per_pixel) {
+ let tick_jump = Math.floor((panel.plotWidth) / num_ticks);
+ for (let i = 0; i < num_ticks; ++i) {
+ let x = i * tick_jump;
+ let text = (x * seconds_per_pixel * 1000).toFixed(1) + ' ms';
+ drawVerticalTick(panel, text, x + panel.plotLeft);
+ }
+}
+
+const freq_ticks_doc='Frequency is plotted on the x-axis. ';
+function drawFreqTicks(panel, num_ticks, pixels_per_hz) {
+ let hz_per_pixel = 1/pixels_per_hz;
+ let tick_jump = Math.floor((panel.plotWidth) / num_ticks);
+ tick_jump=panel.plotWidth / num_ticks
+ for (let i = 0; i < num_ticks; ++i) {
+ let x = i * tick_jump;
+ if (xthis.plotRight) return;
+ let text = (x * hz_per_pixel).toFixed(0) + ' Hz';
+ drawVerticalTick(panel, text, x + panel.plotLeft);
+ }
+}
+
+function drawName(panel){
+ panel.buffer.fill(panel.fill);
+ panel.buffer.strokeWeight(0);
+ panel.buffer.textAlign(panel.buffer.CENTER);
+ panel.buffer.textStyle(panel.buffer.NORMAL);
+ panel.buffer.textFont('Helvetica',12);
+ let textheight = panel.buffer.textSize() + panel.buffer.textDescent() + 1;
+ panel.buffer.text (panel.name, panel.plotLeft, panel.plotTop - textheight, panel.plotWidth, panel.ybezel);
+ panel.buffer.strokeWeight(panel.strokeWeight);
+}
+
+function getColor(num){
+ return [num*666%255,num*69%255,num*420%255]
+}
+
+class inputSigPanel extends Panel {
+ constructor(){
+ super();
+ this.name="Input Signal Time Domain";
+ this.description='This is a straightforward time domain plot of the input signal before "sampling", quantization, and "reconstruction". This signal corresponds with the authentic "analog" input to the simulated analog-to-digital conversion process. '
+ + time_signal_doc + time_ticks_doc + amp_ticks_doc + midline_doc;
+ }
+
+ drawPanel(){
+ this.buffer.background(this.background);
+ drawSignal(this, this.settings.original);
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+}
+
+class deltaModPanel extends inputSigPanel {
+ constructor(){
+ super();
+ this.name="Input Signal Time Domain with Delta Modulation";
+ this.description='This is an extension of the input signal in time domain with a step function illustrating delta modulation.'
+ + time_signal_doc + time_ticks_doc + amp_ticks_doc + midline_doc;
+ }
+
+ drawPanel(){
+ this.buffer.background(this.background);
+ drawSignal(this, this.settings.original);
+ drawDeltaModulation(this, this.settings.original);
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+}
+
+class reconstructedSigPanel extends Panel {
+ constructor(){
+ super();
+ this.name="Reconstructed Signal Time Domain";
+ this.description='This is a straightforward time domain plot of the signal output from the simulated digital-to-analog conversion process. '
+ + time_signal_doc + time_ticks_doc + amp_ticks_doc + midline_doc;
+ }
+
+ drawPanel(){
+ this.buffer.background(this.background);
+ drawSignal(this, this.settings.reconstructed);
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+}
+
+class reconstructedDeltaModSigPanel extends Panel {
+ constructor(){
+ super();
+ this.name="Reconstructed Signal Time Domain using Delta Modulation";
+ this.description='This is a straightforward time domain plot of the signal output from the simulated digital-to-analog conversion process using delta modulation. '
+ + time_signal_doc + time_ticks_doc + amp_ticks_doc + midline_doc;
+ }
+
+ drawPanel(){
+ this.buffer.background(this.background);
+ drawSignal(this, this.settings.reconstructedDelta);
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+ }
+
+const analytic_frequency_doc='Spikes are drawn at the appropriate frequency and amplitude based on the analytic definition of the signal determined by the frequency, number of harmonics, and harmonic amplitude scaling settings. As such, this plot should accurately reflect the frequency content of the signal without any influence of windowing or other considerations that would affect a discrete time fourier transform. Unfortunately, this approach does not reflect non-linear effects such as quantization and clipping, where applicable. ';
+class inputSigFreqPanel extends freqPanel {
+ constructor(){
+ super();
+ this.name="Input Signal Frequency Domain";
+ this.description='This is a frequency domain representation of the simulated "continuous time" input signal. '
+ + analytic_frequency_doc + freq_ticks_doc + passband_doc;
+ }
+
+ drawPanel(){
+ this.buffer.background(this.background);
+ let pixels_per_hz = this.plotWidth / this.settings.maxVisibleFrequency;
+ drawPassBand(this);
+ // let harmInc = 1;
+ // if (this.settings.harmType =="Odd" || this.settings.harmType == "Even"){ harmInc=2;}
+ // let harmPeak = 1, harm =1, ampScale = 1;
+ let harm =1;
+ while (harm<=this.settings.numHarm){
+ let hz = this.settings.harmonicFreqs[harm-1];
+ let xpos = (hz * pixels_per_hz + this.plotLeft);
+ if (xpos > this.plotRight|| xpos< this.plotLeft) break;
+ // if (this.settings.harmSlope == "lin") {ampScale = 1 - (harm-1)/(this.settings.numHarm)};
+ // if (this.settings.harmSlope == "1/x") {ampScale = 1/harmPeak};
+ let height = this.settings.ampZoom * this.settings.amplitude * this.plotHeight *this.settings.harmonicAmps[harm-1];
+ this.drawPeak(xpos, height, this.plotBottom)
+ harm+=1;
+ // (harmPeak ==1 && this.settings.harmType != "Odd")? harmPeak++ : harmPeak +=harmInc;
+ }
+
+
+ this.drawBorder();
+ drawFreqTicks(this, this.numFreqTicks, pixels_per_hz);
+ drawFreqAmplitudeTicks(this, this.plotHeight, 9);
+ drawName(this);
+ }
+
+}
+
+function magnitude(real, cplx) {
+ return Math.sqrt(real * real + cplx * cplx);
+}
+
+const fft_doc='Because the FFT is used here, there are visual artifacts introduced by the windowing process, and the frequency resolution of the plot is inherently limited by the size of the FFT. Note that the resolution is not increased when zooming in with the frequency zoom slider. ';
+function drawFFT(panel, fft, tick='freq') {
+ let gain = panel.plotHeight * panel.settings.ampZoom;
+ let offset = 100;
+ let hz_per_bin = panel.settings.sampleRate / (fft.length / 2);
+ // fft.length / 2 because it is an interleaved complex array
+ // with twice as many elements as it has (complex) numbers
+ let pixels_per_hz = panel.plotWidth / panel.settings.maxVisibleFrequency;
+ let pixels_per_bin = pixels_per_hz * hz_per_bin;
+ let num_bins = Math.round(panel.plotWidth / pixels_per_bin);
+ let normalize = 4/fft.length;
+
+ panel.buffer.background(panel.background);
+ panel.buffer.stroke(panel.stroke);
+ drawPassBand(panel);
+ panel.buffer.beginShape();
+ panel.buffer.vertex(panel.plotLeft, panel.plotBottom);
+ for (let bin = 0; bin <= num_bins; bin++) {
+ let xpos = pixels_per_bin * bin + panel.plotLeft;
+ let ypos = panel.plotBottom - gain * normalize * magnitude(fft[2*bin], fft[2*bin+1]);
+ panel.buffer.vertex(xpos, ypos);
+ }
+ panel.buffer.vertex(panel.plotRight, panel.plotBottom);
+ panel.buffer.endShape(panel.buffer.CLOSE);
+ panel.buffer.strokeWeight(panel.strokeWeight);
+ panel.buffer.stroke(panel.stroke);
+ panel.drawBorder();
+ drawName(panel);
+ if (tick == 'dirac')
+ drawDiracDashes(panel);
+ else
+ drawFreqTicks(panel, panel.numFreqTicks, pixels_per_hz);
+ drawFreqAmplitudeTicks(panel, panel.plotHeight, 9);
+}
+
+class inputSigFFTPanel extends freqPanel {
+ constructor(){
+ super();
+ this.name = "Input Signal FFT";
+ this.description='This plot shows the FFT of the input signal. ' + fft_doc + 'This plot clearly reveals one of the compromises inherent in the simulation; since everything must be represented by the computer, the ideal continuous time input signal must be approximated by a discrete time signal with a sufficiently high sampling rate. ';
+ }
+
+ drawPanel() {
+ drawFFT(this, this.settings.originalFreq);
+ }
+}
+
+class sampledInputFFTPanel extends freqPanel {
+ constructor(){
+ super();
+ this.name="Sampled Signal FFT";
+ this.description='This plot shows the FFT of the signal output by the simulated analog-to-digital conversion. ' + fft_doc;
+ }
+ drawPanel() {
+ drawFFT(this, this.settings.stuffedFreq, 'dirac');
+ }
+}
+
+class reconstructedSigFFTPanel extends freqPanel {
+ constructor(){
+ super();
+ this.name="Reconstructed Signal FFT";
+ this.description='This plot shows the FFT of the signal output by the simulated digital-to-analog conversion. ' + fft_doc + 'This plot clearly reveals one of the compromises inherent in the simulation; since everything must be represented by the computer, the ideal continuous time output signal must be approximated by a discrete time signal with a sufficiently high sampling rate. ';
+ }
+ drawPanel() {
+ drawFFT(this, this.settings.reconstructedFreq);
+ }
+}
+
+class reconstructedDeltaModSigFFTPanel extends freqPanel {
+ constructor(){
+ super();
+ this.name="Reconstructed Signal using Delta Modulation FFT";
+ this.description='This plot shows the FFT of the signal output by the simulated digital-to-analog conversion using delta modulation. ' + fft_doc + 'This plot clearly reveals one of the compromises inherent in the simulation; since everything must be represented by the computer, the ideal continuous time output signal must be approximated by a discrete time signal with a sufficiently high sampling rate. ';
+ }
+ drawPanel() {
+ drawFFT(this, this.settings.reconstructedDeltaFreq);
+ }
+ }
+
+class impulsePanel extends Panel {
+ constructor(){
+ super()
+ this.strokeWeight=1;
+ this.ellipseSize=5;
+ this.name = "Sampling Signal Time Domain";
+ this.description = 'This is a time domain plot of the dirac comb used to sample the input signal. Before quantization, the input signal is multiplied with this dirac comb; this is the "sampling" part of the analog-to-digital conversion process. '
+ + time_ticks_doc;
+ }
+ drawPanel(){
+ let base = this.plotBottom;
+ let ytop = this.plotTop + 10;
+ this.buffer.background(this.background);
+ this.drawBorder();
+
+ let visibleSamples = Math.floor(this.plotWidth / this.settings.downsamplingFactor/this.settings.timeZoom+1);
+ for (let x = 0; x < visibleSamples; x++) {
+ let xpos = this.plotLeft + x * this.settings.downsamplingFactor*this.settings.timeZoom;
+ this.drawStem(xpos,ytop,base);
+ }
+ //I'm not sure dBs make sense here
+ // drawHorizontalTick(this, '0.0 dB', ytop);
+ // drawHorizontalTick(this, '-inf dB', base);
+ drawHorizontalTick(this, '1.0', ytop,5,"right");
+ drawHorizontalTick(this, '0.0', base,5,"right");
+
+ drawTimeTicks(this, this.numTimeTicks, this.settings.timeZoom/(this.settings.sampleRate));
+ drawName(this);
+ }
+}
+
+class impulseFreqPanel extends freqPanel {
+ constructor(){
+ super();
+ this.name="Sampling Signal Frequency Domain";
+ this.description = 'This is a frequency domain plot of the dirac comb used to sample the input signal. The sampling process causes the frequency content of the input signal to be convolved with the frequency response of the dirac comb, resulting in periodic images of the input signal frequency at mulitples of the sampling frequency. ';
+ }
+ drawPanel(){
+ this.bufferInit();
+ let base = this.plotBottom;
+ let pixels_per_hz = this.plotWidth / this.settings.maxVisibleFrequency;
+ let sampleRate = this.settings.sampleRate / this.settings.downsamplingFactor;
+ let numPeaks = Math.round(this.settings.maxVisibleFrequency / sampleRate);
+
+ for (let peak = 0; peak <= numPeaks; peak++) {
+ let hz = peak * this.settings.sampleRate / this.settings.downsamplingFactor;
+ let xpos = hz * pixels_per_hz + this.plotLeft;
+ let color = getColor(peak);
+ this.drawPeak(xpos, this.plotHeight, base, color)
+ let text = peak.toFixed(0) + ' fs';
+ drawVerticalTick(this, text, xpos);
+ }
+
+ drawFreqAmplitudeTicks(this, this.plotHeight, 9);
+ this.drawBorder();
+ drawName(this);
+ }
+}
+
+class sampledInputPanel extends Panel{
+ constructor(){
+ super()
+ this.strokeWeight=1;
+ this.ellipseSize=5;
+ this.name="Sampled Signal Time Domain";
+ this.description = lollipop_doc + time_ticks_doc + amp_ticks_doc + bin_amp_ticks_doc + midline_doc;
+ }
+
+ drawPanel(){
+ this.buffer.background(this.background);
+ drawDiscreteSignal(this,this.settings.downsampled)
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawSignalBinaryScaling(this, this.plotHeight/2, 16,this.settings);
+
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+}
+
+const passband_doc='The frequency range below the nyquist frequency is highlighted by a light grey background. ';
+function drawPassBand(panel) {
+ let sampleRate = panel.settings.sampleRate/panel.settings.downsamplingFactor;
+ let pixels_per_hz = panel.plotWidth / panel.settings.maxVisibleFrequency;
+ panel.buffer.strokeWeight(0);
+ panel.buffer.fill(235);
+ let passbandcutoff = sampleRate/2;
+ let passbandpixelwidth = passbandcutoff * pixels_per_hz;
+ panel.buffer.rect(panel.plotLeft, panel.plotTop, passbandpixelwidth, panel.plotHeight);
+ panel.buffer.strokeWeight(panel.strokeWeight);
+ panel.buffer.fill(panel.fill);
+}
+
+function calculateNumImages(settings) {
+ // calculate the number of spectral images to draw so that the highest frequency
+ // image's lowest negative harmonic is visible
+ let sampleRate = settings.sampleRate / settings.downsamplingFactor;
+ let max_harmonic = settings.harmonicFreqs[settings.harmonicFreqs.length - 1];
+ let numImages = 0;
+ while (numImages * sampleRate - max_harmonic < settings.maxVisibleFrequency)
+ numImages++;
+ return numImages;
+}
+
+function drawDiracDashes(panel) {
+ let sampleRate = panel.settings.sampleRate / panel.settings.downsamplingFactor;
+ let pixels_per_hz = panel.plotWidth / panel.settings.maxVisibleFrequency;
+ let numImages = calculateNumImages(panel.settings);
+
+ for (let image = 0; image <= numImages; image++) {
+ let color = getColor(image);
+ let imagehz = image * sampleRate; // frequency of a dirac comb harmonic that the input spectrum is convolved with
+ let xpos = imagehz * pixels_per_hz + panel.plotLeft;
+
+ // draw the dotted line associated with this dirac comb image
+ panel.buffer.stroke(color);
+ panel.buffer.drawingContext.setLineDash([5,5]);
+ panel.buffer.line(xpos, panel.plotTop, xpos, panel.plotBottom);
+ panel.buffer.drawingContext.setLineDash([]);
+
+ // label the dotted line associated with this dirac comb image
+ let fstext = imagehz.toFixed(0) + ' Hz';
+ drawVerticalTick(panel, fstext, xpos);
+ }
+}
+
+class sampledInputFreqPanel extends freqPanel{
+ constructor(){
+ super();
+ this.name = "Sampled Signal Frequency Domain";
+ this.description='This is a frequency domain representation of the output from the simulated analog-to-digital conversion process. ' + analytic_frequency_doc + 'Notice that periodic images of the input signal are present at multiples of the sampling frequency. These are later removed by the digital-to-analog conversion process, leaving only the frequency content below the Nyquist frequency (whether that content was present in the original signal or introduced by one of the period aliases at multiples of the sampling frequency, i.e. aliasing). '
+ + freq_ticks_doc + passband_doc;
+ }
+
+ drawPanel(){
+ this.buffer.background(this.background);
+ this.buffer.stroke(this.stroke);
+ drawPassBand(this);
+ drawDiracDashes(this);
+
+ let base = this.plotBottom;
+ let sampleRate = this.settings.sampleRate / this.settings.downsamplingFactor;
+ let pixels_per_hz = this.plotWidth / this.settings.maxVisibleFrequency;
+ let numImages = calculateNumImages(this.settings);
+
+ for (let image = 0; image <= numImages; image++) {
+
+ let color = getColor(image);
+ let imagehz = image * sampleRate; // frequency of a dirac comb harmonic that the input spectrum is convolved with
+
+ for (let harm = 1; harm <= this.settings.numHarm; harm++) {
+
+ let hzNegative = imagehz - this.settings.harmonicFreqs[harm-1];
+ let hzPositive = imagehz + this.settings.harmonicFreqs[harm-1];
+
+ if (hzNegative < 0) hzNegative = 0 + (0 - hzNegative); //Reflect at 0. TODO should technically use a new color.
+ // don't reflect at sampleRate because we are already drawing the negative frequency images
+
+ let positiveHeight = this.settings.ampZoom * this.settings.amplitude*this.plotHeight*this.settings.harmonicAmps[harm-1];
+ let negativeHeight = this.settings.ampZoom * this.settings.amplitude*this.plotHeight*this.settings.harmonicAmps[harm-1];
+ let xNegative = hzNegative * pixels_per_hz + this.plotLeft;
+ let xPositive = hzPositive * pixels_per_hz + this.plotLeft;
+ if (xNegative < this.plotRight) this.drawPeak(xNegative, negativeHeight, base, color);
+ if (xPositive < this.plotRight) this.drawPeak(xPositive, positiveHeight, base, color);
+ }
+ }
+
+ this.drawBorder();
+ drawFreqAmplitudeTicks(this, this.plotHeight, 9);
+ drawName(this);
+ }
+}
+
+class quantNoisePanel extends Panel{
+ constructor(){
+ super()
+ this.strokeWeight=1;
+ this.ellipseSize=5;
+ this.name ="Quantization Noise Time Domain";
+ this.description = 'This plot shows the difference between the sampled signal before and after quantization, representing the error introduced by the quantization process. '
+ + time_ticks_doc + amp_ticks_doc + midline_doc;
+ }
+ drawPanel(){
+ this.buffer.background(this.background);
+ drawDiscreteSignal(this, this.settings.quantNoise);
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+}
+
+class quantNoiseFFTPanel extends Panel{
+ constructor(){
+ super();
+ this.name ="Quantization Noise FFT";
+ this.description = 'This plot shows the frequency content of the error introduced by the quantization process. '
+ + fft_doc + freq_ticks_doc + passband_doc;
+ this.ellipseSize=2;
+ this.xAxis = "Frequency";
+ }
+ drawPanel(){
+ drawFFT(this, this.settings.quantNoiseFreq);
+ }
+}
+
+class inputPlusSampledPanel extends Panel {
+ constructor() {
+ super();
+ this.name = "Input with Sampled Signal Time Domain";
+ this.description = 'This plot shows the input signal with the sampled signal overlayed on top. See the documentation for the input signal time domain and sampled signal time domain for more information. ';
+ this.ellipseSize = 5;
+ }
+
+ drawPanel() {
+ this.buffer.background(this.background);
+ drawDiscreteSignal(this,this.settings.downsampled)
+ this.buffer.stroke("gray");
+ drawSignal(this, this.settings.original);
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawSignalBinaryScaling(this, this.plotHeight/2, 16,this.settings);
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+}
+class inputPlusSampledPanel_no_binary extends Panel {
+ constructor() {
+ super();
+ this.name = "Input with Sampled Signal Time Domain";
+ this.description = 'This plot shows the input signal with the sampled signal overlayed on top. See the documentation for the input signal time domain and sampled signal time domain for more information. ';
+ this.ellipseSize = 5;
+ }
+
+ drawPanel() {
+ this.buffer.background(this.background);
+ drawDiscreteSignal(this,this.settings.downsampled)
+ this.buffer.stroke("gray");
+ drawSignal(this, this.settings.original);
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+}
+
+class allSignalsPanel extends Panel {
+ constructor() {
+ super();
+ this.name = "Input (solid), Sampled (lollipop), Reconstructed (dotted), Time Domain";
+ this.description = 'This plot combines the input signal, sampled signal, and reconstructed signal time domain plots. See the documentation for each individual plot for more information. ';
+ this.ellipseSize = 5;
+
+ }
+
+ drawPanel() {
+ this.buffer.background(this.background);
+ drawDiscreteSignal(this,this.settings.downsampled)
+ drawSignal(this, this.settings.original);
+ this.buffer.drawingContext.setLineDash([5,5]);
+ drawSignal(this, this.settings.reconstructed);
+ this.buffer.drawingContext.setLineDash([]);
+ drawMidLine(this);
+ drawName(this);
+ drawSignalAmplitudeTicks(this, this.plotHeight/2, 4);
+ drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate));
+ this.drawBorder();
+ }
+}
diff --git a/delta-modulation/slider_delta.js b/delta-modulation/slider_delta.js
new file mode 100644
index 0000000..a9eff7a
--- /dev/null
+++ b/delta-modulation/slider_delta.js
@@ -0,0 +1,355 @@
+class slider{
+ button;
+ slider;
+ constructor(){
+ }
+
+ setup(p, settings){
+ // should be overridden to set up the slider
+ }
+
+ updateValue(p){
+ this.settings[this.propName] = this.slider.value();
+ this.displayVal = this.calcDisplayVal();
+ this.textBox.value(this.displayVal);
+ this.textLabel.html(this.name+': ');
+ }
+
+ onEdit(){
+ this.updateValue();
+ this.settings.render();
+ this.settings.p5.draw();
+ }
+
+ makeSlider(p){
+ this.slider = p.createSlider(this.min, this.max, this.initial, this.step);
+ this.textLabel = p.createP();
+ this.slider.input(this.onEdit.bind(this));
+ this.slider.mousePressed(this.onEdit.bind(this));
+ this.slider.mouseReleased(this.onEdit.bind(this));
+ this.textBox = p.createInput();
+ this.textBox.size(300);
+ this.button = p.createButton("Update");
+ // this.button.size(200)
+ this.button.mousePressed(this.buttonPressed.bind(this));
+ this.button.mouseReleased(this.onEdit.bind(this));
+ this.slider.parent(this.settings.element.id);
+ //this.button.parent(this.settings.element.id);
+ //this.textBox.parent(this.settings.element.id);
+ this.textLabel.parent(this.settings.element.id);
+ }
+
+ resize(x, y, w, p){
+ let width = w - 20;
+ let labelWidth = 160;
+ width -= labelWidth;
+ let sliderWidth = width * 0.6;
+ width -= sliderWidth;
+ let textboxWidth = width * 0.4;
+ width -= textboxWidth;
+ let buttonWidth = width;
+
+ this.slider.style('width', Math.round(sliderWidth).toString() + "px");
+ console.log("normal slider", x, y, w)
+ this.slider.position(x, y);
+ this.textLabel.position(x+sliderWidth+8, y-18);
+ this.textBox.position(x+sliderWidth+labelWidth,y+8);
+ this.textBox.style('width', Math.round(textboxWidth).toString() + "px");
+ this.button.position(x+sliderWidth+labelWidth+textboxWidth,y+8);
+ this.button.style('width', Math.round(buttonWidth).toString() + "px");
+ }
+ buttonPressed(){
+ this.slider.value(this.calcSliderVal()); }
+
+ calcSliderVal(){
+ // override this with any calculations needed to convert textbox val to slider val (%, etc)
+ return this.textBox.value();
+ }
+ calcDisplayVal(){
+ // override this with any calculations needed to convert stored variable to display val (%, etc)
+ return this.settings[this.propName];
+ }
+ }
+
+
+ class freqSlider extends slider{
+ setup(p,settings){
+ this.settings = settings;
+ this.name ="Frequency (Hz)";
+ this.propName = "fundFreq";
+ this.min = 30;
+ this.max = this.settings.sampleRate / 4 ;
+ this.initial = 440;
+ this.step = 1.0;
+ this.displayVal = this.initial;
+ this.makeSlider(p);
+ }
+
+ }
+
+ class numHarmSlider extends slider{
+ setup(p,settings){
+ this.settings = settings;
+ this.name ="Num. of harmonics";
+ this.propName="numHarm";
+ this.min = 1;
+ this.max = 100;
+ this.initial = 1;
+ this.step = 1;
+ this.displayVal = this.initial;
+ this.oddEvenSel = p.createSelect();
+ this.oddEvenSel.option("Odd");
+ this.oddEvenSel.option("Even");
+ this.oddEvenSel.option("All");
+ this.oddEvenSel.selected(this.settings.harmType);
+ this.oddEvenSel.changed(()=>{this.settings.harmType = this.oddEvenSel.value();this.onEdit();});
+ this.oddEvenSel.parent(this.settings.element.id);
+
+ this.slopeSel = p.createSelect();
+ this.slopeSel.option("1/x");
+ this.slopeSel.option("1/x2");
+ this.slopeSel.option("lin");
+ this.slopeSel.option("flat");
+ this.slopeSel.option("log");
+ this.slopeSel.selected(this.settings.harmSlope);
+ this.slopeSel.changed(()=>{this.settings.harmSlope = this.slopeSel.value();this.onEdit();});
+ this.slopeSel.parent(this.settings.element.id);
+ this.makeSlider(p);
+ }
+ resize(x, y, w, p){
+
+ let width = w - 20;
+ let labelWidth = 160;
+ width -= labelWidth;
+ let sliderWidth = width * 0.6; // slider + dropdowns
+ width -= sliderWidth;
+ let dropDownWidth = 50; // Make slider + dropdown the same width as other sliders.
+ sliderWidth = sliderWidth-(dropDownWidth*2)-20; // Slider
+ let textboxWidth = width * 0.5;
+ let buttonWidth = width*.5;
+
+ this.slider.style('width', Math.round(sliderWidth).toString() + "px");
+ console.log("harmSlider", x, y, w);
+ this.slider.position(x, y);
+ this.oddEvenSel.style('width', Math.round(dropDownWidth).toString() + "px");
+ this.oddEvenSel.position(x+sliderWidth+20,y);
+ this.slopeSel.style('width', Math.round(dropDownWidth).toString() + "px");
+ this.slopeSel.position(x+sliderWidth+20+dropDownWidth,y);
+ this.textLabel.position(x+sliderWidth+20+2*dropDownWidth+7, y-18);
+ this.textBox.position(x+sliderWidth+22+2*dropDownWidth+labelWidth,y+8);
+ this.textBox.style('width', Math.round(textboxWidth).toString() + "px");
+ this.button.position(x+sliderWidth+22+2*dropDownWidth+labelWidth+textboxWidth,y+8);
+ this.button.style('width', Math.round(buttonWidth).toString() + "px");
+ }
+ }
+
+
+ class sampleRateSlider extends slider{
+ setup(p,settings){
+ this.settings = settings;
+ this.name ="Sample Rate (Hz):";
+ this.propName="downsamplingFactor";
+ this.min = p.log(3000)/p.log(2);
+ this.max = p.log(48000)/p.log(2);
+ this.initial = p.log(48000)/p.log(2);
+ this.step = 0.1
+ this.makeSlider(p);
+ }
+ calcDisplayVal(){
+ return this.displayVal= Math.round(this.settings.sampleRate / this.settings.downsamplingFactor , 3);//
+ }
+ calcSliderVal(){
+ return Math.log(this.textBox.value())/Math.log(2);
+ }
+
+ updateValue(p){
+ this.settings.downsamplingFactor = Math.round(WEBAUDIO_MAX_SAMPLERATE/Math.pow(2, this.slider.value()));
+ this.displayVal = this.calcDisplayVal();
+ this.textBox.value(this.displayVal);//
+ this.textLabel.html(this.name);// + p.round(this.settings.sampleRate / this.settings.downsamplingFactor / 1000, 3) + " kHz")
+ }
+ }
+
+ class sampleRateDeltaSlider extends sampleRateSlider{
+ setup(p,settings){
+ this.settings = settings;
+ this.name ="Delta Sample Rate (Hz):";
+ this.propName="downsamplingFactorDelta";
+ this.min = p.log(1500)/p.log(2);
+ this.max = p.log(this.settings.deltaFrequency)/p.log(2);
+ this.initial = p.log(1500)/p.log(2);
+ this.step = 0.1
+ this.makeSlider(p);
+ }
+
+ calcDisplayVal(){
+ return this.displayVal= Math.round(this.settings.deltaFrequency / this.settings.downsamplingFactorDelta , 3);//
+ }
+
+ updateValue(p){
+ this.settings.downsamplingFactorDelta = Math.round(this.settings.deltaFrequency/Math.pow(2, this.slider.value()))/32;
+ this.displayVal = this.calcDisplayVal();
+ this.textBox.value(this.displayVal);//
+ this.textLabel.html(this.name);// + p.round(this.settings.sampleRate / this.settings.downsamplingFactor / 1000, 3) + " kHz")
+ }
+ }
+
+ class deltaStepSlider extends slider {
+ setup(p,settings){
+ this.settings = settings;
+ this.name ="Delta Step (%)";
+ this.propName="deltaStep";
+ this.min = 0.001;
+ this.max = 0.1;
+ this.initial = 0.05;
+ this.step = 0.001;
+ this.makeSlider(p);
+ }
+
+ }
+
+ class ditherSlider extends slider {
+ setup(p,settings){
+ this.settings = settings;
+ this.name ="Dither";
+ this.propName="dither";
+ this.min = 0.0;
+ this.max = 1.0;
+ this.initial = 0.0;
+ this.step = 0.01;
+ this.makeSlider(p);
+ }
+
+ }
+
+ class bitDepthSlider extends slider {
+ setup(p,settings){
+ this.settings = settings;
+ this.name ="Bit Depth";
+ this.propName = "bitDepth";
+ this.min = 1;
+ this.max = BIT_DEPTH_MAX;
+ this.initial = BIT_DEPTH_MAX;
+ this.step = 1;
+ this.makeSlider(p);
+ }
+
+ }
+
+ class amplitudeSlider extends slider {
+ setup(p,settings){
+ this.settings = settings;
+ this.propName ="amplitude";
+ this.name = "Amplitude";
+ this.min = 0.0;
+ this.max = 5;
+ this.initial = 1.0;
+ this.step = 0.01;
+ this.makeSlider(p);
+ }
+
+ }
+
+ class antialiasingSlider extends slider {
+ setup(p, settings){
+ this.settings = settings;
+ this.propName ="antialiasing";
+ this.name = "Antialiasing filter order";
+ this.min = 0.0;
+ this.max = 200;
+ this.initial = 0;
+ this.step = 10;
+ this.makeSlider(p);
+ }
+ }
+
+ class phaseSlider extends slider{
+ setup(p,settings){
+ this.settings = settings;
+ this.propName ="phase";
+ this.name = "Phase (Degrees)";
+ this.min = 0;
+ this.max = 360; //pi
+ this.initial = 0.0;
+ this.step = 1; //pi/8
+ this.makeSlider(p);
+ }
+
+ calcDisplayVal(){return this.settings[this.propName];}
+ }
+ class zoomSlider extends slider{
+ calcDisplayVal(){return this.settings[this.propName]*100;}
+ calcSliderVal(){
+ if (isNaN(this.textBox.value())){
+ return this.slider.value();
+ }
+ else{
+ return this.textBox.value()/100;
+ }
+ }
+ }
+ class ampZoomSlider extends zoomSlider{
+ setup(p,settings){
+ this.settings = settings;
+ this.name ="Amp. Zoom (%)";
+ this.propName="ampZoom";
+ this.min = .1;
+ this.max = 4.0;
+ this.initial =1.0;
+ this.step = .01;
+ this.makeSlider(p);
+ }
+ }
+
+ const minTimeZoom = .30;
+ class timeZoomSlider extends zoomSlider{
+ setup(p,settings){
+ this.settings = settings;
+ this.propName ="timeZoom";
+ this.name = "Time zoom (%)"
+ this.min = minTimeZoom;
+ this.max = 5;
+ this.initial = 1.0;
+ this.step = .01;
+ this.makeSlider(p);
+ }
+
+ }
+
+ const minFreqZoom = 0.5;
+ class freqZoomSlider extends zoomSlider{
+ setup(p,settings){
+ this.settings = settings;
+ this.propName ="freqZoom";
+ this.min = minFreqZoom;
+ this.max = 3;
+ this.initial = 1.0;
+ this.step = .01;
+ this.makeSlider(p);
+ }
+ updateValue(p){
+ this.settings.freqZoom = this.slider.value();
+ this.settings.maxVisibleFrequency = WEBAUDIO_MAX_SAMPLERATE/2/this.settings.freqZoom;
+ this.textBox.value(this.settings.freqZoom*100);
+ this.textLabel.html('Freq. zoom (%):');
+ }
+ }
+
+ class freqZoomSlider_tutorial_1 extends zoomSlider{
+ setup(p,settings){
+ this.settings = settings;
+ this.propName ="freqZoom";
+ this.min = 1.0;
+ this.max = 3;
+ this.initial = 2.0;
+ this.step = .01;
+ this.makeSlider(p);
+ }
+ updateValue(p){
+ this.settings.freqZoom = this.slider.value();
+ this.settings.maxVisibleFrequency = WEBAUDIO_MAX_SAMPLERATE/2/this.settings.freqZoom;
+ this.textBox.value(this.settings.freqZoom*100);
+ this.textLabel.html('Freq. zoom (%):');
+ }
+ }
+
\ No newline at end of file
diff --git a/delta-modulation/styles_delta.css b/delta-modulation/styles_delta.css
new file mode 100644
index 0000000..004fe36
--- /dev/null
+++ b/delta-modulation/styles_delta.css
@@ -0,0 +1,155 @@
+body {
+ font-family: Cantarell, sans-serif;
+ line-height: 1.5;
+ }
+
+ canvas {
+ width: 100% !important;
+ height: 100% !important;
+ text-align: center;
+ }
+
+ sup { vertical-align: top; position: relative; top: -0.5em; }
+
+ footer{
+ font-family: Cantarell, sans-serif;
+ padding: 5px 10px 0px;
+ background : lightgrey;
+ bottom: 0;
+ left: 0;
+ font-size: 9pt;
+ color: grey;
+ text-align: center;
+ }
+ .tab_questions {
+ overflow: hidden;
+ border: 1px solid #ccc;
+ background-color: #f1f1f1;
+ }
+ .tab_questions button {
+ background-color: inherit;
+ float: left;
+ border: none;
+ outline: none;
+ cursor: pointer;
+ padding: 0px 0px;
+ transition: 0.3s;
+ font-size: 0px;
+ }
+ .tab_questions_content {
+
+ display: none;
+ padding: 0px 0px;
+ border: 1px solid #ccc;
+ border-top: none;
+ }
+
+ .previous:hover {
+ background-color: #ddd;
+ color: black;
+ }
+
+ .tabcontent {
+ margin-left: 30px;
+ margin-right: 50px;
+ display: none;
+ }
+ .title{
+ margin-left: 100px;
+ }
+ .intro_text{
+ margin-left: 100px;
+ }
+ .button {
+ text-decoration: none;
+ position :relative;
+ z-index: 0;
+ display: inline-block;
+ padding: 8px 25px;
+ background-color: rgb(255, 255, 255);
+ color: black;
+ border: solid;
+ margin-right: 2px;
+ margin-top: 3px;
+ margin-bottom: 100px;
+ }
+ .input {
+ margin-bottom: 100px;
+ }
+ .button:hover{
+ background-color: #ddd;
+ color: black;
+ }
+ .button_round{
+ text-decoration: none;
+ display: inline-block;
+ padding: 8px 16px;
+ background-color: rgb(255, 255, 255);
+ color: black;
+ border: 2px solid black;
+ margin-right: 2px;
+
+ margin-top: 3px;
+ border-radius: 20px;
+ }
+ .button_round:hover{
+ background-color: #ddd;
+ color: black;
+ }
+ .button_round.active {
+ background-color: #ddd;
+ }
+
+ .next:hover {
+ background-color: #ddd;
+ color: black;
+ }
+
+ .next {
+ text-decoration: none;
+ display: inline-block;
+ padding: 8px 16px;
+ background-color: rgb(255, 255, 255);
+ color: black;
+ border: solid;
+ box-shadow: -3px -4px grey;
+ }
+ .home:hover {
+ background-color: #ddd;
+ color: black;
+ }
+ .home {
+ text-decoration: none;
+ display: inline-block;
+ padding: 8px 16px;
+ background-color: rgb(255, 255, 255);
+ color: black;
+ border: solid;
+ box-shadow: -3px -4px grey;
+ }
+ #page-container {
+ position: relative;
+ min-height: 100vh;
+ }
+
+ #content-wrap {
+ padding-bottom: 0rem; /* Footer height */
+ }
+ #content-wrap2 {
+ padding-bottom: 10rem; /* Footer height */
+ }
+ #footer {
+ position: relative;
+ bottom: 10;
+ width: 100%;
+ height: 2.5rem; /* Footer height */
+
+ }
+ #Question{
+ padding-bottom: 10rem;
+ }
+ #Answer{
+ padding-bottom: 2.5rem;
+ margin-left: 100px;
+ }
+
\ No newline at end of file
diff --git a/delta-modulation/waves_delta.js b/delta-modulation/waves_delta.js
new file mode 100644
index 0000000..7a77a43
--- /dev/null
+++ b/delta-modulation/waves_delta.js
@@ -0,0 +1,495 @@
+/*
+
+
+# The Digital Audio Workbench
+
+https://idmil.gitlab.io/course-materials/mumt203/interactive-demos
+
+## Introduction
+
+The purpose of the digital audio workbench is to illustrate key concepts in
+digital audio theory with interactive visualizations of each stage of the
+analog-to-digial conversion (ADC) and digital-to-analog conversion (DAC)
+processes. These visualizations are inspired by demonstrations using
+oscilloscopes and spectrum analyzers to compare the analog signal input into
+the ADC process with the analog signal output by the DAC process, e.g.
+https://youtu.be/cIQ9IXSUzuM
+
+By experimenting with the settings of the simulation, numerous key concepts in
+digital signal theory can be nicely illustrated, such as aliasing, quantization
+error, critical sampling, under and oversampling, and many others. The
+interactive interface allows the simulation to be explored freely; users can
+examine the signals both visually through numerous graphs, or by listening to
+the test signals directly.
+
+## Implementation
+
+Since our demonstration takes place purely in the digital domain, we
+unfortunately cannot use real continuous time analog inputs and outputs.
+Instead, we simulate the ADC-DAC processes in the discrete time domain. The
+analog input and output are represented as discrete time signals with a high
+sampling rate; at the time of writing, the maximum sampling rate supported
+by WebAudio is 96 kHz.
+
+The ADC process consists of several steps, including antialiasing, sampling,
+and quantization. All of these are simulated in our model: antialiasing is
+achieved with a windowed sinc FIR lowpass filter of order specified by the
+user; sampling is approximated by downsampling the input signal by an
+integer factor; and quantization is achieved by multiplying the sampled
+signal (which ranges from -1.0 to 1.0) by the maximum integer value possible
+given the requested bit depth (e.g. 255 for a bit depth of 8 bits), and then
+rounding every sample to the nearest integer. The DAC process is simulated
+in turn by zero stuffing and lowpass filtering the sampled and quantized
+output of the ADC simultion.
+
+In summary, the continuous time input is simulated by a 96 kHz discrete time
+signal, the sampled output of the ADC process is simulated by a downsampled
+and quantized signal, and the continuous time reconstruction output by the
+DAC is simulated by upsampling the "sampled" signal back to 96 kHz. In our
+tests we have found this model to be reasonable; many key concepts, such as
+critical sampling, aliasing, and quantization noise are well represented in
+our simulation.
+
+For more details, the reader is encouraged to peruse the rest of the source
+code in this document. Many comments have been included to aid readers who
+are unfamiliar with javascript. Any questions you may have about the
+implementation of the simulation can only be definitively answered by
+understanding the source code, but please feel free to contact the project
+maintainers if you have any questions.
+
+```javascript
+*/
+
+// `renderWavesImpl` returns an anonymous function that is bound in the widget
+// constructor. This is done in order to seperate the implementation of the
+// simulation from the other implementation details so that this documentation
+// can be more easily accessed.
+
+const soundTimeSeconds = 1.5;
+const fadeTimeSeconds = 0.125;
+function renderWavesImpl(settings, fft, p) { return (playback = false) => {
+
+ // if we are not rendering for playback, we are rendering for simulation
+ let simulation = !playback;
+
+ // select the buffer to render to; playback buffer, or simulation buffer
+ var original = playback ? settings.original_pb : settings.original;
+ var reconstructed = playback ? settings.reconstructed_pb : settings.reconstructed;
+ var reconstructedDelta = playback ? settings.reconstructedDelta_pb : settings.reconstructedDelta;
+ var stuffed = settings.stuffed;
+
+ // calculate harmonics ------------------------------------------------------
+
+ // The signal is generated using simple additive synthesis. Because of this,
+ // the exact frequency content of the signal can be determined a priori based
+ // on the settings. We generate this information here so that it can be used
+ // not only by the synthesis process below, but also by several of the graphs
+ // used to illustrate the frequency domain content of the signal.
+
+ // We only calculate the harmonics for the simulation; it is assumed they will
+ // already have been calculated earlier when rendering for playback
+
+ if (simulation) {
+ let harmonic_number = 1;
+ let harmonic_amplitude = 1;
+ let invert = 1;
+ let harmInc = (settings.harmType =="Odd" || settings.harmType == "Even") ? 2 : 1;
+
+ for (let i = 0; simulation && i < settings.numHarm; i++) {
+
+ // the amplitude of each harmonic depends on the harmonic slope setting
+ if (settings.harmSlope == "lin") harmonic_amplitude = 1 - i/settings.numHarm;
+ else if (settings.harmSlope == "1/x") harmonic_amplitude = 1/harmonic_number;
+ else if (settings.harmSlope == "1/x2") harmonic_amplitude = 1/harmonic_number/harmonic_number;
+ else if (settings.harmSlope == "flat") harmonic_amplitude = 1;
+ else if (settings.harmSlope == "log") {harmonic_amplitude = Math.exp(-0.1*(harmonic_number-1));
+ console.log(harmonic_amplitude)}
+
+ // In case the harmonic slope is 1/x^2 and the harmonic type is "odd",
+ // by inverting every other harmonic we generate a nice triangle wave.
+ if (settings.harmSlope =="1/x2" && settings.harmType == "Odd") {
+ harmonic_amplitude = harmonic_amplitude * invert;
+ invert *= -1;
+ }
+
+ // the frequency of each partial is a multiple of the fundamental frequency
+ settings.harmonicFreqs[i] = harmonic_number*settings.fundFreq;
+
+ // The harmonic amplitude is calculated above according to the harmonic
+ // slope setting, taking into account the special case for generating a
+ // triangle.
+ settings.harmonicAmps[i] = harmonic_amplitude;
+
+ // With harmonic type set to "even" we want the fundamental and even
+ // harmonics. To achieve this, we increment the harmonic number by 1 after
+ // the fundamental and by 2 after every other partial.
+ if (i == 0 && settings.harmType == "Even") harmonic_number += 1;
+ else harmonic_number += harmInc;
+ }
+ }
+
+ // render original wave -----------------------------------------------------
+
+ // initialize the signal buffer with all zeros (silence)
+ original.fill(0);
+
+ // For the sample at time `n` in the signal buffer `original`,
+ // generate the sum of all the partials based on the previously calculated
+ // frequency and amplitude values.
+ original.forEach( (_, n, arr) => {
+ for (let harmonic = 0; harmonic < settings.numHarm; harmonic++) {
+
+ let fundamental_frequency = settings.harmonicFreqs[0];
+ let frequency = settings.harmonicFreqs[harmonic];
+ let amplitude = settings.harmonicAmps[harmonic];
+
+ // convert phase offset specified in degrees to radians
+ let phase_offset = Math.PI / 180 * settings.phase;
+
+ // adjust phase offset so that harmonics are shifted appropriately
+ let phase_offset_adjusted = phase_offset * frequency / fundamental_frequency;
+
+ let radian_frequency = 2 * Math.PI * frequency;
+ let phase_increment = radian_frequency / WEBAUDIO_MAX_SAMPLERATE;
+ let phase = phase_increment * n + phase_offset_adjusted;
+
+ // accumulate the amplitude contribution from the current harmonic
+ arr[n] += amplitude * Math.sin( phase );
+ }
+ });
+
+ // linearly search for the maximum amplitude value (easy but not efficient)
+ let max = 0;
+ original.forEach( (x, n, y) => {if (x > max) max = x} );
+
+ // normlize and apply amplitude scaling
+ original.forEach( (x, n, y) => y[n] = settings.amplitude * x / max );
+
+ // apply antialiasing filter if applicable ----------------------------------
+
+ // The antialiasing and reconstruction filters are generated using Fili.js.
+ // (https://github.com/markert/fili.js/)
+ let firCalculator = new Fili.FirCoeffs();
+ // Fili uses the windowed sinc method to generate FIR lowpass filters.
+ // Like real antialiasing and reconstruction filters, the filters used in the
+ // simulation are not ideal brick wall filters, but approximations.
+
+ // apply antialiasing only if the filter order is set
+ if (settings.antialiasing > 1) {
+
+ // specify the filter parameters; Fs = sampling rate, Fc = cutoff frequency
+
+ // The cutoff for the antialiasing filter is set to the Nyquist frequency
+ // of the simulated sampling process. The sampling rate of the "sampled"
+ // signal is WEBAUDIO_MAX_SAMPLERATE / the downsampling factor. This is
+ // divided by 2 to get the Nyquist frequency.
+ var filterCoeffs = firCalculator.lowpass(
+ { order: settings.antialiasing
+ , Fs: WEBAUDIO_MAX_SAMPLERATE
+ , Fc: (WEBAUDIO_MAX_SAMPLERATE / settings.downsamplingFactor) / 2
+ });
+
+ // generate the filter
+ var filter = new Fili.FirFilter(filterCoeffs);
+
+ // apply the filter
+ original.forEach( (x, n, y) => y[n] = filter.singleStep(x) );
+
+ // time shift the signal by half the filter order to compensate for the
+ // delay introduced by the FIR filter
+ original.forEach( (x, i, arr) => arr[i - settings.antialiasing/2] = x );
+ }
+
+ // downsample original wave -------------------------------------------------
+
+ // zero initialize the reconstruction, and zero stuffed buffers
+ reconstructed.fill(0);
+ reconstructedDelta.fill(0);
+ stuffed.fill(0);
+
+ // generate new signal buffers for the downsampled signal and quantization
+ // noise whose sizes are initialized according to the currently set
+ // downsampling factor
+ if (playback) {
+ settings.downsampled_pb = new Float32Array(p.round(original.length / settings.downsamplingFactor));
+ settings.quantNoise_pb = new Float32Array(p.round(original.length));
+ settings.downsampledDelta_pb = new Float32Array(p.round(original.length / settings.downsamplingFactorDelta));
+ settings.quantNoiseDelta_pb = new Float32Array(p.round(original.length));
+ } else {
+ settings.downsampled = new Float32Array(p.round(original.length / settings.downsamplingFactor));
+ settings.quantNoise = new Float32Array(p.round(original.length / settings.downsamplingFactor));
+ settings.downsampledDelta = new Float32Array(p.round(original.length / settings.downsamplingFactorDelta));
+ settings.quantNoiseDelta = new Float32Array(p.round(original.length / settings.downsamplingFactorDelta)); }
+ var downsampled = playback ? settings.downsampled_pb : settings.downsampled;
+ var quantNoise = playback ? settings.quantNoise_pb : settings.quantNoise;
+ var downsampledDelta = playback ? settings.downsampledDelta_pb : settings.downsampledDelta;
+ var quantNoiseDelta = playback ? settings.quantNoiseDelta_pb : settings.quantNoiseDelta;
+ var quantNoiseStuffed = settings.quantNoiseStuffed;
+ var quantNoiseStuffedDelta = settings.quantNoiseStuffedDelta;
+ quantNoiseStuffed.fill(0);
+
+ // calculate the maximum integer value representable with the given bit depth
+ let maxInt = p.pow(2, settings.bitDepth) - 1;
+
+ let stepSize = (settings.quantType == "midTread") ? 2/(maxInt-1) : 2/(maxInt);
+
+ // generate the output of the simulated ADC process by "sampling" (actually
+ // just downsampling), and quantizing with dither. During this process, we
+ // also load the buffer for the reconstructed signal with the sampled values;
+ // this allows us to skip an explicit zero-stuffing step later
+
+ downsampled.forEach( (_, n, arr) => {
+
+ // keep only every kth sample where k is the integer downsampling factor
+ let y = original[n * settings.downsamplingFactor];
+ y = y > 1.0 ? 1.0 : y < -1.0 ? -1.0 : y; // apply clipping
+
+ // if the bit depth is set to the maximum, we skip quantization and dither
+ if (settings.bitDepth == BIT_DEPTH_MAX) {
+
+ // record the sampled output of the ADC process
+ arr[n] = y;
+
+ // sparsely fill the reconstruction and zero stuffed buffers to avoid
+ // having to explicitly zero-stuff
+ reconstructed[n * settings.downsamplingFactor] = y;
+ stuffed[n * settings.downsamplingFactor] = y * settings.downsamplingFactor;
+ return;
+ }
+
+ // generate dither noise
+ let dither = (2 * Math.random() - 1) * settings.dither;
+
+ let quantized;
+ // Add dither signal and quantize. Constrain so we dont clip after dither
+ switch(settings.quantType) {
+ case "midTread" :
+ quantized = stepSize*p.floor(p.constrain((y+dither),-1,0.99)/stepSize + 0.5);
+ break;
+ case "midRise" :
+ quantized = stepSize*(p.floor(p.constrain((y+dither),-1,0.99)/stepSize) + 0.5);
+ break;
+ }
+
+ // record the sampled and quantized output of the ADC process with clipping
+ arr[n] = quantized;
+
+
+ // sparsely fill the reconstruction buffer to avoid having to zero-stuff
+ reconstructed[n * settings.downsamplingFactor] = quantized;
+ stuffed[n * settings.downsamplingFactor] = quantized * settings.downsamplingFactor;
+
+ // record the quantization error
+ quantNoise[n] = quantized - y;
+ quantNoiseStuffed[n * settings.downsamplingFactor] = quantNoise[n];
+ });
+
+ // render reconstructed wave by low pass filtering the zero stuffed array----
+
+ // specify filter parameters; as before, the cutoff is set to the Nyquist
+ var filterCoeffs = firCalculator.lowpass(
+ { order: 200
+ , Fs: WEBAUDIO_MAX_SAMPLERATE
+ , Fc: (WEBAUDIO_MAX_SAMPLERATE / settings.downsamplingFactor) / 2
+ });
+
+ // generate the filter
+ var filter = new Fili.FirFilter(filterCoeffs);
+
+ // apply the filter
+ reconstructed.forEach( (x, n, arr) => {
+ let y = filter.singleStep(x);
+
+ // To retain the correct amplitude, we must multiply the output of the
+ // filter by the downsampling factor.
+ arr[n] = y * settings.downsamplingFactor;
+ });
+
+ // time shift the signal by half the filter order to compensate for the delay
+ // introduced by the FIR filter
+ reconstructed.forEach( (x, n, arr) => arr[n - 100] = x );
+
+ //Delta modulation reconstruction
+ let currentAmp = 0;
+ let counter = 0; //Consecutive similar bits counter for adaptive modulation
+ let lastBit = 0;
+ for (let x = 0; x < downsampledDelta.length; x++) {
+ if (original[Math.floor(x/downsampledDelta.length*original.length)] >= currentAmp) {
+ if (settings.deltaType == "adaptive") {
+ if (lastBit == 0) { //If the last bit is similar to this one, increment counter, otherwise reset
+ counter++;
+ } else {counter = 0; lastBit = 0;}
+ }
+ currentAmp += 2*settings.deltaStep*(1+Math.floor(counter/settings.adaptiveNumSteps)); //For adaptive modulation, increase the multiplier every numSteps consecutive steps
+ } else {
+ if (settings.deltaType == "adaptive") {
+ if (lastBit == 1) {
+ counter++;
+ } else {counter = 0; lastBit = 1;}
+ }
+ currentAmp -= 2*settings.deltaStep*(1+Math.floor(counter/settings.adaptiveNumSteps));
+ }
+ currentAmp = (currentAmp>1.0)? currentAmp = 1.0 : (currentAmp<-1.0)? currentAmp = -1.0 : currentAmp = currentAmp;
+ downsampledDelta[x] = currentAmp;
+
+ /* let xpos = Math.round(panel.plotLeft + x * panel.settings.downsamplingFactor*panel.settings.timeZoom);
+ panel.buffer.curveVertex(xpos, ypos);
+ if (pixel_max * signal[Math.floor((x/visibleSamples)*panel.plotWidth/panel.settings.timeZoom)]*panel.settings.ampZoom < panel.halfh - ypos) {
+ ypos += panel.settings.deltaStep*panel.plotHeight;
+ //if (ypos >= panel.plotBottom) ypos -= 2*panel.settings.deltaStep*panel.plotHeight; //Prevent signal from going below bounds
+ } else {
+ ypos -= panel.settings.deltaStep*panel.plotHeight;
+ //if (ypos <= panel.plotTop) ypos += 2*panel.settings.deltaStep*panel.plotHeight; //Same for the top bound
+ }
+ ypos = (ypospanel.plotBottom)? ypos= panel.plotBottom: ypos=ypos;
+ panel.buffer.curveVertex(xpos, ypos); */
+ }
+ //console.log(settings.downsamplingFactorDelta, quantNoise.length, reconstructedDelta.length, original.length);
+ for (let x=0; x1.0)? currentAmp = 1.0 : (currentAmp<-1.0)? currentAmp = -1.0 : currentAmp = currentAmp;
+ quantNoiseDelta[Math.floor(x/reconstructedDelta.length*quantNoiseDelta.length)] = currentAmp;
+ quantNoiseStuffedDelta[Math.floor(x/reconstructedDelta.length*quantNoiseStuffedDelta.length)] = currentAmp;
+ }
+
+ // generate the output of the simulated ADC process by "sampling" (actually
+ // just downsampling), and quantizing with dither. During this process, we
+ // also load the buffer for the reconstructed signal with the sampled values;
+ // this allows us to skip an explicit zero-stuffing step later
+ /* downsampled.forEach( (_, n, arr) => {
+
+ // keep only every kth sample where k is the integer downsampling factor
+ let y = original[n * settings.downsamplingFactor];
+ y = y > 1.0 ? 1.0 : y < -1.0 ? -1.0 : y; // apply clipping
+
+ // generate dither noise
+ let dither = (2 * Math.random() - 1) * settings.dither;
+
+ let quantized;
+ // Add dither signal and quantize. Constrain so we dont clip after dither
+ switch(settings.quantType) {
+ case "midTread" :
+ quantized = stepSize*p.floor(p.constrain((y+dither),-1,0.99)/stepSize + 0.5);
+ break;
+ case "midRise" :
+ quantized = stepSize*(p.floor(p.constrain((y+dither),-1,0.99)/stepSize) + 0.5);
+ break;
+ }
+
+ // record the sampled and quantized output of the ADC process with clipping
+ arr[n] = quantized;
+
+
+ // sparsely fill the reconstruction buffer to avoid having to zero-stuff
+ reconstructed[n * settings.downsamplingFactor] = quantized;
+ stuffed[n * settings.downsamplingFactor] = quantized * settings.downsamplingFactor;
+
+ // record the quantization error
+ quantNoise[n] = quantized - y;
+ quantNoiseStuffed[n * settings.downsamplingFactor] = quantNoise[n];
+ }); */
+
+ // render reconstructed wave by low pass filtering the zero stuffed array----
+
+ // specify filter parameters; as before, the cutoff is set to the Nyquist
+ /* var filterCoeffs = firCalculator.lowpass(
+ { order: 200
+ , Fs: WEBAUDIO_MAX_SAMPLERATE
+ , Fc: (WEBAUDIO_MAX_SAMPLERATE / settings.downsamplingFactor) / 2
+ });
+
+ // generate the filter
+ var filter = new Fili.FirFilter(filterCoeffs);
+
+ // apply the filter
+ reconstructed.forEach( (x, n, arr) => {
+ let y = filter.singleStep(x);
+
+ // To retain the correct amplitude, we must multiply the output of the
+ // filter by the downsampling factor.
+ arr[n] = y * settings.downsamplingFactor;
+ });
+
+ // time shift the signal by half the filter order to compensate for the delay
+ // introduced by the FIR filter
+ reconstructed.forEach( (x, n, arr) => arr[n - 100] = x ); */
+
+ // render FFTs --------------------------------------------------------------
+ // TODO: apply windows?
+
+ // The FFTs of the signals at the various stages of the process are generated
+ // using fft.js (https://github.com/indutny/fft.js). The call to
+ // `realTransform()` performs the FFT, and the call to `completeSpectrum`
+ // fills the upper half of the spectrum, which is otherwise not calculated
+ // since it is a redundant reflection of the lower half of the spectrum.
+
+ if (simulation) {
+ fft.realTransform(settings.originalFreq, original);
+ fft.completeSpectrum(settings.originalFreq);
+
+ fft.realTransform(settings.stuffedFreq, stuffed)
+ fft.completeSpectrum(settings.stuffedFreq);
+
+ fft.realTransform(settings.reconstructedFreq, reconstructed)
+ fft.completeSpectrum(settings.reconstructedFreq);
+
+ fft.realTransform(settings.reconstructedDeltaFreq, reconstructedDelta)
+ fft.completeSpectrum(settings.reconstructedDeltaFreq);
+
+ fft.realTransform(settings.quantNoiseFreq, quantNoiseStuffed)
+ fft.completeSpectrum(settings.quantNoiseFreq);
+
+ fft.realTransform(settings.quantNoiseDeltaFreq, quantNoiseStuffedDelta)
+ fft.completeSpectrum(settings.quantNoiseDeltaFreq);
+ }
+
+ // fade in and out and suppress clipping distortions ------------------------
+
+ // Audio output is windowed to prevent pops. The envelope is a simple linear
+ // ramp up at the beginning and linear ramp down at the end.
+
+ if (playback) {
+ // This normalization makes sure the original signal isn't clipped.
+ // The output is clipped during the simulation, so this may reduce its peak
+ // amplitude a bit, but since the clipping adds distortion the perceived
+ // loudness is relatively the same as the original signal in my testing.
+ let normalize = settings.amplitude > 1.0 ? settings.amplitude : 1.0;
+
+ // Define the fade function
+ let fade = (_, n, arr) => {
+ let fadeTimeSamps = Math.min(fadeTimeSeconds * WEBAUDIO_MAX_SAMPLERATE, arr.length / 2);
+ // The conditional ensures there is a fade even if the fade time is longer than the signal
+ if (n < fadeTimeSamps)
+ arr[n] = (n / fadeTimeSamps) * arr[n] / normalize;
+ else if (n > arr.length - fadeTimeSamps)
+ arr[n] = ((arr.length - n) / fadeTimeSamps) * arr[n] / normalize;
+ else arr[n] = arr[n] / normalize;
+ };
+
+ // Apply the fade function
+ original.forEach(fade);
+ reconstructedDelta.forEach(fade);
+ quantNoiseDelta.forEach(fade);
+ }
+
+
+}}
+/*
+```
+*/
diff --git a/delta-modulation/widget_delta.js b/delta-modulation/widget_delta.js
new file mode 100644
index 0000000..7821b7a
--- /dev/null
+++ b/delta-modulation/widget_delta.js
@@ -0,0 +1,989 @@
+const BIT_DEPTH_MAX = 16;
+const WEBAUDIO_MAX_SAMPLERATE = 96000;
+const NUM_COLUMNS = 2;
+const MAX_HARMONICS = 100;
+function new_widget(panels, sliders, buttons, elem_id, elem_id2, margin_size, width_factor=1.0, height_factor=1.0) {sketch = p => {
+
+//These define the different pages, which edit the text and panels/sliders whenever the next/prev buttons are pressed
+
+/*
+TO ADD A NEW PAGE:
+1. Add a new case to the switch statement. If adding a page between two existing pages, this unfortunately requires updating the case number for all subsequent pages.
+2. Set contentWrap.elt.innerHtml to the content displayed at the top of the page. Please use HTML script similar to existing pages (eg. header at the top, regular line breaks...)
+3. To add a new panel, call updatePanel(panels, "name_of_your_panel", true);
+ To remove a panel, call the same function with false instead of true.
+ Similarly, with sliders, you can call updateSlider(sliders, "propName_of_your_slider", true);
+ To show or hide various buttons, call yourButton.show() or yourButton.hide() respectively.
+ To set the parameter of the simulation, call settings.propName = your_new_value; , replacing the propName with the desired setting.
+ Example: settings.fundFreq = 440; sets the fundamental frequency to 440 Hz
+ Note this only updates the panels/audio simulations, not the sliders
+
+ !! Whenever adding a new element, please remove that element from the preceding page! Otherwise, it will still be present when the user goes backwards.
+4. Call break; at the end of the case statement!
+
+Basic elements:
+ panels:
+ List of panels used in the widget: refer by name
+ Available panels:
+ "Input Signal Time Domain"
+ "Input Signal Time Domain with Delta Modulation"
+ "Reconstructed Signal Time Domain"
+ "Reconstructed Signal Time Domain using Delta Modulation"
+ "Input Signal FFT"
+ "Reconstructed Signal FFT"
+ "Reconstructed Signal using Delta Modulation FFT"
+ "Sampling Signal Time Domain"
+ "Sampling Signal Frequency Domain"
+ "Sampled Signal Time Domain"
+ "Sampled Signal FFT"
+ "Quantization Noise Time Domain"
+ "Quantization Noise FFT"
+ "Input with Sampled Signal Time Domain"
+ "Input (solid), Sampled (lollipop), Reconstructed (dotted), Time Domain"
+ sliders:
+ List of sliders used in the widget: refer by propName
+ Available sliders:
+ "fundFreq"
+ "numHarm"
+ "downsamplingFactor" (sampling rate)
+ "downsamplingFactorDelta"
+ "deltaStep"
+ "dither"
+ "bitDepth"
+ "amplitude"
+ "antialiasing"
+ "phase"
+ "ampZoom"
+ "timeZoom"
+ "freqZoom"
+ buttons:
+ The buttons that appear in the widget
+ Available buttons:
+ originalButton (plays original sound)
+ reconstructedButton (plays reconstructed sound)
+ reconstructedDeltaButton (plays reconstructed sound using delta modulation)
+ quantNoiseButton (plays quantization noise)
+ quantNoiseDeltaButton (plays quantization noise using delta modulation)
+ adaptiveSwitchButton (switches between adaptive and non-adaptive delta modulation)
+ settings:
+ The simulation parameters currently loaded. Same as the slider's propName.
+*/
+function updatePage(pageNum) {
+ switch(pageNum) {
+ //Chapter 1: Waveform Building
+ case 0:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 1: Waveform Building
+
+
+ Let's start with the basics. A sound is created by a variation of pressure through the air (or some other medium).
+ The characteristics of this sound depend on the characteristics of the variation.
+ If the variation of pressure (i.e. the amplitude) is very large, the sound will be loud.
+ If the pressure changes rapidly, we say that the sound signal has a high frequency, and the pitch will be high.
+ In order to visualize a sound, we usually plot its amplitude as a function of time. The simplest such sound is a sine wave, which looks like the function in the panel below.
+ A sine wave only has one frequency at a certain amplitude, and can be written as: (amplitude)*sin(frequency*time)
+ Try playing around with the frequency slider below and press the "Play original" button to see what it sounds like.
+ Careful not to hurt your ears!`;
+ updatePanel(panels, "Input Signal Frequency Domain", false);
+ break;
+ case 1:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 1: Waveform Building
+
+
+ In the right panel, a vertical line at 440 Hz represents the sine wave frequency component.
+ Remember that a sine (or a cosine) wave has only one frequency component.
+ In other words, it represents a simple harmonic motion such as the motion of an ideal pendulum or a tuning fork.`;
+ settings.fundFreq = 440;
+ updatePanel(panels, "Input Signal Frequency Domain", true);
+ updateSlider(sliders, "numHarm", false);
+ break;
+ case 2:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 1: Waveform Building
+
+
+ However, in the real world, sounds aren't just composed of a single frequency component. Usually, on top of the main frequency, sounds will also have a multitude of smaller frequency components called harmonics, situated at integer multiples of the original frequency.
+ You may now add harmonics to the generated waveforms. You may choose to have only even or odd-integer harmonics, as well as different harmonic schemes.
+ Try playing around with the parameters. Can you build a waveform with:
+
+
Square Waves?
+
Triangular Waves?
+
Sawtooth Waves?
+
+ What do each of these sound like?
+ `;
+ updateSlider(sliders, "numHarm", true);
+ updatePanel(panels, "Sampling Signal Time Domain", false);
+ updateSlider(sliders, "downsamplingFactor", false);
+ updatePanel(panels, "Input with Sampled Signal Time Domain", false);
+ break;
+
+ //Chapter 2: Sampling a Waveform in the Time Domain
+ case 3:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ We're now interested in what happens when trying to record a signal in the real world. For sound, an input signal would be some kind of continuous signal, whether analogue
+ or acoustic and would be captured either directly or by a microphone. In this case, we have a sinusoidal waveform.
+ Before the continuous signal can be converted into a set of 0's and 1's, it must be sampled. A simple one-dimensional sampling system would be represented by: y[n] = x(nTs)
+ This means that we simply measure the amplitude of the signal every Ts seconds.
+ The bottom right panel represents said sampling method (the impulse train) that will poll the input x at time [n].
+ The bottom left panel shows the resulting samples with amplitudes corresponding to the polled input signal.
+ `;
+ settings.numHarm = 1;
+ updateSlider(sliders, "numHarm", false);
+ updateSlider(sliders, "downsamplingFactor", true);
+ updatePanel(panels, "Input with Sampled Signal Time Domain", true);
+ updatePanel(panels, "Sampling Signal Time Domain", true);
+ reconstructedButton.hide();
+ break;
+ case 4:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ After having measured the amplitude at each point shown, we will end up with a sequence of numbers representing our sound. Once converted to binary, this will be our sound file.
+ From there, we can reconstruct what we think the input sound is. You may now listen to the reconstruction using the button below the page.
+ `;
+ reconstructedButton.show();
+ break;
+ case 5:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ Try putting the sample rate to its minimum value. What do you see happening in the polled input signal?
+ `;
+ break;
+ case 6:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ From now on, we will also show you the frequency domain of the reconstructed waveform. This is what we think the input signal's frequency is, based off the information we measure.
+ Now, set the input signal frequency to 150 Hz. You may do this using the textboxes and "Update" buttons. How many samples do you get in each period?
+ `;
+ updatePanel(panels, "Sampling Signal Time Domain", false);
+ updatePanel(panels, "Reconstructed Signal FFT", true);
+ break;
+ case 7:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ Now, what happens when you increase the input frequency?
+ In particular, try making it so that the input frequency is exactly half the sampling frequency.
+ What happens to the location of the samples? What should the resulting waveform sound like?
+ `;
+ break;
+ case 8:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ You should've seen that when the sampling frequency is exactly twice that of the input frequency, no variation is detected by the sampling process. This would mean our recording has no sound!
+ This is called the "Nyquist frequency", represented by the edge of the gray area in the frequency domain. We will now explore methods to deal with sounds whose frequency approaches the Nyquist.
+ To start with, you may now control the phase of the input signal relative to the samples. What do you notice when you shift the phase of the input by a little bit?
+ `;
+ updateSlider(sliders, "phase", true);
+ break;
+ case 9:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ You may have seen that by shifting the phase, we are able to gain more information about the input signal than was previously available.
+ Now, setting the phase back to zero, try decreasing the input frequency slightly below Nyquist.
+ What do you notice? Can you tell what the resulting frequency would be? What happens if the input is slightly above Nyquist?
+ `;
+ settings.phase = 0;
+ updateSlider(sliders, "phase", false);
+ updateSlider(sliders, "numHarm", false);
+ break;
+ case 10:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ Near the Nyquist frequency, we see that the input frequency gets "duplicated" on either side of the Nyquist.
+ This is an example of "signal folding", which happens due to the samples getting chosen at inconvenient spots in the input signal.
+ As an extreme case, with the sampling rate at 3000 Hz, what happens when the input signal is at 2900? What happens if the input signal is way higher than 3000 Hz?
+ `;
+ break;
+ case 11:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ When the input signal is only 100 Hz below the sampling frequency, the reconstruction thinks that we're measuring a 100 Hz signal! This is, once again, an example of signal folding.
+ In general, the reconstructed signal is unable to distinguish sounds above the Nyquist frequency. This means the frequency range of our recordings is limited by the Nyquist.
+ Now, set the input signal frequency to 750 and the number of odd 1/x harmonics to 2.
+ With the sampling rate at 3000 Hz, do you notice something in how the input signal is being sampled?
+ `;
+ updateSlider(sliders, "numHarm", true);
+ settings.phase = 0;
+ break;
+ case 12:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ You may have noticed that the samples fell on the same spots whether or not you had 1 or 2 harmonics. This is because the second harmonic fell exactly on the Nyquist frequency (1500 Hz).
+ Thus both the measurement of the fundamental frequency and its harmonics can be affected by our sampling process.
+ This is something that must be taken into account as many sounds contain frequencies above the sampling range and this must be filtered out to prevent ghosting.
+ `;
+ break;
+ case 13:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 2: Sampling a Waveform in the Time Domain
+
+
+ The hearing range of the average human goes from 20 Hz to around 20,000 Hz.
+ Using the information presented so far, can you explain why most recording devices use sampling rates of 48,000 Hz?
+ `;
+ updateSlider(sliders, "fundFreq", true);
+ updateSlider(sliders, "numHarm", true);
+ updateSlider(sliders, "downsamplingFactor", true);
+ updateSlider(sliders, "phase", true);
+ updatePanel(panels, "Input Signal Frequency Domain", true);
+ updatePanel(panels, "Input with Sampled Signal Time Domain", true);
+ updatePanel(panels, "Reconstructed Signal FFT", true);
+ updatePanel(panels, "Input Signal Time Domain with Delta Modulation", false);
+ reconstructedButton.show();
+ break;
+
+ //Add more pages here
+
+ //Chapter 5: Delta Modulation
+ case 14:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 5: Delta Modulation
+
+
+ So far, we've only looked at one sampling method, which is the simplest and most standard approach to recording sound. However, there exist many other methods, and each
+ comes with its own advantages and limitations.
+ For example,
+
+ In this chapter, we will cover a method called "Delta Modulation" sampling. The idea is that, instead of measuring the amplitude of the signal at one point, we will
+ just compare the current amplitude with the previous one.
+ Here's how we would implement this: Start the recording by measuring the current amplitude of the signal.
+ At some time Ts afterwards, if the amplitude is greater than the previous, assign the bit "1". If it is smaller, assign the bit "0".
+ Update the current amplitude by adding or subtracting a "delta step". This is a fixed amplitude that must be decided on beforehand.
+ As Ts becomes very small, we will be able to reconstruct the shape of the waveform!
+ `;
+ settings.fundFreq = 440;
+ settings.numHarm = 1;
+ settings.phase = 0;
+ settings.timeZoom = 2;
+ updateSlider(sliders, "fundFreq", false);
+ updateSlider(sliders, "phase", false);
+ updateSlider(sliders, "numHarm", false);
+ updateSlider(sliders, "downsamplingFactor", false);
+ updatePanel(panels, "Input with Sampled Signal Time Domain", false);
+ updatePanel(panels, "Reconstructed Signal FFT", false);
+ updatePanel(panels, "Input Signal Time Domain with Delta Modulation", true);
+ reconstructedButton.hide();
+
+ updatePanel(panels, "Input Signal Time Domain", true);
+ updatePanel(panels, "Input Signal Frequency Domain", false);
+ updatePanel(panels, "Reconstructed Signal Time Domain using Delta Modulation", false);
+ updatePanel(panels, "Reconstructed Signal using Delta Modulation FFT", false);
+ reconstructedDeltaButton.hide();
+ break;
+ case 15:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 5: Delta Modulation
+
+
+ From now on, we will show you the input signal overlaid with the delta modulation steps, along with the reconstruction using the same algorithm.
+ You also have access to the frequency domain of the input and reconstructed signal.
+ Try playing around with the input signal and listening to the reconstructed sound. In what situations does the delta modulation algorithm fail to reproduce the input signal? Why?
+ `;
+ updateSlider(sliders, "fundFreq", true);
+ updateSlider(sliders, "numHarm", true);
+ updatePanel(panels, "Input Signal Time Domain", false);
+ updatePanel(panels, "Input Signal Frequency Domain", true);
+ updatePanel(panels, "Reconstructed Signal Time Domain using Delta Modulation", true);
+ updatePanel(panels, "Reconstructed Signal using Delta Modulation FFT", true);
+ reconstructedDeltaButton.show();
+
+ updateSlider(sliders, "downsamplingFactorDelta", false);
+ break;
+ case 16:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 5: Delta Modulation
+
+
+ You might have noticed that, at high input frequencies, the reconstruction is unable to "keep up" with the rapid change in amplitude. This leads to the creation of small triangular waves in the reconstruction.
+ This is known as waveform overloading, and it causes the amplitude of the high frequency components to be attenuated (notice the spike in the frequency domain is much smaller).
+ There are a few ways to fix this problem. For instance, try increasing the sampling frequency and see how the reconstruction is affected.
+ Does the reconstruction sound better for high frequency inputs?
+ `;
+ updateSlider(sliders, "downsamplingFactorDelta", true);
+ break;
+ case 17:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 5: Delta Modulation
+
+
+ When the sampling frequency is sufficiently high, the algorithm is now able to follow rapid changes in the amplitude.
+ Notice that since we are only assigning a single bit at each step, we are able to sample the signal much more frequently than in our previous sampling method.
+ For a typical delta modulation algorithm, the sampling frequency can go to 4MHz or higher.
+ However, this introduces an additional problem. At the maximum sampling frequency, try sending a low-frequency input signal with no harmonics. Do you notice anything abnormal in the reconstruction?
+ `;
+
+ updateSlider(sliders, "deltaStep", false);
+ break;
+ case 18:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 5: Delta Modulation
+
+
+ When the input signal stays stable for a certain amount of time, the delta modulation rapidly oscillates around that value, as it is only able to increase or decrease by a fixed step.
+ This creates a distinctive "buzzing" noise in the reconstruction, which you may have noticed by playing around with the parameters.
+ In many ways, this is similar to the quantization phenomenon we saw previously.
+ In order to reduce this, we can decrease the delta step. So far the delta step has increased or decreased by 5% of the amplitude range each time.
+ Try playing around with this now. Are you able to get rid of the buzzing?
+ `;
+ updateSlider(sliders, "deltaStep", true);
+ break;
+ case 19:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 5: Delta Modulation
+
+
+ At very low delta steps, the reconstruction is much more sensitive to the details of the input signal, which gets rid of the buzzing sound from earlier.
+ However, you might have noticed a new problem. Whenever there are large changes in the input, the reconstruction needs many more steps to catch up to it.
+ So, if the step is too low, waveform overloading becomes a problem again, and high-frequency sounds get attenuated.
+ If the step is too high, quantization becomes an issue, and low-frequency sounds will contain buzzing.
+ In other words, it seems we need to adjust the delta step according to the behaviour of the input signal...
+ `;
+ adaptiveSwitchButton.hide();
+ break;
+ case 20:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 5: Delta Modulation
+
+
+ Here's how we can change the algorithm to do this. If the input signal is higher than our current amplitude, add the delta step as usual.
+ If the input signal is higher twice in a row, add twice the delta step. If this happens three times in a row, add three times the delta step and so on.
+ Once the input signal is lower than our current amplitude, reset the multiplier back to 1.
+ In other words, consecutive changes in the delta modulation increase the delta step.
+ This means that the reconstruction can react to abrupt changes in the input, while still capturing the details of the sections where the input is stable.
+ `;
+ break;
+ case 21:
+ contentWrap.elt.innerHTML = `
+
+ Chapter 5: Delta Modulation
+
+
+ This algorithm is known as "adaptive delta modulation". You may now switch between the adaptive and non-adaptive version using the button at the bottom.
+ Try playing around with different situations where the non-adaptive version had problems. Does the adaptive version improve?
+ `;
+ adaptiveSwitchButton.show();
+ break;
+ }
+ reorderPanels();
+ reorderSliders();
+}
+
+/*
+ elem_id:
+ Tells the widget in which Div class to place the buttons in (Questions or answers etc)
+ elem_id2:
+ Tells the widget which div with the according class name to take into account for placing the widget in height terms/
+ margin_size:
+ Used to place the upload buttons on a specific place.
+ width_factor:
+ By default is 1 and determines the width of the widget
+ height_factor:
+ By default is 1 and determines the height of the widget
+*/
+
+var element = undefined;
+console.log(elem_id);
+if (elem_id) {
+ element = document.getElementById(elem_id);
+ console.log(element.id);
+ console.log(element.clientHeight, element.clientWidth);
+
+}
+var intro_text = document.getElementsByClassName(elem_id2);
+var intro_height = 0;
+
+var numPanels = panels.length;
+var numSliders = sliders.length;
+var old_x = 220;
+let panelHeight, panelWidth, sliderWidth, sliderHeight, numColumns, contentWrap;
+resize(1080, 1920);
+
+// set display and fftSize to ensure there is enough data to fill the panels when zoomed all the way out
+let fftSize = p.pow(2, p.round(p.log(panelWidth/minFreqZoom) / p.log(2)));
+let displaySignalSize = p.max(fftSize, panelWidth/minTimeZoom) * 1.1; // 1.1 for 10% extra safety margin
+let fft = new FFTJS(fftSize);
+var settings =
+ { amplitude : 1.0
+ , fundFreq : 1250 // input signal fundamental freq
+ , sampleRate : WEBAUDIO_MAX_SAMPLERATE
+ , downsamplingFactor : 2
+ , downsamplingFactorDelta : 2
+ , numHarm : 1 //Number of harmonics
+ , harmType : "Odd" // Harmonic series to evaluate - Odd, even or all
+ , harmSlope : "1/x" // Amplitude scaling for harmonics. can be used to create different shapes like saw or square
+ , harmonicFreqs : new Float32Array(MAX_HARMONICS) //Array storing harmonic frequency in hz
+ , harmonicAmps : new Float32Array(MAX_HARMONICS) //Array storing harmonic amp (0-1.0)
+ , phase : 0.0 // phase offset for input signal
+ , fftSize : fftSize
+ , bitDepth : BIT_DEPTH_MAX //quantization bit depth
+ , quantType : "midRise" // type of quantization
+ , dither : 0.0 // amplitude of white noise added to signal before quantization
+ , antialiasing : 0 // antialiasing filter order
+ , original: new Float32Array(displaySignalSize)
+ , downsampled: new Float32Array(1) // this gets re-inited when rendering waves
+ , downsampledDelta: new Float32Array(1)
+ , reconstructed: new Float32Array(displaySignalSize)
+ , reconstructedDelta: new Float32Array(displaySignalSize)
+ , stuffed: new Float32Array(displaySignalSize)
+ , quantNoiseStuffed: new Float32Array(displaySignalSize)
+ , quantNoiseStuffedDelta: new Float32Array(displaySignalSize)
+ , quantNoise: new Float32Array(displaySignalSize)
+ , original_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
+ , reconstructed_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
+ , reconstructedDelta_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
+ , quantNoise_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
+ , quantNoiseDelta_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds))
+ , originalFreq : fft.createComplexArray()
+ , stuffedFreq : fft.createComplexArray()
+ , reconstructedFreq : fft.createComplexArray()
+ , reconstructedDeltaFreq : fft.createComplexArray()
+ , quantNoiseFreq : fft.createComplexArray()
+ , quantNoiseDeltaFreq : fft.createComplexArray()
+ , snd : undefined
+ , maxVisibleFrequency : WEBAUDIO_MAX_SAMPLERATE / 2
+ , freqZoom : 1.0 //X axis zoom for frequency panels
+ , ampZoom : 1.0 // Y axis zoom for all panels
+ , timeZoom: 1.0 // X axis zoom for signal panels
+ , deltaFrequency: 96000
+ , deltaStep: 0.05
+ , deltaType: "non-adaptive"
+ , adaptiveNumSteps: 3 //Number of consecutive steps needed to trigger adaptive delta modulation
+ , element : element
+ , margine_size : margin_size+20
+ , p5: undefined
+ , render : undefined
+ , play : undefined
+
+ };
+
+p.settings = settings;
+
+var renderWaves = renderWavesImpl(settings, fft, p);
+
+p.setup = function () {
+ settings.p5 = p;
+ settings.render = renderWaves;
+ settings.play = playWave;
+
+ p.createCanvas(p.windowWidth, p.windowHeight+500);
+ console.log(p.windowWidth,p.windowHeight)
+ p.textAlign(p.CENTER);
+ contentWrap = p.createDiv();
+ contentWrap.id("content-wrap");
+ contentWrap.position(0,100);
+ contentWrap.class("title qs");
+ contentWrap.elt.innerHtml = `
+
-
+
diff --git a/tutorials/panel_tutorial.js b/tutorials/panel_tutorial.js
index 98fdfbb..59678ff 100644
--- a/tutorials/panel_tutorial.js
+++ b/tutorials/panel_tutorial.js
@@ -30,7 +30,7 @@ class Panel {
this.xbezel = Math.max(70, w * 0.1);
this.xbezelLeft = 0.60 * this.xbezel;
this.xbezelRight = 0.40 * this.xbezel;
- this.ybezel = Math.max(20, h * 0.1);
+ this.ybezel = Math.max(20, h *0.1);
this.halfh = h/2;
this.plotHeight = h - 2 * this.ybezel;
this.plotWidth = w - this.xbezel;
@@ -108,19 +108,22 @@ function drawMidLine(panel) {
}
const time_signal_doc='Because this signal approximates a continuous analog signal in our simulation, the signal value is drawn with a simple interpolation scheme. There are currently bugs with this interpolation when zooming in (time zoom > 100%). In addition, visual aliasing may occur when viewing high frequency signals due to the limited number of pixels on the screen acting as a kind of spatial sampling process. This may appear as amplitude modulation in the plot that is not actually present in the signal. Finally, note that the amplitude of the signal is clipped to the size of the panel viewport. This visual clipping happens regardless of whether the signal itself actually exhibits clipping. ';
-function drawSignal(panel, signal, zoom = 1)
+function drawSignal(panel, signal)
{
let pixel_max = panel.plotHeight/2;
let pixel_per_fullscale = pixel_max * panel.settings.ampZoom;
panel.buffer.noFill();
//TODO: there are some artifacts here due to the way the signal is drawn, especially when zoomed in and/or large amplitude
panel.buffer.beginShape();
- panel.buffer.curveTightness(1.0);
- for (let x = 0; x < panel.plotWidth; x++) {
- let pixel_amp = pixel_per_fullscale * signal[Math.round(x/panel.settings.timeZoom)];
+ max_x = 10000
+ for (let x = 0; x < max_x; x++) {
+ let pixel_x = (x/max_x)*panel.plotWidth/panel.settings.timeZoom
+ let amp = signal[Math.floor(pixel_x)]+(pixel_x-Math.floor(pixel_x))/(Math.ceil(pixel_x)-Math.floor(pixel_x))*(signal[Math.ceil(pixel_x)]-signal[Math.floor(pixel_x)]); //Linear interpolation
+ let pixel_amp = pixel_per_fullscale * amp;
let y = panel.halfh - pixel_amp;
- y = (ypanel.plotBottom)? y= panel.plotBottom : y=y; panel.buffer.curveTightness(0.0);
- panel.buffer.curveVertex(x + panel.plotLeft, y);
+ panel.buffer.curveTightness(1.0);
+ y = (ypanel.plotBottom)? y= panel.plotBottom: y=y;panel.buffer.curveTightness(1.0)
+ panel.buffer.curveVertex((x/max_x)*panel.plotWidth + panel.plotLeft, y);
}
panel.buffer.endShape();
}
diff --git a/tutorials/tutorial2.html b/tutorials/tutorial2.html
index 38eeeb2..4d67dab 100644
--- a/tutorials/tutorial2.html
+++ b/tutorials/tutorial2.html
@@ -21,7 +21,7 @@
- Now, bypass the Nyquist Frequency to 2700 Hz whilst keeping the sampling rate at 3000 Hz, what do you notice? Can you tell what the resulting frequency.
+ Now, bypass the Nyquist Frequency to 2700 Hz whilst keeping the sampling rate at 3000 Hz, what do you notice? Can you tell what the resulting frequency would be?
Now set the input signal frequency to 750 and the number of harmonics to two.
@@ -94,7 +94,6 @@
,["original"]
,"Question" //question div id
,"qs" //questions class
-
);
diff --git a/tutorials/tutorial3.html b/tutorials/tutorial3.html
index cb3eb35..76e56ea 100644
--- a/tutorials/tutorial3.html
+++ b/tutorials/tutorial3.html
@@ -29,7 +29,7 @@