From 8f3ed37f3bff09329006df4f71046f1ac7a7c751 Mon Sep 17 00:00:00 2001 From: Laurent Tarabout Date: Thu, 8 Feb 2024 16:02:57 -0500 Subject: [PATCH 1/7] added delta modulation initial --- delta-modulation/index.html | 77 +++ delta-modulation/panel_delta.js | 747 ++++++++++++++++++++++++++++++ delta-modulation/slider_delta.js | 353 ++++++++++++++ delta-modulation/styles_delta.css | 0 delta-modulation/waves_delta.js | 375 +++++++++++++++ delta-modulation/widget_delta.js | 226 +++++++++ tutorials/index.html | 2 +- tutorials/panel_tutorial.js | 17 +- tutorials/tutorial2.html | 4 +- tutorials/tutorial3.html | 2 +- tutorials/tutorial4.html | 12 + 11 files changed, 1804 insertions(+), 11 deletions(-) create mode 100644 delta-modulation/index.html create mode 100644 delta-modulation/panel_delta.js create mode 100644 delta-modulation/slider_delta.js create mode 100644 delta-modulation/styles_delta.css create mode 100644 delta-modulation/waves_delta.js create mode 100644 delta-modulation/widget_delta.js diff --git a/delta-modulation/index.html b/delta-modulation/index.html new file mode 100644 index 0000000..27fa1b7 --- /dev/null +++ b/delta-modulation/index.html @@ -0,0 +1,77 @@ + + + + + + + + + + + + + + + + + + + +
+
+
+ Home +
+ + +
+
+

+ Delta Modulation +

+
+
+ +
+
+ +
+
+
+ + + + + + diff --git a/delta-modulation/panel_delta.js b/delta-modulation/panel_delta.js new file mode 100644 index 0000000..ce62578 --- /dev/null +++ b/delta-modulation/panel_delta.js @@ -0,0 +1,747 @@ +// Canned documentation blurbs +//Panel class. should be extended with a drawPanel method +const log10 = Math.log(10); +class Panel { + constructor(background = "white", stroke = "black", strokeWeight = 1, fill = "black") { + this.background = background; + this.stroke = stroke; + this.strokeWeight = strokeWeight; + this.fill = fill; + this.xAxis= "Time"; + this.yAxis = "Amp"; + this.tickTextSize = 9; + this.numTimeTicks = 8; + this.numFreqTicks = 4; + this.name = "Base Panel Class"; + this.description = "This is the base class that other panels inherit from. If you can see this and you are not reading the source code right now there is probably a problem. Please open an issue or otherwise contact the project maintainers." + } + + setup(p, height, width, settings) { + this.settings = settings; + this.buffer = p.createGraphics(1,1); + this.resize(height, width); + this.bufferInit(); + this.buffer.textFont('Helvetica',20); + this.buffer.textAlign(p.CENTER); + } + + resize(h, w) { + this.buffer.resizeCanvas(w, h); + this.xbezel = Math.max(70, w * 0.1); + this.xbezelLeft = 0.60 * this.xbezel; + this.xbezelRight = 0.40 * this.xbezel; + this.ybezel = Math.max(20, h *0.1); + this.halfh = h/2; + this.plotHeight = h - 2 * this.ybezel; + this.plotWidth = w - this.xbezel; + this.plotLeft = this.xbezelLeft; // the x coord. of the left side of the plot + this.plotRight = w - this.xbezelRight; // ditto of the right side of the plot + this.plotTop = this.ybezel; // y coord. of top + this.plotBottom = h - this.ybezel; // y coord. of bottom + } + + bufferInit(){ + this.buffer.background(this.background); + this.buffer.fill(this.fill); + this.buffer.stroke(this.stroke); + this.buffer.strokeWeight(this.strokeWeight); + } + + drawStem(x,y,startHeight,ellipseSize =this.ellipseSize){ + let actual_y = y; + y = (ythis.plotBottom)? y= this.plotBottom : y; + this.buffer.line(x, startHeight, x, y); + ellipseSize= (actual_ythis.plotBottom)? 0: ellipseSize; + this.buffer.ellipse(x, y, ellipseSize); + }; + + setbackground(backgroundClr){ this.background = backgroundClr; } + setStroke(strokeClr){ this.stroke = strokeClr; } + setStrokeWeight(strokeWgt){ this.strokeWeight = strokeWgt; } + setFill(fillClr){ this.fill = fillClr; } + + drawBorder(){ + this.buffer.stroke(this.stroke); + this.buffer.line(this.plotLeft, this.plotTop, this.plotLeft, this.plotBottom); + this.buffer.line(this.plotLeft, this.plotTop, this.plotRight, this.plotTop); + this.buffer.line(this.plotRight, this.plotTop, this.plotRight, this.plotBottom); + this.buffer.line(this.plotLeft, this.plotBottom, this.plotRight, this.plotBottom); + } + + drawPanel(){} +} + +class freqPanel extends Panel{ + constructor(){ super(); this.xAxis = "Frequency"; + } + + drawPeak(x,height,base,colour="black"){ + height = Math.abs(height); + this.buffer.fill(colour); + this.buffer.stroke(colour); + this.buffer.beginShape(); + if (xthis.plotRight) return; + let x1=x-2; let x2 = x+2; + x1 = Math.max(x1, this.plotLeft); + x2 = Math.min(x2, this.plotRight); + this.buffer.vertex(x1, base); + this.buffer.vertex(x, this.plotBottom-height); + this.buffer.vertex(x2, base); + this.buffer.vertex(x, base); + this.buffer.endShape(); + this.buffer.stroke(this.stroke); this.buffer.fill(this.fill); + } +} + +function linToDB(a, a_0 = 1) +{ + return 20 * Math.log(a / a_0) / log10; +} + +const midline_doc='The horizontal middle line represents an amplitude of zero. '; +function drawMidLine(panel) { + // panel.buffer.drawingContext.setLineDash([5,5]); + panel.buffer.stroke("gray"); + panel.buffer.line(panel.plotLeft, panel.halfh, panel.plotRight, panel.halfh); + panel.buffer.stroke(panel.stroke); + // panel.buffer.drawingContext.setLineDash([]); +} + +const time_signal_doc='Because this signal approximates a continuous analog signal in our simulation, the signal value is drawn with a simple interpolation scheme. There are currently bugs with this interpolation when zooming in (time zoom > 100%). In addition, visual aliasing may occur when viewing high frequency signals due to the limited number of pixels on the screen acting as a kind of spatial sampling process. This may appear as amplitude modulation in the plot that is not actually present in the signal. Finally, note that the amplitude of the signal is clipped to the size of the panel viewport. This visual clipping happens regardless of whether the signal itself actually exhibits clipping. '; +function drawSignal(panel, signal) +{ + let pixel_max = panel.plotHeight/2; + let pixel_per_fullscale = pixel_max * panel.settings.ampZoom; + panel.buffer.noFill(); + panel.buffer.beginShape(); + max_x = 10000 + for (let x = 0; x < max_x; x++) { + let pixel_x = (x/max_x)*panel.plotWidth/panel.settings.timeZoom + let amp = signal[Math.floor(pixel_x)]+(pixel_x-Math.floor(pixel_x))/(Math.ceil(pixel_x)-Math.floor(pixel_x))*(signal[Math.ceil(pixel_x)]-signal[Math.floor(pixel_x)]); //Linear interpolation + let pixel_amp = pixel_per_fullscale * amp; + let y = panel.halfh - pixel_amp; + panel.buffer.curveTightness(1.0); + y = (ypanel.plotBottom)? y= panel.plotBottom: y=y;panel.buffer.curveTightness(1.0) + panel.buffer.curveVertex((x/max_x)*panel.plotWidth + panel.plotLeft, y); + } + panel.buffer.endShape(); +} + +function drawDeltaModulation(panel, signal) { + let pixel_max = panel.plotHeight/2; + let pixel_per_fullscale = pixel_max * panel.settings.ampZoom; + panel.buffer.noFill(); + panel.buffer.beginShape(); + panel.buffer.curveTightness(1.0); + let visibleSamples = Math.floor(panel.plotWidth / panel.settings.downsamplingFactor/panel.settings.timeZoom+1); + console.log(visibleSamples); + let ypos = panel.halfh; + for (let x = 0; x < visibleSamples; x++) { + let xpos = Math.round(panel.plotLeft + x * panel.settings.downsamplingFactor*panel.settings.timeZoom); + panel.buffer.curveVertex(xpos, ypos); + if (pixel_max * signal[Math.floor((x/visibleSamples)*panel.plotWidth/panel.settings.timeZoom)]*panel.settings.ampZoom < panel.halfh - ypos) { + ypos += panel.settings.deltaStep*panel.plotHeight; + //if (ypos >= panel.plotBottom) ypos -= 2*panel.settings.deltaStep*panel.plotHeight; //Prevent signal from going below bounds + } else { + ypos -= panel.settings.deltaStep*panel.plotHeight; + //if (ypos <= panel.plotTop) ypos += 2*panel.settings.deltaStep*panel.plotHeight; //Same for the top bound + } + ypos = (ypospanel.plotBottom)? ypos= panel.plotBottom: ypos=ypos; + panel.buffer.curveVertex(xpos, ypos); + } + panel.buffer.endShape(); +} + +const lollipop_doc='Because this signal represents the discrete time output of the analog-to-digital conversion process, it is drawn with a lollipop plot where each stem represents a single sample. '; +function drawDiscreteSignal(panel,signal){ + let gain = panel.plotHeight/2; + let visibleSamples = Math.floor(panel.plotWidth / panel.settings.downsamplingFactor/panel.settings.timeZoom+1); + for (let x = 0; x < visibleSamples; x++) { + let xpos = Math.round(panel.plotLeft + x * panel.settings.downsamplingFactor*panel.settings.timeZoom); + let ypos = panel.halfh - gain * signal[x]*panel.settings.ampZoom; + panel.drawStem(xpos,ypos,panel.halfh); + } +} + +function drawHorizontalTick(panel, text, height, tick_length = 5, side="left") { + panel.buffer.fill(panel.fill); + panel.buffer.textFont('Helvetica', panel.tickTextSize); + panel.buffer.textStyle(panel.buffer.ITALIC); + panel.buffer.strokeWeight(0); + panel.buffer.textAlign(panel.buffer.RIGHT); + let tickStart = panel.plotLeft-tick_length; + let tickEnd = panel.plotLeft; + if (side == "right"){ + panel.buffer.textAlign(panel.buffer.LEFT); + tickEnd = panel.plotRight+tick_length; + tickStart = panel.plotRight; + panel.buffer.text(text, tickEnd+2, height - panel.tickTextSize/2, panel.buffer.width , height + panel.tickTextSize/2); + } + else{ + panel.buffer.text(text, 0, height - panel.tickTextSize/2, tickStart , height + panel.tickTextSize/2); + + } + + panel.buffer.strokeWeight(panel.strokeWeight); + panel.buffer.line(tickStart , height, + tickEnd, height); +} + +function drawVerticalTick(panel, text, x, tick_length = 5) { + if (xpanel.plotRight){return}; + panel.buffer.fill(panel.fill); + panel.buffer.textFont('Helvetica', panel.tickTextSize); + panel.buffer.textAlign(panel.buffer.CENTER); + panel.buffer.textStyle(panel.buffer.ITALIC); + panel.buffer.strokeWeight(0); + // we draw the text in the center of an oversized box centered over the tick + // 20000 pixels should be more than enough for any reasonable tick text + panel.buffer.text(text, x - 10000, panel.plotBottom + tick_length, 20000, panel.ybezel - tick_length); + panel.buffer.strokeWeight(panel.strokeWeight); + panel.buffer.line(x, panel.plotBottom, x, panel.plotBottom + tick_length); +} + +const freq_amp_ticks_doc='Amplitude is plotted on the y-axis. Ticks on the left label the linear amplitude where 1.0 is equal to the maximum amplitude. '; +function drawFreqAmplitudeTicks(panel, pixel_max, num_ticks) { + for (let i = 0; i <= num_ticks; ++i) { + let tick_amp_pixels = i * pixel_max / num_ticks / panel.settings.ampZoom; + drawHorizontalTick(panel, (tick_amp_pixels/pixel_max).toFixed(2), panel.plotBottom - tick_amp_pixels*panel.settings.ampZoom, 5, "right"); + } +} + +const amp_ticks_doc='Amplitude is plotted on the y-axis. Ticks on the left label the linear amplitude where +/- 1.0 is equal to the maximum amplitude. '; +function drawSignalAmplitudeTicks(panel, pixel_max, num_ticks) { + for (let i = 1; i <= num_ticks; ++i) { + let tick_amp_pixels = i * pixel_max / num_ticks / panel.settings.ampZoom; + // let tick_amp_db = linToDB(tick_amp_pixels, pixel_max); + drawHorizontalTick(panel, (tick_amp_pixels/pixel_max).toFixed(2), panel.halfh - tick_amp_pixels*panel.settings.ampZoom,5,"right"); + drawHorizontalTick(panel, (-tick_amp_pixels/pixel_max).toFixed(2), panel.halfh + tick_amp_pixels*panel.settings.ampZoom,5,"right"); + // drawHorizontalTick(panel, tick_amp_db.toFixed(1) + 'dBFS', panel.halfh - tick_amp_pixels*panel.settings.ampZoom,5, "right"); + // drawHorizontalTick(panel, tick_amp_db.toFixed(1) + 'dBFS', panel.halfh + tick_amp_pixels*panel.settings.ampZoom,5, "right"); + } + // drawHorizontalTick(panel, '-inf dBFS', panel.halfh, 5, "right"); + drawHorizontalTick(panel, '0.00', panel.halfh, 5, "right"); +} + +const bin_amp_ticks_doc='Ticks on the right side of this plot label the numerical value assigned to a given amplitude by the simulated analog-to-digital conversion. The labels are written in hexadecimal unless the bit depth is 7 bits or lower, in which case the labels are in binary. '; +function drawSignalBinaryScaling(panel,pixel_max, num_ticks, settings){ + let maxInt = Math.pow(2, settings.bitDepth)-1; + let stepSize = (settings.quantType == "midTread")? 2/(maxInt-1) : 2/(maxInt); + let numTicks = Math.min(num_ticks,maxInt+1); + let tickScale =(maxInt+1)/numTicks; + let pixel_per_fullscale = pixel_max * panel.settings.ampZoom; + // let stepSize = (settings.quantType == "midRise")? 2/(numTicks-1) : 2/(numTicks); + + let val=-1; let tick; let plotVal; + for ( tick =0; tick= panel.plotTop-.1 && y <=panel.plotBottom+.1) { + if (maxInt<255){ + //if under 8 bits, we can write out binary values + drawHorizontalTick(panel, (Math.round(tick*tickScale)).toString(2).padStart(settings.bitDepth,"0"), y,5,"left"); + } + else { + //draw axis labels in hex because of limited space + drawHorizontalTick(panel, "0x" + (tick*tickScale).toString(16).padStart(4,"0"), y,5,"left"); + } + panel.buffer.stroke("gray"); + panel.buffer.drawingContext.setLineDash([5,5]); + panel.buffer.line(panel.plotLeft, y, panel.plotRight, y); + panel.buffer.drawingContext.setLineDash([]); // drawHorizontalTick(panel, tick.toString(2), y,5,"left"); + } + val = val + stepSize*tickScale; + } + +} + +const time_ticks_doc='Time is plotted on the x-axis. '; +function drawTimeTicks(panel, num_ticks, seconds_per_pixel) { + let tick_jump = Math.floor((panel.plotWidth) / num_ticks); + for (let i = 0; i < num_ticks; ++i) { + let x = i * tick_jump; + let text = (x * seconds_per_pixel * 1000).toFixed(1) + ' ms'; + drawVerticalTick(panel, text, x + panel.plotLeft); + } +} + +const freq_ticks_doc='Frequency is plotted on the x-axis. '; +function drawFreqTicks(panel, num_ticks, pixels_per_hz) { + let hz_per_pixel = 1/pixels_per_hz; + let tick_jump = Math.floor((panel.plotWidth) / num_ticks); + tick_jump=panel.plotWidth / num_ticks + for (let i = 0; i < num_ticks; ++i) { + let x = i * tick_jump; + if (xthis.plotRight) return; + let text = (x * hz_per_pixel).toFixed(0) + ' Hz'; + drawVerticalTick(panel, text, x + panel.plotLeft); + } +} + +function drawName(panel){ + panel.buffer.fill(panel.fill); + panel.buffer.strokeWeight(0); + panel.buffer.textAlign(panel.buffer.CENTER); + panel.buffer.textStyle(panel.buffer.NORMAL); + panel.buffer.textFont('Helvetica',12); + let textheight = panel.buffer.textSize() + panel.buffer.textDescent() + 1; + panel.buffer.text (panel.name, panel.plotLeft, panel.plotTop - textheight, panel.plotWidth, panel.ybezel); + panel.buffer.strokeWeight(panel.strokeWeight); +} + +function getColor(num){ + return [num*666%255,num*69%255,num*420%255] +} + +class inputSigPanel extends Panel { + constructor(){ + super(); + this.name="Input Signal Time Domain"; + this.description='This is a straightforward time domain plot of the input signal before "sampling", quantization, and "reconstruction". This signal corresponds with the authentic "analog" input to the simulated analog-to-digital conversion process. ' + + time_signal_doc + time_ticks_doc + amp_ticks_doc + midline_doc; + } + + drawPanel(){ + this.buffer.background(this.background); + drawSignal(this, this.settings.original); + drawMidLine(this); + drawName(this); + drawSignalAmplitudeTicks(this, this.plotHeight/2, 4); + drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate)); + this.drawBorder(); + } +} + +class deltaModPanel extends inputSigPanel { + constructor(){ + super(); + this.name="Input Signal Time Domain with Delta Modulation"; + this.description='This is an extension of the input signal in time domain with a step function illustrating delta modulation.' + + time_signal_doc + time_ticks_doc + amp_ticks_doc + midline_doc; + } + + drawPanel(){ + this.buffer.background(this.background); + drawSignal(this, this.settings.original); + drawDeltaModulation(this, this.settings.original); + drawMidLine(this); + drawName(this); + drawSignalAmplitudeTicks(this, this.plotHeight/2, 4); + drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate)); + this.drawBorder(); + } +} + +class reconstructedSigPanel extends Panel { + constructor(){ + super(); + this.name="Reconstructed Signal Time Domain"; + this.description='This is a straightforward time domain plot of the signal output from the simulated digital-to-analog conversion process. ' + + time_signal_doc + time_ticks_doc + amp_ticks_doc + midline_doc; + } + + drawPanel(){ + this.buffer.background(this.background); + drawSignal(this, this.settings.reconstructed); + drawMidLine(this); + drawName(this); + drawSignalAmplitudeTicks(this, this.plotHeight/2, 4); + drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate)); + this.drawBorder(); + } +} + +const analytic_frequency_doc='Spikes are drawn at the appropriate frequency and amplitude based on the analytic definition of the signal determined by the frequency, number of harmonics, and harmonic amplitude scaling settings. As such, this plot should accurately reflect the frequency content of the signal without any influence of windowing or other considerations that would affect a discrete time fourier transform. Unfortunately, this approach does not reflect non-linear effects such as quantization and clipping, where applicable. '; +class inputSigFreqPanel extends freqPanel { + constructor(){ + super(); + this.name="Input Signal Frequency Domain"; + this.description='This is a frequency domain representation of the simulated "continuous time" input signal. ' + + analytic_frequency_doc + freq_ticks_doc + passband_doc; + } + + drawPanel(){ + this.buffer.background(this.background); + let pixels_per_hz = this.plotWidth / this.settings.maxVisibleFrequency; + drawPassBand(this); + // let harmInc = 1; + // if (this.settings.harmType =="Odd" || this.settings.harmType == "Even"){ harmInc=2;} + // let harmPeak = 1, harm =1, ampScale = 1; + let harm =1; + while (harm<=this.settings.numHarm){ + let hz = this.settings.harmonicFreqs[harm-1]; + let xpos = (hz * pixels_per_hz + this.plotLeft); + if (xpos > this.plotRight|| xpos< this.plotLeft) break; + // if (this.settings.harmSlope == "lin") {ampScale = 1 - (harm-1)/(this.settings.numHarm)}; + // if (this.settings.harmSlope == "1/x") {ampScale = 1/harmPeak}; + let height = this.settings.ampZoom * this.settings.amplitude * this.plotHeight *this.settings.harmonicAmps[harm-1]; + this.drawPeak(xpos, height, this.plotBottom) + harm+=1; + // (harmPeak ==1 && this.settings.harmType != "Odd")? harmPeak++ : harmPeak +=harmInc; + } + + + this.drawBorder(); + drawFreqTicks(this, this.numFreqTicks, pixels_per_hz); + drawFreqAmplitudeTicks(this, this.plotHeight, 9); + drawName(this); + } + +} + +function magnitude(real, cplx) { + return Math.sqrt(real * real + cplx * cplx); +} + +const fft_doc='Because the FFT is used here, there are visual artifacts introduced by the windowing process, and the frequency resolution of the plot is inherently limited by the size of the FFT. Note that the resolution is not increased when zooming in with the frequency zoom slider. '; +function drawFFT(panel, fft, tick='freq') { + let gain = panel.plotHeight * panel.settings.ampZoom; + let offset = 100; + let hz_per_bin = panel.settings.sampleRate / (fft.length / 2); + // fft.length / 2 because it is an interleaved complex array + // with twice as many elements as it has (complex) numbers + let pixels_per_hz = panel.plotWidth / panel.settings.maxVisibleFrequency; + let pixels_per_bin = pixels_per_hz * hz_per_bin; + let num_bins = Math.round(panel.plotWidth / pixels_per_bin); + let normalize = 4/fft.length; + + panel.buffer.background(panel.background); + panel.buffer.stroke(panel.stroke); + drawPassBand(panel); + panel.buffer.beginShape(); + panel.buffer.vertex(panel.plotLeft, panel.plotBottom); + for (let bin = 0; bin <= num_bins; bin++) { + let xpos = pixels_per_bin * bin + panel.plotLeft; + let ypos = panel.plotBottom - gain * normalize * magnitude(fft[2*bin], fft[2*bin+1]); + panel.buffer.vertex(xpos, ypos); + } + panel.buffer.vertex(panel.plotRight, panel.plotBottom); + panel.buffer.endShape(panel.buffer.CLOSE); + panel.buffer.strokeWeight(panel.strokeWeight); + panel.buffer.stroke(panel.stroke); + panel.drawBorder(); + drawName(panel); + if (tick == 'dirac') + drawDiracDashes(panel); + else + drawFreqTicks(panel, panel.numFreqTicks, pixels_per_hz); + drawFreqAmplitudeTicks(panel, panel.plotHeight, 9); +} + +class inputSigFFTPanel extends freqPanel { + constructor(){ + super(); + this.name = "Input Signal FFT"; + this.description='This plot shows the FFT of the input signal. ' + fft_doc + 'This plot clearly reveals one of the compromises inherent in the simulation; since everything must be represented by the computer, the ideal continuous time input signal must be approximated by a discrete time signal with a sufficiently high sampling rate. '; + } + + drawPanel() { + drawFFT(this, this.settings.originalFreq); + } +} + +class sampledInputFFTPanel extends freqPanel { + constructor(){ + super(); + this.name="Sampled Signal FFT"; + this.description='This plot shows the FFT of the signal output by the simulated analog-to-digital conversion. ' + fft_doc; + } + drawPanel() { + drawFFT(this, this.settings.stuffedFreq, 'dirac'); + } +} + +class reconstructedSigFFTPanel extends freqPanel { + constructor(){ + super(); + this.name="Reconstructed Signal FFT"; + this.description='This plot shows the FFT of the signal output by the simulated digital-to-analog conversion. ' + fft_doc + 'This plot clearly reveals one of the compromises inherent in the simulation; since everything must be represented by the computer, the ideal continuous time output signal must be approximated by a discrete time signal with a sufficiently high sampling rate. '; + } + drawPanel() { + drawFFT(this, this.settings.reconstructedFreq); + } +} + +class impulsePanel extends Panel { + constructor(){ + super() + this.strokeWeight=1; + this.ellipseSize=5; + this.name = "Sampling Signal Time Domain"; + this.description = 'This is a time domain plot of the dirac comb used to sample the input signal. Before quantization, the input signal is multiplied with this dirac comb; this is the "sampling" part of the analog-to-digital conversion process. ' + + time_ticks_doc; + } + drawPanel(){ + let base = this.plotBottom; + let ytop = this.plotTop + 10; + this.buffer.background(this.background); + this.drawBorder(); + + let visibleSamples = Math.floor(this.plotWidth / this.settings.downsamplingFactor/this.settings.timeZoom+1); + for (let x = 0; x < visibleSamples; x++) { + let xpos = this.plotLeft + x * this.settings.downsamplingFactor*this.settings.timeZoom; + this.drawStem(xpos,ytop,base); + } + //I'm not sure dBs make sense here + // drawHorizontalTick(this, '0.0 dB', ytop); + // drawHorizontalTick(this, '-inf dB', base); + drawHorizontalTick(this, '1.0', ytop,5,"right"); + drawHorizontalTick(this, '0.0', base,5,"right"); + + drawTimeTicks(this, this.numTimeTicks, this.settings.timeZoom/(this.settings.sampleRate)); + drawName(this); + } +} + +class impulseFreqPanel extends freqPanel { + constructor(){ + super(); + this.name="Sampling Signal Frequency Domain"; + this.description = 'This is a frequency domain plot of the dirac comb used to sample the input signal. The sampling process causes the frequency content of the input signal to be convolved with the frequency response of the dirac comb, resulting in periodic images of the input signal frequency at mulitples of the sampling frequency. '; + } + drawPanel(){ + this.bufferInit(); + let base = this.plotBottom; + let pixels_per_hz = this.plotWidth / this.settings.maxVisibleFrequency; + let sampleRate = this.settings.sampleRate / this.settings.downsamplingFactor; + let numPeaks = Math.round(this.settings.maxVisibleFrequency / sampleRate); + + for (let peak = 0; peak <= numPeaks; peak++) { + let hz = peak * this.settings.sampleRate / this.settings.downsamplingFactor; + let xpos = hz * pixels_per_hz + this.plotLeft; + let color = getColor(peak); + this.drawPeak(xpos, this.plotHeight, base, color) + let text = peak.toFixed(0) + ' fs'; + drawVerticalTick(this, text, xpos); + } + + drawFreqAmplitudeTicks(this, this.plotHeight, 9); + this.drawBorder(); + drawName(this); + } +} + +class sampledInputPanel extends Panel{ + constructor(){ + super() + this.strokeWeight=1; + this.ellipseSize=5; + this.name="Sampled Signal Time Domain"; + this.description = lollipop_doc + time_ticks_doc + amp_ticks_doc + bin_amp_ticks_doc + midline_doc; + } + + drawPanel(){ + this.buffer.background(this.background); + drawDiscreteSignal(this,this.settings.downsampled) + drawMidLine(this); + drawName(this); + drawSignalAmplitudeTicks(this, this.plotHeight/2, 4); + drawSignalBinaryScaling(this, this.plotHeight/2, 16,this.settings); + + drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate)); + this.drawBorder(); + } +} + +const passband_doc='The frequency range below the nyquist frequency is highlighted by a light grey background. '; +function drawPassBand(panel) { + let sampleRate = panel.settings.sampleRate/panel.settings.downsamplingFactor; + let pixels_per_hz = panel.plotWidth / panel.settings.maxVisibleFrequency; + panel.buffer.strokeWeight(0); + panel.buffer.fill(235); + let passbandcutoff = sampleRate/2; + let passbandpixelwidth = passbandcutoff * pixels_per_hz; + panel.buffer.rect(panel.plotLeft, panel.plotTop, passbandpixelwidth, panel.plotHeight); + panel.buffer.strokeWeight(panel.strokeWeight); + panel.buffer.fill(panel.fill); +} + +function calculateNumImages(settings) { + // calculate the number of spectral images to draw so that the highest frequency + // image's lowest negative harmonic is visible + let sampleRate = settings.sampleRate / settings.downsamplingFactor; + let max_harmonic = settings.harmonicFreqs[settings.harmonicFreqs.length - 1]; + let numImages = 0; + while (numImages * sampleRate - max_harmonic < settings.maxVisibleFrequency) + numImages++; + return numImages; +} + +function drawDiracDashes(panel) { + let sampleRate = panel.settings.sampleRate / panel.settings.downsamplingFactor; + let pixels_per_hz = panel.plotWidth / panel.settings.maxVisibleFrequency; + let numImages = calculateNumImages(panel.settings); + + for (let image = 0; image <= numImages; image++) { + let color = getColor(image); + let imagehz = image * sampleRate; // frequency of a dirac comb harmonic that the input spectrum is convolved with + let xpos = imagehz * pixels_per_hz + panel.plotLeft; + + // draw the dotted line associated with this dirac comb image + panel.buffer.stroke(color); + panel.buffer.drawingContext.setLineDash([5,5]); + panel.buffer.line(xpos, panel.plotTop, xpos, panel.plotBottom); + panel.buffer.drawingContext.setLineDash([]); + + // label the dotted line associated with this dirac comb image + let fstext = imagehz.toFixed(0) + ' Hz'; + drawVerticalTick(panel, fstext, xpos); + } +} + +class sampledInputFreqPanel extends freqPanel{ + constructor(){ + super(); + this.name = "Sampled Signal Frequency Domain"; + this.description='This is a frequency domain representation of the output from the simulated analog-to-digital conversion process. ' + analytic_frequency_doc + 'Notice that periodic images of the input signal are present at multiples of the sampling frequency. These are later removed by the digital-to-analog conversion process, leaving only the frequency content below the Nyquist frequency (whether that content was present in the original signal or introduced by one of the period aliases at multiples of the sampling frequency, i.e. aliasing). ' + + freq_ticks_doc + passband_doc; + } + + drawPanel(){ + this.buffer.background(this.background); + this.buffer.stroke(this.stroke); + drawPassBand(this); + drawDiracDashes(this); + + let base = this.plotBottom; + let sampleRate = this.settings.sampleRate / this.settings.downsamplingFactor; + let pixels_per_hz = this.plotWidth / this.settings.maxVisibleFrequency; + let numImages = calculateNumImages(this.settings); + + for (let image = 0; image <= numImages; image++) { + + let color = getColor(image); + let imagehz = image * sampleRate; // frequency of a dirac comb harmonic that the input spectrum is convolved with + + for (let harm = 1; harm <= this.settings.numHarm; harm++) { + + let hzNegative = imagehz - this.settings.harmonicFreqs[harm-1]; + let hzPositive = imagehz + this.settings.harmonicFreqs[harm-1]; + + if (hzNegative < 0) hzNegative = 0 + (0 - hzNegative); //Reflect at 0. TODO should technically use a new color. + // don't reflect at sampleRate because we are already drawing the negative frequency images + + let positiveHeight = this.settings.ampZoom * this.settings.amplitude*this.plotHeight*this.settings.harmonicAmps[harm-1]; + let negativeHeight = this.settings.ampZoom * this.settings.amplitude*this.plotHeight*this.settings.harmonicAmps[harm-1]; + let xNegative = hzNegative * pixels_per_hz + this.plotLeft; + let xPositive = hzPositive * pixels_per_hz + this.plotLeft; + if (xNegative < this.plotRight) this.drawPeak(xNegative, negativeHeight, base, color); + if (xPositive < this.plotRight) this.drawPeak(xPositive, positiveHeight, base, color); + } + } + + this.drawBorder(); + drawFreqAmplitudeTicks(this, this.plotHeight, 9); + drawName(this); + } +} + +class quantNoisePanel extends Panel{ + constructor(){ + super() + this.strokeWeight=1; + this.ellipseSize=5; + this.name ="Quantization Noise Time Domain"; + this.description = 'This plot shows the difference between the sampled signal before and after quantization, representing the error introduced by the quantization process. ' + + time_ticks_doc + amp_ticks_doc + midline_doc; + } + drawPanel(){ + this.buffer.background(this.background); + drawDiscreteSignal(this, this.settings.quantNoise); + drawMidLine(this); + drawName(this); + drawSignalAmplitudeTicks(this, this.plotHeight/2, 4); + drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate)); + this.drawBorder(); + } +} + +class quantNoiseFFTPanel extends Panel{ + constructor(){ + super(); + this.name ="Quantization Noise FFT"; + this.description = 'This plot shows the frequency content of the error introduced by the quantization process. ' + + fft_doc + freq_ticks_doc + passband_doc; + this.ellipseSize=2; + this.xAxis = "Frequency"; + } + drawPanel(){ + drawFFT(this, this.settings.quantNoiseFreq); + } +} + +class inputPlusSampledPanel extends Panel { + constructor() { + super(); + this.name = "Input with Sampled Signal Time Domain"; + this.description = 'This plot shows the input signal with the sampled signal overlayed on top. See the documentation for the input signal time domain and sampled signal time domain for more information. '; + this.ellipseSize = 5; + } + + drawPanel() { + this.buffer.background(this.background); + drawDiscreteSignal(this,this.settings.downsampled) + this.buffer.stroke("gray"); + drawSignal(this, this.settings.original); + drawMidLine(this); + drawName(this); + drawSignalAmplitudeTicks(this, this.plotHeight/2, 4); + drawSignalBinaryScaling(this, this.plotHeight/2, 16,this.settings); + drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate)); + this.drawBorder(); + } +} +class inputPlusSampledPanel_no_binary extends Panel { + constructor() { + super(); + this.name = "Input with Sampled Signal Time Domain"; + this.description = 'This plot shows the input signal with the sampled signal overlayed on top. See the documentation for the input signal time domain and sampled signal time domain for more information. '; + this.ellipseSize = 5; + } + + drawPanel() { + this.buffer.background(this.background); + drawDiscreteSignal(this,this.settings.downsampled) + this.buffer.stroke("gray"); + drawSignal(this, this.settings.original); + drawMidLine(this); + drawName(this); + drawSignalAmplitudeTicks(this, this.plotHeight/2, 4); + drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate)); + this.drawBorder(); + } +} + +class allSignalsPanel extends Panel { + constructor() { + super(); + this.name = "Input (solid), Sampled (lollipop), Reconstructed (dotted), Time Domain"; + this.description = 'This plot combines the input signal, sampled signal, and reconstructed signal time domain plots. See the documentation for each individual plot for more information. '; + this.ellipseSize = 5; + + } + + drawPanel() { + this.buffer.background(this.background); + drawDiscreteSignal(this,this.settings.downsampled) + drawSignal(this, this.settings.original); + this.buffer.drawingContext.setLineDash([5,5]); + drawSignal(this, this.settings.reconstructed); + this.buffer.drawingContext.setLineDash([]); + drawMidLine(this); + drawName(this); + drawSignalAmplitudeTicks(this, this.plotHeight/2, 4); + drawTimeTicks(this, this.numTimeTicks/this.settings.timeZoom, 1/(this.settings.timeZoom*this.settings.sampleRate)); + this.drawBorder(); + } +} diff --git a/delta-modulation/slider_delta.js b/delta-modulation/slider_delta.js new file mode 100644 index 0000000..5b89796 --- /dev/null +++ b/delta-modulation/slider_delta.js @@ -0,0 +1,353 @@ +class slider{ + button; + slider; + constructor(){ + } + + setup(p, settings){ + // should be overridden to set up the slider + } + + updateValue(p){ + this.settings[this.propName] = this.slider.value(); + this.displayVal = this.calcDisplayVal(); + this.textBox.value(this.displayVal); + this.textLabel.html(this.name+': '); + } + + onEdit(){ + this.updateValue(); + this.settings.render(); + this.settings.p5.draw(); + } + + makeSlider(p){ + this.slider = p.createSlider(this.min, this.max, this.initial, this.step); + this.textLabel = p.createP(); + this.slider.input(this.onEdit.bind(this)); + this.slider.mousePressed(this.onEdit.bind(this)); + this.slider.mouseReleased(this.onEdit.bind(this)); + this.textBox = p.createInput(); + this.textBox.size(300); + this.button = p.createButton("Update"); + // this.button.size(200) + this.button.mousePressed(this.buttonPressed.bind(this)); + this.button.mouseReleased(this.onEdit.bind(this)); + this.slider.parent(this.settings.element.id); + this.button.parent(this.settings.element.id); + this.textBox.parent(this.settings.element.id); + this.textLabel.parent(this.settings.element.id); + } + + resize(x, y, w, p){ + let width = w - 20; + let labelWidth = 140; + width -= labelWidth; + let sliderWidth = width * 0.6; + width -= sliderWidth; + let textboxWidth = width *.4; + width -= textboxWidth; + let buttonWidth = width; + + this.slider.style('width', Math.round(sliderWidth).toString() + "px"); + this.slider.position(x, y); + this.textLabel.position(x + this.slider.width + 10, y - 15); + this.textBox.position(x+this.slider.width + labelWidth,y); + this.textBox.style('width', Math.round(textboxWidth).toString() + "px"); + this.button.position(this.textBox.x+this.textBox.width+5,y); + this.button.style('width', Math.round(buttonWidth).toString() + "px"); + } + buttonPressed(){ + this.slider.value(this.calcSliderVal()); } + + calcSliderVal(){ + // override this with any calculations needed to convert textbox val to slider val (%, etc) + return this.textBox.value(); + } + calcDisplayVal(){ + // override this with any calculations needed to convert stored variable to display val (%, etc) + return this.settings[this.propName]; + } + } + + + class freqSlider extends slider{ + setup(p,settings){ + this.settings = settings; + this.name ="Frequency (Hz)"; + this.propName = "fundFreq"; + this.min = 0; + this.max = this.settings.sampleRate / 4 ; + this.initial = 440; + this.step = 1.0; + this.displayVal = this.initial; + this.makeSlider(p); + } + + } + + class numHarmSlider extends slider{ + setup(p,settings){ + this.settings = settings; + this.name ="Number of harmonics"; + this.propName="numHarm" + this.min = 1; + this.max = 100; + this.initial = 1; + this.step = 1; + this.displayVal = this.initial; + this.oddEvenSel = p.createSelect(); + this.oddEvenSel.option("Odd"); + this.oddEvenSel.option("Even"); + this.oddEvenSel.option("All"); + this.oddEvenSel.selected(this.settings.harmType); + this.oddEvenSel.changed(()=>this.settings.harmType = this.oddEvenSel.value()); + this.oddEvenSel.parent(this.settings.element.id); + + this.slopeSel = p.createSelect(); + this.slopeSel.option("1/x"); + this.slopeSel.option("1/x2"); + this.slopeSel.option("lin"); + this.slopeSel.option("flat"); + this.slopeSel.option("log"); + this.slopeSel.selected(this.settings.harmSlope); + this.slopeSel.changed(()=>this.settings.harmSlope = this.slopeSel.value()); + this.slopeSel.parent(this.settings.element.id); + this.makeSlider(p); + } + resize(x, y, w, p){ + + let width = w - 20; + let labelWidth = 170; + width -= labelWidth; + let sliderWidth = width * 0.5; // slider + dropdowns + width -= sliderWidth; + let dropDownWidth = sliderWidth * .25-10; // Make slider + dropdown the same width as other sliders. + sliderWidth = sliderWidth * .75; // Slider + let textboxWidth = width * 0.42; + let buttonWidth = width*.4; + + this.slider.style('width', Math.round(sliderWidth).toString() + "px"); + this.slider.position(x, y); + this.oddEvenSel.style('width', Math.round(dropDownWidth).toString() + "px"); + this.oddEvenSel.position(x+this.slider.width+10,y); + this.slopeSel.style('width', Math.round(dropDownWidth).toString() + "px"); + this.slopeSel.position(x+this.slider.width+dropDownWidth+10,y); + this.textLabel.position(x + 2*dropDownWidth + this.slider.width + 20, y - 15); + this.textBox.position(x + this.slider.width + 2*dropDownWidth+ labelWidth+10,y); + this.textBox.style('width', Math.round(textboxWidth).toString() + "px"); + this.button.position(this.textBox.x + this.textBox.width,y); + this.button.style('width', Math.round(buttonWidth).toString() + "px"); + } + } + + + class sampleRateSlider extends slider{ + setup(p,settings){ + this.settings = settings; + this.name ="Sample Rate(Hz):"; + this.propName="downsamplingFactor"; + this.min = p.log(3000)/p.log(2); + this.max = p.log(48000)/p.log(2); + this.initial = p.log(48000)/p.log(2); + this.step = 0.1 + this.makeSlider(p); + } + calcDisplayVal(){ + return this.displayVal= Math.round(this.settings.sampleRate / this.settings.downsamplingFactor , 3);// + } + calcSliderVal(){ + return Math.log(this.textBox.value())/Math.log(2); + } + + updateValue(p){ + this.settings.downsamplingFactor = Math.round(WEBAUDIO_MAX_SAMPLERATE/Math.pow(2, this.slider.value())); + this.displayVal = this.calcDisplayVal(); + this.textBox.value(this.displayVal);// + this.textLabel.html(this.name);// + p.round(this.settings.sampleRate / this.settings.downsamplingFactor / 1000, 3) + " kHz") + } + } + + class sampleRateDeltaSlider extends sampleRateSlider{ + setup(p,settings){ + this.settings = settings; + this.name ="Sample Rate (Hz):"; + this.propName="downsamplingFactor"; + this.min = p.log(1500)/p.log(2); + this.max = p.log(this.settings.deltaFrequency)/p.log(2); + this.initial = p.log(1500)/p.log(2); + this.step = 0.1 + this.makeSlider(p); + } + + calcDisplayVal(){ + return this.displayVal= Math.round(this.settings.deltaFrequency / this.settings.downsamplingFactor , 3);// + } + + updateValue(p){ + this.settings.downsamplingFactor = Math.round(this.settings.deltaFrequency/Math.pow(2, this.slider.value()))/32; + this.displayVal = this.calcDisplayVal(); + this.textBox.value(this.displayVal);// + this.textLabel.html(this.name);// + p.round(this.settings.sampleRate / this.settings.downsamplingFactor / 1000, 3) + " kHz") + } + } + + class deltaStepSlider extends slider { + setup(p,settings){ + this.settings = settings; + this.name ="Delta Step (%):"; + this.propName="deltaStep"; + this.min = 0.001; + this.max = 0.1; + this.initial = 0.05; + this.step = 0.001; + this.makeSlider(p); + } + + } + + class ditherSlider extends slider { + setup(p,settings){ + this.settings = settings; + this.name ="Dither"; + this.propName="dither"; + this.min = 0.0; + this.max = 1.0; + this.initial = 0.0; + this.step = 0.01; + this.makeSlider(p); + } + + } + + class bitDepthSlider extends slider { + setup(p,settings){ + this.settings = settings; + this.name ="Bit Depth"; + this.propName = "bitDepth"; + this.min = 1; + this.max = BIT_DEPTH_MAX; + this.initial = BIT_DEPTH_MAX; + this.step = 1; + this.makeSlider(p); + } + + } + + class amplitudeSlider extends slider { + setup(p,settings){ + this.settings = settings; + this.propName ="amplitude"; + this.name = "Amplitude"; + this.min = 0.0; + this.max = 5; + this.initial = 1.0; + this.step = 0.01; + this.makeSlider(p); + } + + } + + class antialiasingSlider extends slider { + setup(p, settings){ + this.settings = settings; + this.propName ="antialiasing"; + this.name = "Antialiasing filter order"; + this.min = 0.0; + this.max = 200; + this.initial = 0; + this.step = 10; + this.makeSlider(p); + } + } + + class phaseSlider extends slider{ + setup(p,settings){ + this.settings = settings; + this.propName ="phase"; + this.name = "Phase (Degrees)"; + this.min = 0; + this.max = 360; //pi + this.initial = 0.0; + this.step = 1; //pi/8 + this.makeSlider(p); + } + + calcDisplayVal(){return this.settings[this.propName];} + } + class zoomSlider extends slider{ + calcDisplayVal(){return this.settings[this.propName]*100;} + calcSliderVal(){ + if (isNaN(this.textBox.value())){ + return this.slider.value(); + } + else{ + return this.textBox.value()/100; + } + } + } + class ampZoomSlider extends zoomSlider{ + setup(p,settings){ + this.settings = settings; + this.name ="Amp. Zoom (%)"; + this.propName="ampZoom"; + this.min = .1; + this.max = 4.0; + this.initial =1.0; + this.step = .01; + this.makeSlider(p); + } + } + + const minTimeZoom = .25; + class timeZoomSlider extends zoomSlider{ + setup(p,settings){ + this.settings = settings; + this.propName ="timeZoom"; + this.name = "Time zoom (%)" + this.min = minTimeZoom; + this.max = 5; + this.initial = 1.0; + this.step = .01; + this.makeSlider(p); + } + + } + + const minFreqZoom = 0.5; + class freqZoomSlider extends zoomSlider{ + setup(p,settings){ + this.settings = settings; + this.propName ="freqZoom"; + this.min = minFreqZoom; + this.max = 3; + this.initial = 1.0; + this.step = .01; + this.makeSlider(p); + } + updateValue(p){ + this.settings.freqZoom = this.slider.value(); + this.settings.maxVisibleFrequency = WEBAUDIO_MAX_SAMPLERATE/2/this.settings.freqZoom; + this.textBox.value(this.settings.freqZoom*100); + this.textLabel.html('Freq. zoom (%):'); + } + } + + class freqZoomSlider_tutorial_1 extends zoomSlider{ + setup(p,settings){ + this.settings = settings; + this.propName ="freqZoom"; + this.min = 1.0; + this.max = 3; + this.initial = 2.0; + this.step = .01; + this.makeSlider(p); + } + updateValue(p){ + this.settings.freqZoom = this.slider.value(); + this.settings.maxVisibleFrequency = WEBAUDIO_MAX_SAMPLERATE/2/this.settings.freqZoom; + this.textBox.value(this.settings.freqZoom*100); + this.textLabel.html('Freq. zoom (%):'); + } + } + \ No newline at end of file diff --git a/delta-modulation/styles_delta.css b/delta-modulation/styles_delta.css new file mode 100644 index 0000000..e69de29 diff --git a/delta-modulation/waves_delta.js b/delta-modulation/waves_delta.js new file mode 100644 index 0000000..b24a4c1 --- /dev/null +++ b/delta-modulation/waves_delta.js @@ -0,0 +1,375 @@ +/* + + +# The Digital Audio Workbench + +https://idmil.gitlab.io/course-materials/mumt203/interactive-demos + +## Introduction + +The purpose of the digital audio workbench is to illustrate key concepts in +digital audio theory with interactive visualizations of each stage of the +analog-to-digial conversion (ADC) and digital-to-analog conversion (DAC) +processes. These visualizations are inspired by demonstrations using +oscilloscopes and spectrum analyzers to compare the analog signal input into +the ADC process with the analog signal output by the DAC process, e.g. +https://youtu.be/cIQ9IXSUzuM + +By experimenting with the settings of the simulation, numerous key concepts in +digital signal theory can be nicely illustrated, such as aliasing, quantization +error, critical sampling, under and oversampling, and many others. The +interactive interface allows the simulation to be explored freely; users can +examine the signals both visually through numerous graphs, or by listening to +the test signals directly. + +## Implementation + +Since our demonstration takes place purely in the digital domain, we +unfortunately cannot use real continuous time analog inputs and outputs. +Instead, we simulate the ADC-DAC processes in the discrete time domain. The +analog input and output are represented as discrete time signals with a high +sampling rate; at the time of writing, the maximum sampling rate supported +by WebAudio is 96 kHz. + +The ADC process consists of several steps, including antialiasing, sampling, +and quantization. All of these are simulated in our model: antialiasing is +achieved with a windowed sinc FIR lowpass filter of order specified by the +user; sampling is approximated by downsampling the input signal by an +integer factor; and quantization is achieved by multiplying the sampled +signal (which ranges from -1.0 to 1.0) by the maximum integer value possible +given the requested bit depth (e.g. 255 for a bit depth of 8 bits), and then +rounding every sample to the nearest integer. The DAC process is simulated +in turn by zero stuffing and lowpass filtering the sampled and quantized +output of the ADC simultion. + +In summary, the continuous time input is simulated by a 96 kHz discrete time +signal, the sampled output of the ADC process is simulated by a downsampled +and quantized signal, and the continuous time reconstruction output by the +DAC is simulated by upsampling the "sampled" signal back to 96 kHz. In our +tests we have found this model to be reasonable; many key concepts, such as +critical sampling, aliasing, and quantization noise are well represented in +our simulation. + +For more details, the reader is encouraged to peruse the rest of the source +code in this document. Many comments have been included to aid readers who +are unfamiliar with javascript. Any questions you may have about the +implementation of the simulation can only be definitively answered by +understanding the source code, but please feel free to contact the project +maintainers if you have any questions. + +```javascript +*/ + +// `renderWavesImpl` returns an anonymous function that is bound in the widget +// constructor. This is done in order to seperate the implementation of the +// simulation from the other implementation details so that this documentation +// can be more easily accessed. + +const soundTimeSeconds = 1.5; +const fadeTimeSeconds = 0.125; +function renderWavesImpl(settings, fft, p) { return (playback = false) => { + + // if we are not rendering for playback, we are rendering for simulation + let simulation = !playback; + + // select the buffer to render to; playback buffer, or simulation buffer + var original = playback ? settings.original_pb : settings.original; + var reconstructed = playback ? settings.reconstructed_pb : settings.reconstructed; + var stuffed = settings.stuffed; + + // calculate harmonics ------------------------------------------------------ + + // The signal is generated using simple additive synthesis. Because of this, + // the exact frequency content of the signal can be determined a priori based + // on the settings. We generate this information here so that it can be used + // not only by the synthesis process below, but also by several of the graphs + // used to illustrate the frequency domain content of the signal. + + // We only calculate the harmonics for the simulation; it is assumed they will + // already have been calculated earlier when rendering for playback + + if (simulation) { + let harmonic_number = 1; + let harmonic_amplitude = 1; + let invert = 1; + let harmInc = (settings.harmType =="Odd" || settings.harmType == "Even") ? 2 : 1; + + for (let i = 0; simulation && i < settings.numHarm; i++) { + + // the amplitude of each harmonic depends on the harmonic slope setting + if (settings.harmSlope == "lin") harmonic_amplitude = 1 - i/settings.numHarm; + else if (settings.harmSlope == "1/x") harmonic_amplitude = 1/harmonic_number; + else if (settings.harmSlope == "1/x2") harmonic_amplitude = 1/harmonic_number/harmonic_number; + else if (settings.harmSlope == "flat") harmonic_amplitude = 1; + else if (settings.harmSlope == "log") {harmonic_amplitude = Math.exp(-0.1*(harmonic_number-1)); + console.log(harmonic_amplitude)} + + // In case the harmonic slope is 1/x^2 and the harmonic type is "odd", + // by inverting every other harmonic we generate a nice triangle wave. + if (settings.harmSlope =="1/x2" && settings.harmType == "Odd") { + harmonic_amplitude = harmonic_amplitude * invert; + invert *= -1; + } + + // the frequency of each partial is a multiple of the fundamental frequency + settings.harmonicFreqs[i] = harmonic_number*settings.fundFreq; + + // The harmonic amplitude is calculated above according to the harmonic + // slope setting, taking into account the special case for generating a + // triangle. + settings.harmonicAmps[i] = harmonic_amplitude; + + // With harmonic type set to "even" we want the fundamental and even + // harmonics. To achieve this, we increment the harmonic number by 1 after + // the fundamental and by 2 after every other partial. + if (i == 0 && settings.harmType == "Even") harmonic_number += 1; + else harmonic_number += harmInc; + } + } + + // render original wave ----------------------------------------------------- + + // initialize the signal buffer with all zeros (silence) + original.fill(0); + + // For the sample at time `n` in the signal buffer `original`, + // generate the sum of all the partials based on the previously calculated + // frequency and amplitude values. + original.forEach( (_, n, arr) => { + for (let harmonic = 0; harmonic < settings.numHarm; harmonic++) { + + let fundamental_frequency = settings.harmonicFreqs[0]; + let frequency = settings.harmonicFreqs[harmonic]; + let amplitude = settings.harmonicAmps[harmonic]; + + // convert phase offset specified in degrees to radians + let phase_offset = Math.PI / 180 * settings.phase; + + // adjust phase offset so that harmonics are shifted appropriately + let phase_offset_adjusted = phase_offset * frequency / fundamental_frequency; + + let radian_frequency = 2 * Math.PI * frequency; + let phase_increment = radian_frequency / WEBAUDIO_MAX_SAMPLERATE; + let phase = phase_increment * n + phase_offset_adjusted; + + // accumulate the amplitude contribution from the current harmonic + arr[n] += amplitude * Math.sin( phase ); + } + }); + + // linearly search for the maximum amplitude value (easy but not efficient) + let max = 0; + original.forEach( (x, n, y) => {if (x > max) max = x} ); + + // normlize and apply amplitude scaling + original.forEach( (x, n, y) => y[n] = settings.amplitude * x / max ); + + // apply antialiasing filter if applicable ---------------------------------- + + // The antialiasing and reconstruction filters are generated using Fili.js. + // (https://github.com/markert/fili.js/) + let firCalculator = new Fili.FirCoeffs(); + // Fili uses the windowed sinc method to generate FIR lowpass filters. + // Like real antialiasing and reconstruction filters, the filters used in the + // simulation are not ideal brick wall filters, but approximations. + + // apply antialiasing only if the filter order is set + if (settings.antialiasing > 1) { + + // specify the filter parameters; Fs = sampling rate, Fc = cutoff frequency + + // The cutoff for the antialiasing filter is set to the Nyquist frequency + // of the simulated sampling process. The sampling rate of the "sampled" + // signal is WEBAUDIO_MAX_SAMPLERATE / the downsampling factor. This is + // divided by 2 to get the Nyquist frequency. + var filterCoeffs = firCalculator.lowpass( + { order: settings.antialiasing + , Fs: WEBAUDIO_MAX_SAMPLERATE + , Fc: (WEBAUDIO_MAX_SAMPLERATE / settings.downsamplingFactor) / 2 + }); + + // generate the filter + var filter = new Fili.FirFilter(filterCoeffs); + + // apply the filter + original.forEach( (x, n, y) => y[n] = filter.singleStep(x) ); + + // time shift the signal by half the filter order to compensate for the + // delay introduced by the FIR filter + original.forEach( (x, i, arr) => arr[i - settings.antialiasing/2] = x ); + } + + // downsample original wave ------------------------------------------------- + + // zero initialize the reconstruction, and zero stuffed buffers + reconstructed.fill(0); + stuffed.fill(0); + + // generate new signal buffers for the downsampled signal and quantization + // noise whose sizes are initialized according to the currently set + // downsampling factor + if (playback) { + settings.downsampled_pb = new Float32Array(p.round(original.length / settings.downsamplingFactor)); + settings.quantNoise_pb = new Float32Array(p.round(original.length / settings.downsamplingFactor)); + } else { + settings.downsampled = new Float32Array(p.round(original.length / settings.downsamplingFactor)); + settings.quantNoise = new Float32Array(p.round(original.length / settings.downsamplingFactor)); + } + var downsampled = playback ? settings.downsampled_pb : settings.downsampled; + var quantNoise = playback ? settings.quantNoise_pb : settings.quantNoise; + var quantNoiseStuffed = settings.quantNoiseStuffed; + quantNoiseStuffed.fill(0); + + // calculate the maximum integer value representable with the given bit depth + let maxInt = p.pow(2, settings.bitDepth) - 1; + + let stepSize = (settings.quantType == "midTread") ? 2/(maxInt-1) : 2/(maxInt); + + // generate the output of the simulated ADC process by "sampling" (actually + // just downsampling), and quantizing with dither. During this process, we + // also load the buffer for the reconstructed signal with the sampled values; + // this allows us to skip an explicit zero-stuffing step later + + downsampled.forEach( (_, n, arr) => { + + // keep only every kth sample where k is the integer downsampling factor + let y = original[n * settings.downsamplingFactor]; + y = y > 1.0 ? 1.0 : y < -1.0 ? -1.0 : y; // apply clipping + + // if the bit depth is set to the maximum, we skip quantization and dither + if (settings.bitDepth == BIT_DEPTH_MAX) { + + // record the sampled output of the ADC process + arr[n] = y; + + // sparsely fill the reconstruction and zero stuffed buffers to avoid + // having to explicitly zero-stuff + reconstructed[n * settings.downsamplingFactor] = y; + stuffed[n * settings.downsamplingFactor] = y * settings.downsamplingFactor; + return; + } + + // generate dither noise + let dither = (2 * Math.random() - 1) * settings.dither; + + let quantized; + // Add dither signal and quantize. Constrain so we dont clip after dither + switch(settings.quantType) { + case "midTread" : + quantized = stepSize*p.floor(p.constrain((y+dither),-1,0.99)/stepSize + 0.5); + break; + case "midRise" : + quantized = stepSize*(p.floor(p.constrain((y+dither),-1,0.99)/stepSize) + 0.5); + break; + } + + // record the sampled and quantized output of the ADC process with clipping + arr[n] = quantized; + + + // sparsely fill the reconstruction buffer to avoid having to zero-stuff + reconstructed[n * settings.downsamplingFactor] = quantized; + stuffed[n * settings.downsamplingFactor] = quantized * settings.downsamplingFactor; + + // record the quantization error + quantNoise[n] = quantized - y; + quantNoiseStuffed[n * settings.downsamplingFactor] = quantNoise[n]; + }); + + // render reconstructed wave by low pass filtering the zero stuffed array---- + + // specify filter parameters; as before, the cutoff is set to the Nyquist + var filterCoeffs = firCalculator.lowpass( + { order: 200 + , Fs: WEBAUDIO_MAX_SAMPLERATE + , Fc: (WEBAUDIO_MAX_SAMPLERATE / settings.downsamplingFactor) / 2 + }); + + // generate the filter + var filter = new Fili.FirFilter(filterCoeffs); + + // apply the filter + reconstructed.forEach( (x, n, arr) => { + let y = filter.singleStep(x); + + // To retain the correct amplitude, we must multiply the output of the + // filter by the downsampling factor. + arr[n] = y * settings.downsamplingFactor; + }); + + // time shift the signal by half the filter order to compensate for the delay + // introduced by the FIR filter + reconstructed.forEach( (x, n, arr) => arr[n - 100] = x ); + + // render FFTs -------------------------------------------------------------- + // TODO: apply windows? + + // The FFTs of the signals at the various stages of the process are generated + // using fft.js (https://github.com/indutny/fft.js). The call to + // `realTransform()` performs the FFT, and the call to `completeSpectrum` + // fills the upper half of the spectrum, which is otherwise not calculated + // since it is a redundant reflection of the lower half of the spectrum. + + if (simulation) { + fft.realTransform(settings.originalFreq, original); + fft.completeSpectrum(settings.originalFreq); + + fft.realTransform(settings.stuffedFreq, stuffed) + fft.completeSpectrum(settings.reconstructedFreq); + + fft.realTransform(settings.reconstructedFreq, reconstructed) + fft.completeSpectrum(settings.reconstructedFreq); + + fft.realTransform(settings.quantNoiseFreq, quantNoiseStuffed) + fft.completeSpectrum(settings.quantNoiseFreq); + } + + // fade in and out and suppress clipping distortions ------------------------ + + // Audio output is windowed to prevent pops. The envelope is a simple linear + // ramp up at the beginning and linear ramp down at the end. + + if (playback) { + // This normalization makes sure the original signal isn't clipped. + // The output is clipped during the simulation, so this may reduce its peak + // amplitude a bit, but since the clipping adds distortion the perceived + // loudness is relatively the same as the original signal in my testing. + let normalize = settings.amplitude > 1.0 ? settings.amplitude : 1.0; + + // Define the fade function + let fade = (_, n, arr) => { + let fadeTimeSamps = Math.min(fadeTimeSeconds * WEBAUDIO_MAX_SAMPLERATE, arr.length / 2); + // The conditional ensures there is a fade even if the fade time is longer than the signal + if (n < fadeTimeSamps) + arr[n] = (n / fadeTimeSamps) * arr[n] / normalize; + else if (n > arr.length - fadeTimeSamps) + arr[n] = ((arr.length - n) / fadeTimeSamps) * arr[n] / normalize; + else arr[n] = arr[n] / normalize; + }; + + // Apply the fade function + original.forEach(fade); + reconstructed.forEach(fade); + quantNoise.forEach(fade); + } + + +}} +/* +``` +*/ diff --git a/delta-modulation/widget_delta.js b/delta-modulation/widget_delta.js new file mode 100644 index 0000000..b063320 --- /dev/null +++ b/delta-modulation/widget_delta.js @@ -0,0 +1,226 @@ +const BIT_DEPTH_MAX = 16; +const WEBAUDIO_MAX_SAMPLERATE = 96000; +const NUM_COLUMNS = 2; +const MAX_HARMONICS = 100; +function new_widget(panels, sliders, buttons, elem_id, elem_id2, margin_size, width_factor=1.0, height_factor=1.0) { const sketch = p => { +/* +new_widget - + +inputs: + panels: + list of panels to be used in the widget + [] + sliders: + list of panels to be used in the widget, like + [] + buttons: + Tells the widget which button to have appear in the widget (play original, play reconstructed, play quantization noise) + elem_id: + Tells the widget in which Div class to place the buttons in (Questions or answers etc) + elem_id2: + Tells the widget which div with the according class name to take into account for placing the widget in height terms/ + margin_size: + Used to place the uplaod buttons on a specific place. + width_factor: + By default is 1 and determines the width of the widget + height_factor: + By default is 1 and determines the height of the widget + +*/ +var element = undefined; +console.log(elem_id); +if (elem_id) { + element = document.getElementById(elem_id); + console.log(element.id); + console.log(element.clientHeight, element.clientWidth); + +} +var intro_text = document.getElementsByClassName(elem_id2); +var intro_height = 0; + +var numPanels = panels.length; +var numSliders = sliders.length; +var old_x = 220; +let panelHeight, panelWidth, sliderWidth, sliderHeight, numColumns; +resize(1080, 1920); + +// set display and fftSize to ensure there is enough data to fill the panels when zoomed all the way out +let fftSize = p.pow(2, p.round(p.log(panelWidth/minFreqZoom) / p.log(2))); +let displaySignalSize = p.max(fftSize, panelWidth/minTimeZoom) * 1.1; // 1.1 for 10% extra safety margin +let fft = new FFTJS(fftSize); +var settings = + { amplitude : 1.0 + , fundFreq : 1250 // input signal fundamental freq + , sampleRate : WEBAUDIO_MAX_SAMPLERATE + , downsamplingFactor : 2 + , numHarm : 2 //Number of harmonics + , harmType : "Odd" // Harmonic series to evaluate - Odd, even or all + , harmSlope : "1/x" // Amplitude scaling for harmonics. can be used to create different shapes like saw or square + , harmonicFreqs : new Float32Array(MAX_HARMONICS) //Array storing harmonic frequency in hz + , harmonicAmps : new Float32Array(MAX_HARMONICS) //Array storing harmonic amp (0-1.0) + , phase : 0.0 // phase offset for input signal + , fftSize : fftSize + , bitDepth : BIT_DEPTH_MAX //quantization bit depth + , quantType : "midRise" // type of quantization + , dither : 0.0 // amplitude of white noise added to signal before quantization + , antialiasing : 0 // antialiasing filter order + , original: new Float32Array(displaySignalSize) + , downsampled: new Float32Array(1) // this gets re-inited when rendering waves + , reconstructed: new Float32Array(displaySignalSize) + , stuffed: new Float32Array(displaySignalSize) + , quantNoiseStuffed: new Float32Array(displaySignalSize) + , quantNoise: new Float32Array(displaySignalSize) + , original_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds)) + , reconstructed_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds)) + , quantNoise_pb: new Float32Array(p.floor(WEBAUDIO_MAX_SAMPLERATE*soundTimeSeconds)) + , originalFreq : fft.createComplexArray() + , stuffedFreq : fft.createComplexArray() + , reconstructedFreq : fft.createComplexArray() + , quantNoiseFreq : fft.createComplexArray() + , snd : undefined + , maxVisibleFrequency : WEBAUDIO_MAX_SAMPLERATE / 2 + , freqZoom : 1.0 //X axis zoom for frequency panels + , ampZoom : 1.0 // Y axis zoom for all panels + , timeZoom: 1.0 // X axis zoom for signal panels + , deltaFrequency: 96000 + , deltaStep: 0.05 + , element : element + , margine_size : margin_size+20 + , p5: undefined + , render : undefined + , play : undefined + }; + +p.settings = settings; + +var renderWaves = renderWavesImpl(settings, fft, p); + +p.setup = function () { + settings.p5 = p; + settings.render = renderWaves; + settings.play = playWave; + + p.createCanvas(p.windowWidth, p.windowHeight); + console.log(p.windowWidth,p.windowHeight) + p.textAlign(p.CENTER); + panels.forEach(panel => panel.setup(p, panelHeight, panelWidth, settings)); + sliders.forEach(slider => slider.setup(p, settings)); + sliders.forEach(slider => slider.updateValue(p)); + renderWaves(); + buttonSetup(); + p.windowResized(); + p.noLoop(); + setTimeout(p.draw, 250); +}; + +p.draw = function() { + panels.forEach(panel => panel.drawPanel()); + panels.forEach( (panel, index) => { + let y = p.floor(index / numColumns) * panelHeight; + let x = p.floor(index % numColumns) * panelWidth; + p.image(panel.buffer, x, y); + }); +}; + +p.windowResized = function() { + console.log(p.windowWidth,p.windowHeight) + let w = width_factor * p.windowWidth - 20; // TODO: get panel bezel somehow instead of hardcoded 20 + let h = height_factor * p.windowHeight - 20; + resize(w, h); + + p.resizeCanvas(w, h); + panels.forEach(panel => panel.resize(panelHeight, panelWidth)); + + intro_text.forEach(element => { + intro_height += element.clientHeight; + }) + let yoffset = panelHeight * p.ceil(numPanels/numColumns) + intro_height + 100; + let sliderPos = new Array(numColumns).fill(1); + sliderPos.forEach((pos,index)=>{ + sliderPos[index] = 150+index*sliderWidth; + }); + + console.log("slider position", sliderPos); + sliders.forEach( (slider, index) => { + let y = yoffset + p.floor(index / numColumns) * sliderHeight; + //let x = p.floor(index % numColumns) * panelWidth; + slider.resize(sliderPos[index % numColumns], y, sliderWidth,p); + }); + let y = yoffset + p.floor((numSliders)/ numColumns) * sliderHeight; + let x = margin_size; + originalButton.position(x + 20, y); + reconstructedButton.position(originalButton.x + originalButton.width * 1.1, originalButton.y); + quantNoiseButton.position(reconstructedButton.x + reconstructedButton.width * 1.1, reconstructedButton.y); + intro_height = 0; +}; + +function resize(w, h) { + if (w < 800 || (numPanels % 2 == 1)) numColumns = 1; + else numColumns = 2; + let panelRows = Math.ceil((numPanels+1)/numColumns); + let sliderRows = Math.ceil((numSliders+1)/numColumns); + panelWidth = w / numColumns; + sliderWidth = w / numColumns - 200; + panelHeight = h / panelRows; + sliderHeight = 200 / sliderRows; + if (sliderHeight < 30) { // keep sliders from getting squished + sliderHeight = 30; + let sliderPanelHeight = sliderHeight * sliderRows; + panelHeight = (h - sliderPanelHeight) / (panelRows - 1); + } +} + +function buttonSetup() { + originalButton = p.createButton("play original"); + originalButton.position(p.width/2 + 10, p.height - p.height / numPanels ); + originalButton.mousePressed( () => { + renderWaves(true); + if (!settings.snd) settings.snd = new (window.AudioContext || window.webkitAudioContext)(); + playWave(settings.original_pb, WEBAUDIO_MAX_SAMPLERATE, settings.snd); + }); + originalButton.parent(element.id); + if(!buttons.includes("original")){ + originalButton.hide(); + } + + reconstructedButton = p.createButton("play reconstructed"); + reconstructedButton.position(originalButton.x + originalButton.width * 1.1, originalButton.y); + reconstructedButton.mousePressed( () => { + renderWaves(true); + if (!settings.snd) settings.snd = new (window.AudioContext || window.webkitAudioContext)(); + playWave(settings.reconstructed_pb, WEBAUDIO_MAX_SAMPLERATE, settings.snd); + }); + reconstructedButton.parent(element.id); + if(!buttons.includes("recon")){ + reconstructedButton.hide(); + } + quantNoiseButton = p.createButton("play quantization noise"); + quantNoiseButton.position(reconstructedButton.x + reconstructedButton.width * 1.1, reconstructedButton.y); + quantNoiseButton.mousePressed( () => { + renderWaves(true); + if (!settings.snd) settings.snd = new (window.AudioContext || window.webkitAudioContext)(); + playWave(settings.quantNoise_pb, WEBAUDIO_MAX_SAMPLERATE, settings.snd); + }); + quantNoiseButton.parent(element.id); + if(!buttons.includes("quant")){ + quantNoiseButton.hide(); + } + +} + +function playWave(wave, sampleRate, audioctx) { + var buffer = audioctx.createBuffer(1, wave.length, sampleRate); + buffer.copyToChannel(wave, 0, 0); + var source = audioctx.createBufferSource(); + source.buffer = buffer; + source.connect(audioctx.destination); + source.start(); +} + +function downloadWave(wave, sampleRate, audioctx) { + +} + + +}; +return new p5(sketch); } // end function new_widget() { var sketch = p => { diff --git a/tutorials/index.html b/tutorials/index.html index 91f3d47..2a2085e 100644 --- a/tutorials/index.html +++ b/tutorials/index.html @@ -52,7 +52,7 @@

- + diff --git a/tutorials/panel_tutorial.js b/tutorials/panel_tutorial.js index 98fdfbb..59678ff 100644 --- a/tutorials/panel_tutorial.js +++ b/tutorials/panel_tutorial.js @@ -30,7 +30,7 @@ class Panel { this.xbezel = Math.max(70, w * 0.1); this.xbezelLeft = 0.60 * this.xbezel; this.xbezelRight = 0.40 * this.xbezel; - this.ybezel = Math.max(20, h * 0.1); + this.ybezel = Math.max(20, h *0.1); this.halfh = h/2; this.plotHeight = h - 2 * this.ybezel; this.plotWidth = w - this.xbezel; @@ -108,19 +108,22 @@ function drawMidLine(panel) { } const time_signal_doc='Because this signal approximates a continuous analog signal in our simulation, the signal value is drawn with a simple interpolation scheme. There are currently bugs with this interpolation when zooming in (time zoom > 100%). In addition, visual aliasing may occur when viewing high frequency signals due to the limited number of pixels on the screen acting as a kind of spatial sampling process. This may appear as amplitude modulation in the plot that is not actually present in the signal. Finally, note that the amplitude of the signal is clipped to the size of the panel viewport. This visual clipping happens regardless of whether the signal itself actually exhibits clipping. '; -function drawSignal(panel, signal, zoom = 1) +function drawSignal(panel, signal) { let pixel_max = panel.plotHeight/2; let pixel_per_fullscale = pixel_max * panel.settings.ampZoom; panel.buffer.noFill(); //TODO: there are some artifacts here due to the way the signal is drawn, especially when zoomed in and/or large amplitude panel.buffer.beginShape(); - panel.buffer.curveTightness(1.0); - for (let x = 0; x < panel.plotWidth; x++) { - let pixel_amp = pixel_per_fullscale * signal[Math.round(x/panel.settings.timeZoom)]; + max_x = 10000 + for (let x = 0; x < max_x; x++) { + let pixel_x = (x/max_x)*panel.plotWidth/panel.settings.timeZoom + let amp = signal[Math.floor(pixel_x)]+(pixel_x-Math.floor(pixel_x))/(Math.ceil(pixel_x)-Math.floor(pixel_x))*(signal[Math.ceil(pixel_x)]-signal[Math.floor(pixel_x)]); //Linear interpolation + let pixel_amp = pixel_per_fullscale * amp; let y = panel.halfh - pixel_amp; - y = (ypanel.plotBottom)? y= panel.plotBottom : y=y; panel.buffer.curveTightness(0.0); - panel.buffer.curveVertex(x + panel.plotLeft, y); + panel.buffer.curveTightness(1.0); + y = (ypanel.plotBottom)? y= panel.plotBottom: y=y;panel.buffer.curveTightness(1.0) + panel.buffer.curveVertex((x/max_x)*panel.plotWidth + panel.plotLeft, y); } panel.buffer.endShape(); } diff --git a/tutorials/tutorial2.html b/tutorials/tutorial2.html index 38eeeb2..99a4752 100644 --- a/tutorials/tutorial2.html +++ b/tutorials/tutorial2.html @@ -21,7 +21,7 @@
@@ -72,7 +72,7 @@

What would the resulting sound look like?

- Now, bypass the Nyquist Frequency to 2700 Hz whilst keeping the sampling rate at 3000 Hz, what do you notice? Can you tell what the resulting frequency. + Now, bypass the Nyquist Frequency to 2700 Hz whilst keeping the sampling rate at 3000 Hz, what do you notice? Can you tell what the resulting frequency would be?
Now set the input signal frequency to 750 and the number of harmonics to two. diff --git a/tutorials/tutorial3.html b/tutorials/tutorial3.html index cb3eb35..76e56ea 100644 --- a/tutorials/tutorial3.html +++ b/tutorials/tutorial3.html @@ -29,7 +29,7 @@

- Tutorial 4: Waveform Reconstruction + Tutorial 3: Waveform Reconstruction


diff --git a/tutorials/tutorial4.html b/tutorials/tutorial4.html index a18e936..a02d4b5 100644 --- a/tutorials/tutorial4.html +++ b/tutorials/tutorial4.html @@ -30,6 +30,18 @@

Tutorial 1: Waveform building + + Frequency Domain + Aliasing + Filtering + Reconstruction + + Quantization : Linear/Nonlinear/Companding + Aliasing + Encoding methods + Different Dithering + Class 4 42 Delta Modulation + Class 3 35 Sampling/Nyquist


From f7b2cafd41bb1846d325ef56279ae9812f7fac5a Mon Sep 17 00:00:00 2001 From: Maxw3llGM Date: Thu, 8 Feb 2024 16:28:43 -0500 Subject: [PATCH 2/7] fixed formating of delta modualtion text --- delta-modulation/index.html | 77 +++++++++++++++++++++++-------------- tutorials/tutorial2.html | 1 - 2 files changed, 49 insertions(+), 29 deletions(-) diff --git a/delta-modulation/index.html b/delta-modulation/index.html index 27fa1b7..0d084d8 100644 --- a/delta-modulation/index.html +++ b/delta-modulation/index.html @@ -1,7 +1,7 @@ - + @@ -18,13 +18,17 @@
-
-
- Home -
- - -
+
+ +
+ + +
+

Delta Modulation @@ -32,7 +36,7 @@


-
+
+ diff --git a/tutorials/tutorial2.html b/tutorials/tutorial2.html index 99a4752..4d67dab 100644 --- a/tutorials/tutorial2.html +++ b/tutorials/tutorial2.html @@ -94,7 +94,6 @@

,["original"] ,"Question" //question div id ,"qs" //questions class - );
From 7460ab62b8ddd82747f670ced5c75c36460c2f25 Mon Sep 17 00:00:00 2001 From: Laurent Tarabout Date: Sat, 10 Feb 2024 17:09:45 -0500 Subject: [PATCH 3/7] Second pass at delta modulation, added reconstruction panel/sound buttons, issues with css formatting of the sliders/buttons --- delta-modulation/index.html | 7 +- delta-modulation/panel_delta.js | 2 - delta-modulation/styles_delta.css | 156 ++++++++++++++++++++++++++++++ delta-modulation/waves_delta.js | 58 +++++++---- delta-modulation/widget_delta.js | 2 +- 5 files changed, 199 insertions(+), 26 deletions(-) diff --git a/delta-modulation/index.html b/delta-modulation/index.html index 0d084d8..1aaeeda 100644 --- a/delta-modulation/index.html +++ b/delta-modulation/index.html @@ -1,7 +1,7 @@ - + @@ -26,7 +26,7 @@

- +
@@ -41,6 +41,7 @@

+
-
+