diff --git a/.vscode/.prettierrc.yaml b/.vscode/.prettierrc.yaml
index a51674bd..f51d99fc 100644
--- a/.vscode/.prettierrc.yaml
+++ b/.vscode/.prettierrc.yaml
@@ -1,5 +1,5 @@
-trailingComma: "all"
+trailingComma: 'all'
tabWidth: 2
semi: true
singleQuote: true
-printWidth: 150
+printWidth: 80
diff --git a/web/src/components/d3component.js b/web/src/components/d3component.js
index 133881ec..90a67d4c 100644
--- a/web/src/components/d3component.js
+++ b/web/src/components/d3component.js
@@ -21,6 +21,9 @@ const D3Component = React.memo(({ callback, data, useDiv = false }) => {
D3Component.propTypes = {
callback: PropTypes.func.isRequired,
data: PropTypes.any,
+ // Using a div node instead of a svg node allows usage of child components of other types:
+ // i.g., for performance issues, we used both canvas & svg child elements in a visualisation.
+ useDiv: PropTypes.bool,
};
export default D3Component;
diff --git a/web/src/components/d3component_scrollytelling.js b/web/src/components/d3component_scrollytelling.js
new file mode 100644
index 00000000..e33ef13d
--- /dev/null
+++ b/web/src/components/d3component_scrollytelling.js
@@ -0,0 +1,32 @@
+import React from 'react';
+import PropTypes from 'prop-types';
+import D3Component from './d3component';
+
+const D3ComponentScrollyTelling = ({
+ callback,
+ data,
+ isInitialized,
+ setIsInitialized,
+ useDiv = false,
+}) => {
+ const createCallback = (svg, data) => {
+ if (!isInitialized) {
+ setIsInitialized(true);
+ callback(svg, data);
+ }
+ };
+
+ return ;
+};
+
+D3ComponentScrollyTelling.propTypes = {
+ callback: PropTypes.func.isRequired,
+ isInitialized: PropTypes.bool,
+ setIsInitialized: PropTypes.func,
+ data: PropTypes.any,
+ // Using a div node instead of a svg node allows usage of child components of other types:
+ // i.g., for performance issues, we used both canvas & svg child elements in a visualisation.
+ useDiv: PropTypes.bool,
+};
+
+export default D3ComponentScrollyTelling;
diff --git a/web/src/d3/evolving_chart/preproc.js b/web/src/d3/evolving_chart/preproc.js
index 3558f9d8..01987222 100644
--- a/web/src/d3/evolving_chart/preproc.js
+++ b/web/src/d3/evolving_chart/preproc.js
@@ -1,7 +1,7 @@
import _ from 'lodash';
-import { convertTimestampsToDates } from '../utils';
-import { STAGES_ORDERED, EPOCH_DURATION_SEC } from '../constants';
+import { convertTimestampsToDates, convertEpochsToAnnotations } from '../utils';
+import { STAGES_ORDERED } from '../constants';
export const preprocessData = (data) => {
data = convertTimestampsToDates(data);
@@ -17,39 +17,6 @@ export const preprocessData = (data) => {
};
};
-const convertEpochsToAnnotations = (data) => {
- const annotations = [];
- const nbEpochs = data.length;
- let currentAnnotationStart = data[0].timestamp;
- let currentSleepStage = data[0].sleepStage;
- let currentAnnotationEpochCount = 0;
-
- const isNextAnnotation = (sleepStage, index) => sleepStage !== currentSleepStage || index === data.length - 1;
-
- const saveCurrentAnnotation = (timestamp) => {
- annotations.push({
- stage: currentSleepStage,
- proportion: currentAnnotationEpochCount / nbEpochs,
- start: currentAnnotationStart,
- end: timestamp,
- duration: currentAnnotationEpochCount * EPOCH_DURATION_SEC,
- });
- };
-
- data.forEach(({ timestamp, sleepStage }, index) => {
- currentAnnotationEpochCount++;
-
- if (isNextAnnotation(sleepStage, index)) {
- saveCurrentAnnotation(timestamp);
- currentAnnotationStart = timestamp;
- currentSleepStage = sleepStage;
- currentAnnotationEpochCount = 0;
- }
- });
-
- return annotations;
-};
-
const getStageTimeProportions = (data) => {
const nbEpochPerSleepStage = _.countBy(data.map((x) => x.sleepStage));
const proportionPerSleepStage = _.mapValues(nbEpochPerSleepStage, (countPerStage) => countPerStage / data.length);
diff --git a/web/src/d3/spectrogram/axes_legend.js b/web/src/d3/spectrogram/axes_legend.js
new file mode 100644
index 00000000..2921a356
--- /dev/null
+++ b/web/src/d3/spectrogram/axes_legend.js
@@ -0,0 +1,157 @@
+import * as d3 from 'd3';
+import _ from 'lodash';
+import {
+ MARGIN,
+ NB_POINTS_COLOR_INTERPOLATION,
+ TITLE_FONT_SIZE,
+ TITLE_POSITION_Y,
+} from './constants';
+
+const createDrawingGroups = (g, spectrogramWidth) =>
+ Object({
+ spectrogramDrawingGroup: g
+ .append('g')
+ .attr('transform', `translate(${MARGIN.LEFT}, ${MARGIN.TOP})`),
+ legendDrawingGroup: g
+ .append('g')
+ .attr(
+ 'transform',
+ `translate(${MARGIN.LEFT + spectrogramWidth}, ${MARGIN.TOP})`,
+ ),
+ });
+
+const drawTitle = (g, channelName, spectrogramWidth) =>
+ g
+ .append('text')
+ .attr('x', spectrogramWidth / 2)
+ .attr('y', TITLE_POSITION_Y)
+ .style('text-anchor', 'middle')
+ .style('font-size', TITLE_FONT_SIZE)
+ .text(`Spectrogram of channel ${channelName}`);
+
+const drawAxes = (
+ g,
+ xAxis,
+ yAxis,
+ singleSpectrogramHeight,
+ spectrogramWidth,
+) => {
+ g.append('text')
+ .attr('class', 'x axis')
+ .attr('y', singleSpectrogramHeight + MARGIN.BOTTOM)
+ .attr('x', spectrogramWidth / 2)
+ .attr('fill', 'currentColor')
+ .style('text-anchor', 'middle')
+ .text('Time');
+
+ g.append('text')
+ .attr('class', 'y axis')
+ .attr('transform', 'rotate(-90)')
+ .attr('y', -MARGIN.LEFT)
+ .attr('x', -singleSpectrogramHeight / 2)
+ .attr('dy', '1em')
+ .attr('fill', 'currentColor')
+ .style('text-anchor', 'middle')
+ .text('Frequency (Hz)');
+
+ g.append('g')
+ .attr('class', 'x axis')
+ .attr('transform', `translate(0, ${singleSpectrogramHeight})`)
+ .call(xAxis)
+ .selectAll('text');
+
+ g.append('g').attr('class', 'y axis').call(yAxis).selectAll('text');
+};
+
+const drawLegend = (svg, color, y, spectrogramHeight) => {
+ const interpolate = d3.interpolate(color.domain()[0], color.domain()[1]);
+
+ const colors = _.map(_.range(NB_POINTS_COLOR_INTERPOLATION + 1), (x) =>
+ color(interpolate(x / NB_POINTS_COLOR_INTERPOLATION)),
+ );
+
+ const svgDefs = svg.append('defs');
+ const GRADIENT_ID = 'mainGradient';
+
+ svgDefs
+ .append('linearGradient')
+ .attr('id', GRADIENT_ID)
+ .attr('x1', '0%')
+ .attr('x2', '0%')
+ .attr('y1', '100%')
+ .attr('y2', '0%')
+ .selectAll('stop')
+ .data(colors)
+ .enter()
+ .append('stop')
+ .attr('stop-color', (d) => d)
+ .attr('offset', (_, i) => i / (colors.length - 1));
+ svg
+ .append('rect')
+ .attr('fill', `url(#${GRADIENT_ID})`)
+ .attr('x', MARGIN.RIGHT / 10)
+ .attr('y', 0)
+ .attr('width', MARGIN.RIGHT / 6)
+ .attr('height', spectrogramHeight);
+
+ const yAxis = d3.axisRight(y).ticks(5, 's');
+ svg
+ .append('g')
+ .attr('class', 'y axis')
+ .attr('transform', `translate(${MARGIN.RIGHT / 3.7},0)`)
+ .call(yAxis)
+ .selectAll('text');
+
+ svg
+ .append('text')
+ .attr('class', 'y axis')
+ .attr('transform', 'rotate(90)')
+ .attr('y', -MARGIN.RIGHT)
+ .attr('x', spectrogramHeight / 2)
+ .attr('dy', '1em')
+ .attr('fill', 'currentColor')
+ .style('text-anchor', 'middle')
+ .text('Power (uV²/Hz)');
+};
+
+const drawSpectrogramAxesAndLegend = (
+ svg,
+ scalesAndAxesBySpectrogram,
+ data,
+ {
+ canvasWidth,
+ spectrogramWidth,
+ singleSpectrogramCanvasHeight,
+ singleSpectrogramHeight,
+ },
+) =>
+ _.forEach(
+ _.zip(scalesAndAxesBySpectrogram, data),
+ ([{ xAxis, yAxis, color, yColor }, { channel }], index) => {
+ const currentSpectrogramDrawingGroup = svg
+ .append('g')
+ .attr(
+ 'transform',
+ `translate(0, ${index * singleSpectrogramCanvasHeight[index]})`,
+ )
+ .attr('width', canvasWidth)
+ .attr('height', singleSpectrogramCanvasHeight[index]);
+
+ const {
+ spectrogramDrawingGroup,
+ legendDrawingGroup,
+ } = createDrawingGroups(currentSpectrogramDrawingGroup, spectrogramWidth);
+
+ drawTitle(spectrogramDrawingGroup, channel, spectrogramWidth);
+ drawAxes(
+ spectrogramDrawingGroup,
+ xAxis,
+ yAxis,
+ singleSpectrogramHeight,
+ spectrogramWidth,
+ );
+ drawLegend(legendDrawingGroup, color, yColor, singleSpectrogramHeight);
+ },
+ );
+
+export default drawSpectrogramAxesAndLegend;
diff --git a/web/src/d3/spectrogram/constants.js b/web/src/d3/spectrogram/constants.js
index 5ae945a1..d4b69980 100644
--- a/web/src/d3/spectrogram/constants.js
+++ b/web/src/d3/spectrogram/constants.js
@@ -1,8 +1,9 @@
export const PADDING = 100;
export const NB_SPECTROGRAM = 2;
export const FREQUENCY_KEY = 'frequencies';
+export const HYPNOGRAM_KEY = 'hypnogram';
export const NB_POINTS_COLOR_INTERPOLATION = 3;
-export const TITLE_FONT_SIZE = '18px';
+export const NOT_HIGHLIGHTED_RECTANGLE_OPACITY = 0.5;
export const CANVAS_WIDTH_TO_HEIGHT_RATIO = 700 / 1000; // width to height ratio
export const CANVAS_HEIGHT_WINDOW_FACTOR = 0.8;
export const MARGIN = {
@@ -11,3 +12,5 @@ export const MARGIN = {
BOTTOM: 50,
LEFT: 70,
};
+export const TITLE_FONT_SIZE = '18px';
+export const TITLE_POSITION_Y = -MARGIN.TOP / 3;
diff --git a/web/src/d3/spectrogram/legend.js b/web/src/d3/spectrogram/legend.js
deleted file mode 100644
index f8a7ad5c..00000000
--- a/web/src/d3/spectrogram/legend.js
+++ /dev/null
@@ -1,53 +0,0 @@
-import * as d3 from 'd3';
-import _ from 'lodash';
-import { MARGIN, NB_POINTS_COLOR_INTERPOLATION, TITLE_FONT_SIZE } from './constants';
-
-export const createLegend = (svg, color, y, spectrogramHeight) => {
- const interpolate = d3.interpolate(color.domain()[0], color.domain()[1]);
-
- const colors = _.map(_.range(NB_POINTS_COLOR_INTERPOLATION + 1), (x) => color(interpolate(x / NB_POINTS_COLOR_INTERPOLATION)));
-
- const svgDefs = svg.append('defs');
- const GRADIENT_ID = 'mainGradient';
-
- svgDefs
- .append('linearGradient')
- .attr('id', GRADIENT_ID)
- .attr('x1', '0%')
- .attr('x2', '0%')
- .attr('y1', '100%')
- .attr('y2', '0%')
- .selectAll('stop')
- .data(colors)
- .enter()
- .append('stop')
- .attr('stop-color', (d) => d)
- .attr('offset', (_, i) => i / (colors.length - 1));
- svg
- .append('rect')
- .attr('fill', `url(#${GRADIENT_ID})`)
- .attr('x', MARGIN.RIGHT / 10)
- .attr('y', 0)
- .attr('width', MARGIN.RIGHT / 6)
- .attr('height', spectrogramHeight);
-
- const yAxis = d3.axisRight(y).ticks(5, 's');
- svg
- .append('g')
- .attr('class', 'y axis')
- .attr('transform', `translate(${MARGIN.RIGHT / 3.7},0)`)
- .call(yAxis)
- .selectAll('text');
-
- svg
- .append('text')
- .attr('class', 'y axis')
- .attr('transform', 'rotate(90)')
- .attr('y', -MARGIN.RIGHT)
- .attr('x', spectrogramHeight / 2)
- .attr('dy', '1em')
- .attr('fill', 'currentColor')
- .style('text-anchor', 'middle')
- .style('font-size', TITLE_FONT_SIZE)
- .text('Power (uV²/Hz)');
-};
diff --git a/web/src/d3/spectrogram/spectrogram.js b/web/src/d3/spectrogram/spectrogram.js
index fef0b5b3..73bdaa70 100644
--- a/web/src/d3/spectrogram/spectrogram.js
+++ b/web/src/d3/spectrogram/spectrogram.js
@@ -5,152 +5,154 @@ import {
MARGIN,
CANVAS_WIDTH_TO_HEIGHT_RATIO,
FREQUENCY_KEY,
- TITLE_FONT_SIZE,
+ HYPNOGRAM_KEY,
NB_SPECTROGRAM,
PADDING,
CANVAS_HEIGHT_WINDOW_FACTOR,
+ NOT_HIGHLIGHTED_RECTANGLE_OPACITY,
} from './constants';
-import { EPOCH_DURATION_SEC } from '../constants';
-import { createLegend } from './legend';
+import { STAGES_ORDERED } from '../constants';
+import drawSpectrogramAxesAndLegend from './axes_legend';
+import { convertTimestampsToDates } from '../utils';
+
+// keys are the sleep stage for which we want to display the spectrogram
+// accepted keys are: null (when all stages are highlighted), W, N1, N2, N3, REM
+export let spectrogramCallbacks = {};
+
+const getDimensions = (parentDiv) => {
+ const canvasWidth = parentDiv.node().getBoundingClientRect().width;
+ const canvasHeight = Math.min(
+ canvasWidth * CANVAS_WIDTH_TO_HEIGHT_RATIO,
+ window.innerHeight * CANVAS_HEIGHT_WINDOW_FACTOR,
+ );
+ const spectrogramsHeight = canvasHeight - MARGIN.TOP - MARGIN.BOTTOM;
+ const singleSpectrogramCanvasHeight = _.range(NB_SPECTROGRAM).map((x) => {
+ let height = spectrogramsHeight / NB_SPECTROGRAM;
+ if (x === 0) {
+ height += MARGIN.TOP;
+ } else if (x === NB_SPECTROGRAM - 1) {
+ height += MARGIN.BOTTOM;
+ }
+ return height;
+ });
+
+ return {
+ canvasWidth,
+ canvasHeight,
+ spectrogramsHeight,
+ singleSpectrogramCanvasHeight,
+ spectrogramWidth: canvasWidth - MARGIN.LEFT - MARGIN.RIGHT,
+ singleSpectrogramHeight: (spectrogramsHeight - PADDING) / NB_SPECTROGRAM,
+ };
+};
+
+const preprocessData = (channel, data) => {
+ const powerAmplitudesByTimestamp = data[channel];
+ const frequencies = data[FREQUENCY_KEY];
+ const hypnogram = convertTimestampsToDates(data[HYPNOGRAM_KEY]);
+
+ return {
+ channel,
+ frequencies,
+ rectangles: _.flatMap(
+ _.zip(powerAmplitudesByTimestamp, hypnogram),
+ ([powerAmplitudeSingleTimestamp, { sleepStage, timestamp }]) =>
+ _.map(
+ _.zip(powerAmplitudeSingleTimestamp, frequencies),
+ ([intensity, frequency]) =>
+ Object({
+ intensity,
+ frequency,
+ timestamp,
+ sleepStage,
+ }),
+ ),
+ ),
+ };
+};
const initializeScales = ({ spectrogramWidth, singleSpectrogramHeight }) =>
Object({
- x: d3.scaleLinear([0, spectrogramWidth]),
+ x: d3.scaleTime([0, spectrogramWidth]),
yLinear: d3.scaleLinear([singleSpectrogramHeight, 0]),
yBand: d3.scaleBand([singleSpectrogramHeight, 0]),
yColor: d3.scaleLinear([singleSpectrogramHeight, 0]),
color: d3.scaleSequential().interpolator(d3.interpolatePlasma),
});
-const initializeAxes = (x, y) =>
- Object({
- xAxis: d3.axisBottom(x).tickFormat((d) => `${d}h`),
- yAxis: d3.axisLeft(y).ticks(5, 's'),
- });
-
-const setDomainOnScales = (currentData, frequencies, preprocessedData, x, yBand, yLinear, color, yColor) => {
- x.domain([0, getHoursFromIndex(currentData.length)]);
+const setDomainOnScales = (
+ { rectangles, frequencies },
+ x,
+ yBand,
+ yLinear,
+ color,
+ yColor,
+) => {
+ x.domain([_.first(rectangles).timestamp, _.last(rectangles).timestamp]);
yBand.domain(frequencies);
yLinear.domain([_.first(frequencies), _.last(frequencies)]);
- color.domain(d3.extent(preprocessedData, ({ Intensity }) => Intensity));
- yColor.domain(d3.extent(preprocessedData, ({ Intensity }) => Intensity));
+ color.domain(d3.extent(rectangles, ({ intensity }) => intensity));
+ yColor.domain(d3.extent(rectangles, ({ intensity }) => intensity));
};
-const preprocessData = (powerAmplitudesByTimestamp, frequencies) =>
- _.flatMap(powerAmplitudesByTimestamp, (powerAmplitudeSingleTimestamp, index) =>
- _.map(_.zip(powerAmplitudeSingleTimestamp, frequencies), ([intensity, frequency]) =>
- Object({
- Intensity: intensity,
- Frequency: frequency,
- Timestamp: getHoursFromIndex(index),
- }),
- ),
- );
-
-const getHoursFromIndex = (idx) => (idx * EPOCH_DURATION_SEC) / 3600;
-
-const createDrawingGroups = (g, spectrogramWidth) =>
+const initializeAxes = (x, y) =>
Object({
- spectrogramDrawingGroup: g.append('g').attr('transform', `translate(${MARGIN.LEFT}, ${MARGIN.TOP})`),
- legendDrawingGroup: g.append('g').attr('transform', `translate(${MARGIN.LEFT + spectrogramWidth}, ${MARGIN.TOP})`),
+ xAxis: d3.axisBottom(x).tickFormat((d) => `${d.getHours()}h`),
+ yAxis: d3.axisLeft(y).ticks(5, 's'),
});
-const getScalesAndAxes = (data, channel, dimensions) => {
+const getScalesAndAxes = (data, dimensions) => {
const { x, yLinear, yBand, yColor, color } = initializeScales(dimensions);
const { xAxis, yAxis } = initializeAxes(x, yLinear);
- const preprocessedData = preprocessData(data[channel], data.frequencies);
- setDomainOnScales(data[channel], data.frequencies, preprocessedData, x, yBand, yLinear, color, yColor);
+ setDomainOnScales(data, x, yBand, yLinear, color, yColor);
- return { data: preprocessedData, x, yBand, yColor, color, xAxis, yAxis };
+ return { x, yBand, yColor, color, xAxis, yAxis };
};
-const createAxes = (g, xAxis, yAxis, singleSpectrogramHeight, spectrogramWidth) => {
- g.append('text')
- .attr('class', 'x axis')
- .attr('y', singleSpectrogramHeight + MARGIN.BOTTOM)
- .attr('x', spectrogramWidth / 2)
- .attr('fill', 'currentColor')
- .style('text-anchor', 'middle')
- .text('Time');
-
- g.append('text')
- .attr('class', 'y axis')
- .attr('transform', 'rotate(-90)')
- .attr('y', -MARGIN.LEFT)
- .attr('x', -singleSpectrogramHeight / 2)
- .attr('dy', '1em')
- .attr('fill', 'currentColor')
- .style('text-anchor', 'middle')
- .text('Frequency (Hz)');
-
- g.append('g')
- .attr('class', 'x axis')
- .attr('transform', `translate(0, ${singleSpectrogramHeight})`)
- .call(xAxis)
- .selectAll('text')
- .style('font-size', TITLE_FONT_SIZE);
-
- g.append('g').attr('class', 'y axis').call(yAxis).selectAll('text').style('font-size', TITLE_FONT_SIZE);
-};
-
-const createTitle = (g, channelName, spectrogramWidth) =>
- g
- .append('text')
- .attr('x', spectrogramWidth / 2)
- .attr('y', -MARGIN.TOP / 3)
- .style('text-anchor', 'middle')
- .style('font-size', TITLE_FONT_SIZE)
- .text(`Spectrogram of channel ${channelName}`);
-
-const createSpectrogramRectangles = (canvas, scalesAndAxesBySpectrogram, { singleSpectrogramCanvasHeight }) => {
+const drawSpectrogramRectangles = (
+ canvas,
+ scalesAndAxesBySpectrogram,
+ data,
+ { singleSpectrogramCanvasHeight },
+ highlightedSleepStage,
+) => {
const context = canvas.node().getContext('2d');
-
- _.each(scalesAndAxesBySpectrogram, ({ x, yBand, color, data }, index) => {
- context.resetTransform();
- context.translate(MARGIN.LEFT, MARGIN.TOP + index * singleSpectrogramCanvasHeight[index]);
-
- _.each(data, ({ Timestamp, Frequency, Intensity }) => {
- context.beginPath();
- context.fillRect(x(Timestamp), yBand(Frequency), x(getHoursFromIndex(1)), yBand.bandwidth());
- context.fillStyle = color(Intensity);
- context.fill();
- context.stroke();
- });
- });
+ const isHighlighted = (sleepStage) =>
+ highlightedSleepStage === null || highlightedSleepStage === sleepStage;
+
+ _.each(
+ _.zip(scalesAndAxesBySpectrogram, data),
+ ([{ x, yBand, color }, { rectangles, frequencies }], index) => {
+ const rectangleWidth =
+ x(rectangles[frequencies.length].timestamp) -
+ x(rectangles[0].timestamp);
+
+ context.resetTransform();
+ context.translate(
+ MARGIN.LEFT,
+ MARGIN.TOP + index * singleSpectrogramCanvasHeight[index],
+ );
+
+ _.each(rectangles, ({ timestamp, frequency, intensity, sleepStage }) => {
+ context.beginPath();
+ context.fillRect(
+ x(timestamp),
+ yBand(frequency),
+ rectangleWidth,
+ yBand.bandwidth(),
+ );
+ context.globalAlpha = isHighlighted(sleepStage)
+ ? 1
+ : NOT_HIGHLIGHTED_RECTANGLE_OPACITY;
+ context.fillStyle = color(intensity);
+ context.fill();
+ context.stroke();
+ });
+ },
+ );
};
-const createSpectrogramAxesAndLegend = (
- svg,
- scalesAndAxesBySpectrogram,
- channelNames,
- { canvasWidth, spectrogramWidth, singleSpectrogramCanvasHeight, singleSpectrogramHeight },
-) =>
- _.forEach(_.zip(scalesAndAxesBySpectrogram, channelNames), ([{ xAxis, yAxis, color, yColor }, channel], index) => {
- const currentSpectrogramDrawingGroup = svg
- .append('g')
- .attr('transform', `translate(0, ${index * singleSpectrogramCanvasHeight[index]})`)
- .attr('width', canvasWidth)
- .attr('height', singleSpectrogramCanvasHeight[index]);
-
- const { spectrogramDrawingGroup, legendDrawingGroup } = createDrawingGroups(currentSpectrogramDrawingGroup, spectrogramWidth);
-
- createTitle(spectrogramDrawingGroup, channel, spectrogramWidth);
- createAxes(spectrogramDrawingGroup, xAxis, yAxis, singleSpectrogramHeight, spectrogramWidth);
- createLegend(legendDrawingGroup, color, yColor, singleSpectrogramHeight);
- });
-
-const getSpectrogramCanvasHeight = (spectrogramHeight) =>
- _.range(NB_SPECTROGRAM).map((x) => {
- let height = spectrogramHeight / NB_SPECTROGRAM;
- if (x === 0) {
- height += MARGIN.TOP;
- } else if (x === NB_SPECTROGRAM - 1) {
- height += MARGIN.BOTTOM;
- }
- return height;
- });
-
const createSpectrogram = (containerNode, data) => {
/*
Considering the number of rectangles to display is well over 1k,
@@ -162,29 +164,61 @@ const createSpectrogram = (containerNode, data) => {
setting the first element's position, in this case the canvas, to absolute.
*/
const parentDiv = d3.select(containerNode);
- const canvasWidth = parentDiv.node().getBoundingClientRect().width;
- const canvasHeight = Math.min(canvasWidth * CANVAS_WIDTH_TO_HEIGHT_RATIO, window.innerHeight * CANVAS_HEIGHT_WINDOW_FACTOR);
- const dimensions = {
- canvasWidth: canvasWidth,
- canvasHeight: canvasHeight,
- spectrogramWidth: canvasWidth - MARGIN.LEFT - MARGIN.RIGHT,
- spectrogramsHeight: canvasHeight - MARGIN.TOP - MARGIN.BOTTOM,
- singleSpectrogramCanvasHeight: getSpectrogramCanvasHeight(canvasHeight - MARGIN.TOP - MARGIN.BOTTOM),
- singleSpectrogramHeight: (canvasHeight - MARGIN.BOTTOM - MARGIN.TOP - PADDING) / NB_SPECTROGRAM,
- };
-
- const channelNames = _.filter(_.keys(data), (keyName) => keyName !== FREQUENCY_KEY);
- const scalesAndAxesBySpectrogram = _.map(channelNames, (name) => getScalesAndAxes(data, name, dimensions));
+ const dimensions = getDimensions(parentDiv);
const canvas = parentDiv
.append('canvas')
.attr('width', dimensions.canvasWidth)
.attr('height', dimensions.canvasHeight)
.style('position', 'absolute');
- const svg = parentDiv.append('svg').attr('width', dimensions.canvasWidth).attr('height', dimensions.canvasHeight);
+ const svg = parentDiv
+ .append('svg')
+ .attr('width', dimensions.canvasWidth)
+ .attr('height', dimensions.canvasHeight);
- createSpectrogramRectangles(canvas, scalesAndAxesBySpectrogram, dimensions);
- createSpectrogramAxesAndLegend(svg, scalesAndAxesBySpectrogram, channelNames, dimensions);
+ const channelNames = _.filter(
+ _.keys(data),
+ (keyName) => !_.includes([FREQUENCY_KEY, HYPNOGRAM_KEY], keyName),
+ );
+ const preprocessedData = _.map(channelNames, (channel) =>
+ preprocessData(channel, data),
+ );
+ const scalesAndAxesBySpectrogram = _.map(preprocessedData, (data) =>
+ getScalesAndAxes(data, dimensions),
+ );
+
+ const createSpectrogramWithHighlightedStageCallback = (
+ highlightedSleepStage,
+ ) => () => {
+ const ctx = canvas.node().getContext('2d');
+ ctx.resetTransform();
+ ctx.clearRect(0, 0, dimensions.canvasWidth, dimensions.canvasHeight);
+ ctx.stroke();
+
+ svg.selectAll('*').remove();
+
+ drawSpectrogramRectangles(
+ canvas,
+ scalesAndAxesBySpectrogram,
+ preprocessedData,
+ dimensions,
+ highlightedSleepStage,
+ );
+ drawSpectrogramAxesAndLegend(
+ svg,
+ scalesAndAxesBySpectrogram,
+ preprocessedData,
+ dimensions,
+ );
+ };
+
+ spectrogramCallbacks = _.zipObject(
+ [null, ...STAGES_ORDERED],
+ _.map([null, ...STAGES_ORDERED], (stage) =>
+ createSpectrogramWithHighlightedStageCallback(stage),
+ ),
+ );
+ spectrogramCallbacks[null]();
};
export default createSpectrogram;
diff --git a/web/src/d3/utils.js b/web/src/d3/utils.js
index 0c131470..a91bae52 100644
--- a/web/src/d3/utils.js
+++ b/web/src/d3/utils.js
@@ -1,3 +1,5 @@
+import { EPOCH_DURATION_SEC } from './constants';
+
export const convertTimestampsToDates = (data) =>
data.map((row) =>
Object({
@@ -6,8 +8,56 @@ export const convertTimestampsToDates = (data) =>
}),
);
+export const convertEpochsToAnnotations = (data) => {
+ // Epochs (original data format):
+ // Epochs are equal-length spans of data extracted from raw continuous data [from MNE library glossary].
+ // In our case, each epoch corresponds to a 30s portion of the night with its corresponding sleep stage label.
+ // Annotations (destination data format):
+ // An annotation is defined by an onset, a duration, and a string description [from MNE library glossary].
+ // In our case, an annotation is a contiguous period of the night where a subject stayed in the same sleep stage.
+
+ const annotations = [];
+ const nbEpochs = data.length;
+ let currentAnnotationStart = data[0].timestamp;
+ let currentSleepStage = data[0].sleepStage;
+ let currentAnnotationEpochCount = 0;
+
+ const isSleepStageTransition = (sleepStage, index) =>
+ sleepStage !== currentSleepStage || index === data.length - 1;
+
+ const saveCurrentAnnotation = (timestamp) => {
+ annotations.push({
+ stage: currentSleepStage,
+ proportion: currentAnnotationEpochCount / nbEpochs,
+ start: currentAnnotationStart,
+ end: timestamp,
+ duration: currentAnnotationEpochCount * EPOCH_DURATION_SEC,
+ });
+ };
+
+ data.forEach(({ timestamp, sleepStage }, index) => {
+ currentAnnotationEpochCount++;
+
+ if (isSleepStageTransition(sleepStage, index)) {
+ saveCurrentAnnotation(timestamp);
+ currentAnnotationStart = timestamp;
+ currentSleepStage = sleepStage;
+ currentAnnotationEpochCount = 0;
+ }
+ });
+
+ return annotations;
+};
+
const parseTimestampToDate = (timestamp) => {
// To convert UNIX timestamp to JS Date, we have to convert number of seconds to milliseconds.
const date = new Date(timestamp * 1000);
- return new Date(date.getUTCFullYear(), date.getUTCMonth(), date.getUTCDay(), date.getUTCHours(), date.getUTCMinutes(), date.getUTCSeconds());
+ return new Date(
+ date.getUTCFullYear(),
+ date.getUTCMonth(),
+ date.getUTCDay(),
+ date.getUTCHours(),
+ date.getUTCMinutes(),
+ date.getUTCSeconds(),
+ );
};
diff --git a/web/src/views/sleep-analysis/index.js b/web/src/views/sleep-analysis/index.js
index 00637435..847a5f28 100644
--- a/web/src/views/sleep-analysis/index.js
+++ b/web/src/views/sleep-analysis/index.js
@@ -9,12 +9,11 @@ import WIPWarning from 'components/wip_warning';
import { createSingleHypnogram } from 'd3/hypnogram/hypnogram';
import text from './text.json';
-import createSpectrogram from 'd3/spectrogram/spectrogram';
import StackedBarChartScrollyTelling from './stacked_bar_chart_scrollytelling';
+import SpectrogramScrollyTelling from './spectrogram_scrollytelling';
import { useCSVData } from 'hooks/api_hooks';
import hypnogramDataSleepEDFPath from 'assets/data/hypnogram.csv';
-import spectrogramData from 'assets/data/spectrograms.json';
const SleepAnalysis = () => {
const csvDataSleepEDF = useCSVData(hypnogramDataSleepEDFPath);
@@ -33,87 +32,137 @@ const SleepAnalysis = () => {
- Of course, we are analyzing only one night of sleep so it is therefore tricky to draw general conclusions about your sleep. It is however
+ Of course, we are analyzing only one night of sleep so it is therefore
+ tricky to draw general conclusions about your sleep. It is however
fascinating to see how your night was.
Without further ado, this is what was your night of sleep:
- We have seen that sleep can be decomposed in mainly two stages, whereas REM and NREM, and that we can observe different stage proportions
- across age, gender and different sleep disorders. We’ve also defined other measures of your sleep architecture, such as your sleep latency,
- efficiency and total sleep time. In order to improve your sleep hygiene, many elements can be considered:
+ We have seen that sleep can be decomposed in mainly two stages,
+ whereas REM and NREM, and that we can observe different stage
+ proportions across age, gender and different sleep disorders. We’ve
+ also defined other measures of your sleep architecture, such as your
+ sleep latency, efficiency and total sleep time. In order to improve
+ your sleep hygiene, many elements can be considered:
- Alimentation: having a balanced diet and avoiding sources of caffeine can have a positive impact on one’s sleep. Chocolate, soft drink,
- tea and decaffeinated coffee are unexpected sources of caffeine.
+ Alimentation: having a balanced diet and avoiding sources of
+ caffeine can have a positive impact on one’s sleep. Chocolate, soft
+ drink, tea and decaffeinated coffee are unexpected sources of
+ caffeine.
+
+
+ Routine: going to sleep about at the same time, in a darkened and
+ quiet environment.
+
+
+ Routine: going to sleep about at the same time, in a darkened and
+ quiet environment.
+
+
+ Routine: going to sleep about at the same time, in a darkened and
+ quiet environment.
- Routine: going to sleep about at the same time, in a darkened and quiet environment.
- Routine: going to sleep about at the same time, in a darkened and quiet environment.
- Routine: going to sleep about at the same time, in a darkened and quiet environment.
- Although we’ve looked at many aspects of your night’s sleep, we haven’t properly looked at your sleep dynamics, whereas how your sleep
+ Although we’ve looked at many aspects of your night’s sleep, we
+ haven’t properly looked at your sleep dynamics, whereas how your sleep
evolves overnight.
Hypnogram
- A hypnogram allows you to visually inspect the evolution of your night, through time. The vertical axis represents how hard it is to wake
- up, namely the sleep deepness. We see that REM is one of the lightest sleep stages (along with N1), because we unknowingly wake up from that
- stage. Those short periods of arousal often last no longer than 15 seconds, are followed by a lighter sleep stage, and cannot be remembered
- the next morning. If they are too frequent, they can affect your sleep quality. [5] We can see that, throughout the night, stages follow
- about the same pattern, whereas we go from NREM (either N1, N2 and N3) and then to REM, and so on. We call those sleep cycles, and those
- typically range from four to six, each one lasting from 90 to 110 minutes. Another commonly looked at measurement is the time between sleep
- onset and the first REM epoch, namely REM latency, which corresponds to 20 minutes.
+ A hypnogram allows you to visually inspect the evolution of your
+ night, through time. The vertical axis represents how hard it is to
+ wake up, namely the sleep deepness. We see that REM is one of the
+ lightest sleep stages (along with N1), because we unknowingly wake up
+ from that stage. Those short periods of arousal often last no longer
+ than 15 seconds, are followed by a lighter sleep stage, and cannot be
+ remembered the next morning. If they are too frequent, they can affect
+ your sleep quality. [5] We can see that, throughout the night, stages
+ follow about the same pattern, whereas we go from NREM (either N1, N2
+ and N3) and then to REM, and so on. We call those sleep cycles, and
+ those typically range from four to six, each one lasting from 90 to
+ 110 minutes. Another commonly looked at measurement is the time
+ between sleep onset and the first REM epoch, namely REM latency, which
+ corresponds to 20 minutes.
-
+
- Sleep cycles take place in a broader process, named the circadian rhythm. It is the one that regulates our wake and sleep cycles over a 24
- hours period.
+ Sleep cycles take place in a broader process, named the circadian
+ rhythm. It is the one that regulates our wake and sleep cycles over a
+ 24 hours period.
- You’ve been able to visualize and inspect your night of sleep, which we’ve classified only based on your EEG recordings. In a sleep lab,
- electrophysiology technicians generally look at your EEG, EOG and submental EMG, and then manually classify each epoch of 30 seconds that
- compose your night. By looking at your EEG recordings, we can see some patterns that can help electrophysiology technicians, and our
+ You’ve been able to visualize and inspect your night of sleep, which
+ we’ve classified only based on your EEG recordings. In a sleep lab,
+ electrophysiology technicians generally look at your EEG, EOG and
+ submental EMG, and then manually classify each epoch of 30 seconds
+ that compose your night. By looking at your EEG recordings, we can see
+ some patterns that can help electrophysiology technicians, and our
classifier, discriminate sleep stages throughout the night.
Spectrogram
- Above, we can see the same chart from the first visualization, which represents your sleep stages through the night. Below it, there are
- spectrograms of both your EEG channels. Spectrograms can be viewed as if we took all of your nights signal, we’ve separated it in contiguous
- 30 seconds chunks, stacked then horizontally and to which we’ve applied the fast fourier transform. We then have, for each 30 seconds epoch,
- the corresponding amplitudes for each frequency that makes up the signal, hence the spectra. We then converted the scale to logarithmic, to
- better see the differences in the spectrums. We then speak of signal power instead of signal amplitude, because we look at the spectrums in
- a logarithmic scale.
+ Above, we can see the same chart from the first visualization, which
+ represents your sleep stages through the night. Below it, there are
+ spectrograms of both your EEG channels. Spectrograms can be viewed as
+ if we took all of your nights signal, we’ve separated it in contiguous
+ 30 seconds chunks, stacked then horizontally and to which we’ve
+ applied the fast fourier transform. We then have, for each 30 seconds
+ epoch, the corresponding amplitudes for each frequency that makes up
+ the signal, hence the spectra. We then converted the scale to
+ logarithmic, to better see the differences in the spectrums. We then
+ speak of signal power instead of signal amplitude, because we look at
+ the spectrums in a logarithmic scale.
How to read it?
- Red therefore means that in that 30 seconds time frame, that particular frequency had a big amplitude. Green means that you had that
- frequency with a lower amplitude. Dark blue means that you didn’t have that frequency in the signal.
+ Red therefore means that in that 30 seconds time frame, that
+ particular frequency had a big amplitude. Green means that you had
+ that frequency with a lower amplitude. Dark blue means that you didn’t
+ have that frequency in the signal.
- To get a better understanding at how spectrograms work, you can check out
-
+ To get a better understanding at how spectrograms work, you can check
+ out
+
{' '}
this visualization{' '}
that decomposes sound frequency from your microphone.
-
+
- Generally, when talking about brain waves, we group certain frequencies together into bands. There are overall five frequency bands, where
- each has a general associated behaviour, or state of mind. We will cover those when looking at time frames corresponding to each sleep
- stage.
+ Generally, when talking about brain waves, we group certain
+ frequencies together into bands. There are overall five frequency
+ bands, where each has a general associated behaviour, or state of
+ mind. We will cover those when looking at time frames corresponding to
+ each sleep stage.
- We can associate wake stages with low-amplitude activity in the 15 to 60 Hz frequency range, called the beta band. By slowly falling asleep,
- the signal frequencies tend to decrease into the 4 to 8 Hz range, or the theta band, and to have larger amplitudes. These characteristics
- are associated with N1. N2 stage has the same characteristics, and also includes sleep spindles. They last only a few seconds and are a
- large oscillation in the 10 to 15 hz band. Because they do not occur during all of the 30 seconds period, they cannot be seen here. Stage
- N3, also called slow wave sleep, is characterized by slower waves between 0.5 and 4 Hz, known as the delta range, with large amplitudes. REM
- stage has the same characteristics as Wake stage, whereas there are low voltage high frequency activity.
+ We can associate wake stages with low-amplitude activity in the 15 to
+ 60 Hz frequency range, called the beta band. By slowly falling asleep,
+ the signal frequencies tend to decrease into the 4 to 8 Hz range, or
+ the theta band, and to have larger amplitudes. These characteristics
+ are associated with N1. N2 stage has the same characteristics, and
+ also includes sleep spindles. They last only a few seconds and are a
+ large oscillation in the 10 to 15 hz band. Because they do not occur
+ during all of the 30 seconds period, they cannot be seen here. Stage
+ N3, also called slow wave sleep, is characterized by slower waves
+ between 0.5 and 4 Hz, known as the delta range, with large amplitudes.
+ REM stage has the same characteristics as Wake stage, whereas there
+ are low voltage high frequency activity.
Wanna know how accurate this data is?
diff --git a/web/src/views/sleep-analysis/spectrogram_scrollytelling.js b/web/src/views/sleep-analysis/spectrogram_scrollytelling.js
new file mode 100644
index 00000000..0b72de38
--- /dev/null
+++ b/web/src/views/sleep-analysis/spectrogram_scrollytelling.js
@@ -0,0 +1,169 @@
+import React, { useState } from 'react';
+import { Container, Card, CardBody } from 'reactstrap';
+
+import { HYPNOGRAM_KEY } from '../../d3/spectrogram/constants';
+import createSpectrogram, {
+ spectrogramCallbacks,
+} from '../../d3/spectrogram/spectrogram';
+import D3ComponentScrollyTelling from '../../components/d3component_scrollytelling';
+import WaypointDirection from '../../components/waypoint_direction';
+
+import { useCSVData } from '../../hooks/api_hooks';
+
+import hypnogramDataSleepEDFPath from 'assets/data/hypnogram.csv';
+import spectrogramData from 'assets/data/spectrograms.json';
+
+const SpectrogramScrollyTelling = () => {
+ const csvDataSleepEDF = useCSVData(hypnogramDataSleepEDFPath);
+ const spectrogramWithHypnogramData = csvDataSleepEDF
+ ? { ...spectrogramData, [HYPNOGRAM_KEY]: csvDataSleepEDF }
+ : null;
+ const [isInitialized, setIsInitialized] = useState(false);
+
+ return (
+
+
+
+
+
+
+
+
+ Here is represented spectrograms of both your EEG channels.
+ Spectrograms can be viewed as if we took all of your nights signal,
+ we’ve separated it in contiguous 30 seconds chunks, stacked then
+ horizontally and to which we’ve applied the fast fourier transform.
+ We then have, for each 30 seconds epoch, the corresponding
+ amplitudes for each frequency that makes up the signal, hence the
+ spectra.
+
+
+ We then converted the scale to logarithmic, to better see the
+ differences in the spectrums. We then speak of signal power instead
+ of signal amplitude, because we look at the spectrums in a
+ logarithmic scale.
+
+ How to read it?
+
+ Red therefore means that in that 30 seconds time frame, that
+ particular frequency had a big amplitude. Green means that you had
+ that frequency with a lower amplitude. Dark blue means that you
+ didn’t have that frequency in the signal.
+
+
+ To get a better understanding at how spectrograms work, you can{' '}
+
+ check out this example
+ {' '}
+ that decomposes sound frequency from your microphone.
+
+
+
+
+
+
+
+ Generally, when talking about brain waves, we group certain
+ frequencies together into bands. There are overall five frequency
+ bands, where each has a general associated behaviour, or state of
+ mind. We will cover those when looking at time frames corresponding
+ to each sleep stage.
+
+
+
+ {isInitialized && (
+
+ )}
+
+
+
+
+ We can associate wake stages with low-amplitude activity in the 15
+ to 60 Hz frequency range, called the beta band. [6]
+
+
+
+ {isInitialized && (
+
+ )}
+
+
+
+
+ By slowly falling asleep, the signal frequencies tend to decrease
+ into the 4 to 8 Hz range, or the theta band, and to have larger
+ amplitudes. These characteristics are associated with N1.
+
+
+
+ {isInitialized && (
+
+ )}
+
+
+
+
+ N2 stage has the same characteristics as N1, and also includes sleep
+ spindles. They last only a few seconds and are a large oscillation
+ in the 10 to 15 hz band. Because they do not occur during all of the
+ 30 seconds period, they cannot be seen here. [6]
+
+
+
+ {isInitialized && (
+
+ )}
+
+
+
+
+ Stage N3, also called slow wave sleep, is characterized by slower
+ waves between 0.5 and 4 Hz, known as the delta range, with large
+ amplitudes. [6]
+
+
+
+ {isInitialized && (
+
+ )}
+
+
+
+
+ REM stage has the same characteristics as Wake stage, whereas there
+ are low voltage high frequency activity. [6]
+
+
+
+
+
+
+ );
+};
+
+export default SpectrogramScrollyTelling;
diff --git a/web/src/views/sleep-analysis/stacked_bar_chart_scrollytelling.js b/web/src/views/sleep-analysis/stacked_bar_chart_scrollytelling.js
index 64f0703b..97e13c0e 100644
--- a/web/src/views/sleep-analysis/stacked_bar_chart_scrollytelling.js
+++ b/web/src/views/sleep-analysis/stacked_bar_chart_scrollytelling.js
@@ -3,7 +3,7 @@ import { Container, Card, CardBody } from 'reactstrap';
import hypnogramCSVPath from 'assets/data/hypnogram.csv';
-import D3Component from 'components/d3component';
+import D3ComponentScrollyTelling from 'components/d3component_scrollytelling';
import WaypointDirection from 'components/waypoint_direction';
import createEvolvingChart, {
@@ -17,44 +17,65 @@ import { useCSVData } from 'hooks/api_hooks';
const StackedBarChartScrollyTelling = () => {
const csvData = useCSVData(hypnogramCSVPath);
const [isInitialized, setIsInitialized] = useState(false);
- const createEvolvingChartCallback = (svg, data) => {
- if (!isInitialized) {
- setIsInitialized(true);
- createEvolvingChart(svg, data);
- }
- };
return (
-
+
- We can see that each colored block represents a part of your night. They are ordered from bed time to out of bed timestamps you’ve written
- in your journal. Each color is associated with a specific sleep stage. You went to bed at 12:22 am and you got out of bed at 9:47 am,
- which adds up to 9 hours and 25 minutes of time spent in bed. Of this total time, you spent 7 hours and 27 minutes actually sleeping. You
- first fell asleep at XX:XX, to which we will refer to as sleep onset. The last non wake block ended at XX:XX, which can also be referred
- to as sleep offset. During that night's sleep, you went through each of the 5 five stages. Let's try to see a little better what happened
- about each of them.
+ We can see that each colored block represents a part of your night.
+ They are ordered from bed time to out of bed timestamps you’ve
+ written in your journal. Each color is associated with a specific
+ sleep stage. You went to bed at 12:22 am and you got out of bed at
+ 9:47 am, which adds up to 9 hours and 25 minutes of time spent in
+ bed. Of this total time, you spent 7 hours and 27 minutes actually
+ sleeping. You first fell asleep at XX:XX, to which we will refer to
+ as sleep onset. The last non wake block ended at XX:XX, which can
+ also be referred to as sleep offset. During that night's sleep, you
+ went through each of the 5 five stages. Let's try to see a little
+ better what happened about each of them.
- {isInitialized && }
+ {isInitialized && (
+
+ )}
- Wake stage is of course the stage we want to minimize when in bed. It can be decomposed into two parts:
+
+ Wake stage is of course the stage we want to minimize when in bed.
+ It can be decomposed into two parts:
+
- Sleep latency : Time spent before falling asleep, which corresponds to X minutes in your case.
- Wake after sleep onset (WASO): Time spent awake after first falling asleep and before waking up.
{' '}
- For healthy adults, it is normal to experience small awakenings during the night. Unprovoked awakenings are mostly during or after REM
- stages.{' '}
+ Sleep latency : Time spent before falling asleep, which
+ corresponds to X minutes in your case.{' '}
+
+
+ {' '}
+ Wake after sleep onset (WASO): Time spent awake after first
+ falling asleep and before waking up.{' '}
+
+
+ {' '}
+ For healthy adults, it is normal to experience small awakenings
+ during the night. Unprovoked awakenings are mostly during or after
+ REM stages.{' '}
@@ -63,32 +84,45 @@ const StackedBarChartScrollyTelling = () => {
- REM stage stands for “Rapid Eyes Movements” and is also known as “paradoxical sleep”. It is associated with dreaming and,
- as the National Sleep Foundation says, “the brain is awake and body paralyzed.”
+ REM stage stands for “Rapid Eyes Movements” and is
+ also known as “paradoxical sleep”. It is associated with dreaming
+ and, as the National Sleep Foundation says,{' '}
+ “the brain is awake and body paralyzed.”
- N1 stage is associated with that drowsy feeling before falling asleep. Most people wouldn’t say they fell asleep if
+ N1 stage is associated with that drowsy feeling
+ before falling asleep. Most people wouldn’t say they fell asleep if
they’ve been woken up from N1 sleep.
- N2 stage still corresponds to a light sleep, but where the muscle activity decreases more, and the eyes have stopped
+ N2 stage still corresponds to a light sleep, but
+ where the muscle activity decreases more, and the eyes have stopped
moving. It is called, along with N1, light sleep .
- N3 stage is when you are deeply asleep, hence it’s also called deep sleep , or sometimes{' '}
- slow wave sleep, and is the most difficult to wake up from. It is during those stages that your cells get repaired, and
- that tissue grows. But how much time did you spend in each stage during the whole night?
+ N3 stage is when you are deeply asleep, hence it’s
+ also called deep sleep , or sometimes{' '}
+ slow wave sleep, and is the most difficult to wake
+ up from. It is during those stages that your cells get repaired, and
+ that tissue grows. But how much time did you spend in each stage
+ during the whole night?
- {isInitialized && }
+ {isInitialized && (
+
+ )}
- From here, we can look at your sleep efficiency, which is the proportion of time spent asleep over the overall time spent in bed. In your
- case, it corresponds to 79%, or 7h27.
+ From here, we can look at your sleep efficiency, which is the
+ proportion of time spent asleep over the overall time spent in bed.
+ In your case, it corresponds to 79%, or 7h27.
@@ -96,9 +130,11 @@ const StackedBarChartScrollyTelling = () => {
- We are currently looking at your in bed sleep stage proportions. Wake time may be overrepresented, because it includes your sleep latency
- and the time you spent in bed after waking up. In order to look at your actual stage proportions, we must cut them out from wake time to
- only keep WASO.
+ We are currently looking at your in bed sleep stage proportions.
+ Wake time may be overrepresented, because it includes your sleep
+ latency and the time you spent in bed after waking up. In order to
+ look at your actual stage proportions, we must cut them out from
+ wake time to only keep WASO.
@@ -106,19 +142,26 @@ const StackedBarChartScrollyTelling = () => {
- We can see that the most prominent sleep stage is N2, which in your case corresponds to XXXX. How does your night compare to other
+ We can see that the most prominent sleep stage is N2, which in your
+ case corresponds to XXXX. How does your night compare to other
people’s night?
- {isInitialized && }
+ {isInitialized && (
+
+ )}
- As a rule of thumb, adults approximately stay 5% of their total sleep time in N1; 50% in N2; and 20% is in N3. The remaining 25% is REM
- stage sleep.
+ As a rule of thumb, adults approximately stay 5% of their total
+ sleep time in N1; 50% in N2; and 20% is in N3. The remaining 25% is
+ REM stage sleep.