diff --git a/assets/scss/custom.scss b/assets/scss/custom.scss index fd6a4ce6..e90e37b4 100644 --- a/assets/scss/custom.scss +++ b/assets/scss/custom.scss @@ -1,6 +1,13 @@ // Override this file to add your own SCSS styling. @use "sass:color"; +body { + line-height: 1.6; +} + +.sidebar, .sidebar a { color: #333333; } +.dark .sidebar, .dark .sidebar a { color: #e0e0e0; } + .container { max-width: 90%; } @@ -196,7 +203,12 @@ html { } .hljs { - background-color: #f9f9f9; + .hljs-title, + .hljs-title.class_, + .hljs-title.class_.inherited__, + .hljs-title.function_ { + color: #7a3e9d; +} } #TableOfContents li a, diff --git a/content/notes/pytorch-hpc/overview.md b/content/notes/pytorch-hpc/overview.md index 17fc4af2..970c4f03 100644 --- a/content/notes/pytorch-hpc/overview.md +++ b/content/notes/pytorch-hpc/overview.md @@ -96,18 +96,18 @@ Activation functions introduce non-linearity into neural networks, enabling them