-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathbackend.html
More file actions
3012 lines (2527 loc) · 119 KB
/
backend.html
File metadata and controls
3012 lines (2527 loc) · 119 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Backend Engineering -- Node.js, Files & Databases - Better Dev</title>
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700;800&display=swap" rel="stylesheet">
<link rel="stylesheet" href="style.css">
</head>
<body>
<header class="topbar">
<button class="sidebar-toggle" aria-label="Open navigation" aria-expanded="false">
<span class="hamburger-icon"></span>
</button>
<a href="index.html" class="logo">Better Dev</a>
</header>
<div class="sidebar-backdrop" aria-hidden="true"></div>
<aside class="sidebar" aria-label="Site navigation">
<div class="sidebar-header">
<span class="sidebar-title">Navigation</span>
<button class="sidebar-close" aria-label="Close navigation">×</button>
</div>
<div class="sidebar-search">
<input type="text" class="sidebar-search-input" placeholder="Search topics..." aria-label="Search topics">
<div class="sidebar-search-results"></div>
</div>
<nav class="sidebar-nav">
<div class="sidebar-group">
<a href="index.html">Home</a>
</div>
<div class="sidebar-group">
<div class="sidebar-group-label">Mathematics</div>
<a href="pre-algebra.html">Pre-Algebra</a>
<a href="algebra.html">Algebra</a>
<a href="sequences-series.html">Sequences & Series</a>
<a href="geometry.html">Geometry</a>
<a href="calculus.html">Calculus</a>
<a href="discrete-math.html">Discrete Math</a>
<a href="linear-algebra.html">Linear Algebra</a>
<a href="probability.html">Probability & Statistics</a>
<a href="binary-systems.html">Binary & Number Systems</a>
<a href="number-theory.html">Number Theory for CP</a>
<a href="computational-geometry.html">Computational Geometry</a>
<a href="game-theory.html">Game Theory</a>
</div>
<div class="sidebar-group">
<div class="sidebar-group-label">Data Structures & Algorithms</div>
<a href="dsa-foundations.html">DSA Foundations</a>
<a href="arrays.html">Arrays & Strings</a>
<a href="stacks-queues.html">Stacks & Queues</a>
<a href="hashmaps.html">Hash Maps & Sets</a>
<a href="linked-lists.html">Linked Lists</a>
<a href="trees.html">Trees & BST</a>
<a href="graphs.html">Graphs</a>
<a href="sorting.html">Sorting & Searching</a>
<a href="patterns.html">LeetCode Patterns</a>
<a href="dp.html">Dynamic Programming</a>
<a href="advanced.html">Advanced Topics</a>
<a href="string-algorithms.html">String Algorithms</a>
<a href="advanced-graphs.html">Advanced Graphs</a>
<a href="advanced-dp.html">Advanced DP</a>
<a href="advanced-ds.html">Advanced Data Structures</a>
<a href="leetcode-650.html">The 650 Problems</a>
<a href="competitive-programming.html">CP Roadmap</a>
</div>
<div class="sidebar-group">
<div class="sidebar-group-label">Languages & Systems</div>
<a href="cpp.html">C++</a>
<a href="golang.html">Go</a>
<a href="javascript.html">JavaScript Deep Dive</a>
<a href="typescript.html">TypeScript</a>
<a href="nodejs.html">Node.js Internals</a>
<a href="os.html">Operating Systems</a>
<a href="linux.html">Linux</a>
<a href="git.html">Git</a>
<a href="backend.html">Backend</a>
<a href="system-design.html">System Design</a>
<a href="networking.html">Networking</a>
<a href="cloud.html">Cloud & Infrastructure</a>
<a href="docker.html">Docker & Compose</a>
<a href="kubernetes.html">Kubernetes</a>
<a href="message-queues.html">Queues & Pub/Sub</a>
<a href="selfhosting.html">VPS & Self-Hosting</a>
<a href="databases.html">PostgreSQL & MySQL</a>
<a href="stripe.html">Stripe & Payments</a>
<a href="distributed-systems.html">Distributed Systems</a>
<a href="backend-engineering.html">Backend Engineering</a>
</div>
<div class="sidebar-group">
<div class="sidebar-group-label">JS/TS Ecosystem</div>
<a href="js-tooling.html">Tooling & Bundlers</a>
<a href="js-testing.html">Testing</a>
<a href="ts-projects.html">Building with TS</a>
</div>
<div class="sidebar-group">
<div class="sidebar-group-label">More</div>
<a href="seans-brain.html">Sean's Brain</a>
</div>
</nav>
</aside>
<div class="container">
<!-- ===== PAGE HEADER ===== -->
<div class="page-header">
<div class="breadcrumb"><a href="index.html">Home</a> / Backend Engineering</div>
<h1>Backend Engineering</h1>
<p>Master the Node.js internals that separate frontend developers from full-stack engineers. Blob, File API, Buffers, Streams, the filesystem, and database fundamentals -- all the concepts you need to handle files, multimedia, and data at scale.</p>
</div>
<!-- ===== TABLE OF CONTENTS ===== -->
<div class="toc">
<h4>Table of Contents</h4>
<a href="#multimedia">1. Multimedia & Binary Data (Blob, File, MIME, Base64)</a>
<a href="#node-fs">2. Node.js File System & Path</a>
<a href="#buffers">3. Buffers in Node.js</a>
<a href="#streams">4. Streams in Node.js</a>
<a href="#db-scale">5. Database Essentials: Transactions, Indexes & Scale</a>
<a href="#chunking">6. Chunked Reading & Streaming Servers</a>
<a href="#scalability">7. Scalability Patterns</a>
<a href="#backend-patterns">8. Backend Design Patterns</a>
</div>
<!-- ============================================================ -->
<!-- SECTION 1: MULTIMEDIA & BINARY DATA -->
<!-- ============================================================ -->
<section id="multimedia">
<h2>1. Multimedia & Binary Data (Blob, File, MIME, Base64)</h2>
<p>Every app eventually deals with files -- images, videos, PDFs, audio. Understanding how binary data moves between the browser, your API, and storage is <strong>non-negotiable</strong> for a backend engineer. This section covers the fundamental building blocks.</p>
<div class="warning-box">
<div class="label">Why This Matters for Your Career</div>
<p>Most tutorials skip this. Then you get a job and your first ticket is "users can upload profile pictures up to 5MB." Suddenly you need to understand MIME types, multipart form data, base64 encoding, file size validation, and streaming uploads. This section gives you the mental model so none of that feels foreign.</p>
</div>
<h3>What is Binary Data?</h3>
<p>Everything on a computer is binary -- ones and zeros. Text files are binary with a nice encoding (UTF-8) that maps bytes to characters. But images, videos, and audio are <strong>raw binary</strong> -- there is no "human-readable" version. You work with them as sequences of bytes.</p>
<div class="example-box">
<div class="label">Text vs Binary -- The Key Difference</div>
<p>A text file containing "Hello" is 5 bytes: <code>48 65 6C 6C 6F</code> (hex). Each byte maps to a letter.</p>
<p>A PNG image's first 8 bytes are always: <code>89 50 4E 47 0D 0A 1A 0A</code> -- this is the "magic number" that tells programs "I'm a PNG." The rest is compressed pixel data that only image decoders understand.</p>
<p><strong>The takeaway:</strong> You can <code>console.log()</code> text and read it. You cannot <code>console.log()</code> an image and get anything useful. Binary data requires specialized handling.</p>
</div>
<h3>Blob (Binary Large Object)</h3>
<p>A <strong>Blob</strong> is the browser's way of representing raw binary data. It's an immutable chunk of bytes with a MIME type attached. You cannot read a Blob directly -- you must use APIs to extract its contents.</p>
<div class="example-box">
<div class="label">Creating and Using Blobs in the Browser</div>
<pre><code>// Creating a Blob from text
const textBlob = new Blob(["Hello, World!"], { type: "text/plain" });
console.log(textBlob.size); // 13 bytes
console.log(textBlob.type); // "text/plain"
// Creating a Blob from JSON
const data = { name: "Sean", role: "developer" };
const jsonBlob = new Blob([JSON.stringify(data)], { type: "application/json" });
// Creating a Blob from binary data (Uint8Array)
const bytes = new Uint8Array([72, 101, 108, 108, 111]); // "Hello"
const binaryBlob = new Blob([bytes], { type: "application/octet-stream" });
// Reading a Blob's contents
const text = await textBlob.text(); // "Hello, World!"
const buffer = await textBlob.arrayBuffer(); // Raw bytes as ArrayBuffer
// Slicing a Blob (like substring but for binary)
const partial = textBlob.slice(0, 5, "text/plain"); // First 5 bytes</code></pre>
</div>
<div class="tip-box">
<div class="label">When You Use Blobs in Real Apps</div>
<ul>
<li><strong>File uploads:</strong> When a user selects a file via <code><input type="file"></code>, you get a <code>File</code> object (which extends Blob)</li>
<li><strong>Download links:</strong> <code>URL.createObjectURL(blob)</code> creates a temporary URL to trigger downloads</li>
<li><strong>Canvas exports:</strong> <code>canvas.toBlob()</code> gives you the image as a Blob for uploading</li>
<li><strong>Fetch responses:</strong> <code>response.blob()</code> gets binary data from API responses</li>
</ul>
</div>
<h3>The File API</h3>
<p>The <code>File</code> object extends Blob, adding <code>name</code>, <code>lastModified</code>, and <code>webkitRelativePath</code>. When a user picks a file from their computer, the browser gives you a File object.</p>
<div class="example-box">
<div class="label">Handling File Uploads -- The Complete Pattern</div>
<pre><code>// HTML: <input type="file" id="upload" accept="image/*" multiple>
const input = document.getElementById("upload");
input.addEventListener("change", async (event) => {
const files = event.target.files; // FileList (array-like)
for (const file of files) {
console.log(file.name); // "photo.jpg"
console.log(file.size); // 2458832 (bytes)
console.log(file.type); // "image/jpeg"
console.log(file.lastModified); // 1708123456789 (timestamp)
// Validate before uploading
if (file.size > 5 * 1024 * 1024) {
alert("File too large! Max 5MB.");
continue;
}
if (!file.type.startsWith("image/")) {
alert("Only images allowed!");
continue;
}
// Option 1: Upload as FormData (multipart -- most common)
const formData = new FormData();
formData.append("avatar", file);
formData.append("userId", "123");
await fetch("/api/upload", { method: "POST", body: formData });
// Option 2: Upload as raw binary
await fetch("/api/upload", {
method: "POST",
headers: { "Content-Type": file.type },
body: file, // File IS a Blob, so this works
});
// Option 3: Read as base64 for preview
const reader = new FileReader();
reader.onload = () => {
const base64 = reader.result; // "data:image/jpeg;base64,/9j/4AAQ..."
document.getElementById("preview").src = base64;
};
reader.readAsDataURL(file);
}
});</code></pre>
</div>
<h3>MIME Types</h3>
<p>MIME (Multipurpose Internet Mail Extensions) types tell systems <strong>what kind of data</strong> they're looking at. Format: <code>type/subtype</code>. Getting MIME types wrong causes real bugs -- browsers won't display images, downloads get corrupted, security filters reject uploads.</p>
<div class="example-box">
<div class="label">Common MIME Types You Must Know</div>
<pre><code>// Text formats
"text/plain" // .txt
"text/html" // .html
"text/css" // .css
"text/javascript" // .js (also "application/javascript")
"text/csv" // .csv
// Application formats
"application/json" // .json -- APIs
"application/pdf" // .pdf
"application/xml" // .xml
"application/zip" // .zip
"application/octet-stream" // Generic binary (unknown type)
// Images
"image/jpeg" // .jpg, .jpeg
"image/png" // .png
"image/gif" // .gif
"image/webp" // .webp (modern, smaller)
"image/svg+xml" // .svg (vector graphics)
// Audio
"audio/mpeg" // .mp3
"audio/wav" // .wav
"audio/ogg" // .ogg
// Video
"video/mp4" // .mp4
"video/webm" // .webm
"video/ogg" // .ogv
// Multipart (for form uploads)
"multipart/form-data" // File uploads via forms</code></pre>
</div>
<div class="warning-box">
<div class="label">Security: Never Trust Client-Sent MIME Types</div>
<p>A user can rename <code>malware.exe</code> to <code>photo.jpg</code> and the browser will send <code>image/jpeg</code> as the MIME type. <strong>Always validate on the server</strong> by checking the file's magic bytes (first few bytes), not the extension or MIME header.</p>
<pre><code>// Server-side: check magic bytes for real file type
// JPEG starts with: FF D8 FF
// PNG starts with: 89 50 4E 47
// PDF starts with: 25 50 44 46 ("%PDF")
// GIF starts with: 47 49 46 38 ("GIF8")
const buf = Buffer.from(fileData);
const isJPEG = buf[0] === 0xFF && buf[1] === 0xD8 && buf[2] === 0xFF;
const isPNG = buf[0] === 0x89 && buf[1] === 0x50 && buf[2] === 0x4E && buf[3] === 0x47;</code></pre>
</div>
<h3>Base64 Encoding</h3>
<p>Base64 converts binary data into ASCII text using 64 characters (A-Z, a-z, 0-9, +, /). Why? Because many systems (JSON, HTML, email, URLs) can only handle text, not raw bytes. Base64 is the bridge.</p>
<div class="example-box">
<div class="label">Base64 in Practice</div>
<pre><code>// Browser: encoding and decoding
const encoded = btoa("Hello, World!"); // "SGVsbG8sIFdvcmxkIQ=="
const decoded = atob("SGVsbG8sIFdvcmxkIQ=="); // "Hello, World!"
// Node.js: Buffer handles base64 natively
const buf = Buffer.from("Hello, World!");
const b64 = buf.toString("base64"); // "SGVsbG8sIFdvcmxkIQ=="
const original = Buffer.from(b64, "base64").toString("utf-8"); // "Hello, World!"
// Data URLs: embed binary in HTML/CSS (small images only!)
// Format: data:[MIME];base64,[DATA]
const imgTag = `<img src="data:image/png;base64,iVBORw0KGgo..." />`;
// Converting a file to base64 for JSON APIs
const file = fs.readFileSync("photo.jpg");
const payload = {
filename: "photo.jpg",
content: file.toString("base64"), // Now it's a JSON-safe string
mime: "image/jpeg"
};</code></pre>
</div>
<div class="warning-box">
<div class="label">Base64 Overhead: 33% Larger</div>
<p>Base64 encoding increases data size by ~33%. A 3MB image becomes ~4MB as base64. This is why you should <strong>never</strong> use base64 for large file transfers -- use multipart form data or streams instead. Base64 is best for small assets (icons, thumbnails) or when you must embed binary data inside JSON/HTML.</p>
</div>
<h3>ArrayBuffer and TypedArrays</h3>
<p>Under the hood, binary data in JavaScript lives in <strong>ArrayBuffers</strong> -- fixed-length chunks of raw memory. You read/write them through <strong>TypedArrays</strong> (views into the buffer).</p>
<div class="example-box">
<div class="label">Working with ArrayBuffers</div>
<pre><code>// Create a buffer of 16 bytes
const buffer = new ArrayBuffer(16);
// View it as unsigned 8-bit integers (0-255 per byte)
const uint8 = new Uint8Array(buffer);
uint8[0] = 72; // 'H'
uint8[1] = 101; // 'e'
uint8[2] = 108; // 'l'
uint8[3] = 108; // 'l'
uint8[4] = 111; // 'o'
// View the SAME buffer as 32-bit integers
const uint32 = new Uint32Array(buffer);
console.log(uint32[0]); // Combines first 4 bytes into one 32-bit number
// Common TypedArrays:
// Uint8Array -- bytes (0 to 255) -- most common for file I/O
// Int8Array -- signed bytes (-128 to 127)
// Uint16Array -- 2-byte unsigned (0 to 65535)
// Int32Array -- 4-byte signed
// Float32Array -- 4-byte floats (for audio/graphics)
// Float64Array -- 8-byte doubles (for precision math)
// Converting between Blob and ArrayBuffer
const blob = new Blob([uint8], { type: "application/octet-stream" });
const backToBuffer = await blob.arrayBuffer();
const backToUint8 = new Uint8Array(backToBuffer);</code></pre>
</div>
<h3>Multipart Form Data -- How File Uploads Actually Work</h3>
<p>When you submit a form with files, the browser sends a <strong>multipart/form-data</strong> request. The body is divided into "parts" separated by a boundary string. Each part has its own headers and content. This is how most file uploads work on the web.</p>
<div class="example-box">
<div class="label">What a Multipart Request Looks Like on the Wire</div>
<pre><code>POST /api/upload HTTP/1.1
Content-Type: multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW
------WebKitFormBoundary7MA4YWxkTrZu0gW
Content-Disposition: form-data; name="username"
sean
------WebKitFormBoundary7MA4YWxkTrZu0gW
Content-Disposition: form-data; name="avatar"; filename="photo.jpg"
Content-Type: image/jpeg
[RAW BINARY BYTES OF THE IMAGE HERE]
------WebKitFormBoundary7MA4YWxkTrZu0gW--</code></pre>
<p>The boundary string separates each field. Text fields send their value directly. File fields include the filename, MIME type, and raw binary content. Your backend framework (Express, Fastify, etc.) parses this for you using libraries like <code>multer</code> or <code>busboy</code>.</p>
</div>
<div class="example-box">
<div class="label">Express.js File Upload with Multer -- Production Pattern</div>
<pre><code>const express = require("express");
const multer = require("multer");
const path = require("path");
// Configure where and how files are stored
const storage = multer.diskStorage({
destination: (req, file, cb) => {
cb(null, "./uploads/"); // Save to uploads directory
},
filename: (req, file, cb) => {
// Unique name: timestamp-randomhex.extension
const uniqueName = Date.now() + "-" + crypto.randomBytes(6).toString("hex");
const ext = path.extname(file.originalname); // ".jpg"
cb(null, uniqueName + ext);
}
});
// Validation: only images, max 5MB
const upload = multer({
storage,
limits: { fileSize: 5 * 1024 * 1024 }, // 5MB
fileFilter: (req, file, cb) => {
const allowed = ["image/jpeg", "image/png", "image/webp"];
if (allowed.includes(file.mimetype)) {
cb(null, true);
} else {
cb(new Error("Only JPEG, PNG, and WebP allowed"));
}
}
});
const app = express();
// Single file upload
app.post("/api/avatar", upload.single("avatar"), (req, res) => {
console.log(req.file);
// { fieldname: 'avatar', originalname: 'photo.jpg',
// mimetype: 'image/jpeg', size: 245883,
// destination: './uploads/', filename: '1708123456789-a1b2c3.jpg',
// path: 'uploads/1708123456789-a1b2c3.jpg' }
res.json({ url: `/uploads/${req.file.filename}` });
});
// Multiple files
app.post("/api/gallery", upload.array("photos", 10), (req, res) => {
console.log(req.files); // Array of file objects (max 10)
res.json({ count: req.files.length });
});
// Error handling for multer
app.use((err, req, res, next) => {
if (err instanceof multer.MulterError) {
if (err.code === "LIMIT_FILE_SIZE") {
return res.status(413).json({ error: "File too large. Max 5MB." });
}
}
res.status(400).json({ error: err.message });
});</code></pre>
</div>
<div class="tip-box">
<div class="label">File Storage: Local vs Cloud</div>
<ul>
<li><strong>Local filesystem</strong> -- Fine for dev and small apps. Breaks when you have multiple servers (load balancing) because files only exist on one machine.</li>
<li><strong>Cloud object storage (S3, GCS, R2)</strong> -- The production standard. Files are accessible from any server, automatically replicated, and you get CDN integration for fast delivery.</li>
<li><strong>Database (BLOB column)</strong> -- Almost never do this. Databases are optimized for structured data, not large binary blobs. It makes backups huge and queries slow.</li>
<li><strong>The pattern:</strong> Upload to cloud storage, save the URL/key in your database. Serve via CDN.</li>
</ul>
</div>
</section>
<!-- ============================================================ -->
<!-- SECTION 2: NODE.JS FILE SYSTEM & PATH -->
<!-- ============================================================ -->
<section id="node-fs">
<h2>2. Node.js File System & Path</h2>
<p>The <code>fs</code> (file system) and <code>path</code> modules are how Node.js interacts with the operating system's files and directories. Every backend engineer uses these daily -- reading config files, writing logs, processing uploads, generating reports.</p>
<div class="warning-box">
<div class="label">Sync vs Async -- This Will Bite You</div>
<p>The <code>fs</code> module offers three flavors of every operation:</p>
<ul>
<li><strong><code>fs.readFileSync()</code></strong> -- Blocks the entire event loop. Use only at startup (reading config). <strong>Never in request handlers.</strong></li>
<li><strong><code>fs.readFile(path, callback)</code></strong> -- Callback-based async. Works but leads to callback hell.</li>
<li><strong><code>fs.promises.readFile()</code></strong> -- Promise-based async. <strong>This is what you should use.</strong></li>
</ul>
<p>If you use <code>readFileSync</code> inside an Express route handler, your server can only handle <strong>one request at a time</strong> while the file is being read. For a 100ms disk read, that's 100ms where every other user is waiting. With 100 concurrent users, the last one waits 10 seconds. Use async.</p>
</div>
<h3>Reading Files</h3>
<div class="example-box">
<div class="label">Three Ways to Read Files</div>
<pre><code>const fs = require("fs");
const fsPromises = require("fs/promises"); // or fs.promises
// 1. Synchronous -- blocks event loop (only use at startup)
const configSync = fs.readFileSync("./config.json", "utf-8");
const config = JSON.parse(configSync);
// 2. Callback -- works but messy
fs.readFile("./data.txt", "utf-8", (err, data) => {
if (err) {
console.error("Failed to read:", err.message);
return;
}
console.log(data);
});
// 3. Promises -- the modern way (use this)
async function loadData() {
try {
const data = await fsPromises.readFile("./data.txt", "utf-8");
console.log(data);
} catch (err) {
if (err.code === "ENOENT") {
console.log("File not found");
} else {
throw err; // Re-throw unexpected errors
}
}
}
// Reading binary files (no encoding = returns Buffer)
const imageBuffer = await fsPromises.readFile("./photo.jpg");
console.log(imageBuffer.length); // Size in bytes
console.log(imageBuffer[0]); // First byte (0xFF for JPEG)</code></pre>
</div>
<h3>Writing Files</h3>
<div class="example-box">
<div class="label">Writing and Appending</div>
<pre><code>const fsPromises = require("fs/promises");
// Write (creates or overwrites)
await fsPromises.writeFile("./output.txt", "Hello, World!", "utf-8");
// Write JSON
const data = { users: 150, active: true };
await fsPromises.writeFile("./data.json", JSON.stringify(data, null, 2));
// Append (adds to end of file)
await fsPromises.appendFile("./log.txt", `[${new Date().toISOString()}] Server started\n`);
// Write binary data
const buffer = Buffer.from([0x89, 0x50, 0x4E, 0x47]); // PNG header
await fsPromises.writeFile("./header.bin", buffer);
// Write with flags
const { open } = require("fs/promises");
const fileHandle = await open("./output.txt", "w"); // 'w' = write, 'a' = append
await fileHandle.write("Line 1\n");
await fileHandle.write("Line 2\n");
await fileHandle.close(); // Always close file handles!</code></pre>
</div>
<h3>The Path Module -- Never Hardcode Paths</h3>
<div class="example-box">
<div class="label">Path Operations You Use Every Day</div>
<pre><code>const path = require("path");
// Joining paths safely (handles slashes for you)
path.join("/users", "sean", "documents", "file.txt");
// "/users/sean/documents/file.txt"
// Resolving to absolute path
path.resolve("./uploads", "photo.jpg");
// "/home/sean/project/uploads/photo.jpg" (from current working dir)
// Getting parts of a path
path.basename("/uploads/photo.jpg"); // "photo.jpg"
path.basename("/uploads/photo.jpg", ".jpg"); // "photo" (without ext)
path.extname("/uploads/photo.jpg"); // ".jpg"
path.dirname("/uploads/photo.jpg"); // "/uploads"
// Parsing a path into its components
path.parse("/home/sean/photo.jpg");
// { root: '/', dir: '/home/sean', base: 'photo.jpg',
// ext: '.jpg', name: 'photo' }
// __dirname and __filename (CommonJS)
console.log(__dirname); // Directory of current file
console.log(__filename); // Full path of current file
// ES Modules equivalent
import { fileURLToPath } from "url";
import { dirname } from "path";
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
// Why this matters: NEVER do this
const bad = "./uploads/" + userInput; // Path traversal attack!
// User sends "../../../etc/passwd" and reads your system files
// ALWAYS sanitize and resolve
const safePath = path.join("./uploads", path.basename(userInput));
// path.basename strips directory traversal: "../../../etc/passwd" -> "passwd"</code></pre>
</div>
<h3>Directory Operations</h3>
<div class="example-box">
<div class="label">Creating, Reading, and Managing Directories</div>
<pre><code>const fsPromises = require("fs/promises");
const path = require("path");
// Create directory (recursive: true creates parent dirs too)
await fsPromises.mkdir("./uploads/avatars/thumbnails", { recursive: true });
// List directory contents
const files = await fsPromises.readdir("./uploads");
console.log(files); // ["photo1.jpg", "photo2.png", "avatars"]
// List with file type info
const entries = await fsPromises.readdir("./uploads", { withFileTypes: true });
for (const entry of entries) {
if (entry.isFile()) console.log("File:", entry.name);
if (entry.isDirectory()) console.log("Dir:", entry.name);
}
// Check if file/directory exists
async function exists(filePath) {
try {
await fsPromises.access(filePath);
return true;
} catch {
return false;
}
}
// Get file metadata
const stats = await fsPromises.stat("./photo.jpg");
console.log(stats.size); // Size in bytes
console.log(stats.isFile()); // true
console.log(stats.isDirectory()); // false
console.log(stats.mtime); // Last modified date
console.log(stats.birthtime); // Created date
// Delete file
await fsPromises.unlink("./temp/old-file.txt");
// Delete directory (recursive: removes contents too)
await fsPromises.rm("./temp", { recursive: true, force: true });
// Rename / Move file
await fsPromises.rename("./old-name.txt", "./new-name.txt");
await fsPromises.rename("./uploads/temp.jpg", "./uploads/avatars/final.jpg");
// Copy file
await fsPromises.copyFile("./source.jpg", "./backup/source.jpg");
// Watch for changes (useful for dev tools, hot reload)
const { watch } = require("fs/promises");
const watcher = watch("./src", { recursive: true });
for await (const event of watcher) {
console.log(event.eventType, event.filename); // "change" "index.js"
}</code></pre>
</div>
<div class="tip-box">
<div class="label">Common fs Error Codes</div>
<ul>
<li><strong><code>ENOENT</code></strong> -- File or directory not found (most common)</li>
<li><strong><code>EACCES</code></strong> -- Permission denied (check file ownership)</li>
<li><strong><code>EISDIR</code></strong> -- Expected a file but got a directory</li>
<li><strong><code>ENOTDIR</code></strong> -- Expected a directory but got a file</li>
<li><strong><code>EEXIST</code></strong> -- File already exists (when using exclusive create)</li>
<li><strong><code>EMFILE</code></strong> -- Too many open files (you're leaking file handles)</li>
<li><strong><code>ENOSPC</code></strong> -- No space left on device (disk full)</li>
</ul>
</div>
</section>
<!-- ============================================================ -->
<!-- SECTION 3: BUFFERS IN NODE.JS -->
<!-- ============================================================ -->
<section id="buffers">
<h2>3. Buffers in Node.js</h2>
<p>A <strong>Buffer</strong> is Node.js's way of handling raw binary data. Unlike JavaScript strings (which are UTF-16 encoded), Buffers are sequences of raw bytes. When you read a file without specifying an encoding, you get a Buffer. When you receive data over a network socket, it arrives as Buffers. They are everywhere in backend code.</p>
<div class="warning-box">
<div class="label">Buffer vs ArrayBuffer</div>
<p><strong>Buffer</strong> is Node.js-specific. <strong>ArrayBuffer</strong> is the browser standard. Buffer actually extends Uint8Array under the hood, so they share many methods. In Node.js, always use Buffer. In the browser, use ArrayBuffer/TypedArrays. When working in environments that support both (like Deno or modern Node), Buffer is still the conventional choice on the server.</p>
</div>
<h3>Creating Buffers</h3>
<div class="example-box">
<div class="label">Every Way to Create a Buffer</div>
<pre><code>// From a string (most common)
const buf1 = Buffer.from("Hello, World!", "utf-8");
console.log(buf1); // <Buffer 48 65 6c 6c 6f 2c 20 57 6f 72 6c 64 21>
console.log(buf1.length); // 13 bytes
// From an array of bytes
const buf2 = Buffer.from([72, 101, 108, 108, 111]); // "Hello"
// From hex string
const buf3 = Buffer.from("48656c6c6f", "hex"); // "Hello"
// From base64
const buf4 = Buffer.from("SGVsbG8=", "base64"); // "Hello"
// Allocate empty buffer (filled with zeros)
const buf5 = Buffer.alloc(1024); // 1KB of zeros -- SAFE
const buf6 = Buffer.allocUnsafe(1024); // 1KB, may contain old memory data -- FAST
// Why allocUnsafe exists: alloc fills memory with zeros (takes time).
// allocUnsafe skips zeroing (faster) but may expose data from
// previously freed memory. Use alloc for security-sensitive data,
// allocUnsafe when you'll immediately overwrite all bytes.</code></pre>
</div>
<h3>Reading From Buffers</h3>
<div class="example-box">
<div class="label">Extracting Data from Buffers</div>
<pre><code>const buf = Buffer.from("Hello, World!");
// Convert to string
buf.toString("utf-8"); // "Hello, World!"
buf.toString("hex"); // "48656c6c6f2c20576f726c6421"
buf.toString("base64"); // "SGVsbG8sIFdvcmxkIQ=="
// Read individual bytes
buf[0]; // 72 (the byte value of 'H')
buf[1]; // 101 ('e')
// Slice (does NOT copy -- shares memory!)
const slice = buf.slice(0, 5); // Buffer: "Hello"
slice[0] = 74; // Changes BOTH buf and slice!
console.log(buf.toString()); // "Jello, World!" -- buf was modified!
// subarray (same as slice, shares memory)
const sub = buf.subarray(7, 12); // Buffer: "World"
// To get an independent copy, use Buffer.from(slice)
const copy = Buffer.from(buf.slice(0, 5)); // Independent copy
// Reading numbers from binary data (network protocols, file formats)
const data = Buffer.alloc(8);
data.writeUInt32BE(0x12345678, 0); // Write 4-byte big-endian at offset 0
data.writeUInt16LE(0xABCD, 4); // Write 2-byte little-endian at offset 4
data.readUInt32BE(0); // 0x12345678
data.readUInt16LE(4); // 0xABCD
// BE = Big Endian (most significant byte first) -- network byte order
// LE = Little Endian (least significant byte first) -- x86 CPUs</code></pre>
</div>
<h3>Buffer Operations</h3>
<div class="example-box">
<div class="label">Common Buffer Manipulations</div>
<pre><code>// Concatenating buffers
const part1 = Buffer.from("Hello, ");
const part2 = Buffer.from("World!");
const combined = Buffer.concat([part1, part2]);
// <Buffer 48 65 6c 6c 6f 2c 20 57 6f 72 6c 64 21>
// Comparing buffers
const a = Buffer.from("abc");
const b = Buffer.from("abc");
const c = Buffer.from("abd");
a.equals(b); // true (same content)
a.equals(c); // false
Buffer.compare(a, c); // -1 (a comes before c)
// Searching in buffers
const buf = Buffer.from("Hello, World!");
buf.indexOf("World"); // 7 (byte offset)
buf.includes("World"); // true
buf.indexOf(0x57); // 7 (byte value of 'W')
// Filling a buffer
const zeroed = Buffer.alloc(10);
zeroed.fill(0xFF); // All bytes set to 255
zeroed.fill("ab"); // Repeating pattern: 61 62 61 62 61 62...
// Iterating over bytes
for (const byte of buf) {
process.stdout.write(byte.toString(16) + " ");
}
// 48 65 6c 6c 6f 2c 20 57 6f 72 6c 64 21</code></pre>
</div>
<div class="tip-box">
<div class="label">When You Use Buffers in Real Backend Code</div>
<ul>
<li><strong>File I/O:</strong> <code>fs.readFile(path)</code> without encoding returns a Buffer</li>
<li><strong>Crypto:</strong> <code>crypto.randomBytes(32)</code> returns a Buffer (for tokens, salts, IVs)</li>
<li><strong>Network:</strong> TCP sockets receive data as Buffers</li>
<li><strong>Image processing:</strong> Libraries like Sharp take and return Buffers</li>
<li><strong>Hashing:</strong> <code>crypto.createHash("sha256").update(buf).digest()</code> works with Buffers</li>
<li><strong>Protocol parsing:</strong> Reading binary protocols (WebSocket frames, HTTP/2, database wire protocols)</li>
</ul>
</div>
</section>
<!-- ============================================================ -->
<!-- SECTION 4: STREAMS IN NODE.JS -->
<!-- ============================================================ -->
<section id="streams">
<h2>4. Streams in Node.js</h2>
<p>Streams let you process data <strong>piece by piece</strong> instead of loading everything into memory at once. This is the difference between an app that crashes on a 2GB file and one that handles it effortlessly with 50MB of RAM. Streams are the backbone of scalable Node.js.</p>
<div class="warning-box">
<div class="label">Why Streams Are Non-Negotiable for Scale</div>
<p>Imagine 100 users simultaneously uploading 100MB files.</p>
<ul>
<li><strong>Without streams:</strong> <code>readFile()</code> loads each entire file into memory. 100 users x 100MB = 10GB RAM. Your server crashes.</li>
<li><strong>With streams:</strong> Each upload processes in small chunks (16KB-64KB at a time). 100 users x 64KB = 6.4MB RAM. Your server is fine.</li>
</ul>
<p>Streams are not an optimization. They are how you build software that doesn't fall over under load.</p>
</div>
<h3>The Four Types of Streams</h3>
<div class="example-box">
<div class="label">Stream Types and Real-World Examples</div>
<pre><code>// 1. READABLE -- Data comes OUT of it (source)
// Examples: fs.createReadStream, HTTP request body, process.stdin
const readable = fs.createReadStream("./bigfile.csv");
// 2. WRITABLE -- Data goes INTO it (destination)
// Examples: fs.createWriteStream, HTTP response, process.stdout
const writable = fs.createWriteStream("./output.txt");
// 3. TRANSFORM -- Data goes in, different data comes out (processor)
// Examples: zlib.createGzip(), crypto.createCipher(), CSV parser
const gzip = zlib.createGzip();
// 4. DUPLEX -- Both readable AND writable (independent)
// Examples: TCP sockets, WebSockets
const socket = new net.Socket();</code></pre>
</div>
<h3>Reading with Streams</h3>
<div class="example-box">
<div class="label">Processing Large Files Without Loading Them Entirely</div>
<pre><code>const fs = require("fs");
// Create a readable stream (default chunk size: 64KB)
const stream = fs.createReadStream("./server.log", {
encoding: "utf-8",
highWaterMark: 64 * 1024, // 64KB chunks (default)
});
// Event-based reading
stream.on("data", (chunk) => {
console.log(`Received ${chunk.length} bytes`);
// Process each chunk -- this fires many times for large files
});
stream.on("end", () => {
console.log("Done reading");
});
stream.on("error", (err) => {
console.error("Read error:", err.message);
});
// Modern async iteration (cleaner -- use this)
async function processFile() {
const stream = fs.createReadStream("./server.log", { encoding: "utf-8" });
for await (const chunk of stream) {
// Process each chunk
const lines = chunk.split("\n");
for (const line of lines) {
if (line.includes("ERROR")) {
console.log("Found error:", line);
}
}
}
}</code></pre>
</div>
<h3>Writing with Streams</h3>
<div class="example-box">
<div class="label">Writing Large Amounts of Data Efficiently</div>
<pre><code>const fs = require("fs");
const writeStream = fs.createWriteStream("./output.csv");
// Write header
writeStream.write("id,name,email\n");
// Write 1 million rows without running out of memory
for (let i = 0; i < 1_000_000; i++) {
const row = `${i},user_${i},user${i}@example.com\n`;
// write() returns false when internal buffer is full
const canContinue = writeStream.write(row);
if (!canContinue) {
// BACKPRESSURE: internal buffer is full
// Wait for it to drain before writing more
await new Promise((resolve) => writeStream.once("drain", resolve));
}
}
// Signal that we're done writing
writeStream.end();
// Wait for all data to be flushed to disk
writeStream.on("finish", () => {
console.log("All data written to disk");
});</code></pre>
</div>
<h3>Piping -- Connecting Streams Together</h3>
<div class="example-box">
<div class="label">pipe() is the Most Important Stream Method</div>
<pre><code>const fs = require("fs");
const zlib = require("zlib");
const crypto = require("crypto");
// Simple copy: read from one file, write to another
fs.createReadStream("./input.txt")
.pipe(fs.createWriteStream("./copy.txt"));
// Compress a file: read -> gzip -> write
fs.createReadStream("./bigfile.log")
.pipe(zlib.createGzip())
.pipe(fs.createWriteStream("./bigfile.log.gz"));
// Chain multiple transforms: read -> encrypt -> compress -> write
fs.createReadStream("./sensitive.json")
.pipe(crypto.createCipheriv("aes-256-cbc", key, iv))
.pipe(zlib.createGzip())
.pipe(fs.createWriteStream("./sensitive.json.enc.gz"));
// Modern: pipeline() with error handling (use this over pipe)
const { pipeline } = require("stream/promises");
async function compressFile(input, output) {
await pipeline(
fs.createReadStream(input),
zlib.createGzip(),
fs.createWriteStream(output)
);
console.log("Compression complete");
// pipeline automatically handles errors and cleanup
}
// HTTP streaming -- serve large files without buffering
app.get("/download/:filename", (req, res) => {
const filePath = path.join("./files", req.params.filename);
const stat = fs.statSync(filePath);
res.writeHead(200, {
"Content-Type": "application/octet-stream",
"Content-Length": stat.size,
"Content-Disposition": `attachment; filename="${req.params.filename}"`,
});
// Stream the file to the client -- constant memory usage regardless of file size
fs.createReadStream(filePath).pipe(res);
});</code></pre>
</div>
<h3>Transform Streams -- Processing Data as It Flows</h3>
<div class="example-box">
<div class="label">Building Custom Transform Streams</div>
<pre><code>const { Transform } = require("stream");
// Transform that converts text to uppercase
const upperCase = new Transform({
transform(chunk, encoding, callback) {
// chunk is a Buffer, convert to string, transform, push out
this.push(chunk.toString().toUpperCase());
callback(); // Signal that we're done processing this chunk
}
});
// Use it in a pipeline
fs.createReadStream("./input.txt")
.pipe(upperCase)
.pipe(fs.createWriteStream("./UPPER_OUTPUT.txt"));
// Transform that filters lines (e.g., only errors from a log)
const errorFilter = new Transform({
objectMode: false,
transform(chunk, encoding, callback) {
const lines = chunk.toString().split("\n");
const errors = lines.filter(line => line.includes("ERROR"));
if (errors.length > 0) {
this.push(errors.join("\n") + "\n");
}
callback();
}
});
// Real-world: CSV parser as a transform stream
const csvParser = new Transform({
objectMode: true, // Push JS objects instead of buffers
transform(chunk, encoding, callback) {
const lines = chunk.toString().split("\n");
for (const line of lines) {
if (line.trim()) {
const [id, name, email] = line.split(",");
this.push({ id, name, email }); // Push JS object
}
}
callback();
}
});</code></pre>
</div>
<div class="warning-box">
<div class="label">Backpressure -- The #1 Stream Concept Developers Miss</div>
<p><strong>Backpressure</strong> happens when a writable stream can't keep up with a readable stream. If the reader produces data faster than the writer can consume it, data buffers in memory and eventually crashes your process.</p>
<pre><code>// BAD: ignoring backpressure
readable.on("data", (chunk) => {
writable.write(chunk); // What if writable is slow? Memory grows unbounded!
});
// GOOD: pipe() handles backpressure automatically