forked from nao1215/filesql
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathfile.go
More file actions
656 lines (584 loc) · 17.1 KB
/
file.go
File metadata and controls
656 lines (584 loc) · 17.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
package filesql
import (
"bytes"
"encoding/csv"
"fmt"
"io"
"path/filepath"
"strings"
"github.com/xuri/excelize/v2"
)
// FileType represents supported file types including compression variants
type FileType int
const (
// FileTypeCSV represents CSV file type
FileTypeCSV FileType = iota
// FileTypeTSV represents TSV file type
FileTypeTSV
// FileTypeLTSV represents LTSV file type
FileTypeLTSV
// FileTypeParquet represents Parquet file type
FileTypeParquet
// FileTypeXLSX represents Excel XLSX file type
FileTypeXLSX
// FileTypeCSVGZ represents gzip-compressed CSV file type
FileTypeCSVGZ
// FileTypeTSVGZ represents gzip-compressed TSV file type
FileTypeTSVGZ
// FileTypeLTSVGZ represents gzip-compressed LTSV file type
FileTypeLTSVGZ
// FileTypeParquetGZ represents gzip-compressed Parquet file type
FileTypeParquetGZ
// FileTypeCSVBZ2 represents bzip2-compressed CSV file type
FileTypeCSVBZ2
// FileTypeTSVBZ2 represents bzip2-compressed TSV file type
FileTypeTSVBZ2
// FileTypeLTSVBZ2 represents bzip2-compressed LTSV file type
FileTypeLTSVBZ2
// FileTypeParquetBZ2 represents bzip2-compressed Parquet file type
FileTypeParquetBZ2
// FileTypeCSVXZ represents xz-compressed CSV file type
FileTypeCSVXZ
// FileTypeTSVXZ represents xz-compressed TSV file type
FileTypeTSVXZ
// FileTypeLTSVXZ represents xz-compressed LTSV file type
FileTypeLTSVXZ
// FileTypeParquetXZ represents xz-compressed Parquet file type
FileTypeParquetXZ
// FileTypeCSVZSTD represents zstd-compressed CSV file type
FileTypeCSVZSTD
// FileTypeTSVZSTD represents zstd-compressed TSV file type
FileTypeTSVZSTD
// FileTypeLTSVZSTD represents zstd-compressed LTSV file type
FileTypeLTSVZSTD
// FileTypeParquetZSTD represents zstd-compressed Parquet file type
FileTypeParquetZSTD
// FileTypeXLSXGZ represents gzip-compressed Excel XLSX file type
FileTypeXLSXGZ
// FileTypeXLSXBZ2 represents bzip2-compressed Excel XLSX file type
FileTypeXLSXBZ2
// FileTypeXLSXXZ represents xz-compressed Excel XLSX file type
FileTypeXLSXXZ
// FileTypeXLSXZSTD represents zstd-compressed Excel XLSX file type
FileTypeXLSXZSTD
// FileTypeUnsupported represents unsupported file type
FileTypeUnsupported
)
// File extensions
const (
// extCSV is the CSV file extension
extCSV = ".csv"
// extTSV is the TSV file extension
extTSV = ".tsv"
// extLTSV is the LTSV file extension
extLTSV = ".ltsv"
// extParquet is the Parquet file extension
extParquet = ".parquet"
// extXLSX is the Excel XLSX file extension
extXLSX = ".xlsx"
// extGZ is the gzip compression extension
extGZ = ".gz"
// extBZ2 is the bzip2 compression extension
extBZ2 = ".bz2"
// extXZ is the xz compression extension
extXZ = ".xz"
// extZSTD is the zstd compression extension
extZSTD = ".zst"
)
// file represents a file that can be converted to table
type file struct {
path string
fileType FileType
}
// tableChunk represents a chunk of table data for streaming processing
type tableChunk struct {
tableName string
headers header
records []Record
columnInfo []columnInfo
}
// getTableName returns the name of the table
func (tc *tableChunk) getTableName() string {
return tc.tableName
}
// getHeaders returns the table headers
func (tc *tableChunk) getHeaders() header {
return tc.headers
}
// getRecords returns the records in this chunk
func (tc *tableChunk) getRecords() []Record {
return tc.records
}
// getColumnInfo returns the column information with inferred types
func (tc *tableChunk) getColumnInfo() []columnInfo {
return tc.columnInfo
}
// chunkProcessor is a function type for processing table chunks
type chunkProcessor func(chunk *tableChunk) error
// streamingParser represents a parser that can read from io.Reader directly
type streamingParser struct {
fileType FileType
tableName string
chunkSize ChunkSize
memoryPool *MemoryPool // Pool for reusable memory allocations
memoryLimit *MemoryLimit // Configurable memory limits
}
// newFile creates a new file
func newFile(path string) *file {
return &file{
path: path,
fileType: detectFileType(path),
}
}
// supportedFileExtPatterns returns all supported file patterns for glob matching
func supportedFileExtPatterns() []string {
baseExts := []string{extCSV, extTSV, extLTSV, extParquet, extXLSX}
compressionExts := []string{"", extGZ, extBZ2, extXZ, extZSTD}
var patterns []string
for _, baseExt := range baseExts {
for _, compressionExt := range compressionExts {
pattern := "*" + baseExt + compressionExt
patterns = append(patterns, pattern)
}
}
return patterns
}
// isSupportedFile checks if the file has a supported extension
func isSupportedFile(fileName string) bool {
fileName = strings.ToLower(fileName)
// Remove compression extensions
for _, ext := range []string{extGZ, extBZ2, extXZ, extZSTD} {
if strings.HasSuffix(fileName, ext) {
fileName = strings.TrimSuffix(fileName, ext)
break
}
}
// Check for supported file extensions
return strings.HasSuffix(fileName, extCSV) ||
strings.HasSuffix(fileName, extTSV) ||
strings.HasSuffix(fileName, extLTSV) ||
strings.HasSuffix(fileName, extParquet) ||
strings.HasSuffix(fileName, extXLSX)
}
// isSupportedExtension checks if the given extension is supported
// The extension should start with a dot (e.g., ".csv", ".tsv.gz")
func isSupportedExtension(ext string) bool {
ext = strings.ToLower(ext)
// Check if it's a simple extension or has compression
return isSupportedFile("file" + ext)
}
// extension returns the file extension for the FileType
func (ft FileType) extension() string {
switch ft {
case FileTypeCSV:
return extCSV
case FileTypeTSV:
return extTSV
case FileTypeLTSV:
return extLTSV
case FileTypeParquet:
return extParquet
case FileTypeXLSX:
return extXLSX
case FileTypeCSVGZ:
return extCSV + extGZ
case FileTypeTSVGZ:
return extTSV + extGZ
case FileTypeLTSVGZ:
return extLTSV + extGZ
case FileTypeParquetGZ:
return extParquet + extGZ
case FileTypeCSVBZ2:
return extCSV + extBZ2
case FileTypeTSVBZ2:
return extTSV + extBZ2
case FileTypeLTSVBZ2:
return extLTSV + extBZ2
case FileTypeParquetBZ2:
return extParquet + extBZ2
case FileTypeCSVXZ:
return extCSV + extXZ
case FileTypeTSVXZ:
return extTSV + extXZ
case FileTypeLTSVXZ:
return extLTSV + extXZ
case FileTypeParquetXZ:
return extParquet + extXZ
case FileTypeCSVZSTD:
return extCSV + extZSTD
case FileTypeTSVZSTD:
return extTSV + extZSTD
case FileTypeLTSVZSTD:
return extLTSV + extZSTD
case FileTypeParquetZSTD:
return extParquet + extZSTD
case FileTypeXLSXGZ:
return extXLSX + extGZ
case FileTypeXLSXBZ2:
return extXLSX + extBZ2
case FileTypeXLSXXZ:
return extXLSX + extXZ
case FileTypeXLSXZSTD:
return extXLSX + extZSTD
default:
return ""
}
}
// baseType returns the base file type without compression
func (ft FileType) baseType() FileType {
switch ft {
case FileTypeCSV, FileTypeCSVGZ, FileTypeCSVBZ2, FileTypeCSVXZ, FileTypeCSVZSTD:
return FileTypeCSV
case FileTypeTSV, FileTypeTSVGZ, FileTypeTSVBZ2, FileTypeTSVXZ, FileTypeTSVZSTD:
return FileTypeTSV
case FileTypeLTSV, FileTypeLTSVGZ, FileTypeLTSVBZ2, FileTypeLTSVXZ, FileTypeLTSVZSTD:
return FileTypeLTSV
case FileTypeParquet, FileTypeParquetGZ, FileTypeParquetBZ2, FileTypeParquetXZ, FileTypeParquetZSTD:
return FileTypeParquet
case FileTypeXLSX, FileTypeXLSXGZ, FileTypeXLSXBZ2, FileTypeXLSXXZ, FileTypeXLSXZSTD:
return FileTypeXLSX
default:
return FileTypeUnsupported
}
}
// getFileExtension returns the file extension for a given FileType
// Deprecated: Use FileType.extension() method instead
func getFileExtension(fileType FileType) string {
return fileType.extension()
}
// getBaseFileType returns the base file type without compression
// Deprecated: Use FileType.baseType() method instead
func getBaseFileType(fileType FileType) FileType {
return fileType.baseType()
}
// getPath returns file path
func (f *file) getPath() string {
return f.path
}
// getFileType returns file type
func (f *file) getFileType() FileType {
return f.fileType
}
// isCSV returns true if the file is CSV format
func (f *file) isCSV() bool {
return f.getFileType().baseType() == FileTypeCSV
}
// isTSV returns true if the file is TSV format
func (f *file) isTSV() bool {
return f.getFileType().baseType() == FileTypeTSV
}
// isLTSV returns true if the file is LTSV format
func (f *file) isLTSV() bool {
return f.getFileType().baseType() == FileTypeLTSV
}
// isXLSX returns true if the file is XLSX format
func (f *file) isXLSX() bool {
return f.getFileType().baseType() == FileTypeXLSX
}
// isCompressed returns true if file is compressed
func (f *file) isCompressed() bool {
return f.isGZ() || f.isBZ2() || f.isXZ() || f.isZSTD()
}
// isGZ returns true if file is gzip compressed
func (f *file) isGZ() bool {
return strings.HasSuffix(f.path, extGZ)
}
// isBZ2 returns true if file is bzip2 compressed
func (f *file) isBZ2() bool {
return strings.HasSuffix(f.path, extBZ2)
}
// isXZ returns true if file is xz compressed
func (f *file) isXZ() bool {
return strings.HasSuffix(f.path, extXZ)
}
// isZSTD returns true if file is zstd compressed
func (f *file) isZSTD() bool {
return strings.HasSuffix(f.path, extZSTD)
}
// toTable converts file to table structure
func (f *file) toTable() (*table, error) {
switch f.getFileType().baseType() {
case FileTypeCSV:
return f.parseCSV()
case FileTypeTSV:
return f.parseTSV()
case FileTypeLTSV:
return f.parseLTSV()
case FileTypeParquet:
return f.parseParquet()
case FileTypeXLSX:
return f.parseXLSX()
default:
return nil, fmt.Errorf("unsupported file type: %s", f.getPath())
}
}
// detectFileType detects file type from extension, considering compressed files
func detectFileType(path string) FileType {
basePath := path
var compressionType string
// Remove compression extensions
if strings.HasSuffix(path, extGZ) {
basePath = strings.TrimSuffix(path, extGZ)
compressionType = compressionGZStr
} else if strings.HasSuffix(path, extBZ2) {
basePath = strings.TrimSuffix(path, extBZ2)
compressionType = compressionBZ2Str
} else if strings.HasSuffix(path, extXZ) {
basePath = strings.TrimSuffix(path, extXZ)
compressionType = compressionXZStr
} else if strings.HasSuffix(path, extZSTD) {
basePath = strings.TrimSuffix(path, extZSTD)
compressionType = compressionZSTDStr
}
ext := strings.ToLower(filepath.Ext(basePath))
switch ext {
case extCSV:
switch compressionType {
case compressionGZStr:
return FileTypeCSVGZ
case compressionBZ2Str:
return FileTypeCSVBZ2
case compressionXZStr:
return FileTypeCSVXZ
case compressionZSTDStr:
return FileTypeCSVZSTD
default:
return FileTypeCSV
}
case extTSV:
switch compressionType {
case compressionGZStr:
return FileTypeTSVGZ
case compressionBZ2Str:
return FileTypeTSVBZ2
case compressionXZStr:
return FileTypeTSVXZ
case compressionZSTDStr:
return FileTypeTSVZSTD
default:
return FileTypeTSV
}
case extLTSV:
switch compressionType {
case compressionGZStr:
return FileTypeLTSVGZ
case compressionBZ2Str:
return FileTypeLTSVBZ2
case compressionXZStr:
return FileTypeLTSVXZ
case compressionZSTDStr:
return FileTypeLTSVZSTD
default:
return FileTypeLTSV
}
case extParquet:
switch compressionType {
case compressionGZStr:
return FileTypeParquetGZ
case compressionBZ2Str:
return FileTypeParquetBZ2
case compressionXZStr:
return FileTypeParquetXZ
case compressionZSTDStr:
return FileTypeParquetZSTD
default:
return FileTypeParquet
}
case extXLSX:
switch compressionType {
case compressionGZStr:
return FileTypeXLSXGZ
case compressionBZ2Str:
return FileTypeXLSXBZ2
case compressionXZStr:
return FileTypeXLSXXZ
case compressionZSTDStr:
return FileTypeXLSXZSTD
default:
return FileTypeXLSX
}
default:
return FileTypeUnsupported
}
}
// openReader opens file and returns a reader that handles compression
func (f *file) openReader() (io.Reader, func() error, error) {
factory := NewCompressionFactory()
return factory.CreateReaderForFile(f.path)
}
// parseDelimitedFile parses CSV or TSV files with specified delimiter
func (f *file) parseDelimitedFile(delimiter rune) (*table, error) {
reader, closer, err := f.openReader()
if err != nil {
return nil, err
}
defer closer()
csvReader := csv.NewReader(reader)
csvReader.Comma = delimiter
records, err := csvReader.ReadAll()
if err != nil {
return nil, err
}
if len(records) == 0 {
return nil, fmt.Errorf("empty file: %s", f.path)
}
header := newHeader(records[0])
// Check for duplicate column names
if err := validateColumnNames(records[0]); err != nil {
return nil, err
}
tableRecords := make([]Record, 0, len(records)-1)
for i := 1; i < len(records); i++ {
tableRecords = append(tableRecords, newRecord(records[i]))
}
tableName := sanitizeTableName(tableFromFilePath(f.path))
return newTable(tableName, header, tableRecords), nil
}
// parseCSV parses CSV file with compression support
func (f *file) parseCSV() (*table, error) {
return f.parseDelimitedFile(csvDelimiter)
}
// parseTSV parses TSV file with compression support
func (f *file) parseTSV() (*table, error) {
return f.parseDelimitedFile(tsvDelimiter)
}
// parseLTSV parses LTSV file with compression support
func (f *file) parseLTSV() (*table, error) {
reader, closer, err := f.openReader()
if err != nil {
return nil, err
}
defer closer()
content, err := io.ReadAll(reader)
if err != nil {
return nil, err
}
lines := strings.Split(string(content), "\n")
if len(lines) == 0 {
return nil, fmt.Errorf("empty file: %s", f.path)
}
headerMap := make(map[string]bool)
var records []map[string]string
for _, line := range lines {
line = strings.TrimSpace(line)
if line == "" {
continue
}
record := make(map[string]string)
pairs := strings.Split(line, "\t")
for _, pair := range pairs {
kv := strings.SplitN(pair, ":", 2)
if len(kv) == 2 {
key := strings.TrimSpace(kv[0])
value := strings.TrimSpace(kv[1])
record[key] = value
headerMap[key] = true
}
}
if len(record) > 0 {
records = append(records, record)
}
}
if len(records) == 0 {
return nil, fmt.Errorf("no valid records found: %s", f.path)
}
var header header
for key := range headerMap {
header = append(header, key)
}
tableRecords := make([]Record, 0, len(records))
for _, recordMap := range records {
var row Record
for _, key := range header {
if val, exists := recordMap[key]; exists {
row = append(row, val)
} else {
row = append(row, "")
}
}
tableRecords = append(tableRecords, row)
}
tableName := sanitizeTableName(tableFromFilePath(f.path))
return newTable(tableName, header, tableRecords), nil
}
// parseXLSX parses XLSX file with compression support
// Only supports single-sheet files for single table parsing.
// For multiple sheets, use filesql.Open() or filesql.OpenContext() for 1-sheet-1-table approach.
func (f *file) parseXLSX() (*table, error) {
reader, closer, err := f.openReader()
if err != nil {
return nil, err
}
defer closer()
// For XLSX files, we need to handle them specially since excelize needs a file path or bytes
// If it's compressed, we need to read all data into memory first
var xlsxFile *excelize.File
if f.isCompressed() {
// Read all data into memory for compressed files
data, err := io.ReadAll(reader)
if err != nil {
return nil, err
}
xlsxFile, err = excelize.OpenReader(bytes.NewReader(data))
if err != nil {
return nil, err
}
} else {
// For uncompressed files, open directly
xlsxFile, err = excelize.OpenFile(f.path)
if err != nil {
return nil, err
}
}
defer func() {
_ = xlsxFile.Close() // Ignore close error
}()
// Get all sheet names
sheetNames := xlsxFile.GetSheetList()
if len(sheetNames) == 0 {
return nil, fmt.Errorf("no sheets found in Excel file: %s", f.path)
}
// With the new 1-sheet-1-table approach, we only parse the first sheet for single table parsing
// For multiple sheets, we process only the first sheet (single table parsing limitation)
// Users should use Open/OpenContext for full multi-sheet support with separate tables
// Process the first sheet
sheetName := sheetNames[0]
rows, err := xlsxFile.GetRows(sheetName)
if err != nil {
return nil, fmt.Errorf("failed to read sheet %s: %w", sheetName, err)
}
if len(rows) == 0 {
return nil, fmt.Errorf("sheet %s is empty in Excel file: %s", sheetName, f.path)
}
// Convert to standard table format
headers, records := convertXLSXRowsToTable(rows)
tableName := sanitizeTableName(tableFromFilePath(f.path))
return newTable(tableName, headers, records), nil
}
// convertXLSXRowsToTable converts XLSX rows to table headers and records
// First row becomes headers, remaining rows become records with padding
func convertXLSXRowsToTable(rows [][]string) (header, []Record) {
var headers header
var records []Record
// First row as headers
if len(rows) > 0 {
headers = make(header, len(rows[0]))
copy(headers, rows[0])
}
// Remaining rows as records
if len(rows) > 1 {
records = make([]Record, len(rows)-1)
for i, row := range rows[1:] {
record := make(Record, len(headers))
for j := range headers {
if j < len(row) {
record[j] = row[j]
} else {
record[j] = "" // Pad with empty string if row is shorter
}
}
records[i] = record
}
}
return headers, records
}