Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 46 additions & 0 deletions dev/config/examples/azure-debug.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
commands:
- name: load extensions
sql: |
INSTALL azure;
LOAD azure;
- name: create adls secret
sql: |
CREATE SECRET adls_conn (
TYPE azure,
PROVIDER credential_chain,
CHAIN 'default;cli;env',
ACCOUNT_NAME '{{ SQLFLOW_AZURE_ACCOUNT_NAME }}'
);

pipeline:
name: kafka-azure-duckdb-sink
description: "Writes Kafka stream into Azure Data Lake using DuckDB Azure extension"
batch_size: 50

source:
type: kafka
kafka:
brokers: [{{ SQLFLOW_KAFKA_BROKERS|default('localhost:9092') }}]
group_id: sql-flow-consumer-001
auto_offset_reset: earliest
topics:
- "input-azure-pipeline-dev"

handler:
type: 'handlers.InferredMemBatch'
sql: |
SELECT
*,
CAST(TO_TIMESTAMP(payload.source.ts_ms / 1000) AS DATE) AS SourceDatePartition
FROM batch;

sink:
type: sqlcommand
sqlcommand:
substitutions:
- var: $sqlflow_uuid
type: uuid4
sql: |
COPY sqlflow_sink_batch
TO 'abfss://somecontainer@someaccount.dfs.core.windows.net/path/$sqlflow_uuid.parquet'
(FORMAT 'parquet', PARTITION_BY (SourceDatePartition), OVERWRITE_OR_IGNORE 1);
1 change: 1 addition & 0 deletions dev/fixtures/azure.jsonl
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"payload":{"source":{"ts_ms":1744569606922}}}