Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
81 changes: 52 additions & 29 deletions .github/workflows/docker-rds-images.yml
Original file line number Diff line number Diff line change
@@ -1,25 +1,22 @@
name: RDS support images

# Builds and (on tag pushes) publishes the prebuilt postgres image used
# by RdsRuntime. Runtime side: `RdsRuntime::ensure_postgres_image` tries
# to pull `ghcr.io/<owner>/fakecloud-postgres:<major>-<fakecloud-version>`
# Builds and (on tag pushes) publishes the prebuilt postgres / mysql /
# mariadb images used by RdsRuntime. Runtime side: the matching
# `ensure_*_image` helper tries to pull
# `ghcr.io/<owner>/fakecloud-<engine>:<version>-<fakecloud-version>`
# before falling back to a local build.
#
# Triggers:
# - `push: tags: ["v*"]` — full release path: builds 4 majors × 2 arches,
# pushes per-arch by digest, merges into `<major>-<version>` and a
# rolling `<major>` tag.
# - `push: tags: ["v*"]` — full release path: builds every engine ×
# version × arch combination, pushes per-arch by digest, merges into
# `<version>-<fakecloud-version>` and a rolling `<version>` tag.
# - `pull_request` (paths-filtered) — dry-run that exercises the build
# for both arches without pushing. Catches Dockerfile typos and
# workflow syntax regressions before we ever cut a release.
# - `workflow_dispatch` — pushes images tagged `<major>-dev-<sha>` so we
# can validate the full publish + manifest-merge path against ghcr.io
# end-to-end without polluting release tags. Rolling `<major>` is NOT
# updated in this mode.
#
# Mirrors the structure of docker.yml: per-arch build with `push-by-digest`,
# then a per-major merge job that creates the manifest list with the
# human-readable tags.
# - `workflow_dispatch` — pushes images tagged `<version>-dev-<sha>` so
# we can validate the full publish + manifest-merge path against
# ghcr.io end-to-end without polluting release tags. Rolling
# `<version>` is NOT updated in this mode.

on:
push:
Expand All @@ -28,18 +25,31 @@ on:
paths:
- .github/workflows/docker-rds-images.yml
- crates/fakecloud-rds/assets/postgres/**
- crates/fakecloud-rds/assets/mysql/**
- crates/fakecloud-rds/assets/mariadb/**
workflow_dispatch:

env:
REGISTRY: ghcr.io
IMAGE_BASE: ghcr.io/${{ github.repository_owner }}/fakecloud-postgres

jobs:
build:
strategy:
fail-fast: false
matrix:
pg_version: ["13", "14", "15", "16"]
target:
# Postgres majors.
- { engine: postgres, version: "13", build_arg: "PG_VERSION" }
- { engine: postgres, version: "14", build_arg: "PG_VERSION" }
- { engine: postgres, version: "15", build_arg: "PG_VERSION" }
- { engine: postgres, version: "16", build_arg: "PG_VERSION" }
# MySQL majors.
- { engine: mysql, version: "5.7", build_arg: "MYSQL_VERSION" }
- { engine: mysql, version: "8.0", build_arg: "MYSQL_VERSION" }
# MariaDB majors.
- { engine: mariadb, version: "10.6", build_arg: "MARIADB_VERSION" }
- { engine: mariadb, version: "10.11", build_arg: "MARIADB_VERSION" }
- { engine: mariadb, version: "11.4", build_arg: "MARIADB_VERSION" }
platform:
- linux/amd64
- linux/arm64
Expand All @@ -52,6 +62,8 @@ jobs:
permissions:
contents: read
packages: write
env:
IMAGE_BASE: ghcr.io/${{ github.repository_owner }}/fakecloud-${{ matrix.target.engine }}

steps:
- uses: actions/checkout@34e114876b0b11c390a56381ad16ebd13914f8d5 # v4
Expand All @@ -71,12 +83,12 @@ jobs:
id: build
uses: docker/build-push-action@10e90e3645eae34f1e60eeb005ba3a3d33f178e8 # v6
with:
context: crates/fakecloud-rds/assets/postgres
context: crates/fakecloud-rds/assets/${{ matrix.target.engine }}
build-args: |
PG_VERSION=${{ matrix.pg_version }}
${{ matrix.target.build_arg }}=${{ matrix.target.version }}
platforms: ${{ matrix.platform }}
cache-from: type=gha,scope=postgres-${{ matrix.pg_version }}-${{ matrix.platform }}
cache-to: type=gha,scope=postgres-${{ matrix.pg_version }}-${{ matrix.platform }},mode=max
cache-from: type=gha,scope=${{ matrix.target.engine }}-${{ matrix.target.version }}-${{ matrix.platform }}
cache-to: type=gha,scope=${{ matrix.target.engine }}-${{ matrix.target.version }}-${{ matrix.platform }},mode=max
outputs: |
type=image,name=${{ env.IMAGE_BASE }},push-by-digest=true,name-canonical=true,push=${{ github.event_name != 'pull_request' }}

Expand All @@ -91,7 +103,7 @@ jobs:
if: github.event_name != 'pull_request'
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
with:
name: digest-postgres-${{ matrix.pg_version }}-${{ matrix.runner }}
name: digest-${{ matrix.target.engine }}-${{ matrix.target.version }}-${{ matrix.runner }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
Expand All @@ -106,14 +118,25 @@ jobs:
strategy:
fail-fast: false
matrix:
pg_version: ["13", "14", "15", "16"]
target:
- { engine: postgres, version: "13" }
- { engine: postgres, version: "14" }
- { engine: postgres, version: "15" }
- { engine: postgres, version: "16" }
- { engine: mysql, version: "5.7" }
- { engine: mysql, version: "8.0" }
- { engine: mariadb, version: "10.6" }
- { engine: mariadb, version: "10.11" }
- { engine: mariadb, version: "11.4" }
env:
IMAGE_BASE: ghcr.io/${{ github.repository_owner }}/fakecloud-${{ matrix.target.engine }}

steps:
- name: Download digests
uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # v4
with:
path: /tmp/digests
pattern: digest-postgres-${{ matrix.pg_version }}-*
pattern: digest-${{ matrix.target.engine }}-${{ matrix.target.version }}-*
merge-multiple: true

- name: Set up Docker Buildx
Expand All @@ -135,15 +158,15 @@ jobs:
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5
with:
images: ${{ env.IMAGE_BASE }}
# On a real release tag (`v*`): pinned `<major>-<version>` plus
# a rolling `<major>` tag.
# On `workflow_dispatch`: a one-off `<major>-dev-<short-sha>`
# On a real release tag (`v*`): pinned `<version>-<release>` plus
# a rolling `<version>` tag.
# On `workflow_dispatch`: a one-off `<version>-dev-<short-sha>`
# tag so we can validate the full publish + manifest-merge
# path end-to-end without overwriting any release tag.
tags: |
type=semver,pattern=${{ matrix.pg_version }}-{{version}}
type=raw,value=${{ matrix.pg_version }},enable=${{ startsWith(github.ref, 'refs/tags/v') }}
type=raw,value=${{ matrix.pg_version }}-dev-${{ steps.sha.outputs.short }},enable=${{ github.event_name == 'workflow_dispatch' }}
type=semver,pattern=${{ matrix.target.version }}-{{version}}
type=raw,value=${{ matrix.target.version }},enable=${{ startsWith(github.ref, 'refs/tags/v') }}
type=raw,value=${{ matrix.target.version }}-dev-${{ steps.sha.outputs.short }},enable=${{ github.event_name == 'workflow_dispatch' }}

- name: Create manifest list and push
working-directory: /tmp/digests
Expand Down
1 change: 1 addition & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ Other install options (Cargo, Docker, Docker Compose, source) are documented at
| SES (v2 + v1 inbound) | 110 | Sending, templates, DKIM, **real receipt rule execution** |
| Cognito User Pools | 122 | Pools, clients, MFA, identity providers, full auth flows; verification email -> SES, SMS -> SNS, all 12 Lambda triggers |
| Kinesis | 39 | Streams, records, shard iterators, retention |
| RDS | 163 | Real Postgres, MySQL, MariaDB, Oracle, SQL Server, Db2 via Docker; lifecycle ops emit `aws.rds` EventBridge events; PostgreSQL `aws_lambda` + `aws_s3` extensions invoke fakecloud Lambda and import/export S3 objects from SQL |
| RDS | 163 | Real Postgres, MySQL, MariaDB, Oracle, SQL Server, Db2 via Docker; lifecycle ops emit `aws.rds` EventBridge events; PostgreSQL `aws_lambda` + `aws_s3` extensions and Aurora-compatible MySQL/MariaDB `mysql.lambda_async`/`mysql.lambda_sync` invoke fakecloud Lambda + import/export S3 objects from SQL |
| ElastiCache | 75 | Real Redis, Valkey, Memcached via Docker |
| Step Functions | 37 | Full ASL interpreter, Lambda/SQS/SNS/EventBridge/DynamoDB tasks |
| API Gateway v1 | 124 | REST APIs, resources, methods, integrations (`MOCK`/`HTTP`/`HTTP_PROXY`/`AWS_PROXY` Lambda), deployments, stages, API keys, usage plans, authorizers, models, request validators, VPC links, domain names, base path mappings, client certs, gateway responses, docs, tags |
Expand Down Expand Up @@ -119,7 +119,7 @@ Full guides: [fakecloud.dev/docs/guides](https://fakecloud.dev/docs/guides).
| Cognito User Pools | 122 operations | [Paid only](https://docs.localstack.cloud/references/licensing/) |
| SES v2 | Full send + templates + DKIM + suppression | [Paid only](https://docs.localstack.cloud/references/licensing/) |
| SES inbound email | Real receipt rule action execution | [Stored but never executed](https://docs.localstack.cloud/user-guide/aws/ses/) |
| RDS | 163 operations, PostgreSQL/MySQL/MariaDB/Oracle/SQL Server/Db2 via Docker, PostgreSQL `aws_lambda` + `aws_s3` extensions | [Paid only](https://docs.localstack.cloud/references/licensing/) |
| RDS | 163 operations, PostgreSQL/MySQL/MariaDB/Oracle/SQL Server/Db2 via Docker, PostgreSQL `aws_lambda` + `aws_s3` extensions, Aurora-compatible MySQL/MariaDB `mysql.lambda_async`/`mysql.lambda_sync` | [Paid only](https://docs.localstack.cloud/references/licensing/) |
| ElastiCache | 75 operations, Redis, Valkey, and Memcached via Docker | [Paid only](https://docs.localstack.cloud/references/licensing/) |
| API Gateway v1 | 124 operations — REST APIs incl. real Lambda proxy data plane | [Paid only](https://docs.localstack.cloud/references/licensing/) |
| API Gateway v2 | 103 operations — HTTP APIs + developer portals | [Paid only](https://docs.localstack.cloud/references/licensing/) |
Expand Down
1 change: 1 addition & 0 deletions crates/fakecloud-e2e/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ aws-sdk-applicationautoscaling = "1"
aws-sdk-wafv2 = "1"
aws-sdk-athena = "1"
tokio-postgres = "0.7"
mysql_async = "0.34"
aws-smithy-types = "1"
aws-credential-types = "1"
aws-types = "1"
Expand Down
102 changes: 102 additions & 0 deletions crates/fakecloud-e2e/tests/rds_mysql_lambda.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
//! End-to-end tests for the Aurora-compatible MySQL/MariaDB
//! `mysql.lambda_async` / `mysql.lambda_sync` stored procedures
//! provided by the prebuilt `fakecloud-mysql` and `fakecloud-mariadb`
//! images. Each test creates a Lambda, spins up the engine container,
//! and exercises both the sync and async invocation paths through the
//! libcurl-backed UDF + bridge endpoint round trip.

mod helpers;

use std::io::Write;

use aws_sdk_lambda::primitives::Blob;
use helpers::TestServer;
use mysql_async::prelude::*;

fn make_echo_zip() -> Vec<u8> {
let buf = Vec::new();
let cursor = std::io::Cursor::new(buf);
let mut writer = zip::ZipWriter::new(cursor);
let options = zip::write::SimpleFileOptions::default();
writer.start_file("index.py", options).unwrap();
writer
.write_all(b"def handler(event, context):\n return event\n")
.unwrap();
let cursor = writer.finish().unwrap();
cursor.into_inner()
}

async fn run_lambda_round_trip(engine: &str, engine_version: &str, db_id: &str) {
let server = TestServer::start_with_env(&[("FAKECLOUD_REBUILD_POSTGRES_IMAGE", "1")]).await;
let lambda = server.lambda_client().await;
let rds = server.rds_client().await;

lambda
.create_function()
.function_name("echo")
.runtime(aws_sdk_lambda::types::Runtime::Python312)
.role("arn:aws:iam::000000000000:role/test-role")
.handler("index.handler")
.code(
aws_sdk_lambda::types::FunctionCode::builder()
.zip_file(Blob::new(make_echo_zip()))
.build(),
)
.send()
.await
.expect("create echo lambda");

rds.create_db_instance()
.db_instance_identifier(db_id)
.allocated_storage(20)
.db_instance_class("db.t3.micro")
.engine(engine)
.engine_version(engine_version)
.master_username("admin")
.master_user_password("secret123")
.db_name("appdb")
.send()
.await
.expect("create db instance");

let instance = helpers::wait_for_db_available(&rds, db_id, 360).await;
let endpoint = instance.endpoint().expect("endpoint");
let host = endpoint.address().expect("address").to_string();
let port = endpoint.port().expect("port") as u16;

let opts = mysql_async::OptsBuilder::default()
.ip_or_hostname(host)
.tcp_port(port)
.user(Some("admin"))
.pass(Some("secret123"))
.db_name(Some("appdb"));
let mut conn = mysql_async::Conn::new(opts)
.await
.expect("connect to mysql");

// Sync invoke: payload should round-trip through the bridge.
let row: Option<String> = conn
.query_first("SELECT mysql.lambda_sync('echo', '{\"hello\":\"world\"}') AS payload")
.await
.expect("invoke lambda_sync");
let payload_json = row.expect("payload");
let parsed: serde_json::Value = serde_json::from_str(&payload_json).unwrap();
assert_eq!(parsed, serde_json::json!({"hello": "world"}));

// Async invoke: returns nothing; assert no error.
conn.query_drop("CALL mysql.lambda_async('echo', '{\"async\":true}')")
.await
.expect("invoke lambda_async");

let _ = conn.disconnect().await;
}

#[tokio::test]
async fn aws_lambda_bridge_mysql_round_trip() {
run_lambda_round_trip("mysql", "8.0", "mysql-lambda-db").await;
}

#[tokio::test]
async fn aws_lambda_bridge_mariadb_round_trip() {
run_lambda_round_trip("mariadb", "10.11", "mariadb-lambda-db").await;
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
-- fakecloud Aurora-compatible Lambda bridge for MySQL/MariaDB.
-- Loaded by the prebuilt fakecloud-mysql / fakecloud-mariadb image
-- on first container start. Renders FAKECLOUD_* env vars from the
-- entrypoint into baked-in URL/account values; SQL never has to know
-- the host or port.

CREATE FUNCTION IF NOT EXISTS fakecloud_post RETURNS STRING SONAME 'fakecloud_udf.so';
CREATE FUNCTION IF NOT EXISTS fakecloud_post_async RETURNS INTEGER SONAME 'fakecloud_udf.so';

DELIMITER $$

DROP PROCEDURE IF EXISTS mysql.lambda_async $$
CREATE PROCEDURE mysql.lambda_async(IN function_name TEXT, IN payload TEXT)
BEGIN
DECLARE body TEXT;
SET body = JSON_OBJECT(
'function_name', function_name,
'payload', CAST(IFNULL(payload, 'null') AS JSON),
'invocation_type', 'Event',
'region', '@FAKECLOUD_REGION@'
);
DO fakecloud_post_async(
'@FAKECLOUD_ENDPOINT@/_fakecloud/rds/lambda-invoke',
body
);
END $$

DROP FUNCTION IF EXISTS mysql.lambda_sync $$
CREATE FUNCTION mysql.lambda_sync(function_name TEXT, payload TEXT)
RETURNS TEXT
DETERMINISTIC
BEGIN
DECLARE body TEXT;
DECLARE result TEXT;
SET body = JSON_OBJECT(
'function_name', function_name,
'payload', CAST(IFNULL(payload, 'null') AS JSON),
'invocation_type', 'RequestResponse',
'region', '@FAKECLOUD_REGION@'
);
SET result = fakecloud_post(
'@FAKECLOUD_ENDPOINT@/_fakecloud/rds/lambda-invoke',
body,
300000
);
-- Bridge response is `{ status_code, payload, ... }`. Strip down to
-- the payload JSON so callers see the same shape Aurora returns
-- from `mysql.lambda_sync` (a plain JSON value, not the wrapper).
RETURN JSON_EXTRACT(result, '$.payload');
END $$

DELIMITER ;
37 changes: 37 additions & 0 deletions crates/fakecloud-rds/assets/mariadb/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
# Built and pushed on each fakecloud release tag by
# .github/workflows/docker-rds-images.yml as
# ghcr.io/faiscadev/fakecloud-mariadb:<major>-<fakecloud-version>
# (plus a rolling :<major> tag). RdsRuntime::ensure_mariadb_image
# tries to pull that tag first and falls back to building from this
# Dockerfile locally when the pull fails.

ARG MARIADB_VERSION=10.11
FROM mariadb:${MARIADB_VERSION}

USER root
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
gcc make libcurl4-openssl-dev libmariadb-dev libc6-dev ca-certificates curl \
&& rm -rf /var/lib/apt/lists/*

COPY fakecloud_udf.c /tmp/fakecloud_udf.c
COPY fakecloud-bootstrap.sh /usr/local/bin/fakecloud-bootstrap.sh
COPY 99-fakecloud-bootstrap.sql.tmpl /tmp/99-fakecloud-bootstrap.sql.tmpl
RUN chmod +x /usr/local/bin/fakecloud-bootstrap.sh

# MariaDB ships mysql_config-style headers via libmariadb-dev. The
# plugin dir lives at /usr/lib/mysql/plugin (or under the mariadb
# tree on some images); honor whatever mysql_config reports first.
RUN PLUGIN_DIR="$(mariadb_config --plugindir 2>/dev/null \
|| mysql_config --plugindir 2>/dev/null \
|| echo /usr/lib/mysql/plugin)" \
&& mkdir -p "$PLUGIN_DIR" \
&& CFLAGS="$(mariadb_config --cflags 2>/dev/null \
|| mysql_config --cflags 2>/dev/null \
|| echo -I/usr/include/mariadb)" \
&& gcc -O2 -fPIC -shared $CFLAGS -o "$PLUGIN_DIR/fakecloud_udf.so" \
/tmp/fakecloud_udf.c -lcurl -lpthread \
&& rm /tmp/fakecloud_udf.c

ENTRYPOINT ["fakecloud-bootstrap.sh"]
CMD ["mariadbd"]
Loading
Loading