Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
120 changes: 120 additions & 0 deletions .github/workflows/programs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
on:
push:
branches:
- main
paths:
- "programs/**"
- "program-tests/**"
- "program-libs/**"
- "prover/client/**"
- ".github/workflows/light-system-programs-tests.yml"
pull_request:
branches:
- "*"
paths:
- "programs/**"
- "program-tests/**"
- "program-libs/**"
- "prover/client/**"
- ".github/workflows/light-system-programs-tests.yml"
types:
- opened
- synchronize
- reopened
- ready_for_review

name: programs

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true

jobs:
system-programs:
name: programs
if: github.event.pull_request.draft == false
runs-on: warp-ubuntu-latest-x64-4x
timeout-minutes: 90

services:
redis:
image: redis:8.0.1
ports:
- 6379:6379
options: >-
--health-cmd "redis-cli ping"
--health-interval 10s
--health-timeout 5s
--health-retries 5

env:
REDIS_URL: redis://localhost:6379

strategy:
matrix:
include:
- program: account-compression-and-registry
sub-tests: '["cargo-test-sbf -p account-compression-test", "cargo-test-sbf -p registry-test"]'
- program: light-system-program-address
sub-tests: '["cargo-test-sbf -p system-test -- test_with_address", "cargo-test-sbf -p e2e-test", "cargo-test-sbf -p compressed-token-test --test ctoken"]'
- program: light-system-program-compression
sub-tests: '["cargo-test-sbf -p system-test -- test_with_compression", "cargo-test-sbf -p system-test --test test_re_init_cpi_account"]'
- program: compressed-token-and-e2e
sub-tests: '["cargo-test-sbf -p compressed-token-test --test v1", "cargo-test-sbf -p compressed-token-test --test mint"]'
- program: compressed-token-batched-tree
sub-tests: '["cargo-test-sbf -p compressed-token-test -- test_transfer_with_photon_and_batched_tree"]'
- program: system-cpi-test
sub-tests:
'["cargo-test-sbf -p system-cpi-test", "cargo test -p light-system-program-pinocchio",
"cargo-test-sbf -p system-cpi-v2-test -- --skip functional_ --skip event::parse", "cargo-test-sbf -p system-cpi-v2-test -- event::parse",
"cargo-test-sbf -p compressed-token-test --test transfer2"
]'
- program: system-cpi-test-v2-functional-read-only
sub-tests: '["cargo-test-sbf -p system-cpi-v2-test -- functional_read_only"]'
- program: system-cpi-test-v2-functional-account-infos
sub-tests: '["cargo-test-sbf -p system-cpi-v2-test -- functional_account_infos"]'
steps:
Comment on lines +33 to +76
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Allow push workflows to run.

if: github.event.pull_request.draft == false short-circuits every push build because github.event.pull_request is null on push events, leaving main without coverage. Gate only pull_request events (e.g., if: github.event_name != 'pull_request' || github.event.pull_request.draft == false') so pushes still execute.

-    if: github.event.pull_request.draft == false
+    if: github.event_name != 'pull_request' || github.event.pull_request.draft == false
🤖 Prompt for AI Agents
In .github/workflows/programs.yml around lines 33 to 76 the current if:
github.event.pull_request.draft == false will be null on push events and
short-circuit those runs; change the conditional to only gate draft pull
requests by using a compound check such as: if: github.event_name !=
'pull_request' || github.event.pull_request.draft == false so that
non-pull_request events (pushes) still run while draft PRs remain blocked.

- name: Checkout sources
uses: actions/checkout@v4

- name: Setup and build
uses: ./.github/actions/setup-and-build
with:
skip-components: "redis,disk-cleanup"
cache-key: "rust"

- name: Build CLI
run: |
npx nx build @lightprotocol/zk-compression-cli

- name: ${{ matrix.program }}
run: |

IFS=',' read -r -a sub_tests <<< "${{ join(fromJSON(matrix['sub-tests']), ', ') }}"
for subtest in "${sub_tests[@]}"
do
echo "$subtest"

# Retry logic for flaky batched-tree test
if [[ "$subtest" == *"test_transfer_with_photon_and_batched_tree"* ]]; then
echo "Running flaky test with retry logic (max 3 attempts)..."
attempt=1
max_attempts=3
until RUSTFLAGS="-D warnings" eval "$subtest"; do
attempt=$((attempt + 1))
if [ $attempt -gt $max_attempts ]; then
echo "Test failed after $max_attempts attempts"
exit 1
fi
echo "Attempt $attempt/$max_attempts failed, retrying..."
sleep 5
done
echo "Test passed on attempt $attempt"
else
RUSTFLAGS="-D warnings" eval "$subtest"
if [ "$subtest" == "cargo-test-sbf -p e2e-test" ]; then
pnpm --filter @lightprotocol/programs run build-compressed-token-small
RUSTFLAGS="-D warnings" eval "$subtest -- --test test_10_all"
fi
Comment on lines +93 to +118
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Trim subtest strings before comparisons.

Splitting on commas leaves each element with a leading space, so the literal comparison on Line 115 never matches "cargo-test-sbf -p e2e-test". As a result, the extra CLI build and --test test_10_all run are silently skipped. Strip the whitespace (e.g., subtest="${subtest## }") or iterate the JSON array directly to ensure the equality check fires.

-          for subtest in "${sub_tests[@]}"
+          for subtest in "${sub_tests[@]}"
           do
-            echo "$subtest"
+            subtest="${subtest## }"
+            echo "$subtest"
🤖 Prompt for AI Agents
In .github/workflows/programs.yml around lines 93 to 118, the elements produced
by splitting the JSON string contain leading spaces so the equality check for
"cargo-test-sbf -p e2e-test" never matches; fix this by trimming
leading/trailing whitespace from each subtest before any comparisons or evals
(or iterate the JSON array directly), and use the trimmed value for the
retry/eval and the subsequent equality branch so the extra build and the `--test
test_10_all` invocation run as intended.

fi
done
Comment on lines +34 to +120

Check warning

Code scanning / CodeQL

Workflow does not contain permissions Medium

Actions job or workflow does not limit the permissions of the GITHUB_TOKEN. Consider setting an explicit permissions block, using the following as a minimal starting point: {contents: read}

Copilot Autofix

AI 5 months ago

To fix this problem, explicitly set the permissions key for the workflow. Since none of the workflow steps write to the repo or interact with issues or PRs, the safest minimum is contents: read. This should be added at the root of the workflow YAML (before jobs:), to apply to all jobs unless overridden.
You should add:

permissions:
  contents: read

directly after the name: programs key (line 27).

Suggested changeset 1
.github/workflows/programs.yml

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/.github/workflows/programs.yml b/.github/workflows/programs.yml
--- a/.github/workflows/programs.yml
+++ b/.github/workflows/programs.yml
@@ -25,6 +25,9 @@
 
 name: programs
 
+permissions:
+  contents: read
+
 concurrency:
   group: ${{ github.workflow }}-${{ github.ref }}
   cancel-in-progress: true
EOF
@@ -25,6 +25,9 @@

name: programs

permissions:
contents: read

concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
Copilot is powered by AI and may make mistakes. Always verify output.
14 changes: 11 additions & 3 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -218,8 +218,7 @@ groth16-solana = { version = "0.2.0" }
bytemuck = { version = "1.19.0" }
arrayvec = "0.7"
tinyvec = "1.10.0"
pinocchio-token-program = { git= "https://github.com/Lightprotocol/token",rev="14bc35d02a994138973f7118a61cd22f08465a98" }

pinocchio-token-program = { git= "https://github.com/Lightprotocol/token", rev="38d8634353e5eeb8c015d364df0eaa39f5c48b05" }
# Math and crypto
num-bigint = "0.4.6"
tabled = "0.20"
Expand Down
5 changes: 0 additions & 5 deletions program-libs/compressible/CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,3 @@
- FailedBorrowRentSysvar (19001), InvalidState (19002)
- HasherError propagation from light-hasher (7xxx codes)
- ProgramError conversions (Anchor, Pinocchio, Solana)

## TODO:
- try to refactor so that 1 lamport is the minimum rent payment
- update config, max write fee, max funded epoch
- update RentConfig at claim
6 changes: 3 additions & 3 deletions program-libs/compressible/docs/CONFIG_ACCOUNT.md
Original file line number Diff line number Diff line change
Expand Up @@ -70,12 +70,12 @@ let v1_pda = CompressibleConfig::ctoken_v1_config_pda();

**Light Registry Program:**
- `update_compressible_config` - Updates config state and parameters
- `withdraw_funding_pool` (discriminator: 108) - Withdraws from rent_sponsor pool
- `withdraw_funding_pool` (discriminator: 105) - Withdraws from rent_sponsor pool

**Compressed Token Program (uses config):**
- `CreateTokenAccount` (discriminator: 18) - Creates ctoken with compressible extension
- `CreateAssociatedTokenAccount` (discriminator: 103) - Creates ATA with compressible
- `Claim` (discriminator: 107) - Claims rent using config parameters
- `CreateAssociatedTokenAccount` (discriminator: 100) - Creates ATA with compressible
- `Claim` (discriminator: 104) - Claims rent using config parameters
- `CompressAndClose` (via Transfer2) - Uses compression_authority from config

**Registry Program (via wrapper):**
Expand Down
6 changes: 3 additions & 3 deletions program-libs/compressible/docs/RENT.md
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ Determines the last epoch covered by rent payments.
### Check if account is compressible
```rust
let (is_compressible, deficit) = calculate_rent_and_balance(
261, // account size
260, // account size
1000000, // current slot
5000000, // current lamports
0, // last claimed slot
Expand All @@ -225,14 +225,14 @@ let (is_compressible, deficit) = calculate_rent_and_balance(
### Calculate claimable rent
```rust
let claimable = claimable_lamports(
261, 1000000, 5000000, 0, 2000000, 1220, 10, 11000
260, 1000000, 5000000, 0, 2000000, 1220, 10, 11000
);
// Returns Some(amount) if claimable, None if compressible
```

### Split lamports on close
```rust
let (to_rent_sponsor, to_user) = calculate_close_lamports(
261, 1000000, 5000000, 0, 2000000, 1220, 10, 11000
260, 1000000, 5000000, 0, 2000000, 1220, 10, 11000
);
```
15 changes: 6 additions & 9 deletions program-libs/compressible/src/compression_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -110,16 +110,13 @@ macro_rules! impl_is_compressible {
if let Some(rent_deficit) = is_compressible {
Ok(lamports_per_write as u64 + rent_deficit)
} else {
let unused_lamports =
state.get_unused_lamports(&self.rent_config, rent_exemption_lamports);
// Account is not compressible, check if we should still top up
let epochs_funded_ahead =
unused_lamports / self.rent_config.rent_curve_per_epoch(num_bytes);
solana_msg::msg!(
"Top-up check: unused_lamports {}, epochs_funded_ahead {}",
unused_lamports,
epochs_funded_ahead
// Calculate epochs funded ahead using available balance
let available_balance = state.get_available_rent_balance(
rent_exemption_lamports,
self.rent_config.compression_cost(),
);
let rent_per_epoch = self.rent_config.rent_curve_per_epoch(num_bytes);
let epochs_funded_ahead = available_balance / rent_per_epoch;
// Skip top-up if already funded for max_funded_epochs or more
if epochs_funded_ahead >= self.rent_config.max_funded_epochs as u64 {
Ok(0)
Expand Down
9 changes: 6 additions & 3 deletions program-libs/compressible/src/rent/account_rent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,8 @@ impl AccountRentState {
self.get_available_rent_balance(rent_exemption_lamports, config.compression_cost());
let required_epochs = self.get_required_epochs::<true>(); // include next epoch for compressibility check
let rent_per_epoch = config.rent_curve_per_epoch(self.num_bytes);
let lamports_due = rent_per_epoch * required_epochs;
// Use saturating_mul to prevent overflow - cheaper than checked_mul (no branching)
let lamports_due = rent_per_epoch.saturating_mul(required_epochs);

if available_balance < lamports_due {
// Include compression cost in deficit so forester can execute
Expand Down Expand Up @@ -111,7 +112,8 @@ impl AccountRentState {
return None; // Should compress, not claim
}
let rent_per_epoch = config.rent_curve_per_epoch(self.num_bytes);
Some(self.get_completed_epochs() * rent_per_epoch)
// Use saturating_mul to prevent overflow - cheaper than checked_mul (no branching)
Some(self.get_completed_epochs().saturating_mul(rent_per_epoch))
}

/// Calculate how lamports are distributed when closing an account.
Expand Down Expand Up @@ -151,7 +153,8 @@ impl AccountRentState {
self.get_available_rent_balance(rent_exemption_lamports, config.compression_cost());
let required_epochs = self.get_required_epochs::<true>();
let rent_per_epoch = config.rent_curve_per_epoch(self.num_bytes);
let lamports_due = rent_per_epoch * required_epochs;
// Use saturating_mul to prevent overflow - cheaper than checked_mul (no branching)
let lamports_due = rent_per_epoch.saturating_mul(required_epochs);

available_balance.saturating_sub(lamports_due)
}
Expand Down
Loading
Loading