Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 0 additions & 22 deletions .github/workflows/deploy-check.yml

This file was deleted.

33 changes: 33 additions & 0 deletions .github/workflows/deploy-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: Deploy to Test Server

on:
push:
branches:
- main
- 'feature/**'
workflow_dispatch:

jobs:
deploy:
runs-on: ubuntu-latest
env:
BRANCH_NAME: ${{ github.ref_name }}

steps:
- name: Set up SSH agent
uses: webfactory/ssh-agent@v0.7.0
with:
ssh-private-key: ${{ secrets.TEST_SSH_PRIVATE_KEY }}

- name: Add test server to known_hosts
run: |
mkdir -p ~/.ssh
echo "${{ secrets.TEST_SSH_KNOWN_HOSTS }}" >> ~/.ssh/known_hosts

- name: Deploy ${{ env.BRANCH_NAME }} to test server
run: |
echo "➡️ Starting remote deployment of branch '${BRANCH_NAME}'"
ssh -o StrictHostKeyChecking=no \
${{ secrets.TEST_SERVER_USER }}@${{ secrets.TEST_SERVER_HOST }} \
"bash ~/deploy_recapp_to_test.sh \"${BRANCH_NAME}\""
echo "✅ Remote deployment of branch '${BRANCH_NAME}' succeeded"
Comment on lines +12 to +33

Check warning

Code scanning / CodeQL

Workflow does not contain permissions Medium

Actions job or workflow does not limit the permissions of the GITHUB_TOKEN. Consider setting an explicit permissions block, using the following as a minimal starting point: {}

Copilot Autofix

AI 9 months ago

To fix the issue, we will add a permissions block at the root of the workflow file. This block will explicitly define the minimal permissions required for the workflow. Since the workflow does not use the GITHUB_TOKEN for any operations, we will set contents: read as the only permission. This ensures that the workflow has the least privilege necessary to operate.


Suggested changeset 1
.github/workflows/deploy-test.yml

Autofix patch

Autofix patch
Run the following command in your local git repository to apply this patch
cat << 'EOF' | git apply
diff --git a/.github/workflows/deploy-test.yml b/.github/workflows/deploy-test.yml
--- a/.github/workflows/deploy-test.yml
+++ b/.github/workflows/deploy-test.yml
@@ -9,2 +9,5 @@
 
+permissions:
+  contents: read
+
 jobs:
EOF
@@ -9,2 +9,5 @@

permissions:
contents: read

jobs:
Copilot is powered by AI and may make mistakes. Always verify output.
Unable to commit as this autofix suggestion is now outdated
4 changes: 2 additions & 2 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# recapp 1.4.2
* almost ready to go public
# recapp 1.6.2

208 changes: 93 additions & 115 deletions deployment.sh
Original file line number Diff line number Diff line change
@@ -1,131 +1,109 @@
#!/bin/bash
#!/usr/bin/env bash
#
# deploy_recapp_to_test.sh
# This script deploys the RecApp application to the test environment.
# It assumes:
# - The deploy user has passwordless sudo for the needed npm/docker commands.
# - $HOME is the home directory of that user, and ~/recapp is the app folder.
# - npm scripts: stop:docker:prod, build:docker:prod, start:docker:prod exist in package.json.
# - set -e is in effect, so any command failing will abort the script.

# Auto-Deployment for recapp
set -euo pipefail

# Konfiguration
if [ $# -eq 0 ]; then
REPO_PATH="/home/cloud/recapp"
LOG_FILE="/home/cloud/recapp/deploy.log"
else
REPO_PATH="$1"
LOG_FILE="$1/deploy.log"
fi
BRANCH="${1:-main}"

PM2_PROCESS_NAME="backend"
# Define log file and rotation parameters
# The log file will be stored in the user's home directory.
# It will rotate when it exceeds 10 MB, keeping the last 3 old logs.
# The log file will be named deploy.log, and old logs will be named deploy.log.1, deploy.log.2, etc.
# The log rotation will be handled by the rotate_logs function.
LOG_DIR="$HOME"
LOG_FILE="$LOG_DIR/deploy.log"
MAX_LOG_SIZE=$((10 * 1024 * 1024)) # 10 MB
MAX_OLD_LOGS=3

# Funktion zum Loggen
log() {
echo "$(date '+%Y-%m-%d %H:%M:%S') - $1" >> "$LOG_FILE"
echo "$1"
}
rotate_logs() {
# If deploy.log does not exist, nothing to rotate
[ -f "$LOG_FILE" ] || return 0

# Funktion zum Prüfen von Änderungen im Remote
check_remote_changes() {
git fetch origin
local_branch=$(git rev-parse --abbrev-ref HEAD)
remote_branch="origin/$local_branch"

if git diff --quiet "$local_branch" "$remote_branch"; then
log "No remote changes for current branch ($local_branch)."
return 1
else
log "New commits detected for current branch ($local_branch)."
return 0
fi
}
local actual_size
actual_size=$(stat -c%s "$LOG_FILE")
if [ "$actual_size" -le "$MAX_LOG_SIZE" ]; then
return 0
fi

# Funktion zum Pullen von Änderungen
pull_changes() {
if git pull origin "$(git rev-parse --abbrev-ref HEAD)"; then
log "Pulling changes was successful."
return 0
else
log "Error on pulling changes."
return 1
fi
}
# Shift old logs: deploy.log.2 -> deploy.log.3, deploy.log.1 -> deploy.log.2, deploy.log -> deploy.log.1
if [ -f "$LOG_DIR/deploy.log.$((MAX_OLD_LOGS - 1))" ]; then
rm -f "$LOG_DIR/deploy.log.$((MAX_OLD_LOGS - 1))"
fi

# Funktion zum Bauen der Projekte
build_projects() {
log "Starting rebuild"

# Installiere Abhängigkeiten
if npm ci; then
log "Installed dependencies successfully."
else
log "Error on dependent package installation."
return 1
fi

# Führe Lerna build aus
if npx lerna run build; then
log "Build all packages."
return 0
else
log "Error on building packages."
return 1
for (( i=MAX_OLD_LOGS-1; i>=1; i-- )); do
if [ -f "$LOG_DIR/deploy.log.$i" ]; then
mv "$LOG_DIR/deploy.log.$i" "$LOG_DIR/deploy.log.$((i + 1))"
fi
}
done

# Funktion zum Neustarten des PM2-Prozesses
restart_pm2() {
if pm2 restart "$PM2_PROCESS_NAME"; then
log "Restartet backend."
return 0
else
log "Error on restarting the backend."
return 1
fi
mv "$LOG_FILE" "$LOG_DIR/deploy.log.1"
: > "$LOG_FILE"
echo "$(date '+%Y-%m-%d %H:%M:%S') - Log rotated: previous log moved to deploy.log.1" >> "$LOG_FILE"
return 0
}

# Funktion zum Kopieren der Frontend-Dateien und Setzen der Rechte
change_frontend_permissions() {
if chmod -R o+r ./packages/frontend/dist/*; then
log "Made frontend build accessible by webserver."
return 0
else
log "Could not change frontend access rights for webserver"
return 1
fi
# Rotate logs if necessary
rotate_logs
echo "test end"
# Append a timestamped message to both the log file and stdout
log() {
local msg="$1"
echo "$(date '+%Y-%m-%d %H:%M:%S') - ${msg}" | tee -a "$LOG_FILE"
}

# Funktion zum Zurücksetzen auf den letzten funktionierenden Stand
rollback() {
log "Errors occured. Rolling back."
git reset --hard HEAD~1
build_projects
restart_pm2
copy_frontend_files
log "Rollback finished."
# On any unexpected exit (non-zero), log it
on_error() {
local exit_code=$?
log "❌ Deployment script exited with code ${exit_code}."
exit "${exit_code}"
}
trap on_error ERR

# Hauptfunktion
main() {
cd "$REPO_PATH" || { log "Fehler: Konnte nicht in das Repository-Verzeichnis wechseln."; exit 1; }

if [ "$1" = "force-build" ]; then
log "Forced deployment."
if build_projects; then
log "New version was deployed"
else
log "An error occured"
exit 1
fi
return
fi

if check_remote_changes; then
if pull_changes && build_projects && restart_pm2 && change_frontend_permissions; then
log "New version was deployed"
else
log "An error occured"
rollback
exit 1
fi
else
log "No action neccessary."
fi
}
log "=== Starting deployment to test environment ==="

# Verify sudo privileges (without a password prompt)
if ! sudo -n true 2>/dev/null; then
log "ERROR: This script requires passwordless sudo privileges. Exiting."
exit 1
fi

# Ensure the recapp directory exists
REPO_DIR="$HOME/recapp"
if [ ! -d "$REPO_DIR" ]; then
log "ERROR: Directory '$REPO_DIR' not found. Cannot deploy."
exit 1
fi

cd "$REPO_DIR"

log "Fetching origin..."
git fetch origin --prune

log "Checking out branch '$BRANCH' (force)..."
git checkout --force -B "$BRANCH" "origin/$BRANCH"

# 1) Stop the existing Docker production container
log "Stopping existing production container..."
sudo npm run stop:docker:prod 2>&1 | tee -a "$LOG_FILE"

# 2) Install dependencies (CI)
log "Installing npm dependencies for CI..."
sudo npm ci 2>&1 | tee -a "$LOG_FILE"

# 3) Build the Docker image for production
log "Building Docker image for production..."
sudo npm run build:docker:prod 2>&1 | tee -a "$LOG_FILE"

# 4) Start the new production container
log "Starting new production container..."
sudo npm run start:docker:prod 2>&1 | tee -a "$LOG_FILE"

# Ausführung der Hauptfunktion
main $2
log "✅ Deployment to test environment completed successfully."
exit 0
2 changes: 1 addition & 1 deletion packages/frontend/package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@recapp/frontend",
"private": true,
"version": "1.6.1",
"version": "1.6.2",
"type": "module",
"scripts": {
"dev": "vite",
Expand Down
68 changes: 41 additions & 27 deletions packages/frontend/src/actors/TokenActor.ts
Original file line number Diff line number Diff line change
@@ -1,36 +1,50 @@
import { Unit, unit, minutes } from "itu-utils";
// packages/frontend/src/actors/TokenActor.ts

import { Unit, unit } from "itu-utils";
import { Actor, ActorRef, ActorSystem } from "ts-actors";
import Axios from "axios";
import { cookie } from "../utils";

const updateToken = () => {
const hasToken = !!cookie("bearer");
if (hasToken) {
Axios.get(import.meta.env.VITE_BACKEND_URI + "/auth/refresh", { withCredentials: true }).catch(() => {
alert(
"Could not refresh token. Presumeably the authentication server is unavailable. Please report this error if it happens repeatedly."
);
window.location.href = "/";
});
}
};

export class TokenActor extends Actor<unknown, Unit> {
public interval: any;
public interval: any;
private expiresAt: Date;

public constructor(name: string, system: ActorSystem) {
super(name, system);
this.expiresAt = new Date(); // Initialize with a default value
}

public override async afterStart(): Promise<void> {
this.updateToken();
}

public constructor(name: string, system: ActorSystem) {
super(name, system);
}
public override async beforeShutdown(): Promise<void> {
clearTimeout(this.interval);
}

public override async afterStart(): Promise<void> {
this.interval = setInterval(updateToken, minutes(import.meta.env.VITE_INACTIVITY_LIMIT).valueOf());
}
private updateToken = () => {
const hasToken = !!cookie("bearer");
if (hasToken) {
Axios.get(import.meta.env.VITE_BACKEND_URI + "/auth/refresh", { withCredentials: true })
.then(response => {
this.expiresAt = new Date(response.data.expires_at);
this.scheduleNextUpdate();
})
.catch(error => {
console.error("Failed to refresh token:", error);
setTimeout(this.updateToken, 5000); // Retry after 5 seconds
});
}
};

public override async beforeShutdown(): Promise<void> {
clearInterval(this.interval);
}
private scheduleNextUpdate = () => {
const buffer = 30000; // 30 seconds before expiry
const delay = this.expiresAt.getTime() - Date.now() - buffer;
clearTimeout(this.interval); // Clear previous timeout
this.interval = setTimeout(this.updateToken, delay);
};

public async receive(_from: ActorRef, _message: unknown): Promise<Unit> {
return unit();
}
}
public async receive(_from: ActorRef, _message: unknown): Promise<Unit> {
return unit();
}
}