Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { Database, FolderUp, Loader2, Trash2 } from "lucide-react";
import { toast } from "sonner";
import { DialogAction } from "@/components/shared/dialog-action";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
Card,
Expand All @@ -24,11 +25,11 @@ export const ShowDestinations = () => {
<CardHeader className="">
<CardTitle className="text-xl flex flex-row gap-2">
<Database className="size-6 text-muted-foreground self-center" />
S3 Destinations
Backup Destinations
</CardTitle>
<CardDescription>
Add your providers like AWS S3, Cloudflare R2, Wasabi,
DigitalOcean Spaces etc.
Add destinations like AWS S3, Cloudflare R2, Wasabi,
DigitalOcean Spaces, SFTP servers, or FTP servers.
</CardDescription>
</CardHeader>
<CardContent className="space-y-2 py-8 border-t">
Expand All @@ -44,7 +45,7 @@ export const ShowDestinations = () => {
<FolderUp className="size-8 self-center text-muted-foreground" />
<span className="text-base text-muted-foreground">
To create a backup it is required to set at least 1
provider.
destination.
</span>
{permissions?.destination.create && <HandleDestinations />}
</div>
Expand All @@ -58,9 +59,14 @@ export const ShowDestinations = () => {
>
<div className="flex items-center justify-between p-3.5 rounded-lg bg-background border w-full">
<div className="flex flex-col gap-1">
<span className="text-sm">
{index + 1}. {destination.name}
</span>
<div className="flex flex-row items-center gap-2">
<span className="text-sm">
{index + 1}. {destination.name}
</span>
<Badge variant="outline" className="text-xs uppercase">
{(destination as any).destinationType ?? "s3"}
</Badge>
</div>
<span className="text-xs text-muted-foreground">
Created at:{" "}
{new Date(
Expand Down
11 changes: 11 additions & 0 deletions apps/dokploy/drizzle/0150_add_sftp_ftp_destination_types.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
ALTER TABLE "destination" ADD COLUMN "destinationType" text NOT NULL DEFAULT 's3';
ALTER TABLE "destination" ADD COLUMN "host" text;
ALTER TABLE "destination" ADD COLUMN "port" integer;
ALTER TABLE "destination" ADD COLUMN "username" text;
ALTER TABLE "destination" ADD COLUMN "password" text;
ALTER TABLE "destination" ADD COLUMN "basePath" text;
ALTER TABLE "destination" ALTER COLUMN "accessKey" SET DEFAULT '';
ALTER TABLE "destination" ALTER COLUMN "secretAccessKey" SET DEFAULT '';
ALTER TABLE "destination" ALTER COLUMN "bucket" SET DEFAULT '';
ALTER TABLE "destination" ALTER COLUMN "region" SET DEFAULT '';
ALTER TABLE "destination" ALTER COLUMN "endpoint" SET DEFAULT '';
7 changes: 7 additions & 0 deletions apps/dokploy/drizzle/meta/_journal.json
Original file line number Diff line number Diff line change
Expand Up @@ -1051,6 +1051,13 @@
"when": 1773637297592,
"tag": "0149_rare_radioactive_man",
"breakpoints": true
},
{
"idx": 150,
"version": "7",
"when": 1774000000000,
"tag": "0150_add_sftp_ftp_destination_types",
"breakpoints": true
}
]
}
7 changes: 4 additions & 3 deletions apps/dokploy/server/api/routers/backup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ import {
import { findDestinationById } from "@dokploy/server/services/destination";
import { runComposeBackup } from "@dokploy/server/utils/backups/compose";
import {
getS3Credentials,
getRcloneFlags,
getRcloneRemote,
normalizeS3Path,
} from "@dokploy/server/utils/backups/utils";
import {
Expand Down Expand Up @@ -424,8 +425,8 @@ export const backupRouter = createTRPCRouter({
.query(async ({ input }) => {
try {
const destination = await findDestinationById(input.destinationId);
const rcloneFlags = getS3Credentials(destination);
const bucketPath = `:s3:${destination.bucket}`;
const rcloneFlags = getRcloneFlags(destination);
const bucketPath = getRcloneRemote(destination, "");

const lastSlashIndex = input.search.lastIndexOf("/");
const baseDir =
Expand Down
108 changes: 86 additions & 22 deletions apps/dokploy/server/api/routers/destination.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import {
execAsyncRemote,
findDestinationById,
IS_CLOUD,
obscureRclonePassword,
removeDestinationById,
updateDestinationById,
} from "@dokploy/server";
Expand All @@ -20,13 +21,90 @@ import {
destinations,
} from "@/server/db/schema";

const buildTestCommand = async (input: {
destinationType?: string;
accessKey?: string;
secretAccessKey?: string;
bucket?: string;
region?: string;
endpoint?: string;
provider?: string;
host?: string;
port?: number;
username?: string;
password?: string;
basePath?: string;
}) => {
const type = input.destinationType ?? "s3";

if (type === "sftp") {
const obscuredPass = await obscureRclonePassword(input.password || "");
const flags = [
`--sftp-host="${input.host}"`,
`--sftp-user="${input.username}"`,
`--sftp-pass="${obscuredPass}"`,
"--retries 1",
"--low-level-retries 1",
"--timeout 10s",
"--contimeout 5s",
];
if (input.port) flags.push(`--sftp-port="${input.port}"`);
const base = input.basePath
? input.basePath.replace(/^\/+|\/+$/g, "") + "/"
: "";
const remote = `:sftp:${base}`;
return `rclone ls ${flags.join(" ")} "${remote}"`;
}

if (type === "ftp") {
const obscuredPass = await obscureRclonePassword(input.password || "");
const flags = [
`--ftp-host="${input.host}"`,
`--ftp-user="${input.username}"`,
`--ftp-pass="${obscuredPass}"`,
"--retries 1",
"--low-level-retries 1",
"--timeout 10s",
"--contimeout 5s",
];
if (input.port) flags.push(`--ftp-port="${input.port}"`);
const base = input.basePath
? input.basePath.replace(/^\/+|\/+$/g, "") + "/"
: "";
const remote = `:ftp:${base}`;
return `rclone ls ${flags.join(" ")} "${remote}"`;
}

// S3
const rcloneFlags = [
`--s3-access-key-id="${input.accessKey}"`,
`--s3-secret-access-key="${input.secretAccessKey}"`,
`--s3-region="${input.region}"`,
`--s3-endpoint="${input.endpoint}"`,
"--s3-no-check-bucket",
"--s3-force-path-style",
"--retries 1",
"--low-level-retries 1",
"--timeout 10s",
"--contimeout 5s",
];
if (input.provider) {
rcloneFlags.unshift(`--s3-provider="${input.provider}"`);
}
return `rclone ls ${rcloneFlags.join(" ")} ":s3:${input.bucket}"`;
};

export const destinationRouter = createTRPCRouter({
create: withPermission("destination", "create")
.input(apiCreateDestination)
.mutation(async ({ input, ctx }) => {
try {
const data = { ...input };
if (data.destinationType !== "s3" && data.password) {
data.password = await obscureRclonePassword(data.password);
}
const result = await createDestintation(
input,
data,
ctx.session.activeOrganizationId,
);
await audit(ctx, {
Expand All @@ -47,26 +125,8 @@ export const destinationRouter = createTRPCRouter({
testConnection: withPermission("destination", "create")
.input(apiCreateDestination)
.mutation(async ({ input }) => {
const { secretAccessKey, bucket, region, endpoint, accessKey, provider } =
input;
try {
const rcloneFlags = [
`--s3-access-key-id="${accessKey}"`,
`--s3-secret-access-key="${secretAccessKey}"`,
`--s3-region="${region}"`,
`--s3-endpoint="${endpoint}"`,
"--s3-no-check-bucket",
"--s3-force-path-style",
"--retries 1",
"--low-level-retries 1",
"--timeout 10s",
"--contimeout 5s",
];
if (provider) {
rcloneFlags.unshift(`--s3-provider="${provider}"`);
}
const rcloneDestination = `:s3:${bucket}`;
const rcloneCommand = `rclone ls ${rcloneFlags.join(" ")} "${rcloneDestination}"`;
const rcloneCommand = await buildTestCommand(input);

if (IS_CLOUD && !input.serverId) {
throw new TRPCError({
Expand All @@ -86,7 +146,7 @@ export const destinationRouter = createTRPCRouter({
message:
error instanceof Error
? error?.message
: "Error connecting to bucket",
: "Error connecting to destination",
cause: error,
});
}
Expand Down Expand Up @@ -147,8 +207,12 @@ export const destinationRouter = createTRPCRouter({
message: "You are not allowed to update this destination",
});
}
const data = { ...input };
if (data.destinationType !== "s3" && data.password) {
data.password = await obscureRclonePassword(data.password);
}
const result = await updateDestinationById(input.destinationId, {
...input,
...data,
organizationId: ctx.session.activeOrganizationId,
});
await audit(ctx, {
Expand Down
68 changes: 62 additions & 6 deletions packages/server/src/db/schema/destination.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { relations } from "drizzle-orm";
import { pgTable, text, timestamp } from "drizzle-orm/pg-core";
import { integer, pgTable, text, timestamp } from "drizzle-orm/pg-core";
import { createInsertSchema } from "drizzle-zod";
import { nanoid } from "nanoid";
import { z } from "zod";
Expand All @@ -13,11 +13,17 @@ export const destinations = pgTable("destination", {
.$defaultFn(() => nanoid()),
name: text("name").notNull(),
provider: text("provider"),
accessKey: text("accessKey").notNull(),
secretAccessKey: text("secretAccessKey").notNull(),
bucket: text("bucket").notNull(),
region: text("region").notNull(),
endpoint: text("endpoint").notNull(),
accessKey: text("accessKey").notNull().default(""),
secretAccessKey: text("secretAccessKey").notNull().default(""),
bucket: text("bucket").notNull().default(""),
region: text("region").notNull().default(""),
endpoint: text("endpoint").notNull().default(""),
destinationType: text("destinationType").notNull().default("s3"),
host: text("host"),
port: integer("port"),
username: text("username"),
password: text("password"),
basePath: text("basePath"),
organizationId: text("organizationId")
.notNull()
.references(() => organization.id, { onDelete: "cascade" }),
Expand Down Expand Up @@ -58,7 +64,32 @@ export const apiCreateDestination = createSchema
})
.required()
.extend({
destinationType: z.enum(["s3", "sftp", "ftp"]).default("s3"),
host: z.string().optional(),
port: z.number().optional(),
username: z.string().optional(),
password: z.string().optional(),
basePath: z.string().optional(),
serverId: z.string().optional(),
})
.superRefine((data, ctx) => {
if (data.destinationType === "s3") {
if (!data.accessKey)
ctx.addIssue({ code: "custom", path: ["accessKey"], message: "Access Key is required" });
if (!data.secretAccessKey)
ctx.addIssue({ code: "custom", path: ["secretAccessKey"], message: "Secret Access Key is required" });
if (!data.bucket)
ctx.addIssue({ code: "custom", path: ["bucket"], message: "Bucket is required" });
if (!data.endpoint)
ctx.addIssue({ code: "custom", path: ["endpoint"], message: "Endpoint is required" });
} else {
if (!data.host)
ctx.addIssue({ code: "custom", path: ["host"], message: "Host is required" });
if (!data.username)
ctx.addIssue({ code: "custom", path: ["username"], message: "Username is required" });
if (!data.password)
ctx.addIssue({ code: "custom", path: ["password"], message: "Password is required" });
}
});

export const apiFindOneDestination = z.object({
Expand All @@ -84,5 +115,30 @@ export const apiUpdateDestination = createSchema
})
.required()
.extend({
destinationType: z.enum(["s3", "sftp", "ftp"]).default("s3"),
host: z.string().optional(),
port: z.number().optional(),
username: z.string().optional(),
password: z.string().optional(),
basePath: z.string().optional(),
serverId: z.string().optional(),
})
.superRefine((data, ctx) => {
if (data.destinationType === "s3") {
if (!data.accessKey)
ctx.addIssue({ code: "custom", path: ["accessKey"], message: "Access Key is required" });
if (!data.secretAccessKey)
ctx.addIssue({ code: "custom", path: ["secretAccessKey"], message: "Secret Access Key is required" });
if (!data.bucket)
ctx.addIssue({ code: "custom", path: ["bucket"], message: "Bucket is required" });
if (!data.endpoint)
ctx.addIssue({ code: "custom", path: ["endpoint"], message: "Endpoint is required" });
} else {
if (!data.host)
ctx.addIssue({ code: "custom", path: ["host"], message: "Host is required" });
if (!data.username)
ctx.addIssue({ code: "custom", path: ["username"], message: "Username is required" });
if (!data.password)
ctx.addIssue({ code: "custom", path: ["password"], message: "Password is required" });
}
});
6 changes: 3 additions & 3 deletions packages/server/src/utils/backups/compose.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import { findEnvironmentById } from "@dokploy/server/services/environment";
import { findProjectById } from "@dokploy/server/services/project";
import { sendDatabaseBackupNotifications } from "../notifications/database-backup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getBackupCommand, getS3Credentials, normalizeS3Path } from "./utils";
import { getBackupCommand, getRcloneFlags, getRcloneRemote, normalizeS3Path } from "./utils";

export const runComposeBackup = async (
compose: Compose,
Expand All @@ -29,8 +29,8 @@ export const runComposeBackup = async (
});

try {
const rcloneFlags = getS3Credentials(destination);
const rcloneDestination = `:s3:${destination.bucket}/${bucketDestination}`;
const rcloneFlags = getRcloneFlags(destination);
const rcloneDestination = getRcloneRemote(destination, bucketDestination);
const rcloneCommand = `rclone rcat ${rcloneFlags.join(" ")} "${rcloneDestination}"`;

const backupCommand = getBackupCommand(
Expand Down
6 changes: 3 additions & 3 deletions packages/server/src/utils/backups/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import { startLogCleanup } from "../access-log/handler";
import { cleanupAll } from "../docker/utils";
import { sendDockerCleanupNotifications } from "../notifications/docker-cleanup";
import { execAsync, execAsyncRemote } from "../process/execAsync";
import { getS3Credentials, normalizeS3Path, scheduleBackup } from "./utils";
import { getRcloneFlags, getRcloneRemote, normalizeS3Path, scheduleBackup } from "./utils";

export const initCronJobs = async () => {
console.log("Setting up cron jobs....");
Expand Down Expand Up @@ -129,9 +129,9 @@ export const keepLatestNBackups = async (
if (!backup.keepLatestCount) return;

try {
const rcloneFlags = getS3Credentials(backup.destination);
const rcloneFlags = getRcloneFlags(backup.destination);
const appName = getServiceAppName(backup);
const backupFilesPath = `:s3:${backup.destination.bucket}/${appName}/${normalizeS3Path(backup.prefix)}`;
const backupFilesPath = getRcloneRemote(backup.destination, `${appName}/${normalizeS3Path(backup.prefix)}`);

// --include "*.sql.gz" or "*.zip" ensures nothing else other than the dokploy backup files are touched by rclone
const rcloneList = `rclone lsf ${rcloneFlags.join(" ")} --include "*${backup.databaseType === "web-server" ? ".zip" : ".sql.gz"}" ${backupFilesPath}`;
Expand Down
Loading