feat: build process

This commit is contained in:
Derock 2024-04-21 22:05:05 -04:00
parent ae243559da
commit 1a104b38ee
No known key found for this signature in database
18 changed files with 686 additions and 33 deletions

2
DEVELOPMENT.md Normal file
View file

@ -0,0 +1,2 @@
Developing on WINDOWS is not supported, and is not planned to be supported. Please use WSL2 or a Linux VM.
The application may run, but certain features may not work as expected due to the reliance on UNIX-like FHS environment.

View file

@ -21,6 +21,7 @@
pkgs.nodePackages.pnpm
pkgs.nodePackages.typescript
pkgs.nodePackages.typescript-language-server
pkgs.nixpacks
];
};
});

View file

@ -48,6 +48,7 @@
"clsx": "^2.1.0",
"common-tags": "^1.8.2",
"cookie": "^0.6.0",
"datastructures-js": "^13.0.0",
"date-fns": "^3.0.6",
"docker-cli-js": "^2.10.0",
"docker-modem": "^5.0.3",
@ -63,6 +64,7 @@
"next-nprogress-bar": "^2.1.2",
"next-themes": "^0.2.1",
"node-os-utils": "^1.3.7",
"pretty-bytes": "^6.1.1",
"react": "18.2.0",
"react-dom": "18.2.0",
"react-hook-form": "^7.49.2",

View file

@ -98,6 +98,9 @@ dependencies:
cookie:
specifier: ^0.6.0
version: 0.6.0
datastructures-js:
specifier: ^13.0.0
version: 13.0.0
date-fns:
specifier: ^3.0.6
version: 3.0.6
@ -143,6 +146,9 @@ dependencies:
node-os-utils:
specifier: ^1.3.7
version: 1.3.7
pretty-bytes:
specifier: ^6.1.1
version: 6.1.1
react:
specifier: 18.2.0
version: 18.2.0
@ -790,6 +796,54 @@ packages:
kuler: 2.0.0
dev: false
/@datastructures-js/binary-search-tree@5.3.2:
resolution: {integrity: sha512-8Y6SqH9wncY5HQMWbazjADyI5Sjop7VFVTPAcYoWWE8pHIVmAuS2CWCQ5wgwNEPUAnJMUz5idRTXmjtl5gwDCQ==}
dev: false
/@datastructures-js/deque@1.0.4:
resolution: {integrity: sha512-zlgVSsxqiAd+scLUILvx8E887o+6kYds9/d4DCM/mFOuUITUlPG/r3u5iPZjzW3o6XPPi+p66p3Kf1+wFxYvLQ==}
dev: false
/@datastructures-js/graph@5.3.0:
resolution: {integrity: sha512-Owbn40ha2W22i6yGSTmOb7ppL6RZKCeFf9tVKqFVXClb5rWQgscHDIyuHXFcNkdlsrq3O0E7zvJ45O60/8m/mg==}
dependencies:
'@datastructures-js/queue': 3.1.4
dev: false
/@datastructures-js/heap@4.3.3:
resolution: {integrity: sha512-UcUu/DLh/aM4W3C8zZfwxxm6/6FIZUlm3mcAXuNOCa6Aj4iizNvNXQyb8DjZQH2jKSQbMRyNlngP6TPimuGjpQ==}
dev: false
/@datastructures-js/linked-list@6.1.1:
resolution: {integrity: sha512-nb463C34dh8gVuicDpl44WP7Cz6SGNG9++U7OTzG5plQMLTjoitvCaCdJug1BAHutC4FBYagfBdfiPJORjvslA==}
dev: false
/@datastructures-js/priority-queue@6.3.1:
resolution: {integrity: sha512-eoxkWql/j0VJ0UFMFTpnyJz4KbEEVQ6aZ/JuJUgenu0Im4tYKylAycNGsYCHGXiVNEd7OKGVwfx1Ac3oYkuu7A==}
dependencies:
'@datastructures-js/heap': 4.3.3
dev: false
/@datastructures-js/queue@3.1.4:
resolution: {integrity: sha512-8QqkdAJQDDd25OBX28lKj7HXD+Cxs6Ee0ogJkZUjD5R1fAvRAWsrCeKop0szqANYjMwdQQd75vyc3Cm8qNJH+Q==}
dev: false
/@datastructures-js/queue@4.2.3:
resolution: {integrity: sha512-GWVMorC/xi2V2ta+Z/CPgPGHL2ZJozcj48g7y2nIX5GIGZGRrbShSHgvMViJwHJurUzJYOdIdRZnWDRrROFwJA==}
dev: false
/@datastructures-js/set@4.2.1:
resolution: {integrity: sha512-qGJhgclFpV7JTPDEJ/ftrFmIf8s6t5Y9nhc5KffuPt0UjCVc1infAltX7R/XFEBF+f7RAqYl/NIZaOekvU88zg==}
dev: false
/@datastructures-js/stack@3.1.4:
resolution: {integrity: sha512-+2+SOvKcNizQaR31AL1Sox4p5rvAlZfvXO9gi6qWrXMvLqb3S5/3t0ZRAefA0ZabQz0LCXOc8aTeHSWSOMrNCQ==}
dev: false
/@datastructures-js/trie@4.2.2:
resolution: {integrity: sha512-wZFXic9OLc+BgtnUYr0EIaAZLaPaNt0r1zjf2xJ5JhGwuK0w2vwlOMMj9RHgIeOY+UM0J76CcrmN/wn2LlEYkA==}
dev: false
/@drizzle-team/studio@0.0.39:
resolution: {integrity: sha512-c5Hkm7MmQC2n5qAsKShjQrHoqlfGslB8+qWzsGGZ+2dHMRTNG60UuzalF0h0rvBax5uzPXuGkYLGaQ+TUX3yMw==}
dependencies:
@ -4180,6 +4234,21 @@ packages:
engines: {node: '>= 12'}
dev: false
/datastructures-js@13.0.0:
resolution: {integrity: sha512-3KLehk8sGWS2IuncLv0/Wxl1a1VtvQagLDtGNBw3SNbi7893PPxWT8dac+cuHA/N4V25BQL6pHF7IVjdaZFB3Q==}
dependencies:
'@datastructures-js/binary-search-tree': 5.3.2
'@datastructures-js/deque': 1.0.4
'@datastructures-js/graph': 5.3.0
'@datastructures-js/heap': 4.3.3
'@datastructures-js/linked-list': 6.1.1
'@datastructures-js/priority-queue': 6.3.1
'@datastructures-js/queue': 4.2.3
'@datastructures-js/set': 4.2.1
'@datastructures-js/stack': 3.1.4
'@datastructures-js/trie': 4.2.2
dev: false
/date-fns@3.0.6:
resolution: {integrity: sha512-W+G99rycpKMMF2/YD064b2lE7jJGUe+EjOES7Q8BIGY8sbNdbgcs9XFTZwvzc9Jx1f3k7LB7gZaZa7f8Agzljg==}
dev: false
@ -7178,6 +7247,11 @@ packages:
hasBin: true
dev: true
/pretty-bytes@6.1.1:
resolution: {integrity: sha512-mQUvGU6aUFQ+rNvTIAcZuWGRT9a6f6Yrg9bHs4ImKF+HZCEK+plBvnAZYSIQztknZF2qnzNtr6F8s0+IuptdlQ==}
engines: {node: ^14.13.1 || >=16.0.0}
dev: false
/prop-types@15.8.1:
resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==}
dependencies:

View file

@ -2,6 +2,7 @@
import { formatDistanceToNowStrict } from "date-fns";
import { ClipboardIcon } from "lucide-react";
import prettyBytes from "pretty-bytes";
import { useEffect, useState } from "react";
import { CgSpinner } from "react-icons/cg";
import { FaGear } from "react-icons/fa6";
@ -94,11 +95,14 @@ export function ContainerEntry({
<TableCell>{uptimeText ?? "N/A"}</TableCell>
<TableCell>{mainContainer?.node ?? "unknown"}</TableCell>
<TableCell>{mainContainer?.cpu ?? "?"}</TableCell>
<TableCell>{mainContainer?.memory ?? "?"}</TableCell>
<TableCell>{mainContainer?.cpu?.toFixed(2) ?? "?"}%</TableCell>
<TableCell>
{mainContainer?.network?.rx ?? "N/A"} /{" "}
{mainContainer?.network?.tx ?? "N/A"}
{prettyBytes(mainContainer?.usedMemory ?? 0)} /{" "}
{prettyBytes(mainContainer?.totalMemory ?? 0)}
</TableCell>
<TableCell>
{prettyBytes(mainContainer?.network?.rx ?? 0)} /{" "}
{prettyBytes(mainContainer?.network?.tx ?? 0)}
</TableCell>
<TableCell className="text-right">
<DropdownMenu>

View file

@ -1,5 +1,6 @@
import { eq } from "drizzle-orm";
import { z } from "zod";
import { BuildManager } from "~/server/build/BuildManager";
import { service } from "~/server/db/schema";
import { buildDockerStackFile } from "~/server/docker/stack";
import logger from "~/server/utils/logger";
@ -33,6 +34,10 @@ export const deployProject = authenticatedProcedure
},
});
// run builds
// TODO: run only if needed
await BuildManager.getInstance().runBuilds(services);
const dockerStackFile = await buildDockerStackFile(services);
logger.debug("deploying stack", { dockerStackFile });

View file

@ -1,5 +1,5 @@
import assert from "assert";
import type Dockerode from "dockerode";
import { type ContainerStats } from "dockerode";
import { z } from "zod";
import { projectMiddleware } from "~/server/api/middleware/project";
import { serviceMiddleware } from "~/server/api/middleware/service";
@ -17,7 +17,8 @@ const zContainerDetails = z.object({
node: z.string().optional(),
cpu: z.number().optional(),
memory: z.number().optional(),
totalMemory: z.number().optional(),
usedMemory: z.number().optional(),
network: z
.object({
tx: z.number().optional(),
@ -175,36 +176,74 @@ export const getServiceContainers = authenticatedProcedure
return;
}
let containerStats: Dockerode.ContainerStats | null = null;
let containerStats: ContainerStats | null = null;
let formattedContainerStats:
| z.infer<typeof zContainerDetails>
| undefined = undefined;
if (task.Status?.ContainerStatus?.ContainerID) {
containerStats = await ctx.docker
.getContainer(task.Status.ContainerStatus.ContainerID)
.stats({ "one-shot": true, stream: false })
.stats({ stream: false })
.catch(docker404ToNull);
}
if (containerStats) {
// calculate container stats
// https://docs.docker.com/engine/api/v1.45/#tag/Container/operation/ContainerStats
let usedMemory: number | undefined;
let cpuPercent: number | undefined;
let totalMemory: number | undefined;
try {
usedMemory =
containerStats.memory_stats.usage -
(containerStats.memory_stats.stats?.cache || 0);
totalMemory = containerStats.memory_stats.limit;
const cpuDelta =
containerStats.cpu_stats.cpu_usage.total_usage -
containerStats.precpu_stats.cpu_usage.total_usage;
const systemCpuDelta =
containerStats.cpu_stats.system_cpu_usage -
containerStats.precpu_stats.system_cpu_usage;
const numCpus = containerStats.cpu_stats.online_cpus;
cpuPercent = (cpuDelta / systemCpuDelta) * numCpus * 100;
// if is nan, set to undefined
if (isNaN(usedMemory)) usedMemory = undefined;
if (isNaN(cpuPercent)) cpuPercent = undefined;
if (isNaN(totalMemory)) totalMemory = undefined;
} catch (error) {
logger.debug(
"Failed to calculate container stats. **THIS IS NOT A BUG if the service was recently redeployed.**",
error,
);
}
formattedContainerStats = {
containerId: task.Status?.ContainerStatus?.ContainerID ?? "",
containerCreatedAt: new Date(
container ? container.Created * 1000 : task.CreatedAt ?? 0,
).getTime(),
error: task.Status?.Err,
node: nodes.find((node) => node.ID === task.NodeID)?.Description
?.Hostname,
cpu: cpuPercent,
usedMemory,
totalMemory,
network: {
tx: containerStats?.networks?.eth0?.tx_bytes,
rx: containerStats?.networks?.eth0?.rx_bytes,
},
};
}
return {
slot: task.Slot,
container: containerStats
? {
containerId: task.Status?.ContainerStatus?.ContainerID ?? "",
containerCreatedAt: new Date(
container ? container.Created * 1000 : task.CreatedAt ?? 0,
).getTime(),
error: task.Status?.Err,
node: nodes.find((node) => node.ID === task.NodeID)?.Description
?.Hostname,
cpu: containerStats?.cpu_stats?.cpu_usage?.total_usage,
memory: containerStats?.memory_stats?.usage,
network: {
tx: containerStats?.networks?.eth0?.tx_bytes,
rx: containerStats?.networks?.eth0?.rx_bytes,
},
}
: undefined,
container: formattedContainerStats,
task: {
taskMessage: task.Status?.Message,

View file

@ -0,0 +1,113 @@
import assert from "assert";
import { Queue } from "datastructures-js";
import { db } from "../db";
import { serviceDeployment } from "../db/schema";
import { ServiceDeploymentStatus, ServiceSource } from "../db/types";
import { type Service } from "../docker/stack";
import logger from "../utils/logger";
import BuildTask from "./BuildTask";
export class BuildManager {
private static logger = logger.child({ module: "builds" });
private static instance = new BuildManager();
public static getInstance() {
return BuildManager.instance;
}
// CONFIGURATION --------
public readonly MAX_CONCURRENT_BUILDS = 5; // TODO: make this configurable
// STATE --------
private tasks = new Map<string, BuildTask>();
private queue = new Queue<string>();
private ongoingTasks = new Set<string>();
private processing = false;
// METHODS --------
public startBuild(serviceId: string, deploymentId: string) {
return new Promise<string>((resolve, reject) => {
const task = new BuildTask(serviceId, deploymentId, resolve, reject);
this.tasks.set(deploymentId, task);
this.queue.enqueue(deploymentId);
this.processQueue();
});
}
public async runBuilds(services: Service[]) {
await Promise.all(
services.map(async (service) => {
if (service.source !== ServiceSource.Docker) {
const [deployment] = await db
.insert(serviceDeployment)
.values({
serviceId: service.id,
status: ServiceDeploymentStatus.BuildPending,
})
.returning()
.execute();
assert(deployment);
service.finalizedDockerImage =
await BuildManager.getInstance().startBuild(
service.id,
deployment.id,
);
}
}),
);
}
private async processNext() {
if (this.queue.isEmpty()) {
BuildManager.logger.debug("Queue is empty");
return;
}
const deploymentId = this.queue.dequeue();
const task = this.tasks.get(deploymentId);
if (!task) {
BuildManager.logger.warn(`Task not found: ${deploymentId}`);
return;
}
BuildManager.logger.info(`Processing task: ${deploymentId}`);
try {
await task.build();
} catch (error) {
BuildManager.logger.error(error);
} finally {
this.tasks.delete(deploymentId);
this.ongoingTasks.delete(deploymentId);
}
this.processQueue();
}
private processQueue() {
if (this.processing) {
return;
}
this.processing = true;
while (
!this.queue.isEmpty() &&
this.ongoingTasks.size < this.MAX_CONCURRENT_BUILDS
) {
const deploymentId = this.queue.front();
this.ongoingTasks.add(deploymentId);
void this.processNext().catch((err) => {
BuildManager.logger.error("Failed to process task " + deploymentId);
BuildManager.logger.error(err);
});
}
this.processing = false;
}
}

View file

@ -0,0 +1,147 @@
import assert from "assert";
import { eq } from "drizzle-orm";
import { mkdirSync } from "fs";
import { rm, rmdir } from "fs/promises";
import path from "path";
import { db } from "../db";
import { service, serviceDeployment } from "../db/schema";
import {
ServiceBuildMethod,
ServiceDeploymentStatus,
ServiceSource,
} from "../db/types";
import Nixpacks from "./builders/Nixpacks";
import GitHubSource from "./sources/GitHub";
import BuilderLogger from "./utils/BuilderLogger";
export default class BuildTask {
static BASE_BUILD_PATH = "/var/tmp";
private readonly logFilePath: string;
private readonly buildLogger: BuilderLogger;
private readonly workingDirectory: string;
private status = ServiceDeploymentStatus.BuildPending;
// promise that resolves when the status is updated
// prevents race conditions when updating the status
private pendingStatusUpdatePromise: Promise<unknown> | null = null;
constructor(
private readonly serviceId: string,
private readonly deploymentId: string,
private readonly finishCallback: (imageTag: string) => void,
private readonly errorCallback: (error: unknown) => void,
) {
this.workingDirectory = path.join(
BuildTask.BASE_BUILD_PATH,
"hostforgebuild-" + this.deploymentId,
);
this.logFilePath = path.join(
BuildTask.BASE_BUILD_PATH,
"hostforgebuild-" + this.deploymentId + ".log",
);
// create the logger and make directories
this.buildLogger = new BuilderLogger(this.logFilePath);
mkdirSync(this.workingDirectory, { recursive: true });
// set the status
void this.updateBuildStatus(this.status);
}
public async build() {
try {
void this.updateBuildStatus(ServiceDeploymentStatus.Building);
// get the service details
const serviceDetails = await this.fetchServiceDetails();
const configuration = {
fileLogger: this.buildLogger,
workDirectory: this.workingDirectory,
serviceConfiguration: serviceDetails,
};
// pull the code
switch (serviceDetails.source) {
case ServiceSource.GitHub: {
await new GitHubSource(configuration).downloadCode();
break;
}
default: {
throw new Error("Unknown source");
}
}
let dockerImageTag = this.deploymentId;
// build the project
switch (serviceDetails.buildMethod) {
case ServiceBuildMethod.Nixpacks: {
dockerImageTag = await new Nixpacks(configuration).build();
break;
}
default: {
throw new Error("Unknown build method");
}
}
// aand we're done
void this.updateBuildStatus(ServiceDeploymentStatus.Deploying);
this.finishCallback(dockerImageTag);
return dockerImageTag;
} catch (error) {
void this.updateBuildStatus(ServiceDeploymentStatus.Failed);
this.errorCallback(error);
throw error;
} finally {
await this.cleanup();
}
}
/**
* Cleans up all the files created by the build task.
*
* ENSURE THAT THIS FUNCTION IS CALLED WHEN THE BUILD TASK IS DONE
* EVEN IF THE BUILD TASK FAILS
*/
public async cleanup() {
// need to wait for fd to close before deleting the log file
await this.buildLogger.finish();
await Promise.allSettled([
rmdir(this.workingDirectory, { recursive: true }),
rm(this.logFilePath),
]);
}
private async fetchServiceDetails() {
const [serviceDetails] = await db
.select()
.from(service)
.where(eq(service.id, this.serviceId));
assert(serviceDetails, "Service not found");
return serviceDetails;
}
private async updateBuildStatus(status: ServiceDeploymentStatus) {
if (this.pendingStatusUpdatePromise) {
await this.pendingStatusUpdatePromise;
}
// in the event that the service is deleted while building, it'll probably error here
// but doesn't really matter
await (this.pendingStatusUpdatePromise = db
.update(serviceDeployment)
.set({ status })
.where(eq(serviceDeployment.id, this.deploymentId)));
this.status = status;
}
}

View file

@ -0,0 +1,20 @@
import { type service } from "../../db/schema";
import type BuilderLogger from "../utils/BuilderLogger";
export default class BaseBuilder {
constructor(
public readonly configuration: {
fileLogger: BuilderLogger;
workDirectory: string;
serviceConfiguration: typeof service.$inferSelect;
},
) {}
/**
* Builds the service, returning the docker tag.
*/
// eslint-disable-next-line @typescript-eslint/require-await
public async build(): Promise<string> {
throw new Error("Not implemented");
}
}

View file

@ -0,0 +1,35 @@
import { spawn } from "child_process";
import { LogLevel } from "../utils/BuilderLogger";
import { joinPathLimited, waitForExit } from "../utils/utils";
import BaseBuilder from "./BaseBuilder";
export default class Nixpacks extends BaseBuilder {
public async build(): Promise<string> {
this.configuration.fileLogger.write(
LogLevel.Notice,
"> Building the service with Nixpacks.",
);
// join the build path with the work directory
const buildPath = joinPathLimited(
this.configuration.workDirectory,
this.configuration.serviceConfiguration.buildPath,
);
const nixpacks = spawn("nixpacks", [
"build",
buildPath,
"--name",
this.configuration.serviceConfiguration.id,
]);
// pipe output
this.configuration.fileLogger.withChildprocess(nixpacks);
// wait for exit
await waitForExit(nixpacks);
// return the docker tag
return this.configuration.serviceConfiguration.id;
}
}

View file

@ -0,0 +1,20 @@
import { type service } from "~/server/db/schema";
import type BuilderLogger from "../utils/BuilderLogger";
export default class BaseSource {
constructor(
public readonly configuration: {
fileLogger: BuilderLogger;
workDirectory: string;
serviceConfiguration: typeof service.$inferSelect;
},
) {}
/**
* Pulls the code from the source.
*/
// eslint-disable-next-line @typescript-eslint/require-await
public async downloadCode(): Promise<void> {
throw new Error("Not implemented");
}
}

View file

@ -0,0 +1,62 @@
import assert from "assert";
import { spawn } from "child_process";
import { LogLevel } from "../utils/BuilderLogger";
import { waitForExit } from "../utils/utils";
import BaseSource from "./BaseSource";
export default class GitHubSource extends BaseSource {
public async downloadCode(): Promise<void> {
// resolve Git URL
const githubUsername =
this.configuration.serviceConfiguration.githubUsername;
const githubRepository =
this.configuration.serviceConfiguration.githubRepository;
const githubBranch = this.configuration.serviceConfiguration.githubBranch;
assert(githubUsername, "GitHub username is required");
assert(githubRepository, "GitHub repository is required");
const gitUrl = `https://github.com/${encodeURIComponent(
githubUsername,
)}/${encodeURIComponent(githubRepository)}`;
// build git clone command
const args = [
// repo url
"clone",
gitUrl,
// get submodules
"--recurse-submodules",
// do not clone the entire history
"--depth",
"1",
];
// if branch specified, add it to the command
if (githubBranch) {
args.push("--branch", githubBranch);
}
// add the work directory
args.push(this.configuration.workDirectory);
// run the git command
this.configuration.fileLogger.write(
LogLevel.Notice,
`> Cloning the repository.\n$ git ${args.join(" ")}`,
);
const git = spawn("git", args, {
cwd: this.configuration.workDirectory,
});
// set up logging
this.configuration.fileLogger.withChildprocess(git);
// wait for exit
await waitForExit(git);
console.log("Downloaded code from GitHub");
}
}

View file

@ -0,0 +1,67 @@
import { type ChildProcessWithoutNullStreams } from "child_process";
import { createWriteStream, type WriteStream } from "fs";
import { Transform } from "node:stream";
export enum LogLevel {
/**
* Command Stdout
*/
Stdout,
/**
* Command Stderr
*/
Stderr,
/**
* Messages that did not originate from the command
*/
Notice,
}
/**
* A very simple file logger to log the output of the build process.
*/
export default class BuilderLogger {
private logFileStream: WriteStream;
constructor(public readonly logFilePath: string) {
this.logFileStream = createWriteStream(this.logFilePath, {
flags: "a",
});
}
public write(level: LogLevel, message: string) {
return this.logFileStream.write(this.formatMessage(level, message), "utf8");
}
public asWriteStream(level: LogLevel) {
return new Transform({
transform: (chunk, encoding, callback) => {
this.write(level, String(chunk));
callback();
},
});
}
public withChildprocess(cp: ChildProcessWithoutNullStreams) {
cp.stdout.pipe(this.asWriteStream(LogLevel.Stdout));
cp.stderr.pipe(this.asWriteStream(LogLevel.Stderr));
}
public finish() {
return new Promise<void>((resolve, reject) => {
this.logFileStream.close((err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
private formatMessage(level: LogLevel, message: string) {
return JSON.stringify({ l: level, m: message, t: Date.now() }) + "\n";
}
}

View file

@ -0,0 +1,33 @@
import { type ChildProcess } from "child_process";
import path from "path";
/**
* Joins the path but makes sure you don't go above the root path
* @param rootPath
* @param paths
* @returns
*/
export function joinPathLimited(rootPath: string, ...paths: string[]): string {
const joinedPath = path.join(rootPath, ...paths);
if (!joinedPath.startsWith(rootPath)) {
throw new Error("Path is outside of the root path");
}
return joinedPath;
}
export function waitForExit(child: ChildProcess) {
return new Promise<void>((resolve, reject) => {
child.on("exit", (code) => {
if (code === 0) {
console.log("Child process exited successfully");
resolve();
} else {
reject(new Error(`Child process exited with code ${code}`));
}
});
child.on("error", (err) => {
reject(err);
});
});
}

View file

@ -13,6 +13,7 @@ import {
DockerRestartCondition,
ServiceBuildMethod,
type DockerVolumeType,
type ServiceDeploymentStatus,
type ServicePortType,
type ServiceSource,
} from "./types";
@ -253,7 +254,8 @@ export const serviceDeployment = sqliteTable("service_deployment", {
createdAt: integer("created_at").default(now).notNull(),
//
buildLogs: blob("build_logs"), // COMPRESSED!
status: integer("status").$type<ServiceDeploymentStatus>().notNull(),
});
/**

View file

@ -128,4 +128,29 @@ export enum DockerVolumeType {
Tmpfs,
}
// export enum
export enum ServiceDeploymentStatus {
/**
* The service is waiting to be built. This may be because there are other builds in progress.
*/
BuildPending,
/**
* The service is being built.
*/
Building,
/**
* The service is deploying.
*/
Deploying,
/**
* The deployment was successful.
*/
Success,
/**
* The deployment failed.
*/
Failed,
}

View file

@ -1,5 +1,4 @@
import assert from "assert";
import { parse } from "dotenv";
import {
type service,
type serviceDomain,
@ -78,17 +77,20 @@ export async function buildDockerStackFile(
rollback_config: {
parallelism: 0,
order: service.zeroDowntime === 1 ? "start-first" : "stop-first",
order: service.zeroDowntime ? "start-first" : "stop-first",
},
update_config: {
parallelism: 0,
order: service.zeroDowntime === 1 ? "start-first" : "stop-first",
order: service.zeroDowntime ? "start-first" : "stop-first",
},
},
entrypoint: service.entrypoint ?? undefined,
environment: service.environment ? parse(service.environment) : undefined,
// environment: service.environment ? parse(service.environment) : undefined,
environment: {
EULA: "TRUE",
},
image: service.finalizedDockerImage ?? service.dockerImage ?? undefined,
ports: service.ports.map((port) => ({
mode:
@ -99,7 +101,7 @@ export async function buildDockerStackFile(
})),
healthcheck: {
disable: service.healthcheckEnabled === 0,
disable: service.healthcheckEnabled,
test: service.healthcheckCommand ?? undefined,
interval: service.healthcheckInterval ?? undefined,
timeout: service.healthcheckTimeout ?? undefined,