A simple CLI tool to spin up OpenBSD virtual machines using QEMU with minimal fuss.

Add image management commands and enhance ORAS setup

- Introduced commands for managing VM images: push, pull, tag, login, logout, and rmi.
- Implemented image listing functionality with a formatted table display.
- Enhanced ORAS binary setup to automatically download and install the specified version.
- Added database migration for images, including creation and modification of the images table.
- Refactored context and database handling for improved clarity and functionality.

+665 -88
+2
deno.json
··· 8 8 }, 9 9 "imports": { 10 10 "@cliffy/command": "jsr:@cliffy/command@^1.0.0-rc.8", 11 + "@cliffy/prompt": "jsr:@cliffy/prompt@^1.0.0-rc.8", 11 12 "@cliffy/flags": "jsr:@cliffy/flags@^1.0.0-rc.8", 12 13 "@cliffy/table": "jsr:@cliffy/table@^1.0.0-rc.8", 13 14 "@db/sqlite": "jsr:@db/sqlite@^0.12.0", ··· 15 16 "@paralleldrive/cuid2": "npm:@paralleldrive/cuid2@^3.0.4", 16 17 "@soapbox/kysely-deno-sqlite": "jsr:@soapbox/kysely-deno-sqlite@^2.2.0", 17 18 "@std/assert": "jsr:@std/assert@1", 19 + "@std/io": "jsr:@std/io@^0.225.2", 18 20 "@std/toml": "jsr:@std/toml@^1.0.11", 19 21 "@zod/zod": "jsr:@zod/zod@^4.1.12", 20 22 "chalk": "npm:chalk@^5.6.2",
+45 -1
deno.lock
··· 1 1 { 2 2 "version": "5", 3 3 "specifiers": { 4 + "jsr:@cliffy/ansi@1.0.0-rc.8": "1.0.0-rc.8", 4 5 "jsr:@cliffy/command@^1.0.0-rc.8": "1.0.0-rc.8", 5 6 "jsr:@cliffy/flags@1.0.0-rc.8": "1.0.0-rc.8", 6 7 "jsr:@cliffy/flags@^1.0.0-rc.8": "1.0.0-rc.8", 7 8 "jsr:@cliffy/internal@1.0.0-rc.8": "1.0.0-rc.8", 9 + "jsr:@cliffy/keycode@1.0.0-rc.8": "1.0.0-rc.8", 10 + "jsr:@cliffy/prompt@^1.0.0-rc.8": "1.0.0-rc.8", 8 11 "jsr:@cliffy/table@1.0.0-rc.8": "1.0.0-rc.8", 9 12 "jsr:@cliffy/table@^1.0.0-rc.8": "1.0.0-rc.8", 10 13 "jsr:@db/sqlite@0.12": "0.12.0", ··· 13 16 "jsr:@soapbox/kysely-deno-sqlite@^2.2.0": "2.2.0", 14 17 "jsr:@std/assert@0.217": "0.217.0", 15 18 "jsr:@std/assert@1": "1.0.15", 19 + "jsr:@std/assert@~1.0.6": "1.0.15", 20 + "jsr:@std/bytes@^1.0.5": "1.0.6", 16 21 "jsr:@std/collections@^1.1.3": "1.1.3", 17 22 "jsr:@std/encoding@1": "1.0.10", 23 + "jsr:@std/encoding@~1.0.5": "1.0.10", 18 24 "jsr:@std/fmt@1": "1.0.8", 19 25 "jsr:@std/fmt@~1.0.2": "1.0.8", 20 26 "jsr:@std/fs@1": "1.0.19", 21 27 "jsr:@std/internal@^1.0.10": "1.0.12", 22 28 "jsr:@std/internal@^1.0.12": "1.0.12", 23 29 "jsr:@std/internal@^1.0.9": "1.0.12", 30 + "jsr:@std/io@~0.225.2": "0.225.2", 24 31 "jsr:@std/path@0.217": "0.217.0", 25 32 "jsr:@std/path@1": "1.1.2", 26 33 "jsr:@std/path@^1.1.1": "1.1.2", 34 + "jsr:@std/path@~1.0.6": "1.0.9", 27 35 "jsr:@std/text@~1.0.7": "1.0.16", 28 36 "jsr:@std/toml@^1.0.11": "1.0.11", 29 37 "jsr:@zod/zod@^4.1.12": "4.1.12", ··· 37 45 "npm:moniker@~0.1.2": "0.1.2" 38 46 }, 39 47 "jsr": { 48 + "@cliffy/ansi@1.0.0-rc.8": { 49 + "integrity": "ba37f10ce55bbfbdd8ddd987f91f029b17bce88385b98ba3058870f3b007b80c", 50 + "dependencies": [ 51 + "jsr:@cliffy/internal", 52 + "jsr:@std/encoding@~1.0.5" 53 + ] 54 + }, 40 55 "@cliffy/command@1.0.0-rc.8": { 41 56 "integrity": "758147790797c74a707e5294cc7285df665422a13d2a483437092ffce40b5557", 42 57 "dependencies": [ ··· 56 71 "@cliffy/internal@1.0.0-rc.8": { 57 72 "integrity": "34cdf2fad9b084b5aed493b138d573f52d4e988767215f7460daf0b918ff43d8" 58 73 }, 74 + "@cliffy/keycode@1.0.0-rc.8": { 75 + "integrity": "76dbf85a67ec0aea2e29ca049b8507b6b3f62a2a971bd744d8d3fc447c177cd9" 76 + }, 77 + "@cliffy/prompt@1.0.0-rc.8": { 78 + "integrity": "eba403ea1d47b9971bf2210fa35f4dc7ebd2aba87beec9540ae47552806e2f25", 79 + "dependencies": [ 80 + "jsr:@cliffy/ansi", 81 + "jsr:@cliffy/internal", 82 + "jsr:@cliffy/keycode", 83 + "jsr:@std/assert@~1.0.6", 84 + "jsr:@std/fmt@~1.0.2", 85 + "jsr:@std/path@~1.0.6", 86 + "jsr:@std/text" 87 + ] 88 + }, 59 89 "@cliffy/table@1.0.0-rc.8": { 60 90 "integrity": "8bbcdc2ba5e0061b4b13810a24e6f5c6ab19c09f0cce9eb691ccd76c7c6c9db5", 61 91 "dependencies": [ ··· 72 102 "@denosaurs/plug@1.1.0": { 73 103 "integrity": "eb2f0b7546c7bca2000d8b0282c54d50d91cf6d75cb26a80df25a6de8c4bc044", 74 104 "dependencies": [ 75 - "jsr:@std/encoding", 105 + "jsr:@std/encoding@1", 76 106 "jsr:@std/fmt@1", 77 107 "jsr:@std/fs", 78 108 "jsr:@std/path@1" ··· 95 125 "dependencies": [ 96 126 "jsr:@std/internal@^1.0.12" 97 127 ] 128 + }, 129 + "@std/bytes@1.0.6": { 130 + "integrity": "f6ac6adbd8ccd99314045f5703e23af0a68d7f7e58364b47d2c7f408aeb5820a" 98 131 }, 99 132 "@std/collections@1.1.3": { 100 133 "integrity": "bf8b0818886df6a32b64c7d3b037a425111f28278d69fd0995aeb62777c986b0" ··· 115 148 "@std/internal@1.0.12": { 116 149 "integrity": "972a634fd5bc34b242024402972cd5143eac68d8dffaca5eaa4dba30ce17b027" 117 150 }, 151 + "@std/io@0.225.2": { 152 + "integrity": "3c740cd4ee4c082e6cfc86458f47e2ab7cb353dc6234d5e9b1f91a2de5f4d6c7", 153 + "dependencies": [ 154 + "jsr:@std/bytes" 155 + ] 156 + }, 118 157 "@std/path@0.217.0": { 119 158 "integrity": "1217cc25534bca9a2f672d7fe7c6f356e4027df400c0e85c0ef3e4343bc67d11", 120 159 "dependencies": [ 121 160 "jsr:@std/assert@0.217" 122 161 ] 162 + }, 163 + "@std/path@1.0.9": { 164 + "integrity": "260a49f11edd3db93dd38350bf9cd1b4d1366afa98e81b86167b4e3dd750129e" 123 165 }, 124 166 "@std/path@1.1.2": { 125 167 "integrity": "c0b13b97dfe06546d5e16bf3966b1cadf92e1cc83e56ba5476ad8b498d9e3038", ··· 204 246 "dependencies": [ 205 247 "jsr:@cliffy/command@^1.0.0-rc.8", 206 248 "jsr:@cliffy/flags@^1.0.0-rc.8", 249 + "jsr:@cliffy/prompt@^1.0.0-rc.8", 207 250 "jsr:@cliffy/table@^1.0.0-rc.8", 208 251 "jsr:@db/sqlite@0.12", 209 252 "jsr:@es-toolkit/es-toolkit@^1.41.0", 210 253 "jsr:@soapbox/kysely-deno-sqlite@^2.2.0", 211 254 "jsr:@std/assert@1", 255 + "jsr:@std/io@~0.225.2", 212 256 "jsr:@std/toml@^1.0.11", 213 257 "jsr:@zod/zod@^4.1.12", 214 258 "npm:@paralleldrive/cuid2@^3.0.4",
+69
main.ts
··· 1 1 #!/usr/bin/env -S deno run --allow-run --allow-read --allow-env 2 2 3 3 import { Command } from "@cliffy/command"; 4 + import { Secret } from "@cliffy/prompt/secret"; 5 + import { readAll } from "@std/io"; 4 6 import chalk from "chalk"; 5 7 import { Effect, pipe } from "effect"; 6 8 import pkg from "./deno.json" with { type: "json" }; 7 9 import { initVmFile, mergeConfig, parseVmFile } from "./src/config.ts"; 8 10 import { CONFIG_FILE_NAME } from "./src/constants.ts"; 9 11 import { createBridgeNetworkIfNeeded } from "./src/network.ts"; 12 + import images from "./src/subcommands/images.ts"; 10 13 import inspect from "./src/subcommands/inspect.ts"; 14 + import login from "./src/subcommands/login.ts"; 15 + import logout from "./src/subcommands/logout.ts"; 11 16 import logs from "./src/subcommands/logs.ts"; 12 17 import ps from "./src/subcommands/ps.ts"; 13 18 import pull from "./src/subcommands/pull.ts"; 19 + import push from "./src/subcommands/push.ts"; 14 20 import restart from "./src/subcommands/restart.ts"; 15 21 import rm from "./src/subcommands/rm.ts"; 22 + import rmi from "./src/subcommands/rmi.ts"; 16 23 import start from "./src/subcommands/start.ts"; 17 24 import stop from "./src/subcommands/stop.ts"; 25 + import tag from "./src/subcommands/tag.ts"; 18 26 import { 19 27 createDriveImageIfNeeded, 20 28 downloadIso, ··· 250 258 .arguments("<image:string>") 251 259 .action(async (_options: unknown, image: string) => { 252 260 await pull(image); 261 + }) 262 + .command( 263 + "push", 264 + "Push VM image to an OCI-compliant registry, e.g., ghcr.io, docker hub", 265 + ) 266 + .arguments("<image:string>") 267 + .action(async (_options: unknown, image: string) => { 268 + await push(image); 269 + }) 270 + .command( 271 + "tag", 272 + "Create a tag 'image' that refers to the VM image of 'vm-name'", 273 + ) 274 + .arguments("<vm-name:string> <image:string>") 275 + .action(async (_options: unknown, vmName: string, image: string) => { 276 + console.log( 277 + `Tagging VM image of ${chalk.greenBright(vmName)} as ${ 278 + chalk.greenBright(image) 279 + }...`, 280 + ); 281 + await tag(vmName, image); 282 + }) 283 + .command( 284 + "login", 285 + "Login to an OCI-compliant registry, e.g., ghcr.io, docker.io (docker hub), etc.", 286 + ) 287 + .option("-u, --username <username:string>", "Registry username") 288 + .arguments("<registry:string>") 289 + .action(async (options: unknown, registry: string) => { 290 + const username = (options as { username: string }).username; 291 + 292 + let password: string | undefined; 293 + const stdinIsTTY = Deno.stdin.isTerminal(); 294 + 295 + if (!stdinIsTTY) { 296 + const buffer = await readAll(Deno.stdin); 297 + password = new TextDecoder().decode(buffer).trim(); 298 + } else { 299 + password = await Secret.prompt("Registry Password: "); 300 + } 301 + 302 + console.log( 303 + `Logging in to registry ${chalk.greenBright(registry)} as ${ 304 + chalk.greenBright(username) 305 + }...`, 306 + ); 307 + await login(username, password, registry); 308 + }) 309 + .command("logout", "Logout from an OCI-compliant registry") 310 + .arguments("<registry:string>") 311 + .action(async (_options: unknown, registry: string) => { 312 + await logout(registry); 313 + }) 314 + .command("images", "List all local VM images") 315 + .action(async () => { 316 + await images(); 317 + }) 318 + .command("rmi", "Remove a local VM image") 319 + .arguments("<image:string>") 320 + .action(async (_options: unknown, image: string) => { 321 + await rmi(image); 253 322 }) 254 323 .parse(Deno.args); 255 324 }
+2 -1
src/context.ts
··· 1 1 import { DB_PATH } from "./constants.ts"; 2 - import { createDb, type Database, migrateToLatest } from "./db.ts"; 2 + import { createDb, type Database } from "./db.ts"; 3 + import { migrateToLatest } from "./migrations.ts"; 3 4 4 5 export const db: Database = createDb(DB_PATH); 5 6 await migrateToLatest(db);
+10 -72
src/db.ts
··· 1 1 import { Database as Sqlite } from "@db/sqlite"; 2 2 import { DenoSqlite3Dialect } from "@soapbox/kysely-deno-sqlite"; 3 - import { 4 - Kysely, 5 - type Migration, 6 - type MigrationProvider, 7 - Migrator, 8 - sql, 9 - } from "kysely"; 3 + import { Kysely } from "kysely"; 10 4 import { CONFIG_DIR } from "./constants.ts"; 11 5 import type { STATUS } from "./types.ts"; 12 6 ··· 21 15 22 16 export type DatabaseSchema = { 23 17 virtual_machines: VirtualMachine; 18 + images: Image; 24 19 }; 25 20 26 21 export type VirtualMachine = { ··· 43 38 updatedAt?: string; 44 39 }; 45 40 46 - const migrations: Record<string, Migration> = {}; 47 - 48 - const migrationProvider: MigrationProvider = { 49 - // deno-lint-ignore require-await 50 - async getMigrations() { 51 - return migrations; 52 - }, 53 - }; 54 - 55 - migrations["001"] = { 56 - async up(db: Kysely<unknown>): Promise<void> { 57 - await db.schema 58 - .createTable("virtual_machines") 59 - .addColumn("id", "varchar", (col) => col.primaryKey()) 60 - .addColumn("name", "varchar", (col) => col.notNull().unique()) 61 - .addColumn("bridge", "varchar") 62 - .addColumn("macAddress", "varchar", (col) => col.notNull().unique()) 63 - .addColumn("memory", "varchar", (col) => col.notNull()) 64 - .addColumn("cpus", "integer", (col) => col.notNull()) 65 - .addColumn("cpu", "varchar", (col) => col.notNull()) 66 - .addColumn("diskSize", "varchar", (col) => col.notNull()) 67 - .addColumn("drivePath", "varchar") 68 - .addColumn("version", "varchar", (col) => col.notNull()) 69 - .addColumn("diskFormat", "varchar") 70 - .addColumn("isoPath", "varchar") 71 - .addColumn("status", "varchar", (col) => col.notNull()) 72 - .addColumn("pid", "integer") 73 - .addColumn( 74 - "createdAt", 75 - "varchar", 76 - (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`), 77 - ) 78 - .addColumn( 79 - "updatedAt", 80 - "varchar", 81 - (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`), 82 - ) 83 - .execute(); 84 - }, 85 - 86 - async down(db: Kysely<unknown>): Promise<void> { 87 - await db.schema.dropTable("virtual_machines").execute(); 88 - }, 89 - }; 90 - 91 - migrations["002"] = { 92 - async up(db: Kysely<unknown>): Promise<void> { 93 - await db.schema 94 - .alterTable("virtual_machines") 95 - .addColumn("portForward", "varchar") 96 - .execute(); 97 - }, 98 - 99 - async down(db: Kysely<unknown>): Promise<void> { 100 - await db.schema 101 - .alterTable("virtual_machines") 102 - .dropColumn("portForward") 103 - .execute(); 104 - }, 105 - }; 106 - 107 - export const migrateToLatest = async (db: Database): Promise<void> => { 108 - const migrator = new Migrator({ db, provider: migrationProvider }); 109 - const { error } = await migrator.migrateToLatest(); 110 - if (error) throw error; 41 + export type Image = { 42 + id: string; 43 + repository: string; 44 + tag: string; 45 + size: number; 46 + path: string; 47 + format: string; 48 + createdAt?: string; 111 49 }; 112 50 113 51 export type Database = Kysely<DatabaseSchema>;
+52
src/images.ts
··· 1 + import { Data, Effect } from "effect"; 2 + import type { DeleteResult, InsertResult } from "kysely"; 3 + import { ctx } from "./context.ts"; 4 + import type { Image } from "./db.ts"; 5 + 6 + export class DbError extends Data.TaggedError("DatabaseError")<{ 7 + message?: string; 8 + }> {} 9 + 10 + export const listImages = (): Effect.Effect<Image[], DbError, never> => 11 + Effect.tryPromise({ 12 + try: () => ctx.db.selectFrom("images").selectAll().execute(), 13 + catch: (error) => 14 + new DbError({ 15 + message: error instanceof Error ? error.message : String(error), 16 + }), 17 + }); 18 + 19 + export const saveImage = ( 20 + image: Image, 21 + ): Effect.Effect<InsertResult[], DbError, never> => 22 + Effect.tryPromise({ 23 + try: () => 24 + ctx.db.insertInto("images") 25 + .values(image) 26 + .onConflict((oc) => 27 + oc 28 + .column("repository") 29 + .column("tag") 30 + .doUpdateSet({ 31 + size: image.size, 32 + path: image.path, 33 + format: image.format, 34 + }) 35 + ) 36 + .execute(), 37 + catch: (error) => 38 + new DbError({ 39 + message: error instanceof Error ? error.message : String(error), 40 + }), 41 + }); 42 + 43 + export const deleteImage = ( 44 + id: string, 45 + ): Effect.Effect<DeleteResult[], DbError, never> => 46 + Effect.tryPromise({ 47 + try: () => ctx.db.deleteFrom("images").where("id", "=", id).execute(), 48 + catch: (error) => 49 + new DbError({ 50 + message: error instanceof Error ? error.message : String(error), 51 + }), 52 + });
+210
src/migrations.ts
··· 1 + import { 2 + type Kysely, 3 + type Migration, 4 + type MigrationProvider, 5 + Migrator, 6 + sql, 7 + } from "kysely"; 8 + import type { Database } from "./db.ts"; 9 + 10 + const migrations: Record<string, Migration> = {}; 11 + 12 + const migrationProvider: MigrationProvider = { 13 + // deno-lint-ignore require-await 14 + async getMigrations() { 15 + return migrations; 16 + }, 17 + }; 18 + 19 + migrations["001"] = { 20 + async up(db: Kysely<unknown>): Promise<void> { 21 + await db.schema 22 + .createTable("virtual_machines") 23 + .addColumn("id", "varchar", (col) => col.primaryKey()) 24 + .addColumn("name", "varchar", (col) => col.notNull().unique()) 25 + .addColumn("bridge", "varchar") 26 + .addColumn("macAddress", "varchar", (col) => col.notNull().unique()) 27 + .addColumn("memory", "varchar", (col) => col.notNull()) 28 + .addColumn("cpus", "integer", (col) => col.notNull()) 29 + .addColumn("cpu", "varchar", (col) => col.notNull()) 30 + .addColumn("diskSize", "varchar", (col) => col.notNull()) 31 + .addColumn("drivePath", "varchar") 32 + .addColumn("version", "varchar", (col) => col.notNull()) 33 + .addColumn("diskFormat", "varchar") 34 + .addColumn("isoPath", "varchar") 35 + .addColumn("status", "varchar", (col) => col.notNull()) 36 + .addColumn("pid", "integer") 37 + .addColumn( 38 + "createdAt", 39 + "varchar", 40 + (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`), 41 + ) 42 + .addColumn( 43 + "updatedAt", 44 + "varchar", 45 + (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`), 46 + ) 47 + .execute(); 48 + }, 49 + 50 + async down(db: Kysely<unknown>): Promise<void> { 51 + await db.schema.dropTable("virtual_machines").execute(); 52 + }, 53 + }; 54 + 55 + migrations["002"] = { 56 + async up(db: Kysely<unknown>): Promise<void> { 57 + await db.schema 58 + .alterTable("virtual_machines") 59 + .addColumn("portForward", "varchar") 60 + .execute(); 61 + }, 62 + 63 + async down(db: Kysely<unknown>): Promise<void> { 64 + await db.schema 65 + .alterTable("virtual_machines") 66 + .dropColumn("portForward") 67 + .execute(); 68 + }, 69 + }; 70 + 71 + migrations["003"] = { 72 + async up(db: Kysely<unknown>): Promise<void> { 73 + await db.schema 74 + .createTable("images") 75 + .addColumn("id", "varchar", (col) => col.primaryKey()) 76 + .addColumn("repository", "varchar", (col) => col.notNull()) 77 + .addColumn("tag", "varchar", (col) => col.notNull()) 78 + .addColumn("size", "integer", (col) => col.notNull()) 79 + .addColumn("path", "varchar", (col) => col.notNull()) 80 + .addColumn("createdAt", "varchar", (col) => col.notNull()) 81 + .execute(); 82 + }, 83 + 84 + async down(db: Kysely<unknown>): Promise<void> { 85 + await db.schema.dropTable("images").execute(); 86 + }, 87 + }; 88 + 89 + migrations["004"] = { 90 + async up(db: Kysely<unknown>): Promise<void> { 91 + await db.schema 92 + .alterTable("images") 93 + .addColumn("format", "varchar", (col) => col.notNull().defaultTo("qcow2")) 94 + .execute(); 95 + }, 96 + 97 + async down(db: Kysely<unknown>): Promise<void> { 98 + await db.schema 99 + .alterTable("images") 100 + .dropColumn("format") 101 + .execute(); 102 + }, 103 + }; 104 + 105 + migrations["005"] = { 106 + async up(db: Kysely<unknown>): Promise<void> { 107 + await db.schema 108 + .createTable("images_new") 109 + .addColumn("id", "varchar", (col) => col.primaryKey()) 110 + .addColumn("repository", "varchar", (col) => col.notNull()) 111 + .addColumn("tag", "varchar", (col) => col.notNull()) 112 + .addColumn("size", "integer", (col) => col.notNull()) 113 + .addColumn("path", "varchar", (col) => col.notNull()) 114 + .addColumn("format", "varchar", (col) => col.notNull().defaultTo("qcow2")) 115 + .addColumn("createdAt", "varchar", (col) => col.notNull()) 116 + .addUniqueConstraint("images_repository_tag_unique", [ 117 + "repository", 118 + "tag", 119 + ]) 120 + .execute(); 121 + 122 + await sql` 123 + INSERT INTO images_new (id, repository, tag, size, path, format, createdAt) 124 + SELECT id, repository, tag, size, path, format, createdAt FROM images 125 + `.execute(db); 126 + 127 + await db.schema.dropTable("images").execute(); 128 + await sql`ALTER TABLE images_new RENAME TO images`.execute(db); 129 + }, 130 + 131 + async down(db: Kysely<unknown>): Promise<void> { 132 + await db.schema 133 + .createTable("images_old") 134 + .addColumn("id", "varchar", (col) => col.primaryKey()) 135 + .addColumn("repository", "varchar", (col) => col.notNull()) 136 + .addColumn("tag", "varchar", (col) => col.notNull()) 137 + .addColumn("size", "integer", (col) => col.notNull()) 138 + .addColumn("path", "varchar", (col) => col.notNull()) 139 + .addColumn("format", "varchar", (col) => col.notNull().defaultTo("qcow2")) 140 + .addColumn("createdAt", "varchar", (col) => col.notNull()) 141 + .execute(); 142 + 143 + await sql` 144 + INSERT INTO images_old (id, repository, tag, size, path, format, createdAt) 145 + SELECT id, repository, tag, size, path, format, createdAt FROM images 146 + `.execute(db); 147 + 148 + await db.schema.dropTable("images").execute(); 149 + await sql`ALTER TABLE images_old RENAME TO images`.execute(db); 150 + }, 151 + }; 152 + 153 + migrations["006"] = { 154 + async up(db: Kysely<unknown>): Promise<void> { 155 + await db.schema 156 + .createTable("images_new") 157 + .addColumn("id", "varchar", (col) => col.primaryKey()) 158 + .addColumn("repository", "varchar", (col) => col.notNull()) 159 + .addColumn("tag", "varchar", (col) => col.notNull()) 160 + .addColumn("size", "integer", (col) => col.notNull()) 161 + .addColumn("path", "varchar", (col) => col.notNull()) 162 + .addColumn("format", "varchar", (col) => col.notNull().defaultTo("qcow2")) 163 + .addColumn( 164 + "createdAt", 165 + "varchar", 166 + (col) => col.notNull().defaultTo(sql`CURRENT_TIMESTAMP`), 167 + ) 168 + .addUniqueConstraint("images_repository_tag_unique", [ 169 + "repository", 170 + "tag", 171 + ]) 172 + .execute(); 173 + 174 + await sql` 175 + INSERT INTO images_new (id, repository, tag, size, path, format, createdAt) 176 + SELECT id, repository, tag, size, path, format, createdAt FROM images 177 + `.execute(db); 178 + 179 + await db.schema.dropTable("images").execute(); 180 + await sql`ALTER TABLE images_new RENAME TO images`.execute(db); 181 + }, 182 + 183 + async down(db: Kysely<unknown>): Promise<void> { 184 + await db.schema 185 + .createTable("images_old") 186 + .addColumn("id", "varchar", (col) => col.primaryKey()) 187 + .addColumn("repository", "varchar", (col) => col.notNull()) 188 + .addColumn("tag", "varchar", (col) => col.notNull()) 189 + .addColumn("size", "integer", (col) => col.notNull()) 190 + .addColumn("path", "varchar", (col) => col.notNull()) 191 + .addColumn("format", "varchar", (col) => col.notNull().defaultTo("qcow2")) 192 + .addColumn("createdAt", "varchar", (col) => col.notNull()) 193 + .addUniqueConstraint("images_repository_tag_unique", [ 194 + "repository", 195 + "tag", 196 + ]) 197 + .execute(); 198 + 199 + await sql` 200 + INSERT INTO images_old (id, repository, tag, size, path, format, createdAt) 201 + SELECT id, repository, tag, size, path, format, createdAt FROM images 202 + `.execute(db); 203 + }, 204 + }; 205 + 206 + export const migrateToLatest = async (db: Database): Promise<void> => { 207 + const migrator = new Migrator({ db, provider: migrationProvider }); 208 + const { error } = await migrator.migrateToLatest(); 209 + if (error) throw error; 210 + };
+90 -7
src/oras.ts
··· 1 - export async function setupOrasBinary(version: string): Promise<void> { 2 - const process = new Deno.Command("oras", { 3 - args: ["version"], 4 - stdout: "piped", 5 - stderr: "piped", 1 + import chalk from "chalk"; 2 + import { CONFIG_DIR } from "./mod.ts"; 3 + 4 + const DEFAULT_ORAS_VERSION = "1.3.0"; 5 + 6 + export async function setupOrasBinary(): Promise<void> { 7 + Deno.env.set( 8 + "PATH", 9 + `${CONFIG_DIR}/bin:${Deno.env.get("PATH")}`, 10 + ); 11 + 12 + const oras = new Deno.Command("which", { 13 + args: ["oras"], 14 + stdout: "null", 15 + stderr: "null", 6 16 }) 7 17 .spawn(); 8 18 9 - const { code } = await process.output(); 10 - if (code === 0) { 19 + const orasStatus = await oras.status; 20 + if (orasStatus.success) { 11 21 return; 12 22 } 13 23 24 + const version = Deno.env.get("ORAS_VERSION") || DEFAULT_ORAS_VERSION; 25 + 14 26 console.log(`Downloading ORAS version ${version}...`); 27 + 28 + const os = Deno.build.os; 29 + let arch = "amd64"; 30 + 31 + if (Deno.build.arch === "aarch64") { 32 + arch = "arm64"; 33 + } 34 + 35 + if (os !== "linux" && os !== "darwin") { 36 + console.error("Unsupported OS. Please download ORAS manually."); 37 + Deno.exit(1); 38 + } 39 + 40 + // https://github.com/oras-project/oras/releases/download/v1.3.0/oras_1.3.0_darwin_amd64.tar.gz 41 + const downloadUrl = 42 + `https://github.com/oras-project/oras/releases/download/v${version}/oras_${version}_${os}_${arch}.tar.gz`; 43 + 44 + console.log(`Downloading ORAS from ${chalk.greenBright(downloadUrl)}`); 45 + 46 + const downloadProcess = new Deno.Command("curl", { 47 + args: ["-L", downloadUrl, "-o", `oras_${version}_${os}_${arch}.tar.gz`], 48 + stdout: "inherit", 49 + stderr: "inherit", 50 + cwd: "/tmp", 51 + }) 52 + .spawn(); 53 + 54 + const status = await downloadProcess.status; 55 + if (!status.success) { 56 + console.error("Failed to download ORAS binary."); 57 + Deno.exit(1); 58 + } 59 + 60 + console.log("Extracting ORAS binary..."); 61 + 62 + const extractProcess = new Deno.Command("tar", { 63 + args: [ 64 + "-xzf", 65 + `oras_${version}_${os}_${arch}.tar.gz`, 66 + "-C", 67 + "./", 68 + ], 69 + stdout: "inherit", 70 + stderr: "inherit", 71 + cwd: "/tmp", 72 + }) 73 + .spawn(); 74 + 75 + const extractStatus = await extractProcess.status; 76 + if (!extractStatus.success) { 77 + console.error("Failed to extract ORAS binary."); 78 + Deno.exit(1); 79 + } 80 + 81 + await Deno.remove(`/tmp/oras_${version}_${os}_${arch}.tar.gz`); 82 + 83 + await Deno.mkdir(`${CONFIG_DIR}/bin`, { recursive: true }); 84 + 85 + await Deno.rename( 86 + `/tmp/oras`, 87 + `${CONFIG_DIR}/bin/oras`, 88 + ); 89 + await Deno.chmod(`${CONFIG_DIR}/bin/oras`, 0o755); 90 + 91 + console.log( 92 + `ORAS binary installed at ${ 93 + chalk.greenBright( 94 + `${CONFIG_DIR}/bin/oras`, 95 + ) 96 + }`, 97 + ); 15 98 }
+55
src/subcommands/images.ts
··· 1 + import { Table } from "@cliffy/table"; 2 + import dayjs from "dayjs"; 3 + import relativeTime from "dayjs/plugin/relativeTime.js"; 4 + import utc from "dayjs/plugin/utc.js"; 5 + import { Effect, pipe } from "effect"; 6 + import type { Image } from "../db.ts"; 7 + import { type DbError, listImages } from "../images.ts"; 8 + import { humanFileSize } from "../utils.ts"; 9 + 10 + dayjs.extend(relativeTime); 11 + dayjs.extend(utc); 12 + 13 + const createTable = () => 14 + Effect.succeed( 15 + new Table( 16 + ["REPOSITORY", "TAG", "IMAGE ID", "CREATED", "SIZE"], 17 + ), 18 + ); 19 + 20 + const populateTable = (table: Table, images: Image[]) => 21 + Effect.gen(function* () { 22 + for (const image of images) { 23 + table.push([ 24 + image.repository, 25 + image.tag, 26 + image.id, 27 + dayjs.utc(image.createdAt).local().fromNow(), 28 + yield* humanFileSize(image.size), 29 + ]); 30 + } 31 + return table; 32 + }); 33 + 34 + const displayTable = (table: Table) => 35 + Effect.sync(() => { 36 + console.log(table.padding(2).toString()); 37 + }); 38 + 39 + const handleError = (error: DbError | Error) => 40 + Effect.sync(() => { 41 + console.error(`Failed to fetch virtual machines: ${error}`); 42 + Deno.exit(1); 43 + }); 44 + 45 + const lsEffect = () => 46 + pipe( 47 + Effect.all([listImages(), createTable()]), 48 + Effect.flatMap(([images, table]) => populateTable(table, images)), 49 + Effect.flatMap(displayTable), 50 + Effect.catchAll(handleError), 51 + ); 52 + 53 + export default async function () { 54 + await Effect.runPromise(lsEffect()); 55 + }
+35
src/subcommands/login.ts
··· 1 + import { setupOrasBinary } from "../oras.ts"; 2 + 3 + export default async function ( 4 + username: string, 5 + password: string, 6 + reqistry: string, 7 + ) { 8 + await setupOrasBinary(); 9 + 10 + const cmd = new Deno.Command("oras", { 11 + args: [ 12 + "login", 13 + "--username", 14 + username, 15 + "--password-stdin", 16 + reqistry, 17 + ], 18 + stdin: "piped", 19 + stderr: "inherit", 20 + stdout: "inherit", 21 + }); 22 + 23 + const process = cmd.spawn(); 24 + if (process.stdin) { 25 + const writer = process.stdin.getWriter(); 26 + await writer.write(new TextEncoder().encode(password + "\n")); 27 + writer.close(); 28 + } 29 + 30 + const status = await process.status; 31 + 32 + if (!status.success) { 33 + Deno.exit(status.code); 34 + } 35 + }
+19
src/subcommands/logout.ts
··· 1 + import { setupOrasBinary } from "../oras.ts"; 2 + 3 + export default async function (registry: string) { 4 + await setupOrasBinary(); 5 + 6 + const cmd = new Deno.Command("oras", { 7 + args: ["logout", registry], 8 + stderr: "inherit", 9 + stdout: "inherit", 10 + }); 11 + 12 + const process = cmd.spawn(); 13 + 14 + const status = await process.status; 15 + 16 + if (!status.success) { 17 + Deno.exit(status.code); 18 + } 19 + }
+3
src/subcommands/pull.ts
··· 1 + import { setupOrasBinary } from "../oras.ts"; 2 + 1 3 export default async function (name: string): Promise<void> { 4 + await setupOrasBinary(); 2 5 }
+4 -1
src/subcommands/push.ts
··· 1 - export default async function (name: string): Promise<void> { 1 + import { setupOrasBinary } from "../oras.ts"; 2 + 3 + export default async function (image: string): Promise<void> { 4 + await setupOrasBinary(); 2 5 }
-5
src/subcommands/registry/login.ts
··· 1 - import { setupOrasBinary } from "../../oras.ts"; 2 - 3 - export default async function () { 4 - await setupOrasBinary(Deno.env.get("ORAS_VERSION") ?? "1.3.0"); 5 - }
+6
src/subcommands/rmi.ts
··· 1 + import { Effect } from "effect"; 2 + import { deleteImage } from "../images.ts"; 3 + 4 + export default async function (id: string) { 5 + await Effect.runPromise(deleteImage(id)); 6 + }
+47
src/subcommands/tag.ts
··· 1 + import { createId } from "@paralleldrive/cuid2"; 2 + import { Effect, pipe } from "effect"; 3 + import { saveImage } from "../images.ts"; 4 + import { getInstanceState, type VirtualMachine } from "../mod.ts"; 5 + import { du } from "../utils.ts"; 6 + 7 + const extractTag = (name: string) => 8 + Effect.sync(() => name.split(":")[1] || "latest"); 9 + 10 + const failIfNoVM = ([vm, name]: [VirtualMachine | undefined, string]) => 11 + Effect.gen(function* () { 12 + if (!vm) { 13 + throw new Error(`VM with name ${name} not found`); 14 + } 15 + if (!vm.drivePath) { 16 + throw new Error(`VM with name ${name} has no drive attached`); 17 + } 18 + 19 + const size = yield* du(vm.drivePath); 20 + 21 + return [vm, name, size] as [VirtualMachine, string, number]; 22 + }); 23 + 24 + export default async function (name: string, image: string) { 25 + await Effect.runPromise( 26 + pipe( 27 + Effect.all([getInstanceState(name), extractTag(image)]), 28 + Effect.flatMap(failIfNoVM), 29 + Effect.flatMap(([vm, tag, size]) => 30 + saveImage({ 31 + id: createId(), 32 + repository: image, 33 + tag, 34 + size, 35 + path: vm.drivePath!, 36 + format: vm.diskFormat, 37 + }) 38 + ), 39 + Effect.catchAll((error) => 40 + Effect.sync(() => { 41 + console.error(`Failed to tag image: ${error}`); 42 + Deno.exit(1); 43 + }) 44 + ), 45 + ), 46 + ); 47 + }
+16 -1
src/utils.ts
··· 37 37 ); 38 38 }; 39 39 40 - const du = (path: string): Effect.Effect<number, LogCommandError, never> => 40 + export const humanFileSize = (bytes: number) => 41 + Effect.sync(() => { 42 + const thresh = 1024; 43 + if (Math.abs(bytes) < thresh) return bytes + "KB"; 44 + const units = ["MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; 45 + let u = -1; 46 + do { 47 + bytes /= thresh; 48 + ++u; 49 + } while (Math.abs(bytes) >= thresh && u < units.length - 1); 50 + return `${bytes.toFixed(1)}${units[u]}`; 51 + }); 52 + 53 + export const du = ( 54 + path: string, 55 + ): Effect.Effect<number, LogCommandError, never> => 41 56 Effect.tryPromise({ 42 57 try: async () => { 43 58 const cmd = new Deno.Command("du", {