test: add test infrastructure with CI workflow and image-gen unit tests
This commit is contained in:
parent
70d9f63727
commit
ac217d5402
|
|
@ -0,0 +1,21 @@
|
|||
name: Test
|
||||
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
node-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
|
||||
- name: Run tests
|
||||
run: npm test
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
# Testing Strategy
|
||||
|
||||
This repository has many scripts, but they do not share a single runtime or dependency graph. The lowest-risk testing strategy is to start from stable Node-based library code, then expand outward to CLI and skill-specific smoke tests.
|
||||
|
||||
## Current Baseline
|
||||
|
||||
- Root test runner: `node:test`
|
||||
- Entry point: `npm test`
|
||||
- Coverage command: `npm run test:coverage`
|
||||
- CI trigger: GitHub Actions on `push`, `pull_request`, and manual dispatch
|
||||
|
||||
This avoids introducing Jest/Vitest across a repo that already mixes plain Node scripts, Bun-based skill packages, vendored code, and browser automation.
|
||||
|
||||
## Rollout Plan
|
||||
|
||||
### Phase 1: Stable library coverage
|
||||
|
||||
Focus on pure functions under `scripts/lib/` first.
|
||||
|
||||
- `scripts/lib/release-files.mjs`
|
||||
- `scripts/lib/shared-skill-packages.mjs`
|
||||
|
||||
Goals:
|
||||
|
||||
- Validate file filtering and release packaging rules
|
||||
- Catch regressions in package vendoring and dependency rewriting
|
||||
- Keep tests deterministic and free of network, Bun, or browser requirements
|
||||
|
||||
### Phase 2: Root CLI integration tests
|
||||
|
||||
Add temp-directory integration tests for root CLIs that already support dry-run or local-only flows.
|
||||
|
||||
- `scripts/sync-shared-skill-packages.mjs`
|
||||
- `scripts/publish-skill.mjs --dry-run`
|
||||
- `scripts/sync-clawhub.mjs` argument handling and local skill discovery
|
||||
|
||||
Goals:
|
||||
|
||||
- Assert exit codes and stdout for common flows
|
||||
- Cover CLI argument parsing without hitting external services
|
||||
|
||||
### Phase 3: Skill script smoke tests
|
||||
|
||||
Add opt-in smoke tests for selected `skills/*/scripts/` packages, starting with those that:
|
||||
|
||||
- accept local input files
|
||||
- have deterministic output
|
||||
- do not require authenticated browser sessions
|
||||
|
||||
Examples:
|
||||
|
||||
- markdown transforms
|
||||
- file conversion helpers
|
||||
- local content analyzers
|
||||
|
||||
Keep browser automation, login flows, and live API publishing scripts outside the default CI path unless they are explicitly mocked.
|
||||
|
||||
### Phase 4: Coverage gates
|
||||
|
||||
After the stable Node path has enough breadth, add coverage thresholds in CI for the tested root modules.
|
||||
|
||||
Recommended order:
|
||||
|
||||
1. Start with reporting only
|
||||
2. Add line/function thresholds for `scripts/lib/**`
|
||||
3. Expand include patterns once skill-level smoke tests are reliable
|
||||
|
||||
## Conventions For New Tests
|
||||
|
||||
- Prefer temp directories over committed fixtures unless the fixture is reused heavily
|
||||
- Test exported functions before testing CLI wrappers
|
||||
- Avoid network, browser, and credential dependencies in default CI
|
||||
- Keep tests isolated so they can run with plain `node --test`
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"name": "baoyu-skills",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"test": "node --test",
|
||||
"test:coverage": "node --experimental-test-coverage --test"
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,110 @@
|
|||
import assert from "node:assert/strict";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import test from "node:test";
|
||||
|
||||
import {
|
||||
listReleaseFiles,
|
||||
validateSelfContainedRelease,
|
||||
} from "../scripts/lib/release-files.mjs";
|
||||
|
||||
async function makeTempDir(prefix) {
|
||||
return fs.mkdtemp(path.join(os.tmpdir(), prefix));
|
||||
}
|
||||
|
||||
async function writeFile(filePath, contents = "") {
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
await fs.writeFile(filePath, contents);
|
||||
}
|
||||
|
||||
async function writeJson(filePath, value) {
|
||||
await writeFile(filePath, `${JSON.stringify(value, null, 2)}\n`);
|
||||
}
|
||||
|
||||
test("listReleaseFiles skips generated paths and returns sorted relative paths", async (t) => {
|
||||
const root = await makeTempDir("baoyu-release-files-");
|
||||
t.after(() => fs.rm(root, { recursive: true, force: true }));
|
||||
|
||||
await writeFile(path.join(root, "b.txt"), "b");
|
||||
await writeFile(path.join(root, "a.txt"), "a");
|
||||
await writeFile(path.join(root, "nested", "keep.txt"), "keep");
|
||||
await writeFile(path.join(root, "node_modules", "skip.js"), "skip");
|
||||
await writeFile(path.join(root, ".git", "config"), "skip");
|
||||
await writeFile(path.join(root, "dist", "artifact.txt"), "skip");
|
||||
await writeFile(path.join(root, "out", "artifact.txt"), "skip");
|
||||
await writeFile(path.join(root, "build", "artifact.txt"), "skip");
|
||||
await writeFile(path.join(root, ".DS_Store"), "skip");
|
||||
await writeFile(path.join(root, "bun.lockb"), "skip");
|
||||
|
||||
const files = await listReleaseFiles(root);
|
||||
|
||||
assert.deepEqual(
|
||||
files.map((file) => file.relPath),
|
||||
["a.txt", "b.txt", "nested/keep.txt"],
|
||||
);
|
||||
});
|
||||
|
||||
test("validateSelfContainedRelease accepts file dependencies that stay within the release root", async (t) => {
|
||||
const root = await makeTempDir("baoyu-release-ok-");
|
||||
t.after(() => fs.rm(root, { recursive: true, force: true }));
|
||||
|
||||
await writeJson(path.join(root, "shared", "package.json"), {
|
||||
name: "shared-package",
|
||||
version: "1.0.0",
|
||||
});
|
||||
await writeFile(path.join(root, "shared", "index.js"), "export const shared = true;\n");
|
||||
await writeJson(path.join(root, "skill", "package.json"), {
|
||||
name: "test-skill",
|
||||
version: "1.0.0",
|
||||
dependencies: {
|
||||
"shared-package": "file:../shared",
|
||||
},
|
||||
});
|
||||
|
||||
await assert.doesNotReject(() => validateSelfContainedRelease(root));
|
||||
});
|
||||
|
||||
test("validateSelfContainedRelease rejects missing local file dependencies", async (t) => {
|
||||
const root = await makeTempDir("baoyu-release-missing-");
|
||||
t.after(() => fs.rm(root, { recursive: true, force: true }));
|
||||
|
||||
await writeJson(path.join(root, "skill", "package.json"), {
|
||||
name: "test-skill",
|
||||
version: "1.0.0",
|
||||
dependencies: {
|
||||
"shared-package": "file:../shared",
|
||||
},
|
||||
});
|
||||
|
||||
await assert.rejects(
|
||||
() => validateSelfContainedRelease(root),
|
||||
/Missing local dependency for release/,
|
||||
);
|
||||
});
|
||||
|
||||
test("validateSelfContainedRelease rejects file dependencies outside the release root", async (t) => {
|
||||
const root = await makeTempDir("baoyu-release-root-");
|
||||
const outside = await makeTempDir("baoyu-release-outside-");
|
||||
t.after(() => fs.rm(root, { recursive: true, force: true }));
|
||||
t.after(() => fs.rm(outside, { recursive: true, force: true }));
|
||||
|
||||
const skillDir = path.join(root, "skill");
|
||||
const externalSpec = path
|
||||
.relative(skillDir, outside)
|
||||
.split(path.sep)
|
||||
.join("/");
|
||||
|
||||
await writeJson(path.join(skillDir, "package.json"), {
|
||||
name: "test-skill",
|
||||
version: "1.0.0",
|
||||
dependencies: {
|
||||
"outside-package": `file:${externalSpec}`,
|
||||
},
|
||||
});
|
||||
|
||||
await assert.rejects(
|
||||
() => validateSelfContainedRelease(root),
|
||||
/Release target is not self-contained/,
|
||||
);
|
||||
});
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
import assert from "node:assert/strict";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import test from "node:test";
|
||||
|
||||
import { syncSharedSkillPackages } from "../scripts/lib/shared-skill-packages.mjs";
|
||||
|
||||
async function makeTempDir(prefix) {
|
||||
return fs.mkdtemp(path.join(os.tmpdir(), prefix));
|
||||
}
|
||||
|
||||
async function writeFile(filePath, contents = "") {
|
||||
await fs.mkdir(path.dirname(filePath), { recursive: true });
|
||||
await fs.writeFile(filePath, contents);
|
||||
}
|
||||
|
||||
async function writeJson(filePath, value) {
|
||||
await writeFile(filePath, `${JSON.stringify(value, null, 2)}\n`);
|
||||
}
|
||||
|
||||
test("syncSharedSkillPackages vendors workspace packages into skill scripts", async (t) => {
|
||||
const root = await makeTempDir("baoyu-sync-shared-");
|
||||
t.after(() => fs.rm(root, { recursive: true, force: true }));
|
||||
|
||||
await writeJson(path.join(root, "packages", "baoyu-md", "package.json"), {
|
||||
name: "baoyu-md",
|
||||
version: "1.0.0",
|
||||
});
|
||||
await writeFile(
|
||||
path.join(root, "packages", "baoyu-md", "src", "index.ts"),
|
||||
"export const markdown = true;\n",
|
||||
);
|
||||
|
||||
const consumerDir = path.join(root, "skills", "demo-skill", "scripts");
|
||||
await writeJson(path.join(consumerDir, "package.json"), {
|
||||
name: "demo-skill-scripts",
|
||||
version: "1.0.0",
|
||||
dependencies: {
|
||||
"baoyu-md": "^1.0.0",
|
||||
kleur: "^4.1.5",
|
||||
},
|
||||
});
|
||||
|
||||
const result = await syncSharedSkillPackages(root, { install: false });
|
||||
|
||||
assert.deepEqual(result.packageDirs, [consumerDir]);
|
||||
assert.deepEqual(result.managedPaths, [
|
||||
"skills/demo-skill/scripts/bun.lock",
|
||||
"skills/demo-skill/scripts/package.json",
|
||||
"skills/demo-skill/scripts/vendor",
|
||||
]);
|
||||
|
||||
const updatedPackageJson = JSON.parse(
|
||||
await fs.readFile(path.join(consumerDir, "package.json"), "utf8"),
|
||||
);
|
||||
assert.equal(updatedPackageJson.dependencies["baoyu-md"], "file:./vendor/baoyu-md");
|
||||
assert.equal(updatedPackageJson.dependencies.kleur, "^4.1.5");
|
||||
|
||||
const vendoredPackageJson = JSON.parse(
|
||||
await fs.readFile(path.join(consumerDir, "vendor", "baoyu-md", "package.json"), "utf8"),
|
||||
);
|
||||
assert.equal(vendoredPackageJson.name, "baoyu-md");
|
||||
|
||||
const vendoredFile = await fs.readFile(
|
||||
path.join(consumerDir, "vendor", "baoyu-md", "src", "index.ts"),
|
||||
"utf8",
|
||||
);
|
||||
assert.match(vendoredFile, /markdown = true/);
|
||||
});
|
||||
|
|
@ -0,0 +1,301 @@
|
|||
import assert from "node:assert/strict";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import test from "node:test";
|
||||
|
||||
import {
|
||||
createTaskArgs,
|
||||
detectProvider,
|
||||
getConfiguredMaxWorkers,
|
||||
getConfiguredProviderRateLimits,
|
||||
getWorkerCount,
|
||||
isRetryableGenerationError,
|
||||
loadBatchTasks,
|
||||
mergeConfig,
|
||||
normalizeOutputImagePath,
|
||||
parseArgs,
|
||||
parseSimpleYaml,
|
||||
} from "../../../skills/baoyu-image-gen/scripts/main.ts";
|
||||
|
||||
function makeArgs(overrides = {}) {
|
||||
return {
|
||||
prompt: null,
|
||||
promptFiles: [],
|
||||
imagePath: null,
|
||||
provider: null,
|
||||
model: null,
|
||||
aspectRatio: null,
|
||||
size: null,
|
||||
quality: null,
|
||||
imageSize: null,
|
||||
referenceImages: [],
|
||||
n: 1,
|
||||
batchFile: null,
|
||||
jobs: null,
|
||||
json: false,
|
||||
help: false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function useEnv(t, values) {
|
||||
const previous = new Map();
|
||||
for (const [key, value] of Object.entries(values)) {
|
||||
previous.set(key, process.env[key]);
|
||||
if (value == null) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
t.after(() => {
|
||||
for (const [key, value] of previous.entries()) {
|
||||
if (value == null) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function makeTempDir(prefix) {
|
||||
return fs.mkdtemp(path.join(os.tmpdir(), prefix));
|
||||
}
|
||||
|
||||
test("parseArgs parses the main image-gen CLI flags", () => {
|
||||
const args = parseArgs([
|
||||
"--promptfiles",
|
||||
"prompts/system.md",
|
||||
"prompts/content.md",
|
||||
"--image",
|
||||
"out/hero",
|
||||
"--provider",
|
||||
"openai",
|
||||
"--quality",
|
||||
"2k",
|
||||
"--imageSize",
|
||||
"4k",
|
||||
"--ref",
|
||||
"ref/one.png",
|
||||
"ref/two.jpg",
|
||||
"--n",
|
||||
"3",
|
||||
"--jobs",
|
||||
"5",
|
||||
"--json",
|
||||
]);
|
||||
|
||||
assert.deepEqual(args.promptFiles, ["prompts/system.md", "prompts/content.md"]);
|
||||
assert.equal(args.imagePath, "out/hero");
|
||||
assert.equal(args.provider, "openai");
|
||||
assert.equal(args.quality, "2k");
|
||||
assert.equal(args.imageSize, "4K");
|
||||
assert.deepEqual(args.referenceImages, ["ref/one.png", "ref/two.jpg"]);
|
||||
assert.equal(args.n, 3);
|
||||
assert.equal(args.jobs, 5);
|
||||
assert.equal(args.json, true);
|
||||
});
|
||||
|
||||
test("parseArgs falls back to positional prompt and rejects invalid provider", () => {
|
||||
const positional = parseArgs(["draw", "a", "cat"]);
|
||||
assert.equal(positional.prompt, "draw a cat");
|
||||
|
||||
assert.throws(
|
||||
() => parseArgs(["--provider", "stability"]),
|
||||
/Invalid provider/,
|
||||
);
|
||||
});
|
||||
|
||||
test("parseSimpleYaml parses nested defaults and provider limits", () => {
|
||||
const yaml = `
|
||||
version: 2
|
||||
default_provider: openrouter
|
||||
default_quality: normal
|
||||
default_aspect_ratio: '16:9'
|
||||
default_image_size: 2K
|
||||
default_model:
|
||||
google: gemini-3-pro-image-preview
|
||||
openai: gpt-image-1.5
|
||||
batch:
|
||||
max_workers: 8
|
||||
provider_limits:
|
||||
google:
|
||||
concurrency: 2
|
||||
start_interval_ms: 900
|
||||
openai:
|
||||
concurrency: 4
|
||||
`;
|
||||
|
||||
const config = parseSimpleYaml(yaml);
|
||||
|
||||
assert.equal(config.version, 2);
|
||||
assert.equal(config.default_provider, "openrouter");
|
||||
assert.equal(config.default_quality, "normal");
|
||||
assert.equal(config.default_aspect_ratio, "16:9");
|
||||
assert.equal(config.default_image_size, "2K");
|
||||
assert.equal(config.default_model?.google, "gemini-3-pro-image-preview");
|
||||
assert.equal(config.default_model?.openai, "gpt-image-1.5");
|
||||
assert.equal(config.batch?.max_workers, 8);
|
||||
assert.deepEqual(config.batch?.provider_limits?.google, {
|
||||
concurrency: 2,
|
||||
start_interval_ms: 900,
|
||||
});
|
||||
assert.deepEqual(config.batch?.provider_limits?.openai, {
|
||||
concurrency: 4,
|
||||
});
|
||||
});
|
||||
|
||||
test("mergeConfig only fills values missing from CLI args", () => {
|
||||
const merged = mergeConfig(
|
||||
makeArgs({
|
||||
provider: "openai",
|
||||
quality: null,
|
||||
aspectRatio: null,
|
||||
imageSize: "4K",
|
||||
}),
|
||||
{
|
||||
default_provider: "google",
|
||||
default_quality: "2k",
|
||||
default_aspect_ratio: "3:2",
|
||||
default_image_size: "2K",
|
||||
},
|
||||
);
|
||||
|
||||
assert.equal(merged.provider, "openai");
|
||||
assert.equal(merged.quality, "2k");
|
||||
assert.equal(merged.aspectRatio, "3:2");
|
||||
assert.equal(merged.imageSize, "4K");
|
||||
});
|
||||
|
||||
test("detectProvider rejects non-ref-capable providers and prefers Google first when multiple keys exist", (t) => {
|
||||
assert.throws(
|
||||
() =>
|
||||
detectProvider(
|
||||
makeArgs({
|
||||
provider: "dashscope",
|
||||
referenceImages: ["ref.png"],
|
||||
}),
|
||||
),
|
||||
/Reference images require a ref-capable provider/,
|
||||
);
|
||||
|
||||
useEnv(t, {
|
||||
GOOGLE_API_KEY: "google-key",
|
||||
OPENAI_API_KEY: "openai-key",
|
||||
OPENROUTER_API_KEY: null,
|
||||
DASHSCOPE_API_KEY: null,
|
||||
REPLICATE_API_TOKEN: null,
|
||||
JIMENG_ACCESS_KEY_ID: null,
|
||||
JIMENG_SECRET_ACCESS_KEY: null,
|
||||
ARK_API_KEY: null,
|
||||
});
|
||||
assert.equal(detectProvider(makeArgs()), "google");
|
||||
});
|
||||
|
||||
test("detectProvider selects an available ref-capable provider for reference-image tasks", (t) => {
|
||||
useEnv(t, {
|
||||
GOOGLE_API_KEY: null,
|
||||
OPENAI_API_KEY: "openai-key",
|
||||
OPENROUTER_API_KEY: null,
|
||||
DASHSCOPE_API_KEY: null,
|
||||
REPLICATE_API_TOKEN: null,
|
||||
JIMENG_ACCESS_KEY_ID: null,
|
||||
JIMENG_SECRET_ACCESS_KEY: null,
|
||||
ARK_API_KEY: null,
|
||||
});
|
||||
assert.equal(
|
||||
detectProvider(makeArgs({ referenceImages: ["ref.png"] })),
|
||||
"openai",
|
||||
);
|
||||
});
|
||||
|
||||
test("batch worker and provider-rate-limit configuration prefer env over EXTEND config", (t) => {
|
||||
useEnv(t, {
|
||||
BAOYU_IMAGE_GEN_MAX_WORKERS: "12",
|
||||
BAOYU_IMAGE_GEN_GOOGLE_CONCURRENCY: "5",
|
||||
BAOYU_IMAGE_GEN_GOOGLE_START_INTERVAL_MS: "450",
|
||||
});
|
||||
|
||||
const extendConfig = {
|
||||
batch: {
|
||||
max_workers: 7,
|
||||
provider_limits: {
|
||||
google: {
|
||||
concurrency: 2,
|
||||
start_interval_ms: 900,
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
assert.equal(getConfiguredMaxWorkers(extendConfig), 12);
|
||||
assert.deepEqual(getConfiguredProviderRateLimits(extendConfig).google, {
|
||||
concurrency: 5,
|
||||
startIntervalMs: 450,
|
||||
});
|
||||
});
|
||||
|
||||
test("loadBatchTasks and createTaskArgs resolve batch-relative paths", async (t) => {
|
||||
const root = await makeTempDir("baoyu-image-gen-batch-");
|
||||
t.after(() => fs.rm(root, { recursive: true, force: true }));
|
||||
|
||||
const batchFile = path.join(root, "jobs", "batch.json");
|
||||
await fs.mkdir(path.dirname(batchFile), { recursive: true });
|
||||
await fs.writeFile(
|
||||
batchFile,
|
||||
JSON.stringify({
|
||||
jobs: 2,
|
||||
tasks: [
|
||||
{
|
||||
id: "hero",
|
||||
promptFiles: ["prompts/hero.md"],
|
||||
image: "out/hero",
|
||||
ref: ["refs/hero.png"],
|
||||
ar: "16:9",
|
||||
},
|
||||
],
|
||||
}),
|
||||
);
|
||||
|
||||
const loaded = await loadBatchTasks(batchFile);
|
||||
assert.equal(loaded.jobs, 2);
|
||||
assert.equal(loaded.batchDir, path.dirname(batchFile));
|
||||
assert.equal(loaded.tasks[0].id, "hero");
|
||||
|
||||
const taskArgs = createTaskArgs(
|
||||
makeArgs({
|
||||
provider: "replicate",
|
||||
quality: "2k",
|
||||
json: true,
|
||||
}),
|
||||
loaded.tasks[0],
|
||||
loaded.batchDir,
|
||||
);
|
||||
|
||||
assert.deepEqual(taskArgs.promptFiles, [
|
||||
path.join(loaded.batchDir, "prompts/hero.md"),
|
||||
]);
|
||||
assert.equal(taskArgs.imagePath, path.join(loaded.batchDir, "out/hero"));
|
||||
assert.deepEqual(taskArgs.referenceImages, [
|
||||
path.join(loaded.batchDir, "refs/hero.png"),
|
||||
]);
|
||||
assert.equal(taskArgs.provider, "replicate");
|
||||
assert.equal(taskArgs.aspectRatio, "16:9");
|
||||
assert.equal(taskArgs.quality, "2k");
|
||||
assert.equal(taskArgs.json, true);
|
||||
});
|
||||
|
||||
test("path normalization, worker count, and retry classification follow expected rules", () => {
|
||||
assert.match(normalizeOutputImagePath("out/sample"), /out[\\/]+sample\.png$/);
|
||||
assert.match(normalizeOutputImagePath("out/sample.webp"), /out[\\/]+sample\.webp$/);
|
||||
|
||||
assert.equal(getWorkerCount(8, null, 3), 3);
|
||||
assert.equal(getWorkerCount(2, 6, 5), 2);
|
||||
assert.equal(getWorkerCount(5, 0, 4), 1);
|
||||
|
||||
assert.equal(isRetryableGenerationError(new Error("API error (401): denied")), false);
|
||||
assert.equal(isRetryableGenerationError(new Error("socket hang up")), true);
|
||||
});
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
import assert from "node:assert/strict";
|
||||
import test from "node:test";
|
||||
|
||||
import {
|
||||
getSizeFromAspectRatio,
|
||||
normalizeSize,
|
||||
parseAspectRatio,
|
||||
} from "../../../skills/baoyu-image-gen/scripts/providers/dashscope.ts";
|
||||
|
||||
test("DashScope aspect-ratio parsing accepts numeric ratios only", () => {
|
||||
assert.deepEqual(parseAspectRatio("3:2"), { width: 3, height: 2 });
|
||||
assert.equal(parseAspectRatio("square"), null);
|
||||
assert.equal(parseAspectRatio("-1:2"), null);
|
||||
});
|
||||
|
||||
test("DashScope size selection picks the closest supported size per quality preset", () => {
|
||||
assert.equal(getSizeFromAspectRatio(null, "normal"), "1024*1024");
|
||||
assert.equal(getSizeFromAspectRatio("16:9", "normal"), "1280*720");
|
||||
assert.equal(getSizeFromAspectRatio("16:9", "2k"), "2048*1152");
|
||||
assert.equal(getSizeFromAspectRatio("invalid", "2k"), "1536*1536");
|
||||
});
|
||||
|
||||
test("DashScope size normalization converts WxH into provider format", () => {
|
||||
assert.equal(normalizeSize("1024x1024"), "1024*1024");
|
||||
assert.equal(normalizeSize("2048*1152"), "2048*1152");
|
||||
});
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
import assert from "node:assert/strict";
|
||||
import test from "node:test";
|
||||
|
||||
import {
|
||||
addAspectRatioToPrompt,
|
||||
buildGoogleUrl,
|
||||
buildPromptWithAspect,
|
||||
extractInlineImageData,
|
||||
extractPredictedImageData,
|
||||
getGoogleImageSize,
|
||||
isGoogleImagen,
|
||||
isGoogleMultimodal,
|
||||
normalizeGoogleModelId,
|
||||
} from "../../../skills/baoyu-image-gen/scripts/providers/google.ts";
|
||||
|
||||
function useEnv(t, values) {
|
||||
const previous = new Map();
|
||||
for (const [key, value] of Object.entries(values)) {
|
||||
previous.set(key, process.env[key]);
|
||||
if (value == null) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
t.after(() => {
|
||||
for (const [key, value] of previous.entries()) {
|
||||
if (value == null) {
|
||||
delete process.env[key];
|
||||
} else {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
test("Google provider helpers normalize model IDs and select image size defaults", () => {
|
||||
assert.equal(
|
||||
normalizeGoogleModelId("models/gemini-3.1-flash-image-preview"),
|
||||
"gemini-3.1-flash-image-preview",
|
||||
);
|
||||
assert.equal(isGoogleMultimodal("models/gemini-3-pro-image-preview"), true);
|
||||
assert.equal(isGoogleImagen("imagen-3.0-generate-002"), true);
|
||||
assert.equal(
|
||||
getGoogleImageSize({ imageSize: null, quality: "2k" }),
|
||||
"2K",
|
||||
);
|
||||
assert.equal(
|
||||
getGoogleImageSize({ imageSize: "4K", quality: "normal" }),
|
||||
"4K",
|
||||
);
|
||||
});
|
||||
|
||||
test("Google URL builder appends v1beta when the base URL does not already include it", (t) => {
|
||||
useEnv(t, { GOOGLE_BASE_URL: "https://generativelanguage.googleapis.com" });
|
||||
assert.equal(
|
||||
buildGoogleUrl("models/demo:generateContent"),
|
||||
"https://generativelanguage.googleapis.com/v1beta/models/demo:generateContent",
|
||||
);
|
||||
});
|
||||
|
||||
test("Google URL and prompt helpers preserve existing v1beta paths and aspect hints", (t) => {
|
||||
useEnv(t, { GOOGLE_BASE_URL: "https://example.com/custom/v1beta/" });
|
||||
assert.equal(
|
||||
buildGoogleUrl("/models/demo:predict"),
|
||||
"https://example.com/custom/v1beta/models/demo:predict",
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
addAspectRatioToPrompt("A city skyline", "16:9"),
|
||||
"A city skyline Aspect ratio: 16:9.",
|
||||
);
|
||||
assert.equal(
|
||||
buildPromptWithAspect("A city skyline", "16:9", "2k"),
|
||||
"A city skyline Aspect ratio: 16:9. High resolution 2048px.",
|
||||
);
|
||||
});
|
||||
|
||||
test("Google response extractors find inline and predicted image payloads", () => {
|
||||
assert.equal(
|
||||
extractInlineImageData({
|
||||
candidates: [
|
||||
{
|
||||
content: {
|
||||
parts: [{ inlineData: { data: "inline-base64" } }],
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
"inline-base64",
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
extractPredictedImageData({
|
||||
predictions: [{ image: { imageBytes: "predicted-base64" } }],
|
||||
}),
|
||||
"predicted-base64",
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
extractPredictedImageData({
|
||||
generatedImages: [{ bytesBase64Encoded: "generated-base64" }],
|
||||
}),
|
||||
"generated-base64",
|
||||
);
|
||||
});
|
||||
|
|
@ -0,0 +1,56 @@
|
|||
import assert from "node:assert/strict";
|
||||
import test from "node:test";
|
||||
|
||||
import {
|
||||
extractImageFromResponse,
|
||||
getMimeType,
|
||||
getOpenAISize,
|
||||
parseAspectRatio,
|
||||
} from "../../../skills/baoyu-image-gen/scripts/providers/openai.ts";
|
||||
|
||||
test("OpenAI aspect-ratio parsing and size selection match model families", () => {
|
||||
assert.deepEqual(parseAspectRatio("16:9"), { width: 16, height: 9 });
|
||||
assert.equal(parseAspectRatio("wide"), null);
|
||||
assert.equal(parseAspectRatio("0:1"), null);
|
||||
|
||||
assert.equal(getOpenAISize("dall-e-3", "16:9", "2k"), "1792x1024");
|
||||
assert.equal(getOpenAISize("dall-e-3", "9:16", "normal"), "1024x1792");
|
||||
assert.equal(getOpenAISize("dall-e-2", "16:9", "2k"), "1024x1024");
|
||||
assert.equal(getOpenAISize("gpt-image-1.5", "16:9", "2k"), "1536x1024");
|
||||
assert.equal(getOpenAISize("gpt-image-1.5", "4:3", "2k"), "1024x1024");
|
||||
});
|
||||
|
||||
test("OpenAI mime-type detection covers supported reference image extensions", () => {
|
||||
assert.equal(getMimeType("frame.png"), "image/png");
|
||||
assert.equal(getMimeType("frame.jpg"), "image/jpeg");
|
||||
assert.equal(getMimeType("frame.webp"), "image/webp");
|
||||
assert.equal(getMimeType("frame.gif"), "image/gif");
|
||||
});
|
||||
|
||||
test("OpenAI response extraction supports base64 and URL download flows", async (t) => {
|
||||
const originalFetch = globalThis.fetch;
|
||||
t.after(() => {
|
||||
globalThis.fetch = originalFetch;
|
||||
});
|
||||
|
||||
const fromBase64 = await extractImageFromResponse({
|
||||
data: [{ b64_json: Buffer.from("hello").toString("base64") }],
|
||||
});
|
||||
assert.equal(Buffer.from(fromBase64).toString("utf8"), "hello");
|
||||
|
||||
globalThis.fetch = async () =>
|
||||
new Response(Uint8Array.from([1, 2, 3]), {
|
||||
status: 200,
|
||||
headers: { "Content-Type": "application/octet-stream" },
|
||||
});
|
||||
|
||||
const fromUrl = await extractImageFromResponse({
|
||||
data: [{ url: "https://example.com/image.png" }],
|
||||
});
|
||||
assert.deepEqual([...fromUrl], [1, 2, 3]);
|
||||
|
||||
await assert.rejects(
|
||||
() => extractImageFromResponse({ data: [{}] }),
|
||||
/No image in response/,
|
||||
);
|
||||
});
|
||||
|
|
@ -0,0 +1,88 @@
|
|||
import assert from "node:assert/strict";
|
||||
import test from "node:test";
|
||||
|
||||
import {
|
||||
buildInput,
|
||||
extractOutputUrl,
|
||||
parseModelId,
|
||||
} from "../../../skills/baoyu-image-gen/scripts/providers/replicate.ts";
|
||||
|
||||
function makeArgs(overrides = {}) {
|
||||
return {
|
||||
aspectRatio: null,
|
||||
quality: null,
|
||||
n: 1,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
test("Replicate model parsing accepts official formats and rejects malformed ones", () => {
|
||||
assert.deepEqual(parseModelId("google/nano-banana-pro"), {
|
||||
owner: "google",
|
||||
name: "nano-banana-pro",
|
||||
version: null,
|
||||
});
|
||||
assert.deepEqual(parseModelId("owner/model:abc123"), {
|
||||
owner: "owner",
|
||||
name: "model",
|
||||
version: "abc123",
|
||||
});
|
||||
|
||||
assert.throws(
|
||||
() => parseModelId("just-a-model-name"),
|
||||
/Invalid Replicate model format/,
|
||||
);
|
||||
});
|
||||
|
||||
test("Replicate input builder maps aspect ratio, image count, quality, and refs", () => {
|
||||
assert.deepEqual(
|
||||
buildInput(
|
||||
"A robot painter",
|
||||
makeArgs({
|
||||
aspectRatio: "16:9",
|
||||
quality: "2k",
|
||||
n: 3,
|
||||
}),
|
||||
["data:image/png;base64,AAAA"],
|
||||
),
|
||||
{
|
||||
prompt: "A robot painter",
|
||||
aspect_ratio: "16:9",
|
||||
number_of_images: 3,
|
||||
resolution: "2K",
|
||||
output_format: "png",
|
||||
image_input: ["data:image/png;base64,AAAA"],
|
||||
},
|
||||
);
|
||||
|
||||
assert.deepEqual(
|
||||
buildInput("A robot painter", makeArgs({ quality: "normal" }), ["ref"]),
|
||||
{
|
||||
prompt: "A robot painter",
|
||||
aspect_ratio: "match_input_image",
|
||||
resolution: "1K",
|
||||
output_format: "png",
|
||||
image_input: ["ref"],
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test("Replicate output extraction supports string, array, and object URLs", () => {
|
||||
assert.equal(
|
||||
extractOutputUrl({ output: "https://example.com/a.png" }),
|
||||
"https://example.com/a.png",
|
||||
);
|
||||
assert.equal(
|
||||
extractOutputUrl({ output: ["https://example.com/b.png"] }),
|
||||
"https://example.com/b.png",
|
||||
);
|
||||
assert.equal(
|
||||
extractOutputUrl({ output: { url: "https://example.com/c.png" } }),
|
||||
"https://example.com/c.png",
|
||||
);
|
||||
|
||||
assert.throws(
|
||||
() => extractOutputUrl({ output: { invalid: true } }),
|
||||
/Unexpected Replicate output format/,
|
||||
);
|
||||
});
|
||||
Loading…
Reference in New Issue