Skip to content
Open
70 changes: 70 additions & 0 deletions e2e-tests/local_provider_settings.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import { expect } from "@playwright/test";
import { test as testWithPo } from "./helpers/test_helper";

testWithPo("Local provider endpoint settings persist", async ({ po }) => {
await po.setUp();
await po.goToSettingsTab();

await expect(po.page.getByText("Ollama", { exact: true })).toBeVisible();

await po.page.getByText("Ollama", { exact: true }).click();
await po.page.waitForSelector('h1:has-text("Configure Ollama")', {
state: "visible",
timeout: 5000,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

use Timeout.MEDIUM instead of hardcoding 5000 - same below

MEDIUM: process.env.CI ? 30_000 : 15_000,

});

const ollamaInput = po.page.getByLabel(
"Local model endpoint (Ollama-compatible)",
);
await expect(ollamaInput).toBeVisible();
await expect(
po.page.getByText("LM Studio API endpoint", { exact: true }),
).toHaveCount(0);

const ollamaEndpoint = "http://localhost:11435";
await ollamaInput.fill(ollamaEndpoint);
await po.page.getByRole("button", { name: "Save" }).click();
await expect(ollamaInput).toHaveValue(ollamaEndpoint);

await po.page.getByRole("button", { name: "Go Back" }).click();
await expect(po.page.getByText("AI Providers")).toBeVisible();

await po.page.getByText("Ollama", { exact: true }).click();
await po.page.waitForSelector('h1:has-text("Configure Ollama")', {
state: "visible",
timeout: 5000,
});
await expect(ollamaInput).toHaveValue(ollamaEndpoint);

await po.page.getByRole("button", { name: "Go Back" }).click();
await expect(po.page.getByText("AI Providers")).toBeVisible();

await po.page.getByText("LM Studio", { exact: true }).click();
await po.page.waitForSelector('h1:has-text("Configure LM Studio")', {
state: "visible",
timeout: 5000,
});

const lmStudioInput = po.page.getByLabel("LM Studio API endpoint");
await expect(lmStudioInput).toBeVisible();
await expect(
po.page.getByText("Local model endpoint (Ollama-compatible)", {
exact: true,
}),
).toHaveCount(0);

const lmStudioEndpoint = "http://localhost:12345";
await lmStudioInput.fill(lmStudioEndpoint);
await po.page.getByRole("button", { name: "Save" }).click();
await expect(lmStudioInput).toHaveValue(lmStudioEndpoint);

await po.page.getByRole("button", { name: "Go Back" }).click();
await expect(po.page.getByText("AI Providers")).toBeVisible();

await po.page.getByText("LM Studio", { exact: true }).click();
await po.page.waitForSelector('h1:has-text("Configure LM Studio")', {
state: "visible",
timeout: 5000,
});
await expect(lmStudioInput).toHaveValue(lmStudioEndpoint);
});
64 changes: 64 additions & 0 deletions src/__tests__/normalizeLmStudioBaseUrl.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
import { DEFAULT_LM_STUDIO_ENDPOINT } from "@/constants/localModels";
import {
getLMStudioBaseUrl,
normalizeLmStudioBaseUrl,
} from "@/ipc/utils/lm_studio_utils";
import { describe, expect, it } from "vitest";

describe("normalizeLmStudioBaseUrl", () => {
it("returns default endpoint when value is undefined", () => {
expect(normalizeLmStudioBaseUrl()).toBe(DEFAULT_LM_STUDIO_ENDPOINT);
});

it("trims whitespace and adds protocol", () => {
expect(normalizeLmStudioBaseUrl(" localhost ")).toBe(
`${DEFAULT_LM_STUDIO_ENDPOINT}`,
);
});

it("adds default port when missing", () => {
expect(normalizeLmStudioBaseUrl("192.168.0.10")).toBe(
"http://192.168.0.10:1234",
);
});

it("adds default port when protocol provided without port", () => {
expect(normalizeLmStudioBaseUrl("http://localhost")).toBe(
"http://localhost:1234",
);
expect(normalizeLmStudioBaseUrl("https://example.com")).toBe(
"https://example.com:1234",
);
});

it("removes trailing /v1 if present", () => {
expect(normalizeLmStudioBaseUrl("http://example.com:9000/v1")).toBe(
"http://example.com:9000",
);
expect(normalizeLmStudioBaseUrl("http://example.com:9000/v1/")).toBe(
"http://example.com:9000",
);
});

it("preserves additional path segments", () => {
expect(normalizeLmStudioBaseUrl("http://example.com/custom/path/")).toBe(
"http://example.com:1234/custom/path",
);
});
});

describe("getLMStudioBaseUrl", () => {
it("prefers env override when set", () => {
const original = process.env.LM_STUDIO_BASE_URL_FOR_TESTING;
try {
process.env.LM_STUDIO_BASE_URL_FOR_TESTING = "http://override:9999/v1";
expect(getLMStudioBaseUrl()).toBe("http://override:9999");
} finally {
if (original === undefined) {
delete process.env.LM_STUDIO_BASE_URL_FOR_TESTING;
} else {
process.env.LM_STUDIO_BASE_URL_FOR_TESTING = original;
}
}
});
});
10 changes: 8 additions & 2 deletions src/__tests__/parseOllamaHost.test.ts
Original file line number Diff line number Diff line change
@@ -1,19 +1,25 @@
import { parseOllamaHost } from "@/ipc/handlers/local_model_ollama_handler";
import { describe, it, expect } from "vitest";
import { DEFAULT_OLLAMA_ENDPOINT } from "@/constants/localModels";

describe("parseOllamaHost", () => {
it("should return default URL when no host is provided", () => {
const result = parseOllamaHost();
expect(result).toBe("http://localhost:11434");
expect(result).toBe(DEFAULT_OLLAMA_ENDPOINT);
});

it("should return default URL when host is undefined", () => {
const result = parseOllamaHost(undefined);
expect(result).toBe("http://localhost:11434");
expect(result).toBe(DEFAULT_OLLAMA_ENDPOINT);
});

it("should return default URL when host is empty string", () => {
const result = parseOllamaHost("");
expect(result).toBe(DEFAULT_OLLAMA_ENDPOINT);
});

it("should trim whitespace before parsing", () => {
const result = parseOllamaHost(" localhost ");
expect(result).toBe("http://localhost:11434");
});

Expand Down
4 changes: 4 additions & 0 deletions src/__tests__/readSettings.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
mockSettingsPath,
expect.stringContaining('"selectedModel"'),
);
expect(scrubSettings(result)).toMatchInlineSnapshot(`

Check failure on line 54 in src/__tests__/readSettings.test.ts

View workflow job for this annotation

GitHub Actions / test (macos, macos-latest, 1, 4)

src/__tests__/readSettings.test.ts > readSettings > when settings file does not exist > should create default settings file and return default settings

Error: Snapshot `readSettings > when settings file does not exist > should create default settings file and return default settings 1` mismatched - Expected + Received @@ -3,14 +3,14 @@ "enableAutoUpdate": true, "enableProLazyEditsMode": true, "enableProSmartFilesContextMode": true, "experiments": {}, "hasRunBefore": false, + "isRunning": false, + "lastKnownPerformance": undefined, "lmStudioEndpoint": "http://localhost:1234", "ollamaEndpoint": "http://localhost:11434", - "isRunning": false, - "lastKnownPerformance": undefined, "providerSettings": {}, "releaseChannel": "stable", "selectedChatMode": "build", "selectedModel": { "name": "auto", ❯ src/__tests__/readSettings.test.ts:54:37
{
"enableAutoFixProblems": false,
"enableAutoUpdate": true,
Expand All @@ -59,6 +59,8 @@
"enableProSmartFilesContextMode": true,
"experiments": {},
"hasRunBefore": false,
"lmStudioEndpoint": "http://localhost:1234",
"ollamaEndpoint": "http://localhost:11434",
"isRunning": false,
"lastKnownPerformance": undefined,
"providerSettings": {},
Expand Down Expand Up @@ -299,7 +301,7 @@

const result = readSettings();

expect(scrubSettings(result)).toMatchInlineSnapshot(`

Check failure on line 304 in src/__tests__/readSettings.test.ts

View workflow job for this annotation

GitHub Actions / test (macos, macos-latest, 1, 4)

src/__tests__/readSettings.test.ts > readSettings > error handling > should return default settings when file read fails

Error: Snapshot `readSettings > error handling > should return default settings when file read fails 1` mismatched - Expected + Received @@ -3,14 +3,14 @@ "enableAutoUpdate": true, "enableProLazyEditsMode": true, "enableProSmartFilesContextMode": true, "experiments": {}, "hasRunBefore": false, + "isRunning": false, + "lastKnownPerformance": undefined, "lmStudioEndpoint": "http://localhost:1234", "ollamaEndpoint": "http://localhost:11434", - "isRunning": false, - "lastKnownPerformance": undefined, "providerSettings": {}, "releaseChannel": "stable", "selectedChatMode": "build", "selectedModel": { "name": "auto", ❯ src/__tests__/readSettings.test.ts:304:37
{
"enableAutoFixProblems": false,
"enableAutoUpdate": true,
Expand All @@ -307,6 +309,8 @@
"enableProSmartFilesContextMode": true,
"experiments": {},
"hasRunBefore": false,
"lmStudioEndpoint": "http://localhost:1234",
"ollamaEndpoint": "http://localhost:11434",
"isRunning": false,
"lastKnownPerformance": undefined,
"providerSettings": {},
Expand Down
196 changes: 196 additions & 0 deletions src/components/LocalModelEndpointSettings.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,196 @@
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import {
DEFAULT_LM_STUDIO_ENDPOINT,
DEFAULT_OLLAMA_ENDPOINT,
} from "@/constants/localModels";
import { useSettings } from "@/hooks/useSettings";
import type { UserSettings } from "@/lib/schemas";
import { showError, showSuccess } from "@/lib/toast";
import { useEffect, useState } from "react";

type SavingTarget = "ollama" | "lmstudio" | null;

type EndpointKind = "ollama" | "lmstudio";

const endpointConfig: Record<
EndpointKind,
{
defaultValue: string;
label: string;
description: string;
successMessage: string;
errorMessage: string;
}
> = {
ollama: {
defaultValue: DEFAULT_OLLAMA_ENDPOINT,
label: "Local model endpoint (Ollama-compatible)",
description:
"Used for listing and running Ollama-compatible local models, including remote hosts.",
successMessage: "Ollama endpoint updated",
errorMessage: "Failed to update Ollama endpoint",
},
lmstudio: {
defaultValue: DEFAULT_LM_STUDIO_ENDPOINT,
label: "LM Studio API endpoint",
description:
"Base URL for the LM Studio server. Trailing /v1 is optional and will be handled automatically.",
successMessage: "LM Studio endpoint updated",
errorMessage: "Failed to update LM Studio endpoint",
},
};

type LocalModelEndpointSettingsProps = {
kind?: EndpointKind;
};

export function LocalModelEndpointSettings({
kind,
}: LocalModelEndpointSettingsProps) {
const { settings, updateSettings } = useSettings();
const [ollamaValue, setOllamaValue] = useState(DEFAULT_OLLAMA_ENDPOINT);
const [lmStudioValue, setLmStudioValue] = useState(
DEFAULT_LM_STUDIO_ENDPOINT,
);
const [saving, setSaving] = useState<SavingTarget>(null);
const ollamaEndpoint = settings?.ollamaEndpoint;
const lmStudioEndpoint = settings?.lmStudioEndpoint;
const endpointKinds: EndpointKind[] = kind ? [kind] : ["ollama", "lmstudio"];

useEffect(() => {
if (!settings) {
return;
}
const endpoint = ollamaEndpoint ?? DEFAULT_OLLAMA_ENDPOINT;
setOllamaValue(endpoint);
}, [ollamaEndpoint, settings]);

useEffect(() => {
if (!settings) {
return;
}
const endpoint = lmStudioEndpoint ?? DEFAULT_LM_STUDIO_ENDPOINT;
setLmStudioValue(endpoint);
}, [lmStudioEndpoint, settings]);

if (!settings) {
return null;
}

const handleSave = async (kind: EndpointKind) => {
const value = kind === "ollama" ? ollamaValue : lmStudioValue;
const config = endpointConfig[kind];
const valueToPersist = value.trim();
const payload: Partial<UserSettings> =
kind === "ollama"
? { ollamaEndpoint: valueToPersist }
: { lmStudioEndpoint: valueToPersist };

setSaving(kind);
try {
await updateSettings(payload);
if (kind === "ollama") {
setOllamaValue(valueToPersist);
} else {
setLmStudioValue(valueToPersist);
}
showSuccess(config.successMessage);
} catch (error) {
const message =
error instanceof Error
? error.message
: String(error ?? "Unknown error");
showError(`${config.errorMessage}: ${message}`);
} finally {
setSaving((current) => (current === kind ? null : current));
}
};

const handleReset = async (kind: EndpointKind) => {
const config = endpointConfig[kind];
const payload: Partial<UserSettings> =
kind === "ollama"
? { ollamaEndpoint: config.defaultValue }
: { lmStudioEndpoint: config.defaultValue };

setSaving(kind);
try {
await updateSettings(payload);
if (kind === "ollama") {
setOllamaValue(config.defaultValue);
} else {
setLmStudioValue(config.defaultValue);
}
showSuccess(`${config.successMessage} (reset)`);
} catch (error) {
const message =
error instanceof Error
? error.message
: String(error ?? "Unknown error");
showError(`${config.errorMessage}: ${message}`);
} finally {
setSaving((current) => (current === kind ? null : current));
}
};

const renderEndpointField = (kind: EndpointKind) => {
const config = endpointConfig[kind];
const value = kind === "ollama" ? ollamaValue : lmStudioValue;
const onChange = kind === "ollama" ? setOllamaValue : setLmStudioValue;
const isSaving = saving === kind;
const isDefault = value === config.defaultValue;

return (
<div className="space-y-2">
<div className="space-y-1">
<Label htmlFor={`${kind}-endpoint`} className="text-sm font-medium">
{config.label}
</Label>
<p className="text-sm text-gray-500 dark:text-gray-400">
{config.description}
</p>
</div>
<div className="flex flex-col gap-2 sm:flex-row sm:items-center">
<Input
id={`${kind}-endpoint`}
value={value}
onChange={(event) => onChange(event.target.value)}
className="sm:flex-1"
autoComplete="off"
spellCheck={false}
/>
<div className="flex gap-2">
<Button
onClick={() => handleSave(kind)}
disabled={isSaving}
type="button"
>
{isSaving ? "Saving..." : "Save"}
</Button>
<Button
onClick={() => handleReset(kind)}
variant="ghost"
disabled={isSaving || isDefault}
type="button"
>
Reset
</Button>
</div>
</div>
<p className="text-xs text-gray-500 dark:text-gray-400">
Default: {config.defaultValue}
</p>
</div>
);
};

return (
<div className="space-y-6">
{endpointKinds.map((endpointKind) => (
<div key={endpointKind}>{renderEndpointField(endpointKind)}</div>
))}
</div>
);
}
Loading
Loading