From f75727f9f3dd5846ee25bd28462db186ae00d69b Mon Sep 17 00:00:00 2001 From: Rugved Somwanshi Date: Mon, 8 Sep 2025 16:59:16 -0400 Subject: [PATCH] Change to repository unstable --- src/subcommands/chat.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/subcommands/chat.ts b/src/subcommands/chat.ts index 3b47a19..dc6d26b 100644 --- a/src/subcommands/chat.ts +++ b/src/subcommands/chat.ts @@ -1,6 +1,6 @@ import { Command } from "@commander-js/extra-typings"; import type { SimpleLogger } from "@lmstudio/lms-common"; -import type { LLMPredictionStats, StaffPickedModel } from "@lmstudio/lms-shared-types"; +import type { LLMPredictionStats, HubModel } from "@lmstudio/lms-shared-types"; import { Chat, type LLM } from "@lmstudio/sdk"; import * as readline from "readline"; import { addCreateClientOptions, createClient } from "../createClient.js"; @@ -113,10 +113,10 @@ export const chat = addLogLevelOptions( // No model loaded, offer to download a staff pick or use existing downloaded model const cliPref = await getCliPref(logger); - let staffPicks: StaffPickedModel[] = []; + let staffPicks: HubModel[] = []; if (offline !== true) { try { - staffPicks = await client.system.unstable.getStaffPicks(); + staffPicks = await client.repository.unstable.getModelCatalog(); } catch (err) { // If error says network connection failed, // then we are offline, so just use empty staff picks @@ -149,7 +149,7 @@ export const chat = addLogLevelOptions( return { name: m.owner + "/" + m.name, isDownloaded: modelKeys.includes(m.owner + "/" + m.name), - size: m.sizeBytes, + size: m.metadata.minMemoryUsageBytes, staffPicked: true, }; })