mirror of
https://github.com/lmstudio-ai/lms.git
synced 2025-08-02 19:14:38 +08:00
stream formatting & inplace connect
This commit is contained in:
@ -1,10 +1,11 @@
|
||||
import { text, type SimpleLogger } from "@lmstudio/lms-common";
|
||||
import { SimpleLogger, text } from "@lmstudio/lms-common";
|
||||
import { LMStudioClient } from "@lmstudio/sdk";
|
||||
import chalk from "chalk";
|
||||
import { flag } from "cmd-ts";
|
||||
import inquirer from "inquirer";
|
||||
import { platform } from "os";
|
||||
import { getCliPref } from "./cliPref";
|
||||
import { getLogLevelMap, type LogLevelArgs, type LogLevelMap } from "./logLevel";
|
||||
import {
|
||||
checkHttpServer,
|
||||
getServerLastStatus,
|
||||
@ -113,28 +114,71 @@ async function maybeTryStartServer(logger: SimpleLogger, startServerOpts: StartS
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a logger that will self delete messages at info level.
|
||||
*/
|
||||
function createSelfDeletingLogger(logger: SimpleLogger, levelMap: LogLevelMap) {
|
||||
return new SimpleLogger(
|
||||
"",
|
||||
{
|
||||
debug: levelMap.debug
|
||||
? (...messages) => {
|
||||
process.stderr.clearLine(0);
|
||||
logger.debug(...messages);
|
||||
}
|
||||
: () => {},
|
||||
info: levelMap.info
|
||||
? (...messages) => {
|
||||
process.stderr.clearLine(0);
|
||||
logger.info(...messages);
|
||||
if (!levelMap.debug) {
|
||||
process.stderr.moveCursor(0, -1);
|
||||
}
|
||||
}
|
||||
: () => {},
|
||||
warn: levelMap.warn
|
||||
? (...messages) => {
|
||||
process.stderr.clearLine(0);
|
||||
logger.warn(...messages);
|
||||
}
|
||||
: () => {},
|
||||
error: levelMap.error
|
||||
? (...messages) => {
|
||||
process.stderr.clearLine(0);
|
||||
logger.error(...messages);
|
||||
}
|
||||
: () => {},
|
||||
},
|
||||
|
||||
{ useLogLevelPrefixes: false },
|
||||
);
|
||||
}
|
||||
|
||||
export interface CreateClientOpts {}
|
||||
|
||||
export async function createClient(
|
||||
logger: SimpleLogger,
|
||||
{ noLaunch, yes }: CreateClientArgs,
|
||||
args: CreateClientArgs & LogLevelArgs,
|
||||
_opts: CreateClientOpts = {},
|
||||
) {
|
||||
const { noLaunch, yes } = args;
|
||||
let port: number;
|
||||
const selfDeletingLogger = createSelfDeletingLogger(logger, getLogLevelMap(args));
|
||||
try {
|
||||
const lastStatus = await getServerLastStatus(logger);
|
||||
const lastStatus = await getServerLastStatus(selfDeletingLogger);
|
||||
port = lastStatus.port;
|
||||
} catch (e) {
|
||||
logger.debug("Failed to get last server status", e);
|
||||
selfDeletingLogger.debug("Failed to get last server status", e);
|
||||
port = 1234;
|
||||
}
|
||||
if (!(await checkHttpServer(logger, port))) {
|
||||
if (!(await maybeTryStartServer(logger, { port, noLaunch, yes }))) {
|
||||
if (!(await checkHttpServer(selfDeletingLogger, port))) {
|
||||
if (!(await maybeTryStartServer(selfDeletingLogger, { port, noLaunch, yes }))) {
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
const baseUrl = `ws://127.0.0.1:${port}`;
|
||||
logger.debug(`Connecting to server with baseUrl ${port}`);
|
||||
selfDeletingLogger.debug(`Connecting to server with baseUrl ${port}`);
|
||||
process.stderr.clearLine(0);
|
||||
return new LMStudioClient({
|
||||
baseUrl,
|
||||
logger,
|
||||
|
@ -27,15 +27,20 @@ export const logLevelArgs = {
|
||||
}),
|
||||
};
|
||||
|
||||
export function createLogger({
|
||||
logLevel,
|
||||
verbose,
|
||||
quiet,
|
||||
}: {
|
||||
export interface LogLevelArgs {
|
||||
logLevel: "debug" | "info" | "warn" | "error" | "none" | undefined;
|
||||
verbose: boolean;
|
||||
quiet: boolean;
|
||||
}): SimpleLogger {
|
||||
}
|
||||
|
||||
export interface LogLevelMap {
|
||||
debug: boolean;
|
||||
info: boolean;
|
||||
warn: boolean;
|
||||
error: boolean;
|
||||
}
|
||||
|
||||
export function getLogLevelMap({ logLevel, verbose, quiet }: LogLevelArgs): LogLevelMap {
|
||||
let numSpecified = 0;
|
||||
if (logLevel !== undefined) {
|
||||
numSpecified++;
|
||||
@ -61,15 +66,25 @@ export function createLogger({
|
||||
logLevel = "debug";
|
||||
}
|
||||
const level = levels.indexOf(logLevel ?? "info");
|
||||
return {
|
||||
debug: level <= levels.indexOf("debug"),
|
||||
info: level <= levels.indexOf("info"),
|
||||
warn: level <= levels.indexOf("warn"),
|
||||
error: level <= levels.indexOf("error"),
|
||||
};
|
||||
}
|
||||
|
||||
export function createLogger({ logLevel, verbose, quiet }: LogLevelArgs): SimpleLogger {
|
||||
const console = new Console({
|
||||
stdout: process.stderr,
|
||||
stderr: process.stderr,
|
||||
});
|
||||
const levelMap = getLogLevelMap({ logLevel, verbose, quiet });
|
||||
const consoleObj = {
|
||||
info: level <= levels.indexOf("info") ? console.info : () => {},
|
||||
warn: level <= levels.indexOf("warn") ? console.warn : () => {},
|
||||
error: level <= levels.indexOf("error") ? console.error : () => {},
|
||||
debug: level <= levels.indexOf("debug") ? console.debug : () => {},
|
||||
debug: levelMap.debug ? console.debug : () => {},
|
||||
info: levelMap.info ? console.info : () => {},
|
||||
warn: levelMap.warn ? console.warn : () => {},
|
||||
error: levelMap.error ? console.error : () => {},
|
||||
};
|
||||
return new SimpleLogger("", consoleObj);
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
import { type DiagnosticsLogEventData } from "@lmstudio/lms-shared-types";
|
||||
import chalk from "chalk";
|
||||
import { command, flag, subcommands } from "cmd-ts";
|
||||
import { createClient, createClientArgs } from "../createClient";
|
||||
import { createLogger, logLevelArgs } from "../logLevel";
|
||||
@ -24,16 +26,26 @@ const stream = command({
|
||||
if (json) {
|
||||
console.log(JSON.stringify(log));
|
||||
} else {
|
||||
const better = {
|
||||
...log,
|
||||
timestamp: new Date(log.timestamp).toISOString(),
|
||||
};
|
||||
console.log(better);
|
||||
console.log("Time: " + chalk.greenBright(new Date(log.timestamp).toLocaleString()));
|
||||
console.log("Type: " + chalk.greenBright(log.data.type));
|
||||
switch (log.data.type) {
|
||||
case "llm.prediction": {
|
||||
printLlmPredictionLogEvent(log.data);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
function printLlmPredictionLogEvent(data: DiagnosticsLogEventData & { type: "llm.prediction" }) {
|
||||
console.log("Model Identifier: " + chalk.greenBright(data.modelIdentifier));
|
||||
console.log("Model Path: " + chalk.greenBright(data.modelPath));
|
||||
console.log(chalk.underline("Full Prompt"));
|
||||
console.log(chalk.cyanBright(data.input));
|
||||
console.log();
|
||||
}
|
||||
|
||||
export const log = subcommands({
|
||||
name: "log",
|
||||
description:
|
||||
|
@ -241,9 +241,9 @@ export async function startServer(
|
||||
}
|
||||
}
|
||||
|
||||
logger.warnText`
|
||||
Launching LM Studio minimized... (If you don't want LM Studio to launch automatically,
|
||||
please use the ${chalk.yellow("--no-launch")} flag.)
|
||||
logger.infoText`
|
||||
Launching LM Studio minimized... (Disable auto-launching via the
|
||||
${chalk.yellow("--no-launch")} flag.)
|
||||
`;
|
||||
|
||||
const launched = await launchApplication(logger);
|
||||
|
Reference in New Issue
Block a user