mirror of
https://github.com/sqlchat/sqlchat.git
synced 2025-09-26 09:34:13 +08:00
chore: add model info to the chat table
This commit is contained in:
@ -0,0 +1,8 @@
|
|||||||
|
/*
|
||||||
|
Warnings:
|
||||||
|
|
||||||
|
- Added the required column `model` to the `Chat` table without a default value. This is not possible if the table is not empty.
|
||||||
|
|
||||||
|
*/
|
||||||
|
-- AlterTable
|
||||||
|
ALTER TABLE "Chat" ADD COLUMN "model" JSONB NOT NULL;
|
@ -13,6 +13,7 @@ datasource db {
|
|||||||
model Chat {
|
model Chat {
|
||||||
id String @id @default(uuid())
|
id String @id @default(uuid())
|
||||||
createdAt DateTime @default(now())
|
createdAt DateTime @default(now())
|
||||||
|
model Json
|
||||||
ctx Json
|
ctx Json
|
||||||
messages Message[]
|
messages Message[]
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ import {
|
|||||||
} from "eventsource-parser";
|
} from "eventsource-parser";
|
||||||
import { NextRequest } from "next/server";
|
import { NextRequest } from "next/server";
|
||||||
import { API_KEY } from "@/env";
|
import { API_KEY } from "@/env";
|
||||||
import { openAIApiEndpoint, openAIApiKey } from "@/utils";
|
import { openAIApiEndpoint, openAIApiKey, gpt35 } from "@/utils";
|
||||||
|
|
||||||
export const config = {
|
export const config = {
|
||||||
runtime: "edge",
|
runtime: "edge",
|
||||||
@ -49,11 +49,11 @@ const handler = async (req: NextRequest) => {
|
|||||||
},
|
},
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
model: "gpt-3.5-turbo",
|
model: gpt35.name,
|
||||||
messages: reqBody.messages,
|
messages: reqBody.messages,
|
||||||
temperature: 0,
|
temperature: gpt35.temperature,
|
||||||
frequency_penalty: 0.0,
|
frequency_penalty: gpt35.frequency_penalty,
|
||||||
presence_penalty: 0.0,
|
presence_penalty: gpt35.presence_penalty,
|
||||||
stream: true,
|
stream: true,
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import { PrismaClient } from "@prisma/client";
|
import { PrismaClient } from "@prisma/client";
|
||||||
import { NextApiRequest, NextApiResponse } from "next";
|
import { NextApiRequest, NextApiResponse } from "next";
|
||||||
import { Conversation, Message } from "@/types";
|
import { Conversation, Message } from "@/types";
|
||||||
|
import { gpt35 } from "@/utils";
|
||||||
|
|
||||||
const prisma = new PrismaClient();
|
const prisma = new PrismaClient();
|
||||||
|
|
||||||
@ -36,6 +37,7 @@ export default async function handler(
|
|||||||
data: {
|
data: {
|
||||||
id: conversation.id,
|
id: conversation.id,
|
||||||
createdAt: new Date(conversation.createdAt),
|
createdAt: new Date(conversation.createdAt),
|
||||||
|
model: gpt35,
|
||||||
ctx: {},
|
ctx: {},
|
||||||
messages: {
|
messages: {
|
||||||
create: messages.map((message) => ({
|
create: messages.map((message) => ({
|
||||||
|
@ -2,3 +2,4 @@ export * from "./id";
|
|||||||
export * from "./openai";
|
export * from "./openai";
|
||||||
export * from "./sql";
|
export * from "./sql";
|
||||||
export * from "./execution";
|
export * from "./execution";
|
||||||
|
export * from "./model";
|
||||||
|
6
src/utils/model.ts
Normal file
6
src/utils/model.ts
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
export const gpt35 = {
|
||||||
|
name: "gpt-3.5-turbo",
|
||||||
|
temperature: 0,
|
||||||
|
frequency_penalty: 0.0,
|
||||||
|
presence_penalty: 0.0,
|
||||||
|
};
|
Reference in New Issue
Block a user