mirror of
https://github.com/foss42/apidash.git
synced 2025-12-01 18:28:25 +08:00
Implemented SaveObject, LLMModel class & OllamaProvider
This commit is contained in:
59
packages/genai/lib/llm_input_payload.dart
Normal file
59
packages/genai/lib/llm_input_payload.dart
Normal file
@@ -0,0 +1,59 @@
|
||||
import 'llm_config.dart';
|
||||
|
||||
class LLMInputPayload {
|
||||
String endpoint;
|
||||
String credential;
|
||||
String systemPrompt;
|
||||
String userPrompt;
|
||||
Map<String, LLMModelConfiguration> configMap;
|
||||
|
||||
LLMInputPayload({
|
||||
required this.endpoint,
|
||||
required this.credential,
|
||||
required this.systemPrompt,
|
||||
required this.userPrompt,
|
||||
required this.configMap,
|
||||
});
|
||||
|
||||
LLMInputPayload clone() {
|
||||
Map<String, LLMModelConfiguration> cmap = {};
|
||||
for (final k in configMap.keys) {
|
||||
cmap[k] = configMap[k]!.clone();
|
||||
}
|
||||
return LLMInputPayload(
|
||||
endpoint: endpoint,
|
||||
credential: credential,
|
||||
systemPrompt: systemPrompt,
|
||||
userPrompt: userPrompt,
|
||||
configMap: cmap,
|
||||
);
|
||||
}
|
||||
|
||||
static Map toJSON(LLMInputPayload payload) {
|
||||
Map cmap = {};
|
||||
for (final e in payload.configMap.entries) {
|
||||
cmap[e.key] = e.value.toJson();
|
||||
}
|
||||
return {
|
||||
'endpoint': payload.endpoint,
|
||||
'credential': payload.credential,
|
||||
'system_prompt': payload.systemPrompt,
|
||||
'user_prompt': payload.userPrompt,
|
||||
'config_map': cmap,
|
||||
};
|
||||
}
|
||||
|
||||
static LLMInputPayload fromJSON(Map json) {
|
||||
Map<String, LLMModelConfiguration> cmap = {};
|
||||
for (final k in json['config_map'].keys) {
|
||||
cmap[k] = LLMModelConfiguration.fromJson(json['config_map'][k]);
|
||||
}
|
||||
return LLMInputPayload(
|
||||
endpoint: json['endpoint'],
|
||||
credential: json['credential'],
|
||||
systemPrompt: json['system_prompt'],
|
||||
userPrompt: json['user_prompt'],
|
||||
configMap: cmap,
|
||||
);
|
||||
}
|
||||
}
|
||||
48
packages/genai/lib/llm_saveobject.dart
Normal file
48
packages/genai/lib/llm_saveobject.dart
Normal file
@@ -0,0 +1,48 @@
|
||||
import 'llm_config.dart';
|
||||
import 'providers/common.dart';
|
||||
import 'providers/providers.dart';
|
||||
|
||||
class LLMSaveObject {
|
||||
String endpoint;
|
||||
String credential;
|
||||
LLMProvider provider;
|
||||
LLMModel selectedLLM;
|
||||
Map<String, LLMModelConfiguration> configMap;
|
||||
|
||||
LLMSaveObject({
|
||||
required this.endpoint,
|
||||
required this.credential,
|
||||
required this.configMap,
|
||||
required this.selectedLLM,
|
||||
required this.provider,
|
||||
});
|
||||
|
||||
Map toJSON() {
|
||||
Map cmap = {};
|
||||
for (final e in configMap.entries) {
|
||||
cmap[e.key] = e.value.toJson();
|
||||
}
|
||||
return {
|
||||
'endpoint': endpoint,
|
||||
'credential': credential,
|
||||
'config_map': cmap,
|
||||
'selected_llm': selectedLLM.identifier,
|
||||
'provider': provider.name,
|
||||
};
|
||||
}
|
||||
|
||||
static LLMSaveObject fromJSON(Map json) {
|
||||
Map<String, LLMModelConfiguration> cmap = {};
|
||||
for (final k in json['config_map'].keys) {
|
||||
cmap[k] = LLMModelConfiguration.fromJson(json['config_map'][k]);
|
||||
}
|
||||
final provider = LLMProvider.fromName(json['provider']);
|
||||
return LLMSaveObject(
|
||||
endpoint: json['endpoint'],
|
||||
credential: json['credential'],
|
||||
configMap: cmap,
|
||||
selectedLLM: provider.getLLMByIdentifier(json['selected_llm']),
|
||||
provider: provider,
|
||||
);
|
||||
}
|
||||
}
|
||||
40
packages/genai/lib/providers/common.dart
Normal file
40
packages/genai/lib/providers/common.dart
Normal file
@@ -0,0 +1,40 @@
|
||||
import '../llm_input_payload.dart';
|
||||
import '../llm_request.dart';
|
||||
import 'providers.dart';
|
||||
|
||||
class LLMModel {
|
||||
const LLMModel(this.identifier, this.modelName, this.provider);
|
||||
final String identifier;
|
||||
final String modelName;
|
||||
final LLMProvider provider;
|
||||
|
||||
static Map toJson(LLMModel m) {
|
||||
return {'identifier': m.identifier, 'provider': m.provider.name};
|
||||
}
|
||||
|
||||
static LLMModel fromJson(Map json) {
|
||||
return LLMProvider.fromName(
|
||||
json['provider'],
|
||||
).getLLMByIdentifier(json['identifier']);
|
||||
}
|
||||
}
|
||||
|
||||
abstract class ModelController {
|
||||
LLMInputPayload get inputPayload => throw UnimplementedError();
|
||||
|
||||
LLMRequestDetails createRequest(
|
||||
LLMModel model,
|
||||
LLMInputPayload inputPayload, {
|
||||
bool stream = false,
|
||||
}) {
|
||||
throw UnimplementedError();
|
||||
}
|
||||
|
||||
String? outputFormatter(Map x) {
|
||||
throw UnimplementedError();
|
||||
}
|
||||
|
||||
String? streamOutputFormatter(Map x) {
|
||||
throw UnimplementedError();
|
||||
}
|
||||
}
|
||||
72
packages/genai/lib/providers/ollama.dart
Normal file
72
packages/genai/lib/providers/ollama.dart
Normal file
@@ -0,0 +1,72 @@
|
||||
import '../llm_config.dart';
|
||||
import '../llm_input_payload.dart';
|
||||
import '../llm_request.dart';
|
||||
import 'common.dart';
|
||||
|
||||
class OllamaModelController extends ModelController {
|
||||
static final instance = OllamaModelController();
|
||||
|
||||
@override
|
||||
LLMInputPayload get inputPayload => LLMInputPayload(
|
||||
endpoint: 'http://localhost:11434/v1/chat/completions',
|
||||
credential: '',
|
||||
systemPrompt: '',
|
||||
userPrompt: '',
|
||||
configMap: {
|
||||
LLMConfigName.temperature.name:
|
||||
defaultLLMConfigurations[LLMConfigName.temperature]!,
|
||||
LLMConfigName.top_p.name: defaultLLMConfigurations[LLMConfigName.top_p]!,
|
||||
},
|
||||
).clone();
|
||||
|
||||
@override
|
||||
LLMRequestDetails createRequest(
|
||||
LLMModel model,
|
||||
LLMInputPayload inputPayload, {
|
||||
bool stream = false,
|
||||
}) {
|
||||
return LLMRequestDetails(
|
||||
endpoint: inputPayload.endpoint,
|
||||
headers: {},
|
||||
method: 'POST',
|
||||
body: {
|
||||
"model": model.identifier,
|
||||
if (stream) ...{'stream': true},
|
||||
"messages": [
|
||||
{"role": "system", "content": inputPayload.systemPrompt},
|
||||
{"role": "user", "content": inputPayload.userPrompt},
|
||||
],
|
||||
"temperature":
|
||||
inputPayload
|
||||
.configMap[LLMConfigName.temperature.name]
|
||||
?.configValue
|
||||
.value
|
||||
?.$2 ??
|
||||
0.5,
|
||||
"top_p":
|
||||
inputPayload
|
||||
.configMap[LLMConfigName.top_p.name]
|
||||
?.configValue
|
||||
.value
|
||||
?.$2 ??
|
||||
0.95,
|
||||
if (inputPayload.configMap[LLMConfigName.max_tokens.name] != null) ...{
|
||||
"max_tokens": inputPayload
|
||||
.configMap[LLMConfigName.max_tokens.name]!
|
||||
.configValue
|
||||
.value,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String? outputFormatter(Map x) {
|
||||
return x['choices']?[0]['message']?['content'];
|
||||
}
|
||||
|
||||
@override
|
||||
String? streamOutputFormatter(Map x) {
|
||||
return x['choices']?[0]['delta']?['content'];
|
||||
}
|
||||
}
|
||||
67
packages/genai/lib/providers/providers.dart
Normal file
67
packages/genai/lib/providers/providers.dart
Normal file
@@ -0,0 +1,67 @@
|
||||
import '../llm_manager.dart';
|
||||
import 'common.dart';
|
||||
import 'ollama.dart';
|
||||
|
||||
enum LLMProvider {
|
||||
gemini('Gemini'),
|
||||
openai('OpenAI'),
|
||||
anthropic('Anthropic'),
|
||||
ollama('Ollama'),
|
||||
azureopenai('Azure OpenAI');
|
||||
|
||||
const LLMProvider(this.displayName);
|
||||
|
||||
final String displayName;
|
||||
|
||||
List<LLMModel> get models {
|
||||
final avl = LLMManager.models[this.name.toLowerCase()];
|
||||
if (avl == null) return [];
|
||||
List<LLMModel> models = [];
|
||||
for (final x in avl) {
|
||||
models.add(LLMModel(x[0], x[1], this));
|
||||
}
|
||||
return models;
|
||||
}
|
||||
|
||||
ModelController get modelController {
|
||||
switch (this) {
|
||||
case LLMProvider.ollama:
|
||||
return OllamaModelController.instance;
|
||||
case _:
|
||||
return OllamaModelController.instance;
|
||||
}
|
||||
}
|
||||
|
||||
static LLMProvider fromJSON(Map json) {
|
||||
return LLMProvider.fromName(json['llm_provider']);
|
||||
}
|
||||
|
||||
static Map toJSON(LLMProvider p) {
|
||||
return {'llm_provider': p.name};
|
||||
}
|
||||
|
||||
static LLMProvider? fromJSONNullable(Map? json) {
|
||||
if (json == null) return null;
|
||||
return LLMProvider.fromName(json['llm_provider']);
|
||||
}
|
||||
|
||||
static Map? toJSONNullable(LLMProvider? p) {
|
||||
if (p == null) return null;
|
||||
return {'llm_provider': p.name};
|
||||
}
|
||||
|
||||
LLMModel getLLMByIdentifier(String identifier) {
|
||||
final m = this.models.where((e) => e.identifier == identifier).firstOrNull;
|
||||
if (m == null) {
|
||||
throw Exception('MODEL DOES NOT EXIST $identifier');
|
||||
}
|
||||
return m;
|
||||
}
|
||||
|
||||
static LLMProvider fromName(String name) {
|
||||
return LLMProvider.values.firstWhere(
|
||||
(model) => model.name == name,
|
||||
orElse: () => throw ArgumentError('INVALID LLM PROVIDER: $name'),
|
||||
);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user