diff --git a/packages/genai/lib/llm_input_payload.dart b/packages/genai/lib/llm_input_payload.dart new file mode 100644 index 00000000..79198911 --- /dev/null +++ b/packages/genai/lib/llm_input_payload.dart @@ -0,0 +1,59 @@ +import 'llm_config.dart'; + +class LLMInputPayload { + String endpoint; + String credential; + String systemPrompt; + String userPrompt; + Map configMap; + + LLMInputPayload({ + required this.endpoint, + required this.credential, + required this.systemPrompt, + required this.userPrompt, + required this.configMap, + }); + + LLMInputPayload clone() { + Map cmap = {}; + for (final k in configMap.keys) { + cmap[k] = configMap[k]!.clone(); + } + return LLMInputPayload( + endpoint: endpoint, + credential: credential, + systemPrompt: systemPrompt, + userPrompt: userPrompt, + configMap: cmap, + ); + } + + static Map toJSON(LLMInputPayload payload) { + Map cmap = {}; + for (final e in payload.configMap.entries) { + cmap[e.key] = e.value.toJson(); + } + return { + 'endpoint': payload.endpoint, + 'credential': payload.credential, + 'system_prompt': payload.systemPrompt, + 'user_prompt': payload.userPrompt, + 'config_map': cmap, + }; + } + + static LLMInputPayload fromJSON(Map json) { + Map cmap = {}; + for (final k in json['config_map'].keys) { + cmap[k] = LLMModelConfiguration.fromJson(json['config_map'][k]); + } + return LLMInputPayload( + endpoint: json['endpoint'], + credential: json['credential'], + systemPrompt: json['system_prompt'], + userPrompt: json['user_prompt'], + configMap: cmap, + ); + } +} diff --git a/packages/genai/lib/llm_saveobject.dart b/packages/genai/lib/llm_saveobject.dart new file mode 100644 index 00000000..cbbafb47 --- /dev/null +++ b/packages/genai/lib/llm_saveobject.dart @@ -0,0 +1,48 @@ +import 'llm_config.dart'; +import 'providers/common.dart'; +import 'providers/providers.dart'; + +class LLMSaveObject { + String endpoint; + String credential; + LLMProvider provider; + LLMModel selectedLLM; + Map configMap; + + LLMSaveObject({ + required this.endpoint, + required this.credential, + required this.configMap, + required this.selectedLLM, + required this.provider, + }); + + Map toJSON() { + Map cmap = {}; + for (final e in configMap.entries) { + cmap[e.key] = e.value.toJson(); + } + return { + 'endpoint': endpoint, + 'credential': credential, + 'config_map': cmap, + 'selected_llm': selectedLLM.identifier, + 'provider': provider.name, + }; + } + + static LLMSaveObject fromJSON(Map json) { + Map cmap = {}; + for (final k in json['config_map'].keys) { + cmap[k] = LLMModelConfiguration.fromJson(json['config_map'][k]); + } + final provider = LLMProvider.fromName(json['provider']); + return LLMSaveObject( + endpoint: json['endpoint'], + credential: json['credential'], + configMap: cmap, + selectedLLM: provider.getLLMByIdentifier(json['selected_llm']), + provider: provider, + ); + } +} diff --git a/packages/genai/lib/providers/common.dart b/packages/genai/lib/providers/common.dart new file mode 100644 index 00000000..e00869b1 --- /dev/null +++ b/packages/genai/lib/providers/common.dart @@ -0,0 +1,40 @@ +import '../llm_input_payload.dart'; +import '../llm_request.dart'; +import 'providers.dart'; + +class LLMModel { + const LLMModel(this.identifier, this.modelName, this.provider); + final String identifier; + final String modelName; + final LLMProvider provider; + + static Map toJson(LLMModel m) { + return {'identifier': m.identifier, 'provider': m.provider.name}; + } + + static LLMModel fromJson(Map json) { + return LLMProvider.fromName( + json['provider'], + ).getLLMByIdentifier(json['identifier']); + } +} + +abstract class ModelController { + LLMInputPayload get inputPayload => throw UnimplementedError(); + + LLMRequestDetails createRequest( + LLMModel model, + LLMInputPayload inputPayload, { + bool stream = false, + }) { + throw UnimplementedError(); + } + + String? outputFormatter(Map x) { + throw UnimplementedError(); + } + + String? streamOutputFormatter(Map x) { + throw UnimplementedError(); + } +} diff --git a/packages/genai/lib/providers/ollama.dart b/packages/genai/lib/providers/ollama.dart new file mode 100644 index 00000000..82c2a0f2 --- /dev/null +++ b/packages/genai/lib/providers/ollama.dart @@ -0,0 +1,72 @@ +import '../llm_config.dart'; +import '../llm_input_payload.dart'; +import '../llm_request.dart'; +import 'common.dart'; + +class OllamaModelController extends ModelController { + static final instance = OllamaModelController(); + + @override + LLMInputPayload get inputPayload => LLMInputPayload( + endpoint: 'http://localhost:11434/v1/chat/completions', + credential: '', + systemPrompt: '', + userPrompt: '', + configMap: { + LLMConfigName.temperature.name: + defaultLLMConfigurations[LLMConfigName.temperature]!, + LLMConfigName.top_p.name: defaultLLMConfigurations[LLMConfigName.top_p]!, + }, + ).clone(); + + @override + LLMRequestDetails createRequest( + LLMModel model, + LLMInputPayload inputPayload, { + bool stream = false, + }) { + return LLMRequestDetails( + endpoint: inputPayload.endpoint, + headers: {}, + method: 'POST', + body: { + "model": model.identifier, + if (stream) ...{'stream': true}, + "messages": [ + {"role": "system", "content": inputPayload.systemPrompt}, + {"role": "user", "content": inputPayload.userPrompt}, + ], + "temperature": + inputPayload + .configMap[LLMConfigName.temperature.name] + ?.configValue + .value + ?.$2 ?? + 0.5, + "top_p": + inputPayload + .configMap[LLMConfigName.top_p.name] + ?.configValue + .value + ?.$2 ?? + 0.95, + if (inputPayload.configMap[LLMConfigName.max_tokens.name] != null) ...{ + "max_tokens": inputPayload + .configMap[LLMConfigName.max_tokens.name]! + .configValue + .value, + }, + }, + ); + } + + @override + String? outputFormatter(Map x) { + return x['choices']?[0]['message']?['content']; + } + + @override + String? streamOutputFormatter(Map x) { + return x['choices']?[0]['delta']?['content']; + } +} diff --git a/packages/genai/lib/providers/providers.dart b/packages/genai/lib/providers/providers.dart new file mode 100644 index 00000000..97ed2378 --- /dev/null +++ b/packages/genai/lib/providers/providers.dart @@ -0,0 +1,67 @@ +import '../llm_manager.dart'; +import 'common.dart'; +import 'ollama.dart'; + +enum LLMProvider { + gemini('Gemini'), + openai('OpenAI'), + anthropic('Anthropic'), + ollama('Ollama'), + azureopenai('Azure OpenAI'); + + const LLMProvider(this.displayName); + + final String displayName; + + List get models { + final avl = LLMManager.models[this.name.toLowerCase()]; + if (avl == null) return []; + List models = []; + for (final x in avl) { + models.add(LLMModel(x[0], x[1], this)); + } + return models; + } + + ModelController get modelController { + switch (this) { + case LLMProvider.ollama: + return OllamaModelController.instance; + case _: + return OllamaModelController.instance; + } + } + + static LLMProvider fromJSON(Map json) { + return LLMProvider.fromName(json['llm_provider']); + } + + static Map toJSON(LLMProvider p) { + return {'llm_provider': p.name}; + } + + static LLMProvider? fromJSONNullable(Map? json) { + if (json == null) return null; + return LLMProvider.fromName(json['llm_provider']); + } + + static Map? toJSONNullable(LLMProvider? p) { + if (p == null) return null; + return {'llm_provider': p.name}; + } + + LLMModel getLLMByIdentifier(String identifier) { + final m = this.models.where((e) => e.identifier == identifier).firstOrNull; + if (m == null) { + throw Exception('MODEL DOES NOT EXIST $identifier'); + } + return m; + } + + static LLMProvider fromName(String name) { + return LLMProvider.values.firstWhere( + (model) => model.name == name, + orElse: () => throw ArgumentError('INVALID LLM PROVIDER: $name'), + ); + } +}