diff --git a/packages/genai/lib/genai.dart b/packages/genai/lib/genai.dart index 83f90435..b96c6195 100644 --- a/packages/genai/lib/genai.dart +++ b/packages/genai/lib/genai.dart @@ -1,7 +1,12 @@ -library genai; - -/// A Calculator. -class Calculator { - /// Returns [value] plus 1. - int addOne(int value) => value + 1; -} +// Module Exports +export 'models/ai_request_model.dart'; +export 'models/ai_response_model.dart'; +export 'providers/providers.dart'; +export 'generative_ai.dart'; +export 'llm_config.dart'; +export 'llm_input_payload.dart'; +export 'llm_manager.dart'; +export 'llm_model.dart'; +export 'llm_provider.dart'; +export 'llm_request.dart'; +export 'llm_saveobject.dart'; \ No newline at end of file diff --git a/packages/genai/lib/generative_ai.dart b/packages/genai/lib/generative_ai.dart new file mode 100644 index 00000000..e55d4d60 --- /dev/null +++ b/packages/genai/lib/generative_ai.dart @@ -0,0 +1,195 @@ +import 'dart:async'; +import 'dart:convert'; +import 'dart:math'; +import 'package:better_networking/better_networking.dart'; +import 'llm_config.dart'; +import 'llm_model.dart'; +import 'llm_request.dart'; + +class GenerativeAI { + static Future executeGenAIRequest( + LLMModel model, + LLMRequestDetails requestDetails, + ) async { + final mC = model.provider.modelController; + final headers = requestDetails.headers; + // print(jsonEncode(requestDetails.body)); + final (response, _, _) = await sendHttpRequest( + (Random().nextDouble() * 9999999 + 1).toString(), + APIType.rest, + HttpRequestModel( + method: HTTPVerb.post, + headers: [ + ...headers.entries.map( + (x) => NameValueModel.fromJson({x.key: x.value}), + ), + ], + url: requestDetails.endpoint, + bodyContentType: ContentType.json, + body: jsonEncode(requestDetails.body), + ), + ); + if (response == null) return null; + if (response.statusCode == 200) { + final data = jsonDecode(response.body); + // print(data); + return mC.outputFormatter(data); + } else { + print(requestDetails.endpoint); + print(response.body); + throw Exception( + 'LLM_EXCEPTION: ${response.statusCode}\n${response.body}', + ); + } + } + + static Future> streamGenAIRequest( + LLMModel model, + LLMRequestDetails requestDetails, + ) async { + final modelController = model.provider.modelController; + + final headers = { + 'Content-Type': 'application/json', + ...requestDetails.headers, + }; + + final httpStream = await streamHttpRequest( + requestDetails.hashCode.toString(), + APIType.rest, + HttpRequestModel( + method: HTTPVerb.post, + headers: headers.entries + .map((entry) => NameValueModel(name: entry.key, value: entry.value)) + .toList(), + url: requestDetails.endpoint, + bodyContentType: ContentType.json, + body: jsonEncode(requestDetails.body), + ), + ); + + final streamController = StreamController(); + + final subscription = httpStream.listen( + (dat) { + if (dat == null) { + streamController.addError('STREAMING ERROR: NULL DATA'); + return; + } + + final chunk = dat.$1; + final error = dat.$3; + + if (chunk == null) { + streamController.addError(error ?? 'NULL ERROR'); + return; + } + + final lines = chunk.split('\n'); + for (final line in lines) { + if (!line.startsWith('data: ') || line.contains('[DONE]')) continue; + final jsonStr = line.substring(6).trim(); + try { + final jsonData = jsonDecode(jsonStr); + final formattedOutput = modelController.streamOutputFormatter( + jsonData, + ); + streamController.sink.add(formattedOutput); + } catch (e) { + print('⚠️ JSON decode error in SSE: $e\Sending as Regular Text'); + streamController.sink.add(jsonStr); + } + } + }, + onError: (error) { + streamController.addError('STREAM ERROR: $error'); + streamController.close(); + }, + onDone: () { + streamController.close(); + }, + cancelOnError: true, + ); + + streamController.onCancel = () async { + await subscription.cancel(); + }; + + return streamController.stream; + } + + static callGenerativeModel( + LLMModel model, { + required Function(String?) onAnswer, + required Function(dynamic) onError, + required String systemPrompt, + required String userPrompt, + String? credential, + String? endpoint, + Map? configurations, + bool stream = false, + }) async { + final c = model.provider.modelController; + final payload = c.inputPayload; + payload.systemPrompt = systemPrompt; + payload.userPrompt = userPrompt; + if (credential != null) { + payload.credential = credential; + } + if (configurations != null) { + payload.configMap.addAll(configurations); + } + if (endpoint != null) { + payload.endpoint = endpoint; + } + try { + if (stream) { + final streamRequest = c.createRequest(model, payload, stream: true); + final answerStream = await streamGenAIRequest(model, streamRequest); + processGenAIStreamOutput(answerStream, (w) { + onAnswer('$w '); + }, onError); + } else { + final request = c.createRequest(model, payload); + final answer = await executeGenAIRequest(model, request); + onAnswer(answer); + } + } catch (e) { + onError(e); + } + } + + static void processGenAIStreamOutput( + Stream stream, + Function(String) onWord, + Function(dynamic) onError, + ) { + String buffer = ''; + stream.listen( + (chunk) { + if (chunk == null || chunk.isEmpty) return; + buffer += chunk; + // Split on spaces but preserve last partial word + final parts = buffer.split(RegExp(r'\s+')); + if (parts.length > 1) { + // Keep the last part in buffer (it may be incomplete) + buffer = parts.removeLast(); + for (final word in parts) { + if (word.trim().isNotEmpty) { + onWord(word); + } + } + } + }, + onDone: () { + // Print any remaining word when stream is finished + if (buffer.trim().isNotEmpty) { + onWord(buffer); + } + }, + onError: (e) { + onError(e); + }, + ); + } +} diff --git a/packages/genai/lib/providers/common.dart b/packages/genai/lib/llm_model.dart similarity index 90% rename from packages/genai/lib/providers/common.dart rename to packages/genai/lib/llm_model.dart index e00869b1..30458c60 100644 --- a/packages/genai/lib/providers/common.dart +++ b/packages/genai/lib/llm_model.dart @@ -1,6 +1,6 @@ -import '../llm_input_payload.dart'; -import '../llm_request.dart'; -import 'providers.dart'; +import 'llm_input_payload.dart'; +import 'llm_provider.dart'; +import 'llm_request.dart'; class LLMModel { const LLMModel(this.identifier, this.modelName, this.provider); diff --git a/packages/genai/lib/llm_provider.dart b/packages/genai/lib/llm_provider.dart new file mode 100644 index 00000000..1e652910 --- /dev/null +++ b/packages/genai/lib/llm_provider.dart @@ -0,0 +1,73 @@ +import 'providers/providers.dart'; +import '../llm_manager.dart'; +import 'llm_model.dart'; + +enum LLMProvider { + gemini('Gemini'), + openai('OpenAI'), + anthropic('Anthropic'), + ollama('Ollama'), + azureopenai('Azure OpenAI'); + + const LLMProvider(this.displayName); + + final String displayName; + + List get models { + final avl = LLMManager.models[this.name.toLowerCase()]; + if (avl == null) return []; + List models = []; + for (final x in avl) { + models.add(LLMModel(x[0], x[1], this)); + } + return models; + } + + ModelController get modelController { + switch (this) { + case LLMProvider.ollama: + return OllamaModelController.instance; + case LLMProvider.gemini: + return GeminiModelController.instance; + case LLMProvider.azureopenai: + return AzureOpenAIModelController.instance; + case LLMProvider.openai: + return OpenAIModelController.instance; + case LLMProvider.anthropic: + return AnthropicModelController.instance; + } + } + + static LLMProvider fromJSON(Map json) { + return LLMProvider.fromName(json['llm_provider']); + } + + static Map toJSON(LLMProvider p) { + return {'llm_provider': p.name}; + } + + static LLMProvider? fromJSONNullable(Map? json) { + if (json == null) return null; + return LLMProvider.fromName(json['llm_provider']); + } + + static Map? toJSONNullable(LLMProvider? p) { + if (p == null) return null; + return {'llm_provider': p.name}; + } + + LLMModel getLLMByIdentifier(String identifier) { + final m = this.models.where((e) => e.identifier == identifier).firstOrNull; + if (m == null) { + throw Exception('MODEL DOES NOT EXIST $identifier'); + } + return m; + } + + static LLMProvider fromName(String name) { + return LLMProvider.values.firstWhere( + (model) => model.name == name, + orElse: () => throw ArgumentError('INVALID LLM PROVIDER: $name'), + ); + } +} diff --git a/packages/genai/lib/llm_saveobject.dart b/packages/genai/lib/llm_saveobject.dart index cbbafb47..7d315789 100644 --- a/packages/genai/lib/llm_saveobject.dart +++ b/packages/genai/lib/llm_saveobject.dart @@ -1,6 +1,6 @@ import 'llm_config.dart'; -import 'providers/common.dart'; -import 'providers/providers.dart'; +import 'llm_model.dart'; +import 'llm_provider.dart'; class LLMSaveObject { String endpoint; diff --git a/packages/genai/lib/models/ai_request_model.dart b/packages/genai/lib/models/ai_request_model.dart index e18ee25a..f093dfc5 100644 --- a/packages/genai/lib/models/ai_request_model.dart +++ b/packages/genai/lib/models/ai_request_model.dart @@ -1,8 +1,9 @@ import 'package:freezed_annotation/freezed_annotation.dart'; +import '../llm_model.dart'; +import '../llm_provider.dart'; import '../llm_saveobject.dart'; import '../llm_input_payload.dart'; import '../llm_request.dart'; -import '../providers/common.dart'; import '../providers/gemini.dart'; import '../providers/providers.dart'; part 'ai_request_model.freezed.dart'; diff --git a/packages/genai/lib/models/ai_response_model.dart b/packages/genai/lib/models/ai_response_model.dart index dc88936b..a1aa75a4 100644 --- a/packages/genai/lib/models/ai_response_model.dart +++ b/packages/genai/lib/models/ai_response_model.dart @@ -4,7 +4,7 @@ import 'dart:typed_data'; import 'package:better_networking/better_networking.dart'; import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:collection/collection.dart' show mergeMaps; -import '../providers/providers.dart'; +import '../llm_provider.dart'; part 'ai_response_model.freezed.dart'; part 'ai_response_model.g.dart'; diff --git a/packages/genai/lib/providers/anthropic.dart b/packages/genai/lib/providers/anthropic.dart index 97deaa2a..8fe0ff86 100644 --- a/packages/genai/lib/providers/anthropic.dart +++ b/packages/genai/lib/providers/anthropic.dart @@ -1,7 +1,7 @@ import '../llm_config.dart'; import '../llm_input_payload.dart'; +import '../llm_model.dart'; import '../llm_request.dart'; -import 'common.dart'; class AnthropicModelController extends ModelController { static final instance = AnthropicModelController(); diff --git a/packages/genai/lib/providers/azureopenai.dart b/packages/genai/lib/providers/azureopenai.dart index 18d45667..3c38ac0e 100644 --- a/packages/genai/lib/providers/azureopenai.dart +++ b/packages/genai/lib/providers/azureopenai.dart @@ -1,7 +1,7 @@ import '../llm_config.dart'; import '../llm_input_payload.dart'; +import '../llm_model.dart'; import '../llm_request.dart'; -import 'common.dart'; class AzureOpenAIModelController extends ModelController { static final instance = AzureOpenAIModelController(); diff --git a/packages/genai/lib/providers/gemini.dart b/packages/genai/lib/providers/gemini.dart index 56b85b87..a4508965 100644 --- a/packages/genai/lib/providers/gemini.dart +++ b/packages/genai/lib/providers/gemini.dart @@ -1,7 +1,7 @@ import '../llm_config.dart'; import '../llm_input_payload.dart'; +import '../llm_model.dart'; import '../llm_request.dart'; -import 'common.dart'; class GeminiModelController extends ModelController { static final instance = GeminiModelController(); diff --git a/packages/genai/lib/providers/ollama.dart b/packages/genai/lib/providers/ollama.dart index 82c2a0f2..06750472 100644 --- a/packages/genai/lib/providers/ollama.dart +++ b/packages/genai/lib/providers/ollama.dart @@ -1,7 +1,7 @@ import '../llm_config.dart'; import '../llm_input_payload.dart'; +import '../llm_model.dart'; import '../llm_request.dart'; -import 'common.dart'; class OllamaModelController extends ModelController { static final instance = OllamaModelController(); diff --git a/packages/genai/lib/providers/openai.dart b/packages/genai/lib/providers/openai.dart index bd9bdc26..2c7a2e3b 100644 --- a/packages/genai/lib/providers/openai.dart +++ b/packages/genai/lib/providers/openai.dart @@ -1,7 +1,7 @@ import '../llm_config.dart'; import '../llm_input_payload.dart'; +import '../llm_model.dart'; import '../llm_request.dart'; -import 'common.dart'; class OpenAIModelController extends ModelController { static final instance = OpenAIModelController(); diff --git a/packages/genai/lib/providers/providers.dart b/packages/genai/lib/providers/providers.dart index dff914ef..ae945182 100644 --- a/packages/genai/lib/providers/providers.dart +++ b/packages/genai/lib/providers/providers.dart @@ -1,77 +1,5 @@ -import '../llm_manager.dart'; -import 'anthropic.dart'; -import 'azureopenai.dart'; -import 'common.dart'; -import 'gemini.dart'; -import 'ollama.dart'; -import 'openai.dart'; - -enum LLMProvider { - gemini('Gemini'), - openai('OpenAI'), - anthropic('Anthropic'), - ollama('Ollama'), - azureopenai('Azure OpenAI'); - - const LLMProvider(this.displayName); - - final String displayName; - - List get models { - final avl = LLMManager.models[this.name.toLowerCase()]; - if (avl == null) return []; - List models = []; - for (final x in avl) { - models.add(LLMModel(x[0], x[1], this)); - } - return models; - } - - ModelController get modelController { - switch (this) { - case LLMProvider.ollama: - return OllamaModelController.instance; - case LLMProvider.gemini: - return GeminiModelController.instance; - case LLMProvider.azureopenai: - return AzureOpenAIModelController.instance; - case LLMProvider.openai: - return OpenAIModelController.instance; - case LLMProvider.anthropic: - return AnthropicModelController.instance; - } - } - - static LLMProvider fromJSON(Map json) { - return LLMProvider.fromName(json['llm_provider']); - } - - static Map toJSON(LLMProvider p) { - return {'llm_provider': p.name}; - } - - static LLMProvider? fromJSONNullable(Map? json) { - if (json == null) return null; - return LLMProvider.fromName(json['llm_provider']); - } - - static Map? toJSONNullable(LLMProvider? p) { - if (p == null) return null; - return {'llm_provider': p.name}; - } - - LLMModel getLLMByIdentifier(String identifier) { - final m = this.models.where((e) => e.identifier == identifier).firstOrNull; - if (m == null) { - throw Exception('MODEL DOES NOT EXIST $identifier'); - } - return m; - } - - static LLMProvider fromName(String name) { - return LLMProvider.values.firstWhere( - (model) => model.name == name, - orElse: () => throw ArgumentError('INVALID LLM PROVIDER: $name'), - ); - } -} +export 'anthropic.dart'; +export 'gemini.dart'; +export 'azureopenai.dart'; +export 'openai.dart'; +export 'ollama.dart'; \ No newline at end of file