REFACTOR: Improved Package Structure

This commit is contained in:
Manas Hejmadi
2025-06-22 23:39:22 +05:30
parent 26d27eba80
commit 7a9afc614b
13 changed files with 298 additions and 96 deletions

View File

@@ -1,7 +1,12 @@
library genai; // Module Exports
export 'models/ai_request_model.dart';
/// A Calculator. export 'models/ai_response_model.dart';
class Calculator { export 'providers/providers.dart';
/// Returns [value] plus 1. export 'generative_ai.dart';
int addOne(int value) => value + 1; export 'llm_config.dart';
} export 'llm_input_payload.dart';
export 'llm_manager.dart';
export 'llm_model.dart';
export 'llm_provider.dart';
export 'llm_request.dart';
export 'llm_saveobject.dart';

View File

@@ -0,0 +1,195 @@
import 'dart:async';
import 'dart:convert';
import 'dart:math';
import 'package:better_networking/better_networking.dart';
import 'llm_config.dart';
import 'llm_model.dart';
import 'llm_request.dart';
class GenerativeAI {
static Future<String?> executeGenAIRequest(
LLMModel model,
LLMRequestDetails requestDetails,
) async {
final mC = model.provider.modelController;
final headers = requestDetails.headers;
// print(jsonEncode(requestDetails.body));
final (response, _, _) = await sendHttpRequest(
(Random().nextDouble() * 9999999 + 1).toString(),
APIType.rest,
HttpRequestModel(
method: HTTPVerb.post,
headers: [
...headers.entries.map(
(x) => NameValueModel.fromJson({x.key: x.value}),
),
],
url: requestDetails.endpoint,
bodyContentType: ContentType.json,
body: jsonEncode(requestDetails.body),
),
);
if (response == null) return null;
if (response.statusCode == 200) {
final data = jsonDecode(response.body);
// print(data);
return mC.outputFormatter(data);
} else {
print(requestDetails.endpoint);
print(response.body);
throw Exception(
'LLM_EXCEPTION: ${response.statusCode}\n${response.body}',
);
}
}
static Future<Stream<String?>> streamGenAIRequest(
LLMModel model,
LLMRequestDetails requestDetails,
) async {
final modelController = model.provider.modelController;
final headers = {
'Content-Type': 'application/json',
...requestDetails.headers,
};
final httpStream = await streamHttpRequest(
requestDetails.hashCode.toString(),
APIType.rest,
HttpRequestModel(
method: HTTPVerb.post,
headers: headers.entries
.map((entry) => NameValueModel(name: entry.key, value: entry.value))
.toList(),
url: requestDetails.endpoint,
bodyContentType: ContentType.json,
body: jsonEncode(requestDetails.body),
),
);
final streamController = StreamController<String?>();
final subscription = httpStream.listen(
(dat) {
if (dat == null) {
streamController.addError('STREAMING ERROR: NULL DATA');
return;
}
final chunk = dat.$1;
final error = dat.$3;
if (chunk == null) {
streamController.addError(error ?? 'NULL ERROR');
return;
}
final lines = chunk.split('\n');
for (final line in lines) {
if (!line.startsWith('data: ') || line.contains('[DONE]')) continue;
final jsonStr = line.substring(6).trim();
try {
final jsonData = jsonDecode(jsonStr);
final formattedOutput = modelController.streamOutputFormatter(
jsonData,
);
streamController.sink.add(formattedOutput);
} catch (e) {
print('⚠️ JSON decode error in SSE: $e\Sending as Regular Text');
streamController.sink.add(jsonStr);
}
}
},
onError: (error) {
streamController.addError('STREAM ERROR: $error');
streamController.close();
},
onDone: () {
streamController.close();
},
cancelOnError: true,
);
streamController.onCancel = () async {
await subscription.cancel();
};
return streamController.stream;
}
static callGenerativeModel(
LLMModel model, {
required Function(String?) onAnswer,
required Function(dynamic) onError,
required String systemPrompt,
required String userPrompt,
String? credential,
String? endpoint,
Map<String, LLMModelConfiguration>? configurations,
bool stream = false,
}) async {
final c = model.provider.modelController;
final payload = c.inputPayload;
payload.systemPrompt = systemPrompt;
payload.userPrompt = userPrompt;
if (credential != null) {
payload.credential = credential;
}
if (configurations != null) {
payload.configMap.addAll(configurations);
}
if (endpoint != null) {
payload.endpoint = endpoint;
}
try {
if (stream) {
final streamRequest = c.createRequest(model, payload, stream: true);
final answerStream = await streamGenAIRequest(model, streamRequest);
processGenAIStreamOutput(answerStream, (w) {
onAnswer('$w ');
}, onError);
} else {
final request = c.createRequest(model, payload);
final answer = await executeGenAIRequest(model, request);
onAnswer(answer);
}
} catch (e) {
onError(e);
}
}
static void processGenAIStreamOutput(
Stream<String?> stream,
Function(String) onWord,
Function(dynamic) onError,
) {
String buffer = '';
stream.listen(
(chunk) {
if (chunk == null || chunk.isEmpty) return;
buffer += chunk;
// Split on spaces but preserve last partial word
final parts = buffer.split(RegExp(r'\s+'));
if (parts.length > 1) {
// Keep the last part in buffer (it may be incomplete)
buffer = parts.removeLast();
for (final word in parts) {
if (word.trim().isNotEmpty) {
onWord(word);
}
}
}
},
onDone: () {
// Print any remaining word when stream is finished
if (buffer.trim().isNotEmpty) {
onWord(buffer);
}
},
onError: (e) {
onError(e);
},
);
}
}

View File

@@ -1,6 +1,6 @@
import '../llm_input_payload.dart'; import 'llm_input_payload.dart';
import '../llm_request.dart'; import 'llm_provider.dart';
import 'providers.dart'; import 'llm_request.dart';
class LLMModel { class LLMModel {
const LLMModel(this.identifier, this.modelName, this.provider); const LLMModel(this.identifier, this.modelName, this.provider);

View File

@@ -0,0 +1,73 @@
import 'providers/providers.dart';
import '../llm_manager.dart';
import 'llm_model.dart';
enum LLMProvider {
gemini('Gemini'),
openai('OpenAI'),
anthropic('Anthropic'),
ollama('Ollama'),
azureopenai('Azure OpenAI');
const LLMProvider(this.displayName);
final String displayName;
List<LLMModel> get models {
final avl = LLMManager.models[this.name.toLowerCase()];
if (avl == null) return [];
List<LLMModel> models = [];
for (final x in avl) {
models.add(LLMModel(x[0], x[1], this));
}
return models;
}
ModelController get modelController {
switch (this) {
case LLMProvider.ollama:
return OllamaModelController.instance;
case LLMProvider.gemini:
return GeminiModelController.instance;
case LLMProvider.azureopenai:
return AzureOpenAIModelController.instance;
case LLMProvider.openai:
return OpenAIModelController.instance;
case LLMProvider.anthropic:
return AnthropicModelController.instance;
}
}
static LLMProvider fromJSON(Map json) {
return LLMProvider.fromName(json['llm_provider']);
}
static Map toJSON(LLMProvider p) {
return {'llm_provider': p.name};
}
static LLMProvider? fromJSONNullable(Map? json) {
if (json == null) return null;
return LLMProvider.fromName(json['llm_provider']);
}
static Map? toJSONNullable(LLMProvider? p) {
if (p == null) return null;
return {'llm_provider': p.name};
}
LLMModel getLLMByIdentifier(String identifier) {
final m = this.models.where((e) => e.identifier == identifier).firstOrNull;
if (m == null) {
throw Exception('MODEL DOES NOT EXIST $identifier');
}
return m;
}
static LLMProvider fromName(String name) {
return LLMProvider.values.firstWhere(
(model) => model.name == name,
orElse: () => throw ArgumentError('INVALID LLM PROVIDER: $name'),
);
}
}

View File

@@ -1,6 +1,6 @@
import 'llm_config.dart'; import 'llm_config.dart';
import 'providers/common.dart'; import 'llm_model.dart';
import 'providers/providers.dart'; import 'llm_provider.dart';
class LLMSaveObject { class LLMSaveObject {
String endpoint; String endpoint;

View File

@@ -1,8 +1,9 @@
import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:freezed_annotation/freezed_annotation.dart';
import '../llm_model.dart';
import '../llm_provider.dart';
import '../llm_saveobject.dart'; import '../llm_saveobject.dart';
import '../llm_input_payload.dart'; import '../llm_input_payload.dart';
import '../llm_request.dart'; import '../llm_request.dart';
import '../providers/common.dart';
import '../providers/gemini.dart'; import '../providers/gemini.dart';
import '../providers/providers.dart'; import '../providers/providers.dart';
part 'ai_request_model.freezed.dart'; part 'ai_request_model.freezed.dart';

View File

@@ -4,7 +4,7 @@ import 'dart:typed_data';
import 'package:better_networking/better_networking.dart'; import 'package:better_networking/better_networking.dart';
import 'package:freezed_annotation/freezed_annotation.dart'; import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:collection/collection.dart' show mergeMaps; import 'package:collection/collection.dart' show mergeMaps;
import '../providers/providers.dart'; import '../llm_provider.dart';
part 'ai_response_model.freezed.dart'; part 'ai_response_model.freezed.dart';
part 'ai_response_model.g.dart'; part 'ai_response_model.g.dart';

View File

@@ -1,7 +1,7 @@
import '../llm_config.dart'; import '../llm_config.dart';
import '../llm_input_payload.dart'; import '../llm_input_payload.dart';
import '../llm_model.dart';
import '../llm_request.dart'; import '../llm_request.dart';
import 'common.dart';
class AnthropicModelController extends ModelController { class AnthropicModelController extends ModelController {
static final instance = AnthropicModelController(); static final instance = AnthropicModelController();

View File

@@ -1,7 +1,7 @@
import '../llm_config.dart'; import '../llm_config.dart';
import '../llm_input_payload.dart'; import '../llm_input_payload.dart';
import '../llm_model.dart';
import '../llm_request.dart'; import '../llm_request.dart';
import 'common.dart';
class AzureOpenAIModelController extends ModelController { class AzureOpenAIModelController extends ModelController {
static final instance = AzureOpenAIModelController(); static final instance = AzureOpenAIModelController();

View File

@@ -1,7 +1,7 @@
import '../llm_config.dart'; import '../llm_config.dart';
import '../llm_input_payload.dart'; import '../llm_input_payload.dart';
import '../llm_model.dart';
import '../llm_request.dart'; import '../llm_request.dart';
import 'common.dart';
class GeminiModelController extends ModelController { class GeminiModelController extends ModelController {
static final instance = GeminiModelController(); static final instance = GeminiModelController();

View File

@@ -1,7 +1,7 @@
import '../llm_config.dart'; import '../llm_config.dart';
import '../llm_input_payload.dart'; import '../llm_input_payload.dart';
import '../llm_model.dart';
import '../llm_request.dart'; import '../llm_request.dart';
import 'common.dart';
class OllamaModelController extends ModelController { class OllamaModelController extends ModelController {
static final instance = OllamaModelController(); static final instance = OllamaModelController();

View File

@@ -1,7 +1,7 @@
import '../llm_config.dart'; import '../llm_config.dart';
import '../llm_input_payload.dart'; import '../llm_input_payload.dart';
import '../llm_model.dart';
import '../llm_request.dart'; import '../llm_request.dart';
import 'common.dart';
class OpenAIModelController extends ModelController { class OpenAIModelController extends ModelController {
static final instance = OpenAIModelController(); static final instance = OpenAIModelController();

View File

@@ -1,77 +1,5 @@
import '../llm_manager.dart'; export 'anthropic.dart';
import 'anthropic.dart'; export 'gemini.dart';
import 'azureopenai.dart'; export 'azureopenai.dart';
import 'common.dart'; export 'openai.dart';
import 'gemini.dart'; export 'ollama.dart';
import 'ollama.dart';
import 'openai.dart';
enum LLMProvider {
gemini('Gemini'),
openai('OpenAI'),
anthropic('Anthropic'),
ollama('Ollama'),
azureopenai('Azure OpenAI');
const LLMProvider(this.displayName);
final String displayName;
List<LLMModel> get models {
final avl = LLMManager.models[this.name.toLowerCase()];
if (avl == null) return [];
List<LLMModel> models = [];
for (final x in avl) {
models.add(LLMModel(x[0], x[1], this));
}
return models;
}
ModelController get modelController {
switch (this) {
case LLMProvider.ollama:
return OllamaModelController.instance;
case LLMProvider.gemini:
return GeminiModelController.instance;
case LLMProvider.azureopenai:
return AzureOpenAIModelController.instance;
case LLMProvider.openai:
return OpenAIModelController.instance;
case LLMProvider.anthropic:
return AnthropicModelController.instance;
}
}
static LLMProvider fromJSON(Map json) {
return LLMProvider.fromName(json['llm_provider']);
}
static Map toJSON(LLMProvider p) {
return {'llm_provider': p.name};
}
static LLMProvider? fromJSONNullable(Map? json) {
if (json == null) return null;
return LLMProvider.fromName(json['llm_provider']);
}
static Map? toJSONNullable(LLMProvider? p) {
if (p == null) return null;
return {'llm_provider': p.name};
}
LLMModel getLLMByIdentifier(String identifier) {
final m = this.models.where((e) => e.identifier == identifier).firstOrNull;
if (m == null) {
throw Exception('MODEL DOES NOT EXIST $identifier');
}
return m;
}
static LLMProvider fromName(String name) {
return LLMProvider.values.firstWhere(
(model) => model.name == name,
orElse: () => throw ArgumentError('INVALID LLM PROVIDER: $name'),
);
}
}