genai: Created Package & implemented LLM Config, Manager & Request

This commit is contained in:
Manas Hejmadi
2025-06-22 23:01:06 +05:30
parent 92bf9d9aa7
commit effe414268
11 changed files with 282 additions and 0 deletions

30
packages/genai/.gitignore vendored Normal file
View File

@@ -0,0 +1,30 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
migrate_working_dir/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
# Libraries should not include pubspec.lock, per https://dart.dev/guides/libraries/private-files#pubspeclock.
/pubspec.lock
**/doc/api/
.dart_tool/
.packages
build/

View File

@@ -0,0 +1,3 @@
## 0.0.1
* TODO: Describe initial release.

1
packages/genai/LICENSE Normal file
View File

@@ -0,0 +1 @@
TODO: Add your license here.

2
packages/genai/README.md Normal file
View File

@@ -0,0 +1,2 @@
# genai package
This Package contains all the code related to generative AI capabilities and is a foundational package that can be used in various projects

View File

@@ -0,0 +1,4 @@
include: package:flutter_lints/flutter.yaml
# Additional information about this file can be found at
# https://dart.dev/guides/language/analysis-options

View File

@@ -0,0 +1,7 @@
library genai;
/// A Calculator.
class Calculator {
/// Returns [value] plus 1.
int addOne(int value) => value + 1;
}

View File

@@ -0,0 +1,191 @@
import 'dart:convert';
typedef LLMOutputFormatter = String? Function(Map);
class LLMModelConfiguration {
final String configId;
final String configName;
final String configDescription;
final LLMModelConfigurationType configType;
final LLMModelConfigValue configValue;
LLMModelConfiguration updateValue(LLMModelConfigValue value) {
return LLMModelConfiguration(
configId: configId,
configName: configName,
configDescription: configDescription,
configType: configType,
configValue: value,
);
}
LLMModelConfiguration({
required this.configId,
required this.configName,
required this.configDescription,
required this.configType,
required this.configValue,
}) {
// Assert that the configuration type and value matches
switch (configType) {
case LLMModelConfigurationType.boolean:
assert(configValue is LLMConfigBooleanValue);
case LLMModelConfigurationType.slider:
assert(configValue is LLMConfigSliderValue);
case LLMModelConfigurationType.numeric:
assert(configValue is LLMConfigNumericValue);
case LLMModelConfigurationType.text:
assert(configValue is LLMConfigTextValue);
}
}
factory LLMModelConfiguration.fromJson(Map x) {
LLMModelConfigurationType cT;
LLMModelConfigValue cV;
switch (x['configType']) {
case 'boolean':
cT = LLMModelConfigurationType.boolean;
cV = LLMConfigBooleanValue.deserialize(x['configValue']);
break;
case 'slider':
cT = LLMModelConfigurationType.slider;
cV = LLMConfigSliderValue.deserialize(x['configValue']);
break;
case 'numeric':
cT = LLMModelConfigurationType.numeric;
cV = LLMConfigNumericValue.deserialize(x['configValue']);
break;
case 'text':
cT = LLMModelConfigurationType.text;
cV = LLMConfigTextValue.deserialize(x['configValue']);
break;
default:
cT = LLMModelConfigurationType.text;
cV = LLMConfigTextValue.deserialize(x['configValue']);
}
return LLMModelConfiguration(
configId: x['configId'],
configName: x['configName'],
configDescription: x['configDescription'],
configType: cT,
configValue: cV,
);
}
Map toJson() {
return {
'configId': configId,
'configName': configName,
'configDescription': configDescription,
'configType': configType.name.toString(),
'configValue': configValue.serialize(),
};
}
LLMModelConfiguration clone() {
return LLMModelConfiguration.fromJson(toJson());
}
}
enum LLMModelConfigurationType { boolean, slider, numeric, text }
//----------------LLMConfigValues ------------
abstract class LLMModelConfigValue {
dynamic _value;
// ignore: unnecessary_getters_setters
dynamic get value => _value;
set value(dynamic newValue) => _value = newValue;
String serialize();
LLMModelConfigValue(this._value);
}
class LLMConfigBooleanValue extends LLMModelConfigValue {
LLMConfigBooleanValue({required bool value}) : super(value);
@override
String serialize() {
return value.toString();
}
static LLMConfigBooleanValue deserialize(String x) {
return LLMConfigBooleanValue(value: x == 'true');
}
}
class LLMConfigNumericValue extends LLMModelConfigValue {
LLMConfigNumericValue({required num value}) : super(value);
@override
String serialize() {
return value.toString();
}
static LLMConfigNumericValue deserialize(String x) {
return LLMConfigNumericValue(value: num.parse(x));
}
}
class LLMConfigSliderValue extends LLMModelConfigValue {
LLMConfigSliderValue({required (double, double, double) value})
: super(value);
@override
String serialize() {
final v = value as (double, double, double);
return jsonEncode([v.$1, v.$2, v.$3]);
}
static LLMConfigSliderValue deserialize(String x) {
final z = jsonDecode(x) as List;
final val = (
double.parse(z[0].toString()),
double.parse(z[1].toString()),
double.parse(z[2].toString()),
);
return LLMConfigSliderValue(value: val);
}
}
class LLMConfigTextValue extends LLMModelConfigValue {
LLMConfigTextValue({required String value}) : super(value);
@override
String serialize() {
return value.toString();
}
static LLMConfigTextValue deserialize(String x) {
return LLMConfigTextValue(value: x);
}
}
enum LLMConfigName { temperature, top_p, max_tokens, endpoint }
Map<LLMConfigName, LLMModelConfiguration> defaultLLMConfigurations = {
LLMConfigName.temperature: LLMModelConfiguration(
configId: 'temperature',
configName: 'Temperature',
configDescription: 'The Temperature of the Model',
configType: LLMModelConfigurationType.slider,
configValue: LLMConfigSliderValue(value: (0, 0.5, 1)),
),
LLMConfigName.top_p: LLMModelConfiguration(
configId: 'top_p',
configName: 'Top P',
configDescription: 'The Top P of the Model',
configType: LLMModelConfigurationType.slider,
configValue: LLMConfigSliderValue(value: (0, 0.95, 1)),
),
LLMConfigName.max_tokens: LLMModelConfiguration(
configId: 'max_tokens',
configName: 'Maximum Tokens',
configDescription: 'The maximum number of tokens allowed in the output',
configType: LLMModelConfigurationType.numeric,
configValue: LLMConfigNumericValue(value: -1),
),
};

View File

View File

@@ -0,0 +1,13 @@
class LLMRequestDetails {
String endpoint;
Map<String, String> headers;
String method;
Map<String, dynamic> body;
LLMRequestDetails({
required this.endpoint,
required this.headers,
required this.method,
required this.body,
});
}

View File

@@ -0,0 +1,26 @@
name: genai
description: "Generative AI capabilities for flutter applications"
version: 0.0.1
homepage:
publish_to: none
environment:
sdk: ^3.8.0
flutter: ">=1.17.0"
dependencies:
flutter:
sdk: flutter
shared_preferences: ^2.5.2
better_networking:
path: ../better_networking
dev_dependencies:
flutter_test:
sdk: flutter
build_runner: ^2.4.12
flutter_lints: ^4.0.0
freezed: ^2.5.7
json_serializable: ^6.7.1
flutter:

View File

@@ -0,0 +1,5 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:genai/genai.dart';
void main() {
}