Merge branch 'foss42:main' into dashbot-core

This commit is contained in:
Udhay Adithya
2025-09-01 16:04:48 +05:30
committed by GitHub
71 changed files with 4312 additions and 74 deletions

12
packages/genai/.pubignore Normal file
View File

@@ -0,0 +1,12 @@
pubspec.lock
melos_genai.iml
build/
coverage/
dart_test.yaml
doc/
test/
pubspec_overrides.yaml
genai_example/melos_genai_example.iml
genai_example/pubspec_overrides.yaml
tool/
models.json

View File

@@ -1,3 +1,3 @@
## 0.0.1
* TODO: Describe initial release.
- Introducing a unified Dart/Flutter package for working with multiple Generative AI providers (Google Gemini, OpenAI, Anthropic, Azure OpenAI, Ollama, etc.).

View File

@@ -2,11 +2,11 @@
A **unified Dart/Flutter package** for working with multiple Generative AI providers (Google Gemini, OpenAI, Anthropic, Azure OpenAI, Ollama, etc.) using a **single request model**.
* ✅ Supports **normal & streaming** responses
* ✅ Unified `AIRequestModel` across providers
* ✅ Configurable parameters (temperature, top-p, max tokens, etc.)
* ✅ Simple request utilities (`executeGenAIRequest`, `streamGenAIRequest`)
* ✅ Extensible — add your own provider easily
- ✅ Supports **normal & streaming** responses
- ✅ Unified `AIRequestModel` across providers
- ✅ Configurable parameters (temperature, top-p, max tokens, etc.)
- ✅ Simple request utilities (`executeGenAIRequest`, `streamGenAIRequest`)
- ✅ Extensible — add your own provider easily
---
@@ -85,10 +85,10 @@ Each request accepts `modelConfigs` to fine-tune output.
Available configs (defaults provided):
* `temperature` → controls randomness
* `top_p` / `topP` → nucleus sampling probability
* `max_tokens` / `maxOutputTokens` → maximum length of output
* `stream` → enables streaming
- `temperature` → controls randomness
- `top_p` / `topP` → nucleus sampling probability
- `max_tokens` / `maxOutputTokens` → maximum length of output
- `stream` → enables streaming
Example:
@@ -135,9 +135,9 @@ processGenAIStreamOutput(
## 🔒 Authentication
* **OpenAI / Anthropic / Azure OpenAI** → API key passed as HTTP header.
* **Gemini** → API key passed as query param `?key=YOUR_API_KEY`.
* **Ollama** → local server, no key required.
- **OpenAI / Anthropic / Azure OpenAI** → API key passed as HTTP header.
- **Gemini** → API key passed as query param `?key=YOUR_API_KEY`.
- **Ollama** → local server, no key required.
Just set `apiKey` in your `AIRequestModel`.
@@ -150,10 +150,11 @@ Want to add a new AI provider?
1. Extend `ModelProvider`
2. Implement:
* `defaultAIRequestModel`
* `createRequest()`
* `outputFormatter()`
* `streamOutputFormatter()`
- `defaultAIRequestModel`
- `createRequest()`
- `outputFormatter()`
- `streamOutputFormatter()`
3. Register in `kModelProvidersMap`
Thats it — it plugs into the same unified request flow.
@@ -175,16 +176,16 @@ print(answer);
---
## 🤝 Contributing
We welcome contributions to the `genai` package! If you'd like to contribute, please fork the repository and submit a pull request. For major changes or new features, it's a good idea to open an issue first to discuss your ideas.
## Maintainer
## Maintainer(s)
- Ankit Mahato ([GitHub](https://github.com/animator), [LinkedIn](https://www.linkedin.com/in/ankitmahato/), [X](https://x.com/ankitmahato))
- Ashita Prasad ([GitHub](https://github.com/ashitaprasad), [LinkedIn](https://www.linkedin.com/in/ashitaprasad/), [X](https://x.com/ashitaprasad))
- Manas Hejmadi (contributor) ([GitHub](https://github.com/synapsecode))
## License
This project is licensed under the [Apache License 2.0](https://github.com/foss42/apidash/blob/main/packages/genai/LICENSE).
This project is licensed under the [Apache License 2.0](https://github.com/foss42/apidash/blob/main/packages/genai/LICENSE).

View File

@@ -0,0 +1,90 @@
import 'package:genai/agentic_engine/blueprint.dart';
import 'package:genai/genai.dart';
class AIAgentService {
static Future<String?> _call_provider({
required AIRequestModel baseAIRequestObject,
required String systemPrompt,
required String input,
}) async {
final aiRequest = baseAIRequestObject.copyWith(
systemPrompt: systemPrompt,
userPrompt: input,
);
return await executeGenAIRequest(aiRequest);
}
static Future<String?> _orchestrator(
AIAgent agent,
AIRequestModel baseAIRequestObject, {
String? query,
Map? variables,
}) async {
String sP = agent.getSystemPrompt();
//Perform Templating
if (variables != null) {
for (final v in variables.keys) {
sP = sP.substitutePromptVariable(v, variables[v]);
}
}
return await _call_provider(
systemPrompt: sP,
input: query ?? '',
baseAIRequestObject: baseAIRequestObject,
);
}
static Future<dynamic> _governor(
AIAgent agent,
AIRequestModel baseAIRequestObject, {
String? query,
Map? variables,
}) async {
int RETRY_COUNT = 0;
List<int> backoffDelays = [200, 400, 800, 1600, 3200];
do {
try {
final res = await _orchestrator(
agent,
baseAIRequestObject,
query: query,
variables: variables,
);
if (res != null) {
if (await agent.validator(res)) {
return agent.outputFormatter(res);
}
}
} catch (e) {
"AIAgentService::Governor: Exception Occured: $e";
}
// Exponential Backoff
if (RETRY_COUNT < backoffDelays.length) {
await Future.delayed(
Duration(milliseconds: backoffDelays[RETRY_COUNT]),
);
}
RETRY_COUNT += 1;
print(
"Retrying AgentCall for (${agent.agentName}): ATTEMPT: $RETRY_COUNT",
);
} while (RETRY_COUNT < 5);
return null;
}
static Future<dynamic> callAgent(
AIAgent agent,
AIRequestModel baseAIRequestObject, {
String? query,
Map? variables,
}) async {
return await _governor(
agent,
baseAIRequestObject,
query: query,
variables: variables,
);
}
}

View File

@@ -0,0 +1,2 @@
export 'agent_service.dart';
export 'blueprint.dart';

View File

@@ -0,0 +1,18 @@
abstract class AIAgent {
String get agentName;
String getSystemPrompt();
Future<bool> validator(String aiResponse);
Future<dynamic> outputFormatter(String validatedResponse);
}
extension SystemPromptTemplating on String {
String substitutePromptVariable(String variable, String value) {
return this.replaceAll(":$variable:", value);
}
}
class AgentInputs {
final String? query;
final Map? variables;
AgentInputs({this.query, this.variables});
}

View File

@@ -2,3 +2,4 @@ export 'models/models.dart';
export 'interface/interface.dart';
export 'utils/utils.dart';
export 'widgets/widgets.dart';
export 'agentic_engine/agentic_engine.dart';

View File

@@ -1,8 +1,14 @@
name: genai
description: "Generative AI capabilities for flutter applications"
description: "A unified Dart/Flutter package for working with multiple Generative AI providers (Google Gemini, OpenAI, Anthropic, Azure OpenAI, Ollama, etc.) using a single request model."
version: 0.0.1
homepage:
publish_to: none
homepage: https://github.com/foss42/apidash/tree/main/packages/genai
topics:
- ai
- ollama
- gemini
- claude
- openai
environment:
sdk: ^3.8.0
@@ -11,8 +17,7 @@ environment:
dependencies:
flutter:
sdk: flutter
better_networking:
path: ../better_networking
better_networking: ^0.0.2
freezed_annotation: ^2.4.1
json_annotation: ^4.9.0
nanoid: ^1.0.0

View File

@@ -1,5 +1,4 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:genai/genai.dart';
void main() {
}
void main() {}