mirror of
https://github.com/foss42/apidash.git
synced 2025-12-04 03:46:57 +08:00
Providers: Implemented anthropic, gemini, openai & azureopenai
This commit is contained in:
77
packages/genai/lib/providers/azureopenai.dart
Normal file
77
packages/genai/lib/providers/azureopenai.dart
Normal file
@@ -0,0 +1,77 @@
|
||||
import '../llm_config.dart';
|
||||
import '../llm_input_payload.dart';
|
||||
import '../llm_request.dart';
|
||||
import 'common.dart';
|
||||
|
||||
class AzureOpenAIModelController extends ModelController {
|
||||
static final instance = AzureOpenAIModelController();
|
||||
@override
|
||||
LLMInputPayload get inputPayload => LLMInputPayload(
|
||||
endpoint: '', //TO BE FILLED BY USER
|
||||
credential: '',
|
||||
systemPrompt: '',
|
||||
userPrompt: '',
|
||||
configMap: {
|
||||
LLMConfigName.temperature.name:
|
||||
defaultLLMConfigurations[LLMConfigName.temperature]!,
|
||||
LLMConfigName.top_p.name: defaultLLMConfigurations[LLMConfigName.top_p]!,
|
||||
},
|
||||
).clone();
|
||||
|
||||
@override
|
||||
LLMRequestDetails createRequest(
|
||||
LLMModel model,
|
||||
LLMInputPayload inputPayload, {
|
||||
bool stream = false,
|
||||
}) {
|
||||
if (inputPayload.endpoint.isEmpty) {
|
||||
throw Exception('MODEL ENDPOINT IS EMPTY');
|
||||
}
|
||||
return LLMRequestDetails(
|
||||
endpoint: inputPayload.endpoint,
|
||||
headers: {'api-key': inputPayload.credential},
|
||||
method: 'POST',
|
||||
body: {
|
||||
if (stream) ...{'stream': true},
|
||||
"messages": [
|
||||
{"role": "system", "content": inputPayload.systemPrompt},
|
||||
if (inputPayload.userPrompt.isNotEmpty) ...{
|
||||
{"role": "user", "content": inputPayload.userPrompt},
|
||||
} else ...{
|
||||
{"role": "user", "content": "Generate"},
|
||||
},
|
||||
],
|
||||
"temperature":
|
||||
inputPayload
|
||||
.configMap[LLMConfigName.temperature.name]
|
||||
?.configValue
|
||||
.value
|
||||
?.$2 ??
|
||||
0.5,
|
||||
"top_p":
|
||||
inputPayload
|
||||
.configMap[LLMConfigName.top_p.name]
|
||||
?.configValue
|
||||
.value
|
||||
?.$2 ??
|
||||
0.95,
|
||||
if (inputPayload.configMap[LLMConfigName.max_tokens.name] != null) ...{
|
||||
"max_tokens": inputPayload
|
||||
.configMap[LLMConfigName.max_tokens.name]!
|
||||
.configValue
|
||||
.value,
|
||||
},
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@override
|
||||
String? outputFormatter(Map x) {
|
||||
return x["choices"]?[0]["message"]?["content"]?.trim();
|
||||
}
|
||||
|
||||
@override
|
||||
String? streamOutputFormatter(Map x) {
|
||||
return x["choices"]?[0]["delta"]?["content"];
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user