mirror of
https://github.com/foss42/apidash.git
synced 2025-12-01 18:28:25 +08:00
Streaming Option added to AI Request Configurations
This commit is contained in:
@@ -339,7 +339,12 @@ class CollectionStateNotifier
|
||||
|
||||
if (apiType == APIType.ai) {
|
||||
aiRequestModel = requestModel.aiRequestModel!;
|
||||
final genAIRequest = aiRequestModel.createRequest();
|
||||
|
||||
final streamingMode = aiRequestModel.payload
|
||||
.configMap[LLMConfigName.stream.name]?.configValue.value ??
|
||||
false;
|
||||
|
||||
final genAIRequest = aiRequestModel.createRequest(stream: streamingMode);
|
||||
substitutedHttpRequestModel = getSubstitutedHttpRequestModel(
|
||||
HttpRequestModel(
|
||||
method: HTTPVerb.post,
|
||||
|
||||
@@ -164,7 +164,7 @@ class LLMConfigTextValue extends LLMModelConfigValue {
|
||||
}
|
||||
}
|
||||
|
||||
enum LLMConfigName { temperature, top_p, max_tokens, endpoint }
|
||||
enum LLMConfigName { temperature, top_p, max_tokens, endpoint, stream }
|
||||
|
||||
Map<LLMConfigName, LLMModelConfiguration> defaultLLMConfigurations = {
|
||||
LLMConfigName.temperature: LLMModelConfiguration(
|
||||
@@ -188,4 +188,12 @@ Map<LLMConfigName, LLMModelConfiguration> defaultLLMConfigurations = {
|
||||
configType: LLMModelConfigurationType.numeric,
|
||||
configValue: LLMConfigNumericValue(value: -1),
|
||||
),
|
||||
LLMConfigName.stream: LLMModelConfiguration(
|
||||
configId: 'stream',
|
||||
configName: 'Enable Streaming Mode',
|
||||
configDescription:
|
||||
'The LLM output will be sent in a stream instead of all at once',
|
||||
configType: LLMModelConfigurationType.boolean,
|
||||
configValue: LLMConfigBooleanValue(value: false),
|
||||
),
|
||||
};
|
||||
|
||||
@@ -30,9 +30,9 @@ class AIRequestModel with _$AIRequestModel {
|
||||
return AIRequestModel(payload: p, model: model, provider: provider);
|
||||
}
|
||||
|
||||
LLMRequestDetails createRequest() {
|
||||
LLMRequestDetails createRequest({bool stream = false}) {
|
||||
final controller = model.provider.modelController;
|
||||
return controller.createRequest(model, payload, stream: true);
|
||||
return controller.createRequest(model, payload, stream: stream);
|
||||
}
|
||||
|
||||
factory AIRequestModel.fromDefaultSaveObject(LLMSaveObject? defaultLLMSO) {
|
||||
|
||||
@@ -12,9 +12,12 @@ class GeminiModelController extends ModelController {
|
||||
systemPrompt: '',
|
||||
userPrompt: '',
|
||||
configMap: {
|
||||
//TODO: CHANGES TO THESE DO NOT APPLY TO OLDER REQUESTS!!!!!!
|
||||
LLMConfigName.temperature.name:
|
||||
defaultLLMConfigurations[LLMConfigName.temperature]!,
|
||||
LLMConfigName.top_p.name: defaultLLMConfigurations[LLMConfigName.top_p]!,
|
||||
LLMConfigName.stream.name:
|
||||
defaultLLMConfigurations[LLMConfigName.stream]!,
|
||||
},
|
||||
).clone();
|
||||
|
||||
|
||||
Reference in New Issue
Block a user