Streaming Option added to AI Request Configurations

This commit is contained in:
Manas Hejmadi
2025-07-26 03:37:17 +05:30
parent f60836e4b2
commit db599b3d74
4 changed files with 20 additions and 4 deletions

View File

@@ -339,7 +339,12 @@ class CollectionStateNotifier
if (apiType == APIType.ai) {
aiRequestModel = requestModel.aiRequestModel!;
final genAIRequest = aiRequestModel.createRequest();
final streamingMode = aiRequestModel.payload
.configMap[LLMConfigName.stream.name]?.configValue.value ??
false;
final genAIRequest = aiRequestModel.createRequest(stream: streamingMode);
substitutedHttpRequestModel = getSubstitutedHttpRequestModel(
HttpRequestModel(
method: HTTPVerb.post,

View File

@@ -164,7 +164,7 @@ class LLMConfigTextValue extends LLMModelConfigValue {
}
}
enum LLMConfigName { temperature, top_p, max_tokens, endpoint }
enum LLMConfigName { temperature, top_p, max_tokens, endpoint, stream }
Map<LLMConfigName, LLMModelConfiguration> defaultLLMConfigurations = {
LLMConfigName.temperature: LLMModelConfiguration(
@@ -188,4 +188,12 @@ Map<LLMConfigName, LLMModelConfiguration> defaultLLMConfigurations = {
configType: LLMModelConfigurationType.numeric,
configValue: LLMConfigNumericValue(value: -1),
),
LLMConfigName.stream: LLMModelConfiguration(
configId: 'stream',
configName: 'Enable Streaming Mode',
configDescription:
'The LLM output will be sent in a stream instead of all at once',
configType: LLMModelConfigurationType.boolean,
configValue: LLMConfigBooleanValue(value: false),
),
};

View File

@@ -30,9 +30,9 @@ class AIRequestModel with _$AIRequestModel {
return AIRequestModel(payload: p, model: model, provider: provider);
}
LLMRequestDetails createRequest() {
LLMRequestDetails createRequest({bool stream = false}) {
final controller = model.provider.modelController;
return controller.createRequest(model, payload, stream: true);
return controller.createRequest(model, payload, stream: stream);
}
factory AIRequestModel.fromDefaultSaveObject(LLMSaveObject? defaultLLMSO) {

View File

@@ -12,9 +12,12 @@ class GeminiModelController extends ModelController {
systemPrompt: '',
userPrompt: '',
configMap: {
//TODO: CHANGES TO THESE DO NOT APPLY TO OLDER REQUESTS!!!!!!
LLMConfigName.temperature.name:
defaultLLMConfigurations[LLMConfigName.temperature]!,
LLMConfigName.top_p.name: defaultLLMConfigurations[LLMConfigName.top_p]!,
LLMConfigName.stream.name:
defaultLLMConfigurations[LLMConfigName.stream]!,
},
).clone();