Fix: Golden prompting of Explain, debug and Generate Test cases

This commit is contained in:
siddu015
2025-02-23 11:11:59 +05:30
parent b3f6253317
commit 5d9471fe72
2 changed files with 49 additions and 152 deletions

View File

@ -2,15 +2,15 @@ import 'dart:convert';
import 'package:ollama_dart/ollama_dart.dart';
class OllamaService {
final OllamaClient _client;
final OllamaClient _client;
OllamaService() : _client = OllamaClient(baseUrl: 'http://127.0.0.1:11434/api');
OllamaService() : _client = OllamaClient(baseUrl: 'http://127.0.0.1:11434/api');
// Generate response
Future<String> generateResponse(String prompt) async {
final response = await _client.generateCompletion(
request: GenerateCompletionRequest(
model: 'llama3.2:1b',
model: 'llama3.2:3b',
prompt: prompt
),
);
@ -184,118 +184,4 @@ generate same to same type of test case url for test purpose
}
}
Future<String> generateTestCases({required dynamic requestModel, required dynamic responseModel}) async {
final method = requestModel.httpRequestModel?.method
.toString()
.split('.')
.last
.toUpperCase()
?? "GET";
final endpoint = requestModel.httpRequestModel?.url ?? "Unknown endpoint";
final headers = requestModel.httpRequestModel?.enabledHeadersMap ?? {};
final parameters = requestModel.httpRequestModel?.enabledParamsMap ?? {};
final body = requestModel.httpRequestModel?.body;
final statusCode = responseModel.statusCode ?? 200;
// Extract example values from successful response
final responseBody = responseModel.body is String
? jsonDecode(responseModel.body)
: responseModel.body;
final exampleValues = _extractExampleValues(parameters, responseBody);
final prompt = '''
Generate comprehensive test cases in JSON format for this API endpoint:
API Details:
- Endpoint: $endpoint
- Method: $method
- Headers: ${headers.isNotEmpty ? jsonEncode(headers) : "None"}
- Parameters: ${parameters.isNotEmpty ? jsonEncode(parameters) : "None"}
- Successful Response Example (${statusCode}): ${jsonEncode(responseBody)}
Test Case Requirements:
1. Structure tests in JSON format with arrays for different categories
2. Include valid parameter combinations from the actual request
3. Create edge cases using min/max values and boundary conditions
4. Generate invalid parameter combinations that trigger error responses
5. Include authentication failure scenarios if applicable
6. Mirror successful response structure in test expectations
JSON Template:
{
"test_cases": {
"valid": [
{
"name": "Test valid request with typical parameters",
"parameters": { /* mirror actual parameter structure */ },
"expected": {
"status_code": ${statusCode},
"body_patterns": { /* key fields to validate */ }
}
}
],
"edge_cases": [
{
"name": "Test maximum limit boundary",
"parameters": { /* edge values */ },
"expected": { /* status and response patterns */ }
}
],
"invalid": [
{
"name": "Test missing required parameter",
"parameters": { /* incomplete params */ },
"expected": {
"status_code": 400,
"error_patterns": [ "missing field", "required" ]
}
}
]
}
}
Example Values from Current Implementation:
${jsonEncode(exampleValues)}
Generation Guidelines:
- Use parameter names and structure from the actual request
- Base valid values on successful response patterns
- Derive edge cases from parameter types (e.g., string length, number ranges)
- Match error responses to observed API behavior
- Include authentication headers if present in original request
- Prioritize testing critical business logic endpoints
Generate the JSON test suite following this structure and guidelines.
''';
return generateResponse(prompt);
}
Map<String, dynamic> _extractExampleValues( Map<String, String> parameters, dynamic responseBody) {
final examples = <String, dynamic>{};
// Extract parameter examples
examples['parameters'] = parameters.map((k, v) =>
MapEntry(k, _deriveValuePattern(v)));
// Extract response body patterns
if (responseBody is Map) {
examples['response_patterns'] = responseBody.map((k, v) =>
MapEntry(k, _deriveValuePattern(v)));
}
return examples;
}
String _deriveValuePattern(dynamic value) {
if (value is num) return "{number}";
if (value is String) {
if (DateTime.tryParse(value) != null) return "{datetime}";
if (value.contains('@')) return "{email}";
return "{string}";
}
return "{value}";
}
}

View File

@ -5,29 +5,28 @@ import 'package:flutter_markdown/flutter_markdown.dart';
class ChatbotWidget extends ConsumerStatefulWidget {
const ChatbotWidget({Key? key}) : super(key: key);
const ChatbotWidget({Key? key}) : super(key: key);
@override
_ChatbotWidgetState createState() => _ChatbotWidgetState();
@override
_ChatbotWidgetState createState() => _ChatbotWidgetState();
}
class _ChatbotWidgetState extends ConsumerState<ChatbotWidget> {
final TextEditingController _controller = TextEditingController();
final List<Map<String, dynamic>> _messages = [];
bool _isLoading = false;
final TextEditingController _controller = TextEditingController();
final List<Map<String, dynamic>> _messages = [];
bool _isLoading = false;
void _sendMessage(String message) async {
if (message.trim().isEmpty) return;
final ollamaService = ref.read(ollamaServiceProvider);
final requestModel = ref.read(selectedRequestModelProvider);
final responseModel = requestModel?.httpResponseModel;
void _sendMessage(String message) async {
if (message.trim().isEmpty) return;
final ollamaService = ref.read(ollamaServiceProvider);
final requestModel = ref.read(selectedRequestModelProvider);
final responseModel = requestModel?.httpResponseModel;
setState(() {
_messages.add({'role': 'user', 'message': message});
_controller.clear();
_isLoading = true;
});
setState(() {
_messages.add({'role': 'user', 'message': message});
_controller.clear();
_isLoading = true;
});
try {
String response;
@ -36,26 +35,38 @@ setState(() {
requestModel: requestModel,
responseModel: responseModel,
);
} else if (message == "Debug API") {
response = await ollamaService.debugApi(
requestModel: requestModel,
responseModel: responseModel,
);
} else if (message == "Generate Test Case") {
response = await ollamaService.generateTestCases(
requestModel: requestModel,
responseModel: responseModel
);
} else {
response = await ollamaService.generateResponse(message);
}
setState(() {
_messages.add({'role': 'bot', 'message': response});
});
} catch (error) {
setState(() {
_messages.add({'role': 'bot', 'message': "Error: ${error.toString()}"});
});
} finally {
setState(() => _isLoading = false);
}
}
setState(() {
_messages.add({'role': 'bot', 'message': response});
});
} catch (error) {
setState(() {
_messages.add({'role': 'bot', 'message': "Error: ${error.toString()}"});
});
} finally {
setState(() => _isLoading = false);
}
}
@override
Widget build(BuildContext context) {
final requestModel = ref.watch(selectedRequestModelProvider);
final statusCode = requestModel?.httpResponseModel?.statusCode;
final showDebugButton = statusCode != null && statusCode >= 400;
return Container(
height: 400,
padding: const EdgeInsets.all(16),
@ -75,7 +86,6 @@ setState(() {
icon: const Icon(Icons.info_outline),
label: const Text("Explain API"),
),
if (showDebugButton) ...[
const SizedBox(width: 8),
ElevatedButton.icon(
@ -90,6 +100,7 @@ setState(() {
icon: const Icon(Icons.developer_mode),
label: const Text("Test Case"),
),
const Spacer(),
],
),
@ -137,10 +148,10 @@ setState(() {
}
class ChatBubble extends StatelessWidget {
final String message;
final bool isUser;
final String message;
final bool isUser;
const ChatBubble({super.key, required this.message, this.isUser = false});
const ChatBubble({super.key, required this.message, this.isUser = false});
@override
Widget build(BuildContext context) {
@ -162,4 +173,4 @@ const ChatBubble({super.key, required this.message, this.isUser = false});
),
);
}
}
}