diff --git a/lib/services/ollama_service.dart b/lib/services/ollama_service.dart index f6ae682f..69967ee3 100644 --- a/lib/services/ollama_service.dart +++ b/lib/services/ollama_service.dart @@ -2,41 +2,211 @@ import 'dart:convert'; import 'package:ollama_dart/ollama_dart.dart'; class OllamaService { - final OllamaClient _client; +final OllamaClient _client; - OllamaService() : _client = OllamaClient(baseUrl: 'http://127.0.0.1:11434/api'); +OllamaService() : _client = OllamaClient(baseUrl: 'http://127.0.0.1:11434/api'); - // Generate response - Future generateResponse(String prompt) async { - final response = await _client.generateCompletion( - request: GenerateCompletionRequest( - model: 'llama3.2:3b', - prompt: prompt - ), +// Generate response +Future generateResponse(String prompt) async { +final response = await _client.generateCompletion( +request: GenerateCompletionRequest( +model: 'llama3.2:1b', +prompt: prompt +), +); +return response.response.toString(); +} + +// Explain latest API request & response +Future explainLatestApi({required dynamic requestModel, required dynamic responseModel}) async { +if (requestModel == null || responseModel == null) { +return "No recent API requests found"; +} + +// Extract request details +final method = requestModel.httpRequestModel?.method + .toString() + .split('.') + .last + .toUpperCase() + ?? "GET"; +final endpoint = requestModel.httpRequestModel?.url ?? "Unknown endpoint"; +final headers = requestModel.httpRequestModel?.enabledHeadersMap ?? {}; +final parameters = requestModel.httpRequestModel?.enabledParamsMap ?? {}; +final body = requestModel.httpRequestModel?.body; + +// Process response +final rawResponse = responseModel.body; +final responseBody = rawResponse is String + ? jsonDecode(rawResponse) + : rawResponse as Map?; +final statusCode = responseModel.statusCode ?? 0; + + + + final prompt = ''' + Analyze this API interaction +Current API Request: +- Endpoint: $endpoint +- Method: $method +- Headers: ${headers.isNotEmpty ? jsonEncode(headers) : "None"} +- Parameters: ${parameters.isNotEmpty ? jsonEncode(parameters) : "None"} +- Body: ${body ?? "None"} + +Current Response: +- Status Code: $statusCode +- Response Body: ${responseBody != null ? jsonEncode(responseBody) : rawResponse} + + +Required Analysis Format: + +1. Start with overall status assessment +2. List validation/security issues +3. Highlight request/response mismatches +4. Suggest concrete improvements +5. Use plain text formatting with clear section headers + +Response Structure: +API Request: [request details] +Response: [response details] +Analysis: [structured analysis]'''; + + +return generateResponse(prompt); + + + +} + +Future debugApi({required dynamic requestModel, required dynamic responseModel}) async { +if (requestModel == null || responseModel == null) { +return "There are no recent API Requests to debug."; +} + +final requestJson = jsonEncode(requestModel.toJson()); +final responseJson = jsonEncode(responseModel.toJson()); +final statusCode = responseModel.statusCode; + +final prompt = ''' +Provide detailed debugging steps for this failed API request: + +**Status Code:** $statusCode +**Request Details:** +$requestJson + +**Response Details:** +$responseJson + +Provide a step-by-step debugging guide including: +1. Common causes for this status code +2. Specific issues in the request +3. Potential fixes +4. Recommended next steps + +Format the response with clear headings and bullet points. +'''; + +return generateResponse(prompt); + + + +} + + +Future generateTestCases({required dynamic requestModel, required dynamic responseModel}) async { + +final endpoint = requestModel.httpRequestModel?.url ?? "Unknown endpoint"; +final headers = requestModel.httpRequestModel?.enabledHeadersMap ?? {}; +final parameters = requestModel.httpRequestModel?.enabledParamsMap ?? {}; +final body = requestModel.httpRequestModel?.body; +final method = requestModel.httpRequestModel?.method.toString().split('.').last.toUpperCase() ?? "GET"; +// Process response +// final rawResponse = responseModel.body; +// final responseBody = rawResponse is String +// ? jsonDecode(rawResponse) +// : rawResponse as Map?; + final statusCode = responseModel.statusCode ?? 0; + final exampleParams = await generateExampleParams( + requestModel: requestModel, + responseModel: responseModel, ); - return response.response.toString(); - } - - // Explain latest API request & response - Future explainLatestApi({required dynamic requestModel, required dynamic responseModel}) async { - if (requestModel == null || responseModel == null) { - return "There are no recent API Requests."; - } - - final requestJson = jsonEncode(requestModel.toJson()); - final responseJson = jsonEncode(responseModel.toJson()); - final prompt = ''' - Explain the API request and response in a simple way: - - **Request Details:** - $requestJson - - **Response Details:** - $responseJson - - Please provide a brief and clear explanation with key insights. - '''; +Generate test cases for the following API: + +**API Request:** +- **Endpoint:** `$endpoint` +- **Method:** `$method` +- **Headers:** ${headers.isNotEmpty ? jsonEncode(headers) : "None"} +- **Parameters:** ${parameters.isNotEmpty ? jsonEncode(parameters) : "None"} + +**Test Case Requirements:** +1. Normal case (valid input, expected success) +2. Edge case (unexpected or boundary values) +3. Missing required parameters +4. Invalid authentication (if applicable) +5. Error handling for different status codes + +**Example Test Case Format:** +@Test +void testValidRequest() { + final response = sendRequest("$endpoint", method: "$method", params: $exampleParams); + assert(response.status == 200); +} +\`\`\` + +Generate test cases covering all scenarios. +'''; + return generateResponse(prompt); } + + /// Generate example parameter values based on parameter names + Future> generateExampleParams({ + required dynamic requestModel, + required dynamic responseModel, +}) async { + final ollamaService = OllamaService(); + final String apiEndpoint = requestModel.httpRequestModel?.url ?? "Unknown Endpoint"; + final String apiMethod = requestModel.httpRequestModel?.method.name ?? "GET"; + final Map apiHeaders = requestModel.httpRequestModel?.enabledHeadersMap ?? {}; + final Map apiParams = requestModel.httpRequestModel?.enabledParamsMap ?? {}; + final String? apiBody = requestModel.httpRequestModel?.body; + + final dynamic rawResponse = responseModel?.body; + final Map? apiResponse = + (rawResponse is String) ? jsonDecode(rawResponse) : rawResponse is Map ? rawResponse : null; + + // Construct LLM prompt to analyze and extract meaningful test cases + final String prompt = ''' +Analyze the following API request and generate structured example parameters. + +**API Request:** +- **Endpoint:** `$apiEndpoint` +- **Method:** `$apiMethod` +- **Headers:** ${apiHeaders.isNotEmpty ? jsonEncode(apiHeaders) : "None"} +- **Parameters:** ${apiParams.isNotEmpty ? jsonEncode(apiParams) : "None"} +- **Body:** ${apiBody ?? "None"} + +**Response:** +- **Status Code:** ${responseModel?.statusCode ?? "Unknown"} +- **Response Body:** ${apiResponse != null ? jsonEncode(apiResponse) : rawResponse} + +### **Required Output Format** +1. **Standard Example Values**: Assign the most appropriate example values for each parameter. +2. **Edge Cases**: Provide at least 2 edge cases per parameter. +3. **Invalid Cases**: Generate invalid inputs for error handling. +4. **Output must be in valid JSON format.** +'''; + + // Force LLM to return structured JSON output + final String response = await ollamaService.generateResponse(prompt); + + try { + return jsonDecode(response) as Map; + } catch (e) { + return {"error": "Failed to parse response from LLM."}; + } + } + +} \ No newline at end of file diff --git a/lib/widgets/chatbot_widget.dart b/lib/widgets/chatbot_widget.dart index 56246980..ae41fc27 100644 --- a/lib/widgets/chatbot_widget.dart +++ b/lib/widgets/chatbot_widget.dart @@ -3,140 +3,181 @@ import 'package:flutter_riverpod/flutter_riverpod.dart'; import 'package:apidash/providers/providers.dart'; class ChatbotWidget extends ConsumerStatefulWidget { - const ChatbotWidget({Key? key}) : super(key: key); +const ChatbotWidget({Key? key}) : super(key: key); - @override - _ChatbotWidgetState createState() => _ChatbotWidgetState(); +@override +_ChatbotWidgetState createState() => _ChatbotWidgetState(); } class _ChatbotWidgetState extends ConsumerState { - final TextEditingController _controller = TextEditingController(); - final List> _messages = []; - bool _isLoading = false; +final TextEditingController _controller = TextEditingController(); +final List> _messages = []; +bool _isLoading = false; - void _sendMessage(String message) async { - if (message.trim().isEmpty) return; - final ollamaService = ref.read(ollamaServiceProvider); - final requestModel = ref.read(selectedRequestModelProvider); - final responseModel = requestModel?.httpResponseModel; +void _sendMessage(String message) async { +if (message.trim().isEmpty) return; +final ollamaService = ref.read(ollamaServiceProvider); +final requestModel = ref.read(selectedRequestModelProvider); +final responseModel = requestModel?.httpResponseModel; - setState(() { - _messages.add({'role': 'user', 'message': message}); - _controller.clear(); - _isLoading = true; - }); - try { - String response; - if (message == "Explain API") { - response = await ollamaService.explainLatestApi( - requestModel: requestModel, - responseModel: responseModel, - ); - } else { - response = await ollamaService.generateResponse(message); +setState(() { + _messages.add({'role': 'user', 'message': message}); + _controller.clear(); + _isLoading = true; +}); + +try { + String response; + if (message == "Explain API") { + response = await ollamaService.explainLatestApi( + requestModel: requestModel, + responseModel: responseModel, + ); + } + else if (message == "Debug API") { + response = await ollamaService.debugApi( + requestModel: requestModel, + responseModel: responseModel, + ); + } else if (message == "Generate Test Case") { + response = await ollamaService.generateTestCases(requestModel: requestModel, + responseModel: responseModel,); } - - setState(() { - _messages.add({'role': 'bot', 'message': response}); - }); - } catch (error) { - setState(() { - _messages.add({'role': 'bot', 'message': "Error: ${error.toString()}"}); - }); - } finally { - setState(() => _isLoading = false); - } + else { + response = await ollamaService.generateResponse(message); } - @override - Widget build(BuildContext context) { - return Container( - height: 400, - padding: const EdgeInsets.all(16), - decoration: BoxDecoration( - color: Theme.of(context).colorScheme.surface, - borderRadius: BorderRadius.circular(12), - boxShadow: const [ - BoxShadow(color: Colors.black12, blurRadius: 8, offset: Offset(0, 4)), - ], - ), - child: Column( + setState(() { + _messages.add({'role': 'bot', 'message': response}); + }); +} catch (error) { + setState(() { + _messages.add({'role': 'bot', 'message': "Error: ${error.toString()}"}); + }); +} finally { + setState(() => _isLoading = false); +} + + +} + +@override +Widget build(BuildContext context) { +final requestModel = ref.watch(selectedRequestModelProvider); +final statusCode = requestModel?.httpResponseModel?.statusCode; +final showDebugButton = statusCode != null && statusCode >= 400; + +return Container( + height: 400, + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Theme.of(context).colorScheme.surface, + borderRadius: BorderRadius.circular(12), + boxShadow: const [ + BoxShadow(color: Colors.black12, blurRadius: 8, offset: Offset(0, 4)), + ], + ), + child: Column( + children: [ + Row( children: [ - Row( - children: [ - ElevatedButton.icon( - onPressed: () => _sendMessage("Explain API"), - icon: const Icon(Icons.info_outline), - label: const Text("Explain API"), + ElevatedButton.icon( + onPressed: () => _sendMessage("Explain API"), + icon: const Icon(Icons.info_outline), + label: const Text("Explain API"), + ), + if (showDebugButton) ...[ + const SizedBox(width: 8), + ElevatedButton.icon( + onPressed: () => _sendMessage("Debug API"), + icon: const Icon(Icons.bug_report), + label: const Text("Debug"), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.redAccent, ), - const Spacer(), - ], - ), - Expanded( - child: ListView.builder( - reverse: true, - itemCount: _messages.length, - itemBuilder: (context, index) { - final message = _messages.reversed.toList()[index]; - return ChatBubble( - message: message['message'], - isUser: message['role'] == 'user', - ); - }, ), - ), - if (_isLoading) - const Padding( - padding: EdgeInsets.all(8.0), - child: CircularProgressIndicator(), - ), - Row( - children: [ - Expanded( - child: TextField( - controller: _controller, - decoration: InputDecoration( - hintText: 'Ask something...', - border: OutlineInputBorder( - borderRadius: BorderRadius.circular(8)), - ), - onSubmitted: _sendMessage, + ], + const SizedBox(width: 8), + ElevatedButton.icon( + onPressed: () => _sendMessage("Generate Test Case"), + icon: const Icon(Icons.developer_mode), + label: const Text("Test Case"), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.blueAccent, ), ), - IconButton( - icon: const Icon(Icons.send), - onPressed: () => _sendMessage(_controller.text), + + const Spacer(), + ], + ), + Expanded( + child: ListView.builder( + reverse: true, + itemCount: _messages.length, + itemBuilder: (context, index) { + final message = _messages.reversed.toList()[index]; + return ChatBubble( + message: message['message'], + isUser: message['role'] == 'user', + ); + }, + ), + ), + if (_isLoading) + const Padding( + padding: EdgeInsets.all(8.0), + child: CircularProgressIndicator(), + ), + Row( + children: [ + Expanded( + child: TextField( + controller: _controller, + decoration: InputDecoration( + hintText: 'Ask something...', + border: OutlineInputBorder( + borderRadius: BorderRadius.circular(8)), ), - ], + onSubmitted: _sendMessage, + ), + ), + IconButton( + icon: const Icon(Icons.send), + onPressed: () => _sendMessage(_controller.text), ), ], ), - ); - } + ], + ), +); + + + +} } class ChatBubble extends StatelessWidget { - final String message; - final bool isUser; +final String message; +final bool isUser; - const ChatBubble({super.key, required this.message, this.isUser = false}); +const ChatBubble({super.key, required this.message, this.isUser = false}); - @override - Widget build(BuildContext context) { - return Align( - alignment: isUser ? Alignment.centerRight : Alignment.centerLeft, - child: Container( - margin: const EdgeInsets.symmetric(vertical: 4), - padding: const EdgeInsets.all(12), - decoration: BoxDecoration( - color: isUser - ? Theme.of(context).colorScheme.primaryContainer - : Theme.of(context).colorScheme.secondaryContainer, - borderRadius: BorderRadius.circular(8), - ), - child: Text(message), - ), - ); - } +@override +Widget build(BuildContext context) { +return Align( +alignment: isUser ? Alignment.centerRight : Alignment.centerLeft, +child: Container( +margin: const EdgeInsets.symmetric(vertical: 4), +padding: const EdgeInsets.all(12), +decoration: BoxDecoration( +color: isUser +? Theme.of(context).colorScheme.primaryContainer +: Theme.of(context).colorScheme.secondaryContainer, +borderRadius: BorderRadius.circular(8), +), +child: Text(message), +), +); } +} \ No newline at end of file