Refactored the dashbot logic to dashbot directory

This commit is contained in:
siddu015
2025-02-27 15:05:54 +05:30
parent 7b271a8ecd
commit 18a65bcf61
10 changed files with 135 additions and 366 deletions

View File

@ -0,0 +1,61 @@
import 'dart:convert';
import '../services/dashbot_service.dart';
import 'package:apidash/models/request_model.dart';
class ExplainFeature {
final DashBotService _service;
ExplainFeature(this._service);
Future<String> explainLatestApi({
required RequestModel? requestModel,
required dynamic responseModel,
}) async {
if (requestModel == null || responseModel == null) {
return "No recent API requests found.";
}
if (requestModel.httpRequestModel?.url == null) {
return "Error: Invalid API request (missing endpoint).";
}
final method = requestModel.httpRequestModel?.method.toString().split('.').last.toUpperCase() ?? "GET";
final endpoint = requestModel.httpRequestModel!.url!;
final headers = requestModel.httpRequestModel?.enabledHeadersMap ?? {};
final parameters = requestModel.httpRequestModel?.enabledParamsMap ?? {};
final body = requestModel.httpRequestModel?.body;
final rawResponse = responseModel.body;
final responseBody = rawResponse is String ? rawResponse : jsonEncode(rawResponse);
final statusCode = responseModel.statusCode ?? 0;
final prompt = '''
Analyze this API interaction and **identify discrepancies**:
**API Request:**
- Endpoint: `$endpoint`
- Method: `$method`
- Headers: ${headers.isNotEmpty ? jsonEncode(headers) : "None"}
- Parameters: ${parameters.isNotEmpty ? jsonEncode(parameters) : "None"}
- Body: ${body ?? "None"}
**API Response:**
- Status Code: $statusCode
- Body:
\`\`\`json
$responseBody
\`\`\`
**Instructions:**
1. Start with a **summary** of the API interaction.
2. List **validation issues** (e.g., missing headers, invalid parameters).
3. Highlight **request/response mismatches** (e.g., unexpected data types, missing fields).
4. Suggest **concrete improvements** (e.g., fix parameters, add error handling).
**Format:**
- Use Markdown with headings (`##`, `###`).
- Include bullet points for clarity.
''';
return _service.generateResponse(prompt);
}
}

0
lib/dashbot/main.dart Normal file
View File

View File

@ -1,6 +1,7 @@
import 'dart:convert';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:shared_preferences/shared_preferences.dart';
import '../services/dashbot_service.dart';
final chatMessagesProvider = StateNotifierProvider<ChatMessagesNotifier, List<Map<String, dynamic>>>(
(ref) => ChatMessagesNotifier(),
@ -36,3 +37,7 @@ class ChatMessagesNotifier extends StateNotifier<List<Map<String, dynamic>>> {
_saveMessages();
}
}
final dashBotServiceProvider = Provider<DashBotService>((ref) {
return DashBotService();
});

View File

@ -0,0 +1,27 @@
import 'dart:convert';
import 'package:ollama_dart/ollama_dart.dart';
import '../features/explain.dart';
import 'package:apidash/models/request_model.dart';
class DashBotService {
final OllamaClient _client;
late final ExplainFeature _explainFeature;
DashBotService() : _client = OllamaClient(baseUrl: 'http://127.0.0.1:11434/api') {
_explainFeature = ExplainFeature(this);
}
Future<String> generateResponse(String prompt) async {
final response = await _client.generateCompletion(
request: GenerateCompletionRequest(model: 'llama3.2:3b', prompt: prompt),
);
return response.response.toString();
}
Future<String> handleRequest(String input, RequestModel? requestModel, dynamic responseModel) async {
if (input == "Explain API") {
return _explainFeature.explainLatestApi(requestModel: requestModel, responseModel: responseModel);
}
return generateResponse(input);
}
}

View File

@ -1,116 +1,63 @@
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:apidash/providers/providers.dart';
import 'package:flutter_markdown/flutter_markdown.dart';
import 'package:apidash/dashbot/providers/dashbot_providers.dart';
import 'package:apidash/providers/providers.dart';
import '../../providers/collection_providers.dart';
class ChatbotWidget extends ConsumerStatefulWidget {
const ChatbotWidget({Key? key}) : super(key: key);
class DashBotWidget extends ConsumerStatefulWidget {
const DashBotWidget({Key? key}) : super(key: key);
@override
_ChatbotWidgetState createState() => _ChatbotWidgetState();
_DashBotWidgetState createState() => _DashBotWidgetState();
}
class _ChatbotWidgetState extends ConsumerState<ChatbotWidget> {
class _DashBotWidgetState extends ConsumerState<DashBotWidget> {
final TextEditingController _controller = TextEditingController();
bool _isLoading = false;
List<Map<String, dynamic>> get _messages => ref.watch(chatMessagesProvider);
Future<void> _handleCodeGeneration() async {
final language = await showDialog<String>(
context: context,
builder: (context) => AlertDialog(
title: const Text('Select Language'),
content: SingleChildScrollView(
child: Column(
mainAxisSize: MainAxisSize.min,
children: [
for (var lang in [
'Flutter (UI)',
'React (UI)',
'Dart (Console)',
'Python',
'JavaScript',
'Node.js',
'Java',
'C#'
])
ListTile(
title: Text(lang),
onTap: () => Navigator.pop(context, lang),
),
],
),
),
),
);
if (language != null) {
_sendMessage("Generate $language Code");
}
}
void _sendMessage(String message) async {
Future<void> _sendMessage(String message) async {
if (message.trim().isEmpty) return;
final ollamaService = ref.read(ollamaServiceProvider);
final dashBotService = ref.read(dashBotServiceProvider);
final requestModel = ref.read(selectedRequestModelProvider);
final responseModel = requestModel?.httpResponseModel;
setState(() {
_isLoading = true;
});
setState(() => _isLoading = true);
ref.read(chatMessagesProvider.notifier).addMessage({
'role': 'user',
'message': message
'message': message,
});
try {
String response;
final response = await dashBotService.handleRequest(message, requestModel, responseModel);
final formattedResponse = _formatMarkdown(response);
if (message == "Explain API") {
response = await ollamaService.explainLatestApi(
requestModel: requestModel,
responseModel: responseModel,
);
} else if (message == "Debug API") {
response = await ollamaService.debugApi(
requestModel: requestModel,
responseModel: responseModel,
);
} else if (message == "Generate Test Case") {
response = await ollamaService.generateTestCases(
requestModel: requestModel,
responseModel: responseModel,
);
} else if (message.startsWith("Generate ") && message.endsWith(" Code")) {
final language = message.replaceAll("Generate ", "").replaceAll(" Code", "");
response = await ollamaService.generateCode(
requestModel: requestModel,
responseModel: responseModel,
language: language,
);
} else {
response = await ollamaService.generateResponse(message);
}
setState(() {
_messages.add({
'role': 'bot',
'message': response.contains("```") ? response : "```\n$response\n```"
});
ref.read(chatMessagesProvider.notifier).addMessage({
'role': 'bot',
'message': formattedResponse,
});
} catch (error) {
setState(() {
_messages.add({'role': 'bot', 'message': "Error: ${error.toString()}"});
ref.read(chatMessagesProvider.notifier).addMessage({
'role': 'bot',
'message': "Error: ${error.toString()}",
});
} finally {
setState(() => _isLoading = false);
}
}
String _formatMarkdown(String text) {
if (!text.contains("```") && text.trim().isNotEmpty) {
text = "```\n$text\n```";
}
text = text.replaceAllMapped(RegExp(r'^\*\*(.*?)\*\*', multiLine: true),
(match) => '## ${match.group(1)}');
return text;
}
@override
Widget build(BuildContext context) {
final messages = ref.watch(chatMessagesProvider);
final requestModel = ref.watch(selectedRequestModelProvider);
final statusCode = requestModel?.httpResponseModel?.statusCode;
final showDebugButton = statusCode != null && statusCode >= 400;
@ -121,9 +68,7 @@ class _ChatbotWidgetState extends ConsumerState<ChatbotWidget> {
decoration: BoxDecoration(
color: Theme.of(context).colorScheme.surface,
borderRadius: BorderRadius.circular(12),
boxShadow: const [
BoxShadow(color: Colors.black12, blurRadius: 8, offset: Offset(0, 4)),
],
boxShadow: const [BoxShadow(color: Colors.black12, blurRadius: 8, offset: Offset(0, 4))],
),
child: Column(
children: [
@ -149,31 +94,15 @@ class _ChatbotWidgetState extends ConsumerState<ChatbotWidget> {
icon: const Icon(Icons.info_outline),
label: const Text("Explain API"),
),
if (showDebugButton)
ElevatedButton.icon(
onPressed: () => _sendMessage("Debug API"),
icon: const Icon(Icons.bug_report),
label: const Text("Debug"),
),
ElevatedButton.icon(
onPressed: () => _sendMessage("Generate Test Case"),
icon: const Icon(Icons.developer_mode),
label: const Text("Test Case"),
),
ElevatedButton.icon(
onPressed: _handleCodeGeneration,
icon: const Icon(Icons.code),
label: const Text("Generate Code"),
),
],
),
const SizedBox(height: 12),
Expanded(
child: ListView.builder(
reverse: true,
itemCount: _messages.length,
itemCount: messages.length,
itemBuilder: (context, index) {
final message = _messages.reversed.toList()[index];
final message = messages.reversed.toList()[index];
return ChatBubble(
message: message['message'],
isUser: message['role'] == 'user',
@ -251,9 +180,7 @@ class ChatBubble extends StatelessWidget {
fontWeight: FontWeight.w600,
color: Theme.of(context).colorScheme.onSurface,
),
listBullet: TextStyle(
color: Theme.of(context).colorScheme.onSurface,
),
listBullet: TextStyle(color: Theme.of(context).colorScheme.onSurface),
),
),
),

View File

@ -1,7 +0,0 @@
import 'package:apidash/services/ollama_service.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
// Add this to your existing providers
final ollamaServiceProvider = Provider<OllamaService>((ref) {
return OllamaService();
});

View File

@ -3,6 +3,3 @@ export 'environment_providers.dart';
export 'history_providers.dart';
export 'settings_providers.dart';
export 'ui_providers.dart';
export 'ollama_providers.dart';
export 'dashbot_messages.dart';

View File

@ -4,7 +4,7 @@ import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:apidash/providers/providers.dart';
import 'package:apidash/widgets/widgets.dart';
import 'package:apidash/consts.dart';
import '../widgets/chatbot_widget.dart';
import '../dashbot/widgets/dashbot_widget.dart';
import 'common_widgets/common_widgets.dart';
import 'envvar/environment_page.dart';
import 'home_page/home_page.dart';
@ -132,7 +132,7 @@ class Dashboard extends ConsumerWidget {
isScrollControlled: true,
builder: (context) => const Padding(
padding: EdgeInsets.all(16.0),
child: ChatbotWidget(),
child: DashBotWidget(),
),
),
child: const Icon(Icons.help_outline),

View File

@ -1,249 +0,0 @@
import 'dart:convert';
import 'package:ollama_dart/ollama_dart.dart';
class OllamaService {
final OllamaClient _client;
OllamaService() : _client = OllamaClient(baseUrl: 'http://127.0.0.1:11434/api');
// Generate response
Future<String> generateResponse(String prompt) async {
final response = await _client.generateCompletion(
request: GenerateCompletionRequest(
model: 'llama3.2:3b',
prompt: prompt
),
);
return response.response.toString();
}
// Explain responses & identify any discrepancy
Future<String> explainLatestApi({required dynamic requestModel, required dynamic responseModel}) async {
if (requestModel == null || responseModel == null) {
return "No recent API requests found.";
}
// Validate critical fields
if (requestModel.httpRequestModel?.url == null) {
return "Error: Invalid API request (missing endpoint).";
}
// Extract request details
final method = requestModel.httpRequestModel?.method
.toString()
.split('.')
.last
.toUpperCase() ?? "GET";
final endpoint = requestModel.httpRequestModel!.url!;
final headers = requestModel.httpRequestModel?.enabledHeadersMap ?? {};
final parameters = requestModel.httpRequestModel?.enabledParamsMap ?? {};
final body = requestModel.httpRequestModel?.body;
// Process response
final rawResponse = responseModel.body;
final responseBody = rawResponse is String ? rawResponse : jsonEncode(rawResponse);
final statusCode = responseModel.statusCode ?? 0;
final prompt = '''
Analyze this API interaction and **identify discrepancies**:
**API Request:**
- Endpoint: `$endpoint`
- Method: `$method`
- Headers: ${headers.isNotEmpty ? jsonEncode(headers) : "None"}
- Parameters: ${parameters.isNotEmpty ? jsonEncode(parameters) : "None"}
- Body: ${body ?? "None"}
**API Response:**
- Status Code: $statusCode
- Body:
\`\`\`json
$responseBody
\`\`\`
**Instructions:**
1. Start with a **summary** of the API interaction.
2. List **validation issues** (e.g., missing headers, invalid parameters).
3. Highlight **request/response mismatches** (e.g., unexpected data types, missing fields).
4. Suggest **concrete improvements** (e.g., fix parameters, add error handling).
**Format:**
- Use Markdown with headings (`##`, `###`).
- Include bullet points for clarity.
''';
return generateResponse(prompt);
}
// Debugging Failed API Requests
Future<String> debugApi({required dynamic requestModel, required dynamic responseModel}) async {
if (requestModel == null || responseModel == null) {
return "There are no recent API Requests to debug.";
}
final requestJson = jsonEncode(requestModel.toJson());
final responseJson = jsonEncode(responseModel.toJson());
final statusCode = responseModel.statusCode;
final prompt = '''
Provide detailed debugging steps for this failed API request:
**Status Code:** $statusCode
**Request Details:**
$requestJson
**Response Details:**
$responseJson
Provide a step-by-step debugging guide including:
1. Common causes for this status code
2. Specific issues in the request
3. Potential fixes
4. Recommended next steps
Format the response with clear headings and bullet points.
''';
return generateResponse(prompt);
}
// Generating test cases for API
Future<String> generateTestCases({required dynamic requestModel, required dynamic responseModel}) async {
final method = requestModel.httpRequestModel?.method
.toString()
.split('.')
.last
.toUpperCase()
?? "GET";
final endpoint = requestModel.httpRequestModel?.url ?? "Unknown endpoint";
final headers = requestModel.httpRequestModel?.enabledHeadersMap ?? {};
final parameters = requestModel.httpRequestModel?.enabledParamsMap ?? {};
final body = requestModel.httpRequestModel?.body;
final responsebody=responseModel.body;
final exampleParams = await generateExampleParams(
requestModel: requestModel,
responseModel: responseModel,
);
final prompt = '''
**API Request:**
- **Endpoint:** `$endpoint`
- **Method:** `$method`
- **Headers:** ${headers.isNotEmpty ? jsonEncode(headers) : "None"}
- **Parameters:** ${parameters.isNotEmpty ? jsonEncode(parameters) : "None"}
-**body:** ${body ?? "None"}
here is an example test case for the given:$exampleParams
**Instructions:**
- Generate example parameter values for the request.
-Generate the url of as i provided in the api reuest
-generate same to same type of test case url for test purpose
''';
return generateResponse(prompt);
}
// Generating Example Programming on API for different languages
Future<Map<String, dynamic>> generateExampleParams({required dynamic requestModel, required dynamic responseModel,}) async {
final ollamaService = OllamaService();
final method = requestModel.httpRequestModel?.method
.toString()
.split('.')
.last
.toUpperCase()
?? "GET";
final endpoint = requestModel.httpRequestModel?.url ?? "Unknown endpoint";
final headers = requestModel.httpRequestModel?.enabledHeadersMap ?? {};
final parameters = requestModel.httpRequestModel?.enabledParamsMap ?? {};
final body = requestModel.httpRequestModel?.body;
final dynamic rawResponse = responseModel?.body;
final Map<String, dynamic>? apiResponse =
(rawResponse is String) ? jsonDecode(rawResponse) : rawResponse is Map<String, dynamic> ? rawResponse : null;
// Construct LLM prompt to analyze and extract meaningful test cases
final String prompt = '''
Analyze the following API request and generate structured example parameters.
**API Request:**
- **Endpoint:** `$endpoint`
- **Method:** `$method`
- **Headers:** ${headers.isNotEmpty ? jsonEncode(headers) : "None"}
- **Parameters:** ${parameters.isNotEmpty ? jsonEncode(parameters) : "None"}
- **Body:** ${body ?? "None"}
**Instructions:**
- Generate example parameter values for the request.
-Generate the url of as i provided in the api reuest
generate same to same type of test case url for test purpose
''';
// Force LLM to return structured JSON output
final String response = await ollamaService.generateResponse(prompt);
try {
return jsonDecode(response) as Map<String, dynamic>;
} catch (e) {
return {"error": "Failed to parse response from LLM."};
}
}
Future<String> generateCode({required dynamic requestModel, required dynamic responseModel, required String language}) async {
final method = requestModel.httpRequestModel?.method
?.toString()
?.split('.')
?.last
?.toUpperCase() ?? "GET";
final endpoint = requestModel.httpRequestModel?.url ?? "Unknown endpoint";
final headers = requestModel.httpRequestModel?.enabledHeadersMap ?? {};
final params = requestModel.httpRequestModel?.enabledParamsMap ?? {};
final body = requestModel.httpRequestModel?.body;
final responseBody = responseModel.body;
final prompt = '''
Generate complete $language code for this API integration:
API Request:
- URL: $endpoint
- Method: $method
- Headers: ${headers.isEmpty ? 'None' : jsonEncode(headers)}
- Params: ${params.isEmpty ? 'None' : jsonEncode(params)}
- Body: ${body ?? 'None'}
Response Structure:
${_formatResponse(responseBody)}
Requirements:
1. Single-file solution with no external config
2. Direct API URL implementation
3. Error handling for network/status errors
4. UI components matching response data
5. Ready-to-run code with example data display
Generate complete implementation code only.
''';
return generateResponse(prompt);
}
String _formatResponse(dynamic response) {
if (response is Map) {
return response.entries
.map((e) => '${e.key}: ${_valueType(e.value)}')
.join('\n');
}
return response?.toString() ?? 'No response body';
}
String _valueType(dynamic value) {
if (value is List) return 'List[${value.isNotEmpty ? _valueType(value.first) : '?'}]';
if (value is Map) return 'Object';
return value.runtimeType.toString();
}
}

View File

@ -700,6 +700,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "2.3.2"
highlight:
dependency: "direct main"
description:
name: highlight
sha256: "5353a83ffe3e3eca7df0abfb72dcf3fa66cc56b953728e7113ad4ad88497cf21"
url: "https://pub.dev"
source: hosted
version: "0.7.0"
highlighter:
dependency: "direct main"
description: