diff --git a/routes/config/controllers/geminiController.js b/routes/config/controllers/geminiController.js new file mode 100644 index 0000000..b84f416 --- /dev/null +++ b/routes/config/controllers/geminiController.js @@ -0,0 +1,74 @@ +//(Handles AI logic & feedback processing) +import fetch from "node-fetch"; +import { model, memory } from "../config/langchain.js"; + +// Validate Gemini API Key +export const validateGeminiKey = async (req, res) => { + const { geminiKey } = req.body; + if (!geminiKey) return res.status(400).json({ message: "Gemini API key is required." }); + + try { + const geminiUrl = `https://generativelanguage.googleapis.com/v1beta/models?key=${geminiKey} + + +import fetch from "node-fetch"; +import { model, memory } from "../config/langchain.js"; + +// Validate Gemini API Key +export const validateGeminiKey = async (req, res) => { + const { geminiKey } = req.body; + if (!geminiKey) return res.status(400).json({ message: "Gemini API key is required." }); + + try { + const geminiUrl = `https://generativelanguage.googleapis.com/v1beta/models?key=${geminiKey}`; + const geminiRes = await fetch(geminiUrl); + + if (!geminiRes.ok) { + const errorData = await geminiRes.json(); + return res.status(400).json({ valid: false, message: errorData.error?.message || "Invalid API key" }); + } + + res.json({ valid: true }); + } catch (err) { + console.error("Validation Error:", err); + res.status(500).json({ valid: false, message: "Failed to validate API key." }); + } +}; + +// Get Feedback from Gemini API with Memory & Structured Categorization +export const getGeminiFeedback = async (req, res) => { + const { pseudocode, geminiKey } = req.body; + if (!pseudocode || !geminiKey) return res.status(400).json({ message: "Pseudocode and Gemini API key are required." }); + + try { + await memory.saveContext({ input: pseudocode }); + + const response = await model.call({ + input: `Analyze this pseudocode and provide feedback in structured categories: + 1. **Syntax Issues:** Highlight syntax errors. + 2. **Logical Errors:** Identify mistakes in flow. + 3. **Best Practices:** Improve code readability. + 4. **Optimization Suggestions:** Enhance efficiency. + + Consider previous iterations for detailed recommendations. + + Pseudocode: + ${pseudocode}`, + memory, + }); + + const feedbackText = response?.text || "No feedback received."; + + const formattedFeedback = { + syntaxIssues: feedbackText.match(/Syntax Issues:\s*(.*)/)?.[1] || "No syntax errors detected.", + logicalErrors: feedbackText.match(/Logical Errors:\s*(.*)/)?.[1] || "No logical errors found.", + bestPractices: feedbackText.match(/Best Practices:\s*(.*)/)?.[1] || "No best practices suggested.", + optimizationSuggestions: feedbackText.match(/Optimization Suggestions:\s*(.*)/)?.[1] || "No optimization recommendations.", + }; + + res.json({ feedback: formattedFeedback }); + } catch (err) { + console.error("Gemini Feedback Error:", err); + res.status(500).json({ message: "Failed to get feedback from Gemini AI." }); + } +}; diff --git a/routes/config/controllers/sever.js b/routes/config/controllers/sever.js new file mode 100644 index 0000000..5332181 --- /dev/null +++ b/routes/config/controllers/sever.js @@ -0,0 +1,16 @@ +//Main Express server setup +import express from "express"; +import dotenv from "dotenv"; +import geminiRoutes from "./routes/gemini.js"; + +dotenv.config(); + +const app = express(); +app.use(express.json()); + +// Register Routes +app.use("/api", geminiRoutes); + +// Start the Server +const PORT = process.env.PORT || 5000; +app.listen(PORT, () => console.log(`Server running on port ${PORT}`)); diff --git a/routes/config/langchain.js b/routes/config/langchain.js new file mode 100644 index 0000000..a6b871a --- /dev/null +++ b/routes/config/langchain.js @@ -0,0 +1,16 @@ +//Sets up LangChain models, memory, and configurations for structured AI interactions. +//(Configures LangChain AI model & memory) + +import { ChatOpenAI } from "langchain/chat_models"; +import { ConversationBufferMemory } from "langchain/memory"; +import dotenv from "dotenv"; + +dotenv.config(); + +export const model = new ChatOpenAI({ + modelName: "gemini-pro", + temperature: 0.7, + apiKey: process.env.GEMINI_API_KEY, // Secure API key usage +}); + +export const memory = new ConversationBufferMemory(); // Stores past iterations diff --git a/routes/gemini.js b/routes/gemini.js new file mode 100644 index 0000000..95dfc49 --- /dev/null +++ b/routes/gemini.js @@ -0,0 +1,14 @@ +// Contains the Express.js routes for API validation and feedback processing. + +import express from "express"; +import { validateGeminiKey, getGeminiFeedback } from "../controllers/geminiController.js"; + +const router = express.Router(); + +// Validate Gemini API Key +router.post("/validate-gemini-key", validateGeminiKey); + +// Get feedback with memory & structured categorization +router.post("/gemini-feedback", getGeminiFeedback); + +export default router;