diff --git a/.github/scripts/check-duplicates.js b/.github/scripts/check-duplicates.js
index 9683379..3ad0bf0 100644
--- a/.github/scripts/check-duplicates.js
+++ b/.github/scripts/check-duplicates.js
@@ -7,7 +7,7 @@ const OWNER = process.env.GITHUB_REPOSITORY.split("/")[0];
const REPO = process.env.GITHUB_REPOSITORY.split("/")[1];
const ISSUE_NUMBER = Number(process.env.ISSUE_NUMBER);
const SIMILARITY_THRESHOLD = parseFloat(
- process.env.SIMILARITY_THRESHOLD || "0.5"
+ process.env.SIMILARITY_THRESHOLD || "0.5",
);
// Initialize Pinecone client
@@ -26,7 +26,7 @@ async function retryApiCall(apiCall, maxRetries = 3, delay = 1000) {
if (i === maxRetries - 1) throw error;
if (error.status === 429 || error.status >= 500) {
console.log(
- `API call failed (attempt ${i + 1}), retrying in ${delay}ms...`
+ `API call failed (attempt ${i + 1}), retrying in ${delay}ms...`,
);
await new Promise((resolve) => setTimeout(resolve, delay));
delay *= 2;
@@ -113,7 +113,7 @@ async function run() {
model: "models/text-embedding-004",
content: { parts: [{ text: text }] },
}),
- }
+ },
);
const data = await response.json();
@@ -162,7 +162,7 @@ async function run() {
}
} else {
console.log(
- " ๐ Filter query returned no results, trying list approach..."
+ " ๐ Filter query returned no results, trying list approach...",
);
let paginationToken = null;
@@ -179,7 +179,7 @@ async function run() {
if (vector.metadata?.issue_number === ISSUE_NUMBER) {
existingVectorIds.push(vector.id);
console.log(
- ` ๐ Found existing vector via list: ${vector.id}`
+ ` ๐ Found existing vector via list: ${vector.id}`,
);
}
}
@@ -193,12 +193,12 @@ async function run() {
console.log(
`Issue exists in DB: ${isEditingExistingIssue ? "YES" : "NO"} (${
existingVectorIds.length
- } vectors found)`
+ } vectors found)`,
);
}, "Could not check for existing issue vectors in the database.");
} catch (error) {
console.error(
- "Vector database check failed, continuing with basic processing..."
+ "Vector database check failed, continuing with basic processing...",
);
}
@@ -220,11 +220,11 @@ async function run() {
console.log(`Found ${results.length} potential matches`);
filteredResults = results.filter(
- (r) => r.metadata?.issue_number !== ISSUE_NUMBER
+ (r) => r.metadata?.issue_number !== ISSUE_NUMBER,
);
console.log(
- `After filtering out current issue: ${filteredResults.length} matches`
+ `After filtering out current issue: ${filteredResults.length} matches`,
);
// Get all potential duplicates above 0.55 threshold for 3-tier system
@@ -238,7 +238,7 @@ async function run() {
.sort((a, b) => b.similarity - a.similarity); // Sort by highest similarity first
console.log(
- `Found ${duplicates.length} potential matches above 0.55 similarity threshold`
+ `Found ${duplicates.length} potential matches above 0.55 similarity threshold`,
);
filteredResults.forEach((result, index) => {
@@ -246,11 +246,11 @@ async function run() {
let category = "โ
Below threshold";
if (score >= 0.85) category = "๐จ HIGH DUPLICATE";
else if (score >= 0.55) category = "๐ค POTENTIALLY RELATED";
-
+
console.log(
` ${index + 1}. Issue #${
result.metadata?.issue_number || "Unknown"
- } - Score: ${score.toFixed(4)} ${category}`
+ } - Score: ${score.toFixed(4)} ${category}`,
);
console.log(` Title: "${result.metadata?.title || "No title"}"`);
});
@@ -266,18 +266,22 @@ async function run() {
let duplicateAction = "none";
// Categorize duplicates by similarity score
- const highSimilarityDuplicates = duplicates.filter(d => d.similarity >= 0.85);
- const mediumSimilarityDuplicates = duplicates.filter(d => d.similarity >= 0.55 && d.similarity < 0.85);
-
+ const highSimilarityDuplicates = duplicates.filter(
+ (d) => d.similarity >= 0.85,
+ );
+ const mediumSimilarityDuplicates = duplicates.filter(
+ (d) => d.similarity >= 0.55 && d.similarity < 0.85,
+ );
+
if (highSimilarityDuplicates.length > 0) {
// TIER 1: High similarity (>= 0.85) - Auto-close as duplicate
duplicateAction = "auto-close";
shouldUpdateVector = false;
shouldAutoClose = !isEditingExistingIssue;
-
+
const topMatch = highSimilarityDuplicates[0];
const similarityPercent = (topMatch.similarity * 100).toFixed(1);
-
+
if (isEditingExistingIssue) {
commentBody = `๐จ **Warning: Edited Issue Now Appears as Duplicate** ๐จ\n\n`;
commentBody += `After your recent edit, this issue appears to be a duplicate of:\n\n`;
@@ -293,17 +297,18 @@ async function run() {
commentBody += `Please continue the discussion in the original issue above. If your problem is different, please open a new issue with more specific details.\n\n`;
}
- console.log(`๐จ HIGH SIMILARITY DUPLICATE detected! Similarity: ${similarityPercent}% with issue #${topMatch.number}`);
-
+ console.log(
+ `๐จ HIGH SIMILARITY DUPLICATE detected! Similarity: ${similarityPercent}% with issue #${topMatch.number}`,
+ );
} else if (mediumSimilarityDuplicates.length > 0) {
// TIER 2: Medium similarity (0.55-0.84) - Flag as potentially related
duplicateAction = "flag-related";
shouldUpdateVector = true; // Still add to vector DB for unique issues
shouldAutoClose = false;
-
+
const topMatch = mediumSimilarityDuplicates[0];
const similarityPercent = (topMatch.similarity * 100).toFixed(1);
-
+
if (isEditingExistingIssue) {
commentBody = `๐ค **Potentially Related Issue After Edit** ๐ค\n\n`;
commentBody += `After your recent edit, this issue seems related to:\n\n`;
@@ -311,13 +316,14 @@ async function run() {
commentBody = `๐ค **Potentially Related Issue Found** ๐ค\n\n`;
commentBody += `This issue seems related to:\n\n`;
}
-
+
commentBody += `- Issue #${topMatch.number}: "${topMatch.title}" (${similarityPercent}% similar)\n`;
commentBody += ` Link: https://github.com/${OWNER}/${REPO}/issues/${topMatch.number}\n\n`;
commentBody += `This issue is not identical but may be related. A maintainer will review to determine if they should be linked or if this is indeed a separate issue.\n\n`;
-
- console.log(`๐ค POTENTIALLY RELATED issue detected! Similarity: ${similarityPercent}% with issue #${topMatch.number}`);
-
+
+ console.log(
+ `๐ค POTENTIALLY RELATED issue detected! Similarity: ${similarityPercent}% with issue #${topMatch.number}`,
+ );
} else {
// TIER 3: Low similarity (< 0.55) - Treat as unique
duplicateAction = "unique";
@@ -333,7 +339,9 @@ async function run() {
commentBody += `Your contribution helps make this project better. We appreciate you taking the time to report this! ๐\n\n`;
}
- console.log(`โ
UNIQUE issue confirmed. No similar issues found above 0.55 threshold.`);
+ console.log(
+ `โ
UNIQUE issue confirmed. No similar issues found above 0.55 threshold.`,
+ );
}
commentBody += `*This comment was generated automatically by Seroski-DupBot ๐ค*\n\nCheck out the developer: [Portfolio](https://portfolio.rosk.dev)`;
@@ -358,35 +366,39 @@ async function run() {
if (shouldAutoClose && duplicateAction === "auto-close") {
try {
console.log(`๐ Auto-closing issue #${ISSUE_NUMBER} as duplicate...`);
-
+
// First add the duplicate label
await retryApiCall(async () => {
return await octokit.issues.addLabels({
owner: OWNER,
repo: REPO,
issue_number: ISSUE_NUMBER,
- labels: ['duplicate']
+ labels: ["duplicate"],
});
});
-
+
console.log(`๐ท๏ธ Added 'duplicate' label to issue #${ISSUE_NUMBER}`);
-
+
// Then close the issue with 'not_planned' state reason
await retryApiCall(async () => {
return await octokit.issues.update({
owner: OWNER,
repo: REPO,
issue_number: ISSUE_NUMBER,
- state: 'closed',
- state_reason: 'duplicate'
+ state: "closed",
+ state_reason: "duplicate",
});
});
-
- console.log(`๐ Issue #${ISSUE_NUMBER} has been auto-closed as duplicate`);
-
+
+ console.log(
+ `๐ Issue #${ISSUE_NUMBER} has been auto-closed as duplicate`,
+ );
} catch (error) {
- console.error(`โ Failed to auto-close issue #${ISSUE_NUMBER}:`, error.message);
-
+ console.error(
+ `โ Failed to auto-close issue #${ISSUE_NUMBER}:`,
+ error.message,
+ );
+
// Post error comment if automatic closure fails
try {
await retryApiCall(async () => {
@@ -394,17 +406,23 @@ async function run() {
owner: OWNER,
repo: REPO,
issue_number: ISSUE_NUMBER,
- body: `โ ๏ธ **Auto-close Failed** โ ๏ธ\n\nThis issue was detected as a high-confidence duplicate but could not be automatically closed. A maintainer will review this manually.\n\n*Error: ${error.message}*`
+ body: `โ ๏ธ **Auto-close Failed** โ ๏ธ\n\nThis issue was detected as a high-confidence duplicate but could not be automatically closed. A maintainer will review this manually.\n\n*Error: ${error.message}*`,
});
});
} catch (commentError) {
- console.error(`โ Failed to post error comment: ${commentError.message}`);
+ console.error(
+ `โ Failed to post error comment: ${commentError.message}`,
+ );
}
}
} else if (duplicateAction === "flag-related") {
- console.log(`๐ค Issue #${ISSUE_NUMBER} flagged as potentially related - no auto-action taken`);
+ console.log(
+ `๐ค Issue #${ISSUE_NUMBER} flagged as potentially related - no auto-action taken`,
+ );
} else if (duplicateAction === "unique") {
- console.log(`โ
Issue #${ISSUE_NUMBER} confirmed as unique - will process normally`);
+ console.log(
+ `โ
Issue #${ISSUE_NUMBER} confirmed as unique - will process normally`,
+ );
}
// Continue with vector database updates only for unique issues
@@ -417,7 +435,7 @@ async function run() {
if (existingVectorIds.length > 0) {
await index.deleteMany(existingVectorIds);
console.log(
- `๐๏ธ Deleted ${existingVectorIds.length} old vector(s)`
+ `๐๏ธ Deleted ${existingVectorIds.length} old vector(s)`,
);
}
@@ -438,7 +456,7 @@ async function run() {
]);
console.log(
- "โ
Updated issue embedding in Pinecone with new content."
+ "โ
Updated issue embedding in Pinecone with new content.",
);
} else {
console.log("Adding new issue embedding to Pinecone...");
@@ -459,27 +477,33 @@ async function run() {
]);
console.log(
- "โ
New issue embedding stored in Pinecone for future duplicate detection."
+ "โ
New issue embedding stored in Pinecone for future duplicate detection.",
);
}
}, "Could not update the vector database.");
} catch (error) {
console.error(
- "Failed to update vector database, but issue processing completed."
+ "Failed to update vector database, but issue processing completed.",
);
}
} else {
if (duplicateAction === "auto-close") {
- console.log("โญ๏ธ Skipped adding to Pinecone due to high-confidence duplicate detection and auto-closure.");
+ console.log(
+ "โญ๏ธ Skipped adding to Pinecone due to high-confidence duplicate detection and auto-closure.",
+ );
} else if (duplicateAction === "flag-related") {
- console.log("โ
Added to Pinecone despite potential relation - issue treated as separate.");
+ console.log(
+ "โ
Added to Pinecone despite potential relation - issue treated as separate.",
+ );
} else if (isEditingExistingIssue) {
- console.log("โ ๏ธ Keeping existing vectors unchanged due to similarity detected after edit.");
+ console.log(
+ "โ ๏ธ Keeping existing vectors unchanged due to similarity detected after edit.",
+ );
}
}
console.log(
- `\n=== Duplicate check completed for issue #${ISSUE_NUMBER} ===\n`
+ `\n=== Duplicate check completed for issue #${ISSUE_NUMBER} ===\n`,
);
}
diff --git a/.github/scripts/cleanup-closed-issue.js b/.github/scripts/cleanup-closed-issue.js
index 6f378a8..fc044cd 100644
--- a/.github/scripts/cleanup-closed-issue.js
+++ b/.github/scripts/cleanup-closed-issue.js
@@ -24,7 +24,7 @@ async function retryApiCall(apiCall, maxRetries = 3, delay = 1000) {
if (i === maxRetries - 1) throw error;
if (error.status === 429 || error.status >= 500) {
console.log(
- `API call failed (attempt ${i + 1}), retrying in ${delay}ms...`
+ `API call failed (attempt ${i + 1}), retrying in ${delay}ms...`,
);
await new Promise((resolve) => setTimeout(resolve, delay));
delay *= 2; // Exponential backoff
@@ -37,21 +37,21 @@ async function retryApiCall(apiCall, maxRetries = 3, delay = 1000) {
async function cleanupClosedIssue() {
console.log(
- `\n=== Cleaning up closed issue #${ISSUE_NUMBER} from vector database ===`
+ `\n=== Cleaning up closed issue #${ISSUE_NUMBER} from vector database ===`,
);
console.log(`Repository: ${OWNER}/${REPO}`);
console.log(`Pinecone Index: ${indexName}`);
if (!OWNER || !REPO) {
console.error(
- "โ Repository owner and name must be specified via GITHUB_REPOSITORY or GITHUB_OWNER/GITHUB_REPO environment variables"
+ "โ Repository owner and name must be specified via GITHUB_REPOSITORY or GITHUB_OWNER/GITHUB_REPO environment variables",
);
process.exit(1);
}
if (!ISSUE_NUMBER) {
console.error(
- "โ Issue number must be specified via ISSUE_NUMBER environment variable"
+ "โ Issue number must be specified via ISSUE_NUMBER environment variable",
);
process.exit(1);
}
@@ -83,7 +83,7 @@ async function cleanupClosedIssue() {
// Query Pinecone to find vectors for this issue with retry logic
console.log(
- `๐ Searching for vectors related to issue #${ISSUE_NUMBER}...`
+ `๐ Searching for vectors related to issue #${ISSUE_NUMBER}...`,
);
const vectorsToDelete = [];
@@ -110,7 +110,7 @@ async function cleanupClosedIssue() {
} else {
// Fallback to listing all vectors (paginated approach)
console.log(
- " ๐ Filter query returned no results, trying list approach..."
+ " ๐ Filter query returned no results, trying list approach...",
);
let paginationToken = null;
@@ -138,7 +138,7 @@ async function cleanupClosedIssue() {
} catch (error) {
console.error(
"โ Failed to search vectors from Pinecone:",
- error.message
+ error.message,
);
throw error;
}
@@ -147,7 +147,7 @@ async function cleanupClosedIssue() {
if (vectorsToDelete.length === 0) {
console.log(
- `โน๏ธ No vectors found for issue #${ISSUE_NUMBER}. It may have been a duplicate issue that was never added to the vector database.`
+ `โน๏ธ No vectors found for issue #${ISSUE_NUMBER}. It may have been a duplicate issue that was never added to the vector database.`,
);
// Still post a cleanup confirmation comment with retry logic
@@ -171,7 +171,7 @@ async function cleanupClosedIssue() {
// Delete the vectors from Pinecone with retry logic
console.log(
- `๐๏ธ Deleting ${vectorsToDelete.length} vector(s) from Pinecone...`
+ `๐๏ธ Deleting ${vectorsToDelete.length} vector(s) from Pinecone...`,
);
try {
@@ -179,7 +179,7 @@ async function cleanupClosedIssue() {
return await index.deleteMany(vectorsToDelete);
});
console.log(
- `โ
Successfully deleted ${vectorsToDelete.length} vector(s) from Pinecone`
+ `โ
Successfully deleted ${vectorsToDelete.length} vector(s) from Pinecone`,
);
} catch (deleteError) {
console.error(`โ Error deleting vectors:`, deleteError.message);
diff --git a/.github/scripts/cleanup-duplicates.js b/.github/scripts/cleanup-duplicates.js
index 8fc085b..fd9f580 100644
--- a/.github/scripts/cleanup-duplicates.js
+++ b/.github/scripts/cleanup-duplicates.js
@@ -12,7 +12,7 @@ const indexName = process.env.PINECONE_INDEX;
// Add delay to respect API rate limits
function delay(ms) {
- return new Promise(resolve => setTimeout(resolve, ms));
+ return new Promise((resolve) => setTimeout(resolve, ms));
}
async function cleanupDuplicates() {
@@ -29,7 +29,7 @@ async function cleanupDuplicates() {
vector: Array(1024).fill(0.1),
topK: 1000, // Should be enough for all vectors
includeMetadata: true,
- includeValues: false
+ includeValues: false,
});
if (!allVectors.matches || allVectors.matches.length === 0) {
@@ -41,7 +41,7 @@ async function cleanupDuplicates() {
// Group vectors by issue number
const vectorsByIssue = new Map();
-
+
for (const vector of allVectors.matches) {
const issueNumber = vector.metadata?.issue_number;
if (issueNumber) {
@@ -60,30 +60,32 @@ async function cleanupDuplicates() {
for (const [issueNumber, vectors] of vectorsByIssue) {
console.log(`\n๐ Issue #${issueNumber}: ${vectors.length} vector(s)`);
-
+
if (vectors.length === 1) {
console.log(` โ
No duplicates for issue #${issueNumber}`);
vectorsToKeep.push(vectors[0]);
} else {
- console.log(` ๐ Found ${vectors.length} vectors, selecting which to keep...`);
-
+ console.log(
+ ` ๐ Found ${vectors.length} vectors, selecting which to keep...`,
+ );
+
// Sort vectors: prefer non-timestamped IDs (clean format)
vectors.sort((a, b) => {
const aHasTimestamp = /-\d{13}/.test(a.id);
const bHasTimestamp = /-\d{13}/.test(b.id);
-
+
if (!aHasTimestamp && bHasTimestamp) return -1; // a comes first (keep a)
- if (aHasTimestamp && !bHasTimestamp) return 1; // b comes first (keep b)
+ if (aHasTimestamp && !bHasTimestamp) return 1; // b comes first (keep b)
return a.id.localeCompare(b.id); // alphabetical if both same type
});
-
+
const toKeep = vectors[0];
const toDelete = vectors.slice(1);
-
+
console.log(` โ
Keeping: ${toKeep.id}`);
vectorsToKeep.push(toKeep);
-
- toDelete.forEach(v => {
+
+ toDelete.forEach((v) => {
console.log(` ๐๏ธ Deleting: ${v.id}`);
vectorsToDelete.push(v.id);
});
@@ -100,10 +102,12 @@ async function cleanupDuplicates() {
}
// Confirm before deletion
- console.log(`\nโ ๏ธ About to delete ${vectorsToDelete.length} duplicate vectors.`);
+ console.log(
+ `\nโ ๏ธ About to delete ${vectorsToDelete.length} duplicate vectors.`,
+ );
console.log("๐ Vectors to delete:");
- vectorsToDelete.forEach(id => console.log(` - ${id}`));
-
+ vectorsToDelete.forEach((id) => console.log(` - ${id}`));
+
// Delete in batches
console.log("\n๐งน Starting cleanup...");
const batchSize = 100; // Pinecone delete limit
@@ -111,38 +115,45 @@ async function cleanupDuplicates() {
for (let i = 0; i < vectorsToDelete.length; i += batchSize) {
const batch = vectorsToDelete.slice(i, i + batchSize);
-
+
try {
await index.deleteMany(batch);
deleted += batch.length;
- console.log(` ๐๏ธ Deleted batch: ${batch.length} vectors (total: ${deleted}/${vectorsToDelete.length})`);
-
+ console.log(
+ ` ๐๏ธ Deleted batch: ${batch.length} vectors (total: ${deleted}/${vectorsToDelete.length})`,
+ );
+
// Add delay between batches
await delay(1000);
} catch (error) {
console.error(` โ Failed to delete batch:`, error.message);
- console.error(` Batch IDs: ${batch.join(', ')}`);
+ console.error(` Batch IDs: ${batch.join(", ")}`);
}
}
console.log(`\n๐ Cleanup completed!`);
- console.log(`โ
Deleted: ${deleted}/${vectorsToDelete.length} duplicate vectors`);
- console.log(`๐ Remaining vectors: ${vectorsToKeep.length} (one per issue)`);
-
+ console.log(
+ `โ
Deleted: ${deleted}/${vectorsToDelete.length} duplicate vectors`,
+ );
+ console.log(
+ `๐ Remaining vectors: ${vectorsToKeep.length} (one per issue)`,
+ );
+
// Verify cleanup
console.log("\n๐ Verifying cleanup...");
await delay(2000); // Wait for Pinecone to sync
-
+
const finalStats = await index.describeIndexStats();
const finalCount = finalStats.totalRecordCount || 0;
console.log(`๐ Final vector count: ${finalCount}`);
-
+
if (finalCount === vectorsToKeep.length) {
console.log("โ
Cleanup verification successful!");
} else {
- console.log(`โ ๏ธ Expected ${vectorsToKeep.length} vectors, but found ${finalCount}`);
+ console.log(
+ `โ ๏ธ Expected ${vectorsToKeep.length} vectors, but found ${finalCount}`,
+ );
}
-
} catch (error) {
console.error("โ Error during cleanup:", error);
process.exit(1);
@@ -151,7 +162,7 @@ async function cleanupDuplicates() {
// Handle command line arguments
const args = process.argv.slice(2);
-if (args.includes('--help') || args.includes('-h')) {
+if (args.includes("--help") || args.includes("-h")) {
console.log(`
๐ Usage: node scripts/cleanup-duplicates.js
@@ -171,7 +182,7 @@ if (args.includes('--help') || args.includes('-h')) {
}
// Confirmation prompt for safety
-if (!args.includes('--force')) {
+if (!args.includes("--force")) {
console.log(`
โ ๏ธ WARNING: This script will delete duplicate vectors from your Pinecone index!
@@ -189,7 +200,7 @@ To see help: node scripts/cleanup-duplicates.js --help
}
// Run the cleanup
-cleanupDuplicates().catch(error => {
+cleanupDuplicates().catch((error) => {
console.error("๐ฅ Script failed:", error);
process.exit(1);
-});
\ No newline at end of file
+});
diff --git a/.github/scripts/cleanup-specific-issue.js b/.github/scripts/cleanup-specific-issue.js
index 44b1ef3..1ea1c22 100644
--- a/.github/scripts/cleanup-specific-issue.js
+++ b/.github/scripts/cleanup-specific-issue.js
@@ -17,7 +17,9 @@ async function deleteIssueVectors() {
if (!ISSUE_TO_DELETE) {
console.error("โ Please provide an issue number:");
- console.error(" Usage: ISSUE_NUMBER=6 node scripts/cleanup-specific-issue.js");
+ console.error(
+ " Usage: ISSUE_NUMBER=6 node scripts/cleanup-specific-issue.js",
+ );
console.error(" Or: node scripts/cleanup-specific-issue.js 6");
process.exit(1);
}
@@ -27,10 +29,12 @@ async function deleteIssueVectors() {
console.log("โ
Connected to Pinecone index");
// Find all vectors for this issue
- console.log(`๐ Searching for vectors related to issue #${ISSUE_TO_DELETE}...`);
-
+ console.log(
+ `๐ Searching for vectors related to issue #${ISSUE_TO_DELETE}...`,
+ );
+
const vectorsToDelete = [];
-
+
try {
// First, try using metadata filter
const queryResponse = await index.query({
@@ -39,40 +43,48 @@ async function deleteIssueVectors() {
includeValues: false,
includeMetadata: true,
filter: {
- issue_number: parseInt(ISSUE_TO_DELETE)
- }
+ issue_number: parseInt(ISSUE_TO_DELETE),
+ },
});
if (queryResponse.matches && queryResponse.matches.length > 0) {
for (const match of queryResponse.matches) {
vectorsToDelete.push(match.id);
console.log(` ๐ Found vector via filter: ${match.id}`);
- console.log(` Metadata:`, JSON.stringify(match.metadata, null, 2));
+ console.log(
+ ` Metadata:`,
+ JSON.stringify(match.metadata, null, 2),
+ );
}
} else {
- console.log(" ๐ Filter query returned no results, trying list approach...");
-
+ console.log(
+ " ๐ Filter query returned no results, trying list approach...",
+ );
+
// Fallback: List all vectors and filter
let paginationToken = null;
-
+
do {
const listOptions = { limit: 100 };
if (paginationToken) {
listOptions.paginationToken = paginationToken;
}
-
+
const listResponse = await index.listPaginated(listOptions);
-
+
if (listResponse.vectors) {
for (const vector of listResponse.vectors) {
if (vector.metadata?.issue_number === parseInt(ISSUE_TO_DELETE)) {
vectorsToDelete.push(vector.id);
console.log(` ๐ Found vector via list: ${vector.id}`);
- console.log(` Metadata:`, JSON.stringify(vector.metadata, null, 2));
+ console.log(
+ ` Metadata:`,
+ JSON.stringify(vector.metadata, null, 2),
+ );
}
}
}
-
+
paginationToken = listResponse.pagination?.next;
} while (paginationToken);
}
@@ -81,10 +93,14 @@ async function deleteIssueVectors() {
throw searchError;
}
- console.log(`\nFound ${vectorsToDelete.length} vector(s) to delete for Issue #${ISSUE_TO_DELETE}`);
+ console.log(
+ `\nFound ${vectorsToDelete.length} vector(s) to delete for Issue #${ISSUE_TO_DELETE}`,
+ );
if (vectorsToDelete.length === 0) {
- console.log(`โน๏ธ No vectors found for Issue #${ISSUE_TO_DELETE}. Nothing to delete.`);
+ console.log(
+ `โน๏ธ No vectors found for Issue #${ISSUE_TO_DELETE}. Nothing to delete.`,
+ );
return;
}
@@ -96,23 +112,28 @@ async function deleteIssueVectors() {
// Confirm deletion
console.log(`\nโ ๏ธ This action cannot be undone!`);
-
+
// Delete the vectors
console.log(`\n๐๏ธ Deleting ${vectorsToDelete.length} vector(s)...`);
-
+
try {
await index.deleteMany(vectorsToDelete);
- console.log(`โ
Successfully deleted ${vectorsToDelete.length} vector(s) for Issue #${ISSUE_TO_DELETE}`);
+ console.log(
+ `โ
Successfully deleted ${vectorsToDelete.length} vector(s) for Issue #${ISSUE_TO_DELETE}`,
+ );
} catch (deleteError) {
console.error(`โ Error deleting vectors:`, deleteError.message);
throw deleteError;
}
console.log(`\n=== Cleanup Summary ===`);
- console.log(`๐ Issue #${ISSUE_TO_DELETE} vectors deleted: ${vectorsToDelete.length}`);
+ console.log(
+ `๐ Issue #${ISSUE_TO_DELETE} vectors deleted: ${vectorsToDelete.length}`,
+ );
console.log(`โ
Database cleanup completed successfully`);
- console.log(`\n๐ฏ You can now edit Issue #${ISSUE_TO_DELETE} to test the update functionality!`);
-
+ console.log(
+ `\n๐ฏ You can now edit Issue #${ISSUE_TO_DELETE} to test the update functionality!`,
+ );
} catch (error) {
console.error("โ Error during cleanup:", error);
process.exit(1);
@@ -121,7 +142,7 @@ async function deleteIssueVectors() {
// Handle command line arguments
const args = process.argv.slice(2);
-if (args.includes('--help') || args.includes('-h')) {
+if (args.includes("--help") || args.includes("-h")) {
console.log(`
๐ Usage:
ISSUE_NUMBER=6 node scripts/cleanup-specific-issue.js
@@ -142,7 +163,7 @@ if (args.includes('--help') || args.includes('-h')) {
}
// Run the cleanup script
-deleteIssueVectors().catch(error => {
+deleteIssueVectors().catch((error) => {
console.error("๐ฅ Cleanup script failed:", error);
process.exit(1);
-});
\ No newline at end of file
+});
diff --git a/.github/scripts/clear-all-vectors.js b/.github/scripts/clear-all-vectors.js
index 52d2db4..ebb8889 100644
--- a/.github/scripts/clear-all-vectors.js
+++ b/.github/scripts/clear-all-vectors.js
@@ -12,7 +12,7 @@ const indexName = process.env.PINECONE_INDEX;
// Add delay to respect API rate limits
function delay(ms) {
- return new Promise(resolve => setTimeout(resolve, ms));
+ return new Promise((resolve) => setTimeout(resolve, ms));
}
async function clearAllVectors() {
@@ -28,7 +28,7 @@ async function clearAllVectors() {
console.log("๐ Getting current index statistics...");
const initialStats = await index.describeIndexStats();
const totalVectors = initialStats.totalRecordCount || 0;
-
+
console.log(`๐ Current state:`);
console.log(` - Total vectors: ${totalVectors}`);
console.log(` - Index dimension: ${initialStats.dimension}`);
@@ -48,70 +48,78 @@ async function clearAllVectors() {
console.log("\n๐งน Attempting to clear entire namespace...");
await index.deleteAll();
console.log("โ
Successfully cleared entire namespace");
-
+
// Wait for operation to complete
await delay(5000);
-
} catch (deleteAllError) {
console.log("โ ๏ธ deleteAll() failed, trying alternative method...");
console.error("Error:", deleteAllError.message);
-
+
// Method 2: Get all vectors and delete them in batches
console.log("๐ Fetching all vectors for batch deletion...");
-
+
const allVectors = await index.query({
vector: Array(1024).fill(0.1),
topK: 10000, // Max limit
includeMetadata: false,
- includeValues: false
+ includeValues: false,
});
if (allVectors.matches && allVectors.matches.length > 0) {
console.log(`๐ Found ${allVectors.matches.length} vectors to delete`);
-
+
// Delete in batches
const batchSize = 1000;
let deleted = 0;
-
+
for (let i = 0; i < allVectors.matches.length; i += batchSize) {
const batch = allVectors.matches.slice(i, i + batchSize);
- const batchIds = batch.map(v => v.id);
-
+ const batchIds = batch.map((v) => v.id);
+
try {
await index.deleteMany(batchIds);
deleted += batch.length;
- console.log(` ๐๏ธ Deleted batch: ${batch.length} vectors (total: ${deleted}/${allVectors.matches.length})`);
-
+ console.log(
+ ` ๐๏ธ Deleted batch: ${batch.length} vectors (total: ${deleted}/${allVectors.matches.length})`,
+ );
+
await delay(1000);
} catch (batchError) {
console.error(` โ Failed to delete batch:`, batchError.message);
}
}
-
- console.log(`โ
Batch deletion completed: ${deleted}/${allVectors.matches.length} vectors`);
+
+ console.log(
+ `โ
Batch deletion completed: ${deleted}/${allVectors.matches.length} vectors`,
+ );
}
}
// Verify the clearing
console.log("\n๐ Verifying index is cleared...");
await delay(3000); // Wait for Pinecone to sync
-
+
const finalStats = await index.describeIndexStats();
const remainingVectors = finalStats.totalRecordCount || 0;
-
+
console.log(`\n๐ Final Results:`);
console.log(` - Initial vectors: ${totalVectors}`);
console.log(` - Remaining vectors: ${remainingVectors}`);
console.log(` - Vectors cleared: ${totalVectors - remainingVectors}`);
-
+
if (remainingVectors === 0) {
console.log("๐ SUCCESS: All vectors have been cleared from the index!");
- console.log("๐ก You can now repopulate with fresh data using the populate script.");
+ console.log(
+ "๐ก You can now repopulate with fresh data using the populate script.",
+ );
} else {
- console.log(`โ ๏ธ WARNING: ${remainingVectors} vectors still remain in the index.`);
- console.log("This might be due to Pinecone sync delays. Check again in a few minutes.");
+ console.log(
+ `โ ๏ธ WARNING: ${remainingVectors} vectors still remain in the index.`,
+ );
+ console.log(
+ "This might be due to Pinecone sync delays. Check again in a few minutes.",
+ );
}
-
} catch (error) {
console.error("โ Error during clearing:", error);
process.exit(1);
@@ -121,7 +129,7 @@ async function clearAllVectors() {
// Handle command line arguments
const args = process.argv.slice(2);
-if (args.includes('--help') || args.includes('-h')) {
+if (args.includes("--help") || args.includes("-h")) {
console.log(`
๐ Usage: node scripts/clear-all-vectors.js --force
@@ -143,7 +151,7 @@ if (args.includes('--help') || args.includes('-h')) {
}
// Safety check - require --force flag
-if (!args.includes('--force')) {
+if (!args.includes("--force")) {
console.log(`
๐จ DANGER: This script will delete ALL vectors from your Pinecone index!
@@ -187,11 +195,11 @@ setTimeout(() => {
setTimeout(() => {
console.log("1...");
setTimeout(() => {
- clearAllVectors().catch(error => {
+ clearAllVectors().catch((error) => {
console.error("๐ฅ Script failed:", error);
process.exit(1);
});
}, 1000);
}, 1000);
}, 1000);
-}, 1000);
\ No newline at end of file
+}, 1000);
diff --git a/.github/scripts/debug-pinecone.js b/.github/scripts/debug-pinecone.js
index 9d13067..08b44c1 100644
--- a/.github/scripts/debug-pinecone.js
+++ b/.github/scripts/debug-pinecone.js
@@ -16,12 +16,12 @@ async function debugPinecone() {
try {
const index = pinecone.Index(indexName);
-
+
// Get index stats
console.log("\n1. Index Statistics:");
const stats = await index.describeIndexStats();
console.log("Full stats object:", JSON.stringify(stats, null, 2));
-
+
// Try to query some vectors
console.log("\n2. Sample Query (first 10 vectors):");
try {
@@ -29,9 +29,9 @@ async function debugPinecone() {
vector: Array(1024).fill(0.1),
topK: 10,
includeMetadata: true,
- includeValues: false
+ includeValues: false,
});
-
+
console.log(`Found ${queryResult.matches?.length || 0} vectors`);
if (queryResult.matches && queryResult.matches.length > 0) {
queryResult.matches.forEach((match, i) => {
@@ -44,28 +44,40 @@ async function debugPinecone() {
} catch (queryError) {
console.error("Query failed:", queryError.message);
}
-
+
// Try specific fetch for known IDs
console.log("\n3. Testing specific ID fetch:");
- const testIds = ['issue-1', 'issue-3', 'issue-4', 'issue-5', 'issue-6', 'issue-7', 'issue-8'];
-
+ const testIds = [
+ "issue-1",
+ "issue-3",
+ "issue-4",
+ "issue-5",
+ "issue-6",
+ "issue-7",
+ "issue-8",
+ ];
+
try {
const fetchResult = await index.fetch(testIds);
- console.log(`Fetch result keys: ${Object.keys(fetchResult.vectors || {}).join(', ')}`);
-
+ console.log(
+ `Fetch result keys: ${Object.keys(fetchResult.vectors || {}).join(", ")}`,
+ );
+
if (fetchResult.vectors) {
Object.entries(fetchResult.vectors).forEach(([id, vector]) => {
console.log(` Found: ${id}`);
if (vector.metadata) {
console.log(` Issue #: ${vector.metadata.issue_number}`);
- console.log(` Title: ${vector.metadata.title?.substring(0, 50)}...`);
+ console.log(
+ ` Title: ${vector.metadata.title?.substring(0, 50)}...`,
+ );
}
});
}
} catch (fetchError) {
console.error("Fetch failed:", fetchError.message);
}
-
+
// Try with different ID patterns (in case they have timestamps)
console.log("\n4. Checking for timestamped IDs:");
try {
@@ -73,13 +85,15 @@ async function debugPinecone() {
vector: Array(1024).fill(0.1),
topK: 100,
includeMetadata: true,
- includeValues: false
+ includeValues: false,
});
-
+
if (allQuery.matches && allQuery.matches.length > 0) {
console.log("All vector IDs found:");
- allQuery.matches.forEach(match => {
- console.log(` - ${match.id} (issue #${match.metadata?.issue_number || 'unknown'})`);
+ allQuery.matches.forEach((match) => {
+ console.log(
+ ` - ${match.id} (issue #${match.metadata?.issue_number || "unknown"})`,
+ );
});
} else {
console.log("No vectors found in query");
@@ -87,10 +101,9 @@ async function debugPinecone() {
} catch (allQueryError) {
console.error("All query failed:", allQueryError.message);
}
-
} catch (error) {
console.error("Debug failed:", error);
}
}
-debugPinecone().catch(console.error);
\ No newline at end of file
+debugPinecone().catch(console.error);
diff --git a/.github/scripts/populate-existing-issues.js b/.github/scripts/populate-existing-issues.js
index 281a028..0457e65 100644
--- a/.github/scripts/populate-existing-issues.js
+++ b/.github/scripts/populate-existing-issues.js
@@ -7,8 +7,10 @@ import dotenv from "dotenv";
dotenv.config();
const octokit = new Octokit({ auth: process.env.GITHUB_TOKEN });
-const OWNER = process.env.GITHUB_REPOSITORY?.split("/")[0] || process.env.GITHUB_OWNER;
-const REPO = process.env.GITHUB_REPOSITORY?.split("/")[1] || process.env.GITHUB_REPO;
+const OWNER =
+ process.env.GITHUB_REPOSITORY?.split("/")[0] || process.env.GITHUB_OWNER;
+const REPO =
+ process.env.GITHUB_REPOSITORY?.split("/")[1] || process.env.GITHUB_REPO;
// Initialize Pinecone client
const pinecone = new Pinecone({
@@ -27,25 +29,25 @@ async function generateEmbedding(text) {
headers: {
"Content-Type": "application/json",
},
- body: JSON.stringify({
+ body: JSON.stringify({
model: "models/text-embedding-004",
- content: { parts: [{ text: text }] }
+ content: { parts: [{ text: text }] },
}),
- }
+ },
);
-
+
const data = await response.json();
-
+
if (data.error) {
console.error("Gemini API Error:", data.error);
return Array(1024).fill(0.01);
}
-
+
if (!data.embedding || !data.embedding.values) {
console.error("Invalid embedding response:", data);
return Array(1024).fill(0.01);
}
-
+
// Pad or truncate to match Pinecone index dimension (1024)
let embedding = data.embedding.values;
if (embedding.length < 1024) {
@@ -53,7 +55,7 @@ async function generateEmbedding(text) {
} else if (embedding.length > 1024) {
embedding = embedding.slice(0, 1024);
}
-
+
return embedding;
} catch (error) {
console.error("Error generating embedding:", error);
@@ -63,7 +65,7 @@ async function generateEmbedding(text) {
// Add delay to respect API rate limits
function delay(ms) {
- return new Promise(resolve => setTimeout(resolve, ms));
+ return new Promise((resolve) => setTimeout(resolve, ms));
}
async function populateExistingIssues() {
@@ -72,7 +74,9 @@ async function populateExistingIssues() {
console.log(`Pinecone Index: ${indexName}`);
if (!OWNER || !REPO) {
- console.error("โ Repository owner and name must be specified via GITHUB_REPOSITORY or GITHUB_OWNER/GITHUB_REPO environment variables");
+ console.error(
+ "โ Repository owner and name must be specified via GITHUB_REPOSITORY or GITHUB_OWNER/GITHUB_REPO environment variables",
+ );
process.exit(1);
}
@@ -83,29 +87,29 @@ async function populateExistingIssues() {
// Fetch all open issues from the repository
console.log("๐ฅ Fetching open issues from GitHub...");
-
+
let allIssues = [];
let page = 1;
const perPage = 100;
-
+
while (true) {
const { data: issues } = await octokit.issues.listForRepo({
owner: OWNER,
repo: REPO,
- state: 'open',
+ state: "open",
per_page: perPage,
page: page,
});
-
+
if (issues.length === 0) break;
-
+
// Filter out pull requests (they show up in issues API)
- const actualIssues = issues.filter(issue => !issue.pull_request);
+ const actualIssues = issues.filter((issue) => !issue.pull_request);
allIssues = allIssues.concat(actualIssues);
-
+
console.log(` ๐ Fetched page ${page} - ${actualIssues.length} issues`);
page++;
-
+
// Add delay to respect GitHub API rate limits
await delay(1000);
}
@@ -119,82 +123,94 @@ async function populateExistingIssues() {
// Check if issues already exist in Pinecone to avoid duplicates
console.log("๐ Checking for existing issues in Pinecone...");
-
+
const existingIssueNumbers = new Set();
-
+
try {
// Get index statistics first
const stats = await index.describeIndexStats();
const totalVectors = stats.totalRecordCount || 0;
console.log(` ๐ Index contains ${totalVectors} total vectors`);
-
+
if (totalVectors === 0) {
console.log(" โน๏ธ Index is empty, all issues will be processed");
} else {
// Use multiple approaches to check for existing vectors
console.log(" ๐ Checking for existing issue vectors...");
-
+
// Method 1: Try to query with a sample vector to get some existing vectors
try {
console.log(" ๐ Sampling existing vectors...");
const sampleQuery = await index.query({
vector: Array(1024).fill(0.1),
topK: Math.min(100, totalVectors),
- includeMetadata: true
+ includeMetadata: true,
});
-
+
if (sampleQuery.matches && sampleQuery.matches.length > 0) {
- console.log(` ๐ Found ${sampleQuery.matches.length} sample vectors`);
+ console.log(
+ ` ๐ Found ${sampleQuery.matches.length} sample vectors`,
+ );
for (const match of sampleQuery.matches) {
if (match.metadata?.issue_number) {
existingIssueNumbers.add(match.metadata.issue_number);
- console.log(` โ Found existing issue #${match.metadata.issue_number}`);
+ console.log(
+ ` โ Found existing issue #${match.metadata.issue_number}`,
+ );
}
}
}
} catch (sampleError) {
- console.log(" โ ๏ธ Sample query failed, trying direct fetch approach");
+ console.log(
+ " โ ๏ธ Sample query failed, trying direct fetch approach",
+ );
}
-
+
// Method 2: Try to fetch vectors by their expected IDs
console.log(" ๐ Checking by direct ID lookup...");
for (let i = 0; i < allIssues.length; i += 10) {
const batch = allIssues.slice(i, i + 10);
-
+
// Try to fetch vectors by their expected IDs
- const vectorIds = batch.map(issue => `issue-${issue.number}`);
-
+ const vectorIds = batch.map((issue) => `issue-${issue.number}`);
+
try {
const fetchResult = await index.fetch(vectorIds);
-
+
if (fetchResult.vectors) {
- Object.keys(fetchResult.vectors).forEach(vectorId => {
+ Object.keys(fetchResult.vectors).forEach((vectorId) => {
const match = vectorId.match(/issue-(\d+)/);
if (match) {
const issueNum = parseInt(match[1]);
if (!existingIssueNumbers.has(issueNum)) {
existingIssueNumbers.add(issueNum);
- console.log(` โ Found existing issue #${issueNum} by ID`);
+ console.log(
+ ` โ Found existing issue #${issueNum} by ID`,
+ );
}
}
});
}
} catch (fetchError) {
// If fetch fails, try metadata filter queries for this batch
- console.log(` โ ๏ธ Fetch failed for batch, trying metadata queries...`);
+ console.log(
+ ` โ ๏ธ Fetch failed for batch, trying metadata queries...`,
+ );
for (const issue of batch) {
try {
const queryResult = await index.query({
vector: Array(1024).fill(0.1),
filter: { issue_number: { $eq: issue.number } },
topK: 1,
- includeMetadata: true
+ includeMetadata: true,
});
-
+
if (queryResult.matches && queryResult.matches.length > 0) {
if (!existingIssueNumbers.has(issue.number)) {
existingIssueNumbers.add(issue.number);
- console.log(` โ Found existing issue #${issue.number} by query`);
+ console.log(
+ ` โ Found existing issue #${issue.number} by query`,
+ );
}
}
} catch (queryError) {
@@ -202,7 +218,7 @@ async function populateExistingIssues() {
}
}
}
-
+
// Small delay between batches
await delay(300);
}
@@ -211,22 +227,34 @@ async function populateExistingIssues() {
console.log(" โ ๏ธ Error checking existing issues:", error.message);
console.log(" ๐ Will process all issues to be safe");
}
-
- console.log(`Found ${existingIssueNumbers.size} existing issues in Pinecone`);
+
+ console.log(
+ `Found ${existingIssueNumbers.size} existing issues in Pinecone`,
+ );
// Filter out issues that already exist in Pinecone
- const newIssues = allIssues.filter(issue => !existingIssueNumbers.has(issue.number));
+ const newIssues = allIssues.filter(
+ (issue) => !existingIssueNumbers.has(issue.number),
+ );
const skippedCount = allIssues.length - newIssues.length;
-
+
console.log(`๐ ${newIssues.length} new issues to process`);
- console.log(`โญ๏ธ ${skippedCount} issues skipped (already exist in Pinecone)`);
-
+ console.log(
+ `โญ๏ธ ${skippedCount} issues skipped (already exist in Pinecone)`,
+ );
+
if (skippedCount > 0) {
- console.log(` Skipped issues: ${Array.from(existingIssueNumbers).sort((a, b) => a - b).join(', ')}`);
+ console.log(
+ ` Skipped issues: ${Array.from(existingIssueNumbers)
+ .sort((a, b) => a - b)
+ .join(", ")}`,
+ );
}
if (newIssues.length === 0) {
- console.log("โ
All open issues are already in Pinecone. Nothing to add.");
+ console.log(
+ "โ
All open issues are already in Pinecone. Nothing to add.",
+ );
return;
}
@@ -238,20 +266,24 @@ async function populateExistingIssues() {
for (let i = 0; i < newIssues.length; i += batchSize) {
const batch = newIssues.slice(i, i + batchSize);
- console.log(`\n๐ฆ Processing batch ${Math.floor(i / batchSize) + 1}/${Math.ceil(newIssues.length / batchSize)}`);
+ console.log(
+ `\n๐ฆ Processing batch ${Math.floor(i / batchSize) + 1}/${Math.ceil(newIssues.length / batchSize)}`,
+ );
const vectors = [];
for (const issue of batch) {
try {
- console.log(` ๐ Processing issue #${issue.number}: "${issue.title.substring(0, 50)}..."`);
-
+ console.log(
+ ` ๐ Processing issue #${issue.number}: "${issue.title.substring(0, 50)}..."`,
+ );
+
// Combine title and body for embedding
const issueText = `${issue.title} ${issue.body || ""}`;
-
+
// Generate embedding
const embedding = await generateEmbedding(issueText);
-
+
// Prepare vector for Pinecone - use consistent ID format
const vectorId = `issue-${issue.number}`;
vectors.push({
@@ -265,19 +297,21 @@ async function populateExistingIssues() {
updated_at: issue.updated_at,
url: issue.html_url,
state: issue.state,
- labels: issue.labels?.map(label => label.name).join(', ') || '',
- author: issue.user?.login || 'unknown'
- }
+ labels: issue.labels?.map((label) => label.name).join(", ") || "",
+ author: issue.user?.login || "unknown",
+ },
});
processed++;
console.log(` โ
Issue #${issue.number} prepared`);
-
+
// Add delay between API calls to respect rate limits
await delay(500);
-
} catch (error) {
- console.error(` โ Failed to process issue #${issue.number}:`, error.message);
+ console.error(
+ ` โ Failed to process issue #${issue.number}:`,
+ error.message,
+ );
failed++;
}
}
@@ -285,14 +319,23 @@ async function populateExistingIssues() {
// Upsert batch to Pinecone
if (vectors.length > 0) {
try {
- console.log(` ๐ Upserting ${vectors.length} vectors to Pinecone...`);
+ console.log(
+ ` ๐ Upserting ${vectors.length} vectors to Pinecone...`,
+ );
await index.upsert(vectors);
successful += vectors.length;
- console.log(` โ
Batch upserted to Pinecone: ${vectors.length} vectors`);
+ console.log(
+ ` โ
Batch upserted to Pinecone: ${vectors.length} vectors`,
+ );
} catch (error) {
- console.error(` โ Failed to upsert batch to Pinecone:`, error.message);
+ console.error(
+ ` โ Failed to upsert batch to Pinecone:`,
+ error.message,
+ );
// Log which specific issues failed
- console.error(` Failed issues: ${vectors.map(v => v.metadata.issue_number).join(', ')}`);
+ console.error(
+ ` Failed issues: ${vectors.map((v) => v.metadata.issue_number).join(", ")}`,
+ );
failed += vectors.length;
}
}
@@ -305,13 +348,18 @@ async function populateExistingIssues() {
console.log(`๐ Total issues processed: ${processed}`);
console.log(`โ
Successfully added to Pinecone: ${successful}`);
console.log(`โ Failed: ${failed}`);
- console.log(`๐ Success rate: ${((successful / processed) * 100).toFixed(1)}%`);
-
+ console.log(
+ `๐ Success rate: ${((successful / processed) * 100).toFixed(1)}%`,
+ );
+
if (successful > 0) {
- console.log(`\n๐ Successfully populated Pinecone with ${successful} issue embeddings!`);
- console.log(`๐ค Your duplicate detection bot is now ready to work with existing issues.`);
+ console.log(
+ `\n๐ Successfully populated Pinecone with ${successful} issue embeddings!`,
+ );
+ console.log(
+ `๐ค Your duplicate detection bot is now ready to work with existing issues.`,
+ );
}
-
} catch (error) {
console.error("โ Error during population:", error);
process.exit(1);
@@ -320,7 +368,7 @@ async function populateExistingIssues() {
// Handle command line arguments
const args = process.argv.slice(2);
-if (args.includes('--help') || args.includes('-h')) {
+if (args.includes("--help") || args.includes("-h")) {
console.log(`
๐ Usage: node scripts/populate-existing-issues.js
@@ -343,7 +391,7 @@ if (args.includes('--help') || args.includes('-h')) {
}
// Run the population script
-populateExistingIssues().catch(error => {
+populateExistingIssues().catch((error) => {
console.error("๐ฅ Script failed:", error);
process.exit(1);
-});
\ No newline at end of file
+});
diff --git a/.github/scripts/updateLeaderboard.js b/.github/scripts/updateLeaderboard.js
index c45977b..c24b92e 100644
--- a/.github/scripts/updateLeaderboard.js
+++ b/.github/scripts/updateLeaderboard.js
@@ -16,8 +16,8 @@ const LEVEL_POINTS = {
function normalizeLabel(label) {
return label
.toLowerCase()
- .replace(/\s+/g, "") // remove spaces
- .replace(/-/g, ""); // remove dashes
+ .replace(/\s+/g, "") // remove spaces
+ .replace(/-/g, ""); // remove dashes
}
async function fetchAllPRs() {
@@ -30,11 +30,11 @@ async function fetchAllPRs() {
per_page: 100,
});
- prs.forEach(pr => {
+ prs.forEach((pr) => {
if (!pr.merged_at) return; // only merged PRs
- const labels = pr.labels.map(l => normalizeLabel(l.name));
- let level = labels.find(l => l.startsWith("level"));
+ const labels = pr.labels.map((l) => normalizeLabel(l.name));
+ let level = labels.find((l) => l.startsWith("level"));
if (!level) return;
const points = LEVEL_POINTS[level] || 0;
@@ -62,12 +62,7 @@ async function updateGoogleSheet(contributors) {
let rows = [["Username", "PR Numbers", "Levels", "Total Points"]];
for (let [user, data] of Object.entries(contributors)) {
- rows.push([
- user,
- data.prs.join(", "),
- data.levels.join(", "),
- data.total,
- ]);
+ rows.push([user, data.prs.join(", "), data.levels.join(", "), data.total]);
}
await sheets.spreadsheets.values.update({
diff --git a/.github/scripts/validate-apis.js b/.github/scripts/validate-apis.js
index 0dc1b5d..58f6a6d 100644
--- a/.github/scripts/validate-apis.js
+++ b/.github/scripts/validate-apis.js
@@ -18,17 +18,17 @@ async function validatePinecone() {
const pinecone = new Pinecone({ apiKey: process.env.PINECONE_API_KEY });
const index = pinecone.Index(process.env.PINECONE_INDEX);
-
+
const stats = await index.describeIndexStats();
-
- console.log('โ
Pinecone connection successful');
+
+ console.log("โ
Pinecone connection successful");
console.log(`๐ Index: ${process.env.PINECONE_INDEX}`);
console.log(`๐ Total vectors: ${stats.totalRecordCount || 0}`);
console.log(`๐ Dimension: ${stats.dimension}`);
-
+
return { success: true, stats };
} catch (error) {
- console.error('โ Pinecone validation failed:', error.message);
+ console.error("โ Pinecone validation failed:", error.message);
return { success: false, error: error.message };
}
}
@@ -40,20 +40,28 @@ async function validateGitHub() {
}
const octokit = new Octokit({ auth: process.env.GITHUB_TOKEN });
-
+
// Test with current repository or fallback
- const owner = process.env.GITHUB_REPOSITORY?.split("/")[0] || process.env.GITHUB_OWNER || "seroski-ai";
- const repo = process.env.GITHUB_REPOSITORY?.split("/")[1] || process.env.GITHUB_REPO || "seroski-dupbot";
-
+ const owner =
+ process.env.GITHUB_REPOSITORY?.split("/")[0] ||
+ process.env.GITHUB_OWNER ||
+ "seroski-ai";
+ const repo =
+ process.env.GITHUB_REPOSITORY?.split("/")[1] ||
+ process.env.GITHUB_REPO ||
+ "seroski-dupbot";
+
const result = await octokit.repos.get({ owner, repo });
-
- console.log('โ
GitHub connection successful');
+
+ console.log("โ
GitHub connection successful");
console.log(`๐ Repository: ${result.data.full_name}`);
- console.log(`๐ Access: ${result.data.permissions?.admin ? 'Admin' : result.data.permissions?.push ? 'Write' : 'Read'}`);
-
+ console.log(
+ `๐ Access: ${result.data.permissions?.admin ? "Admin" : result.data.permissions?.push ? "Write" : "Read"}`,
+ );
+
return { success: true, repo: result.data };
} catch (error) {
- console.error('โ GitHub validation failed:', error.message);
+ console.error("โ GitHub validation failed:", error.message);
return { success: false, error: error.message };
}
}
@@ -69,57 +77,61 @@ async function validateGemini() {
{
method: "POST",
headers: { "Content-Type": "application/json" },
- body: JSON.stringify({
+ body: JSON.stringify({
model: "models/text-embedding-004",
- content: { parts: [{ text: "connection test" }] }
+ content: { parts: [{ text: "connection test" }] },
}),
- }
+ },
);
if (!response.ok) {
const errorData = await response.text();
- throw new Error(`HTTP ${response.status}: ${response.statusText} - ${errorData}`);
+ throw new Error(
+ `HTTP ${response.status}: ${response.statusText} - ${errorData}`,
+ );
}
const data = await response.json();
-
+
if (data.error) {
- throw new Error(data.error.message || 'Unknown Gemini API error');
+ throw new Error(data.error.message || "Unknown Gemini API error");
}
- console.log('โ
Gemini API connection successful');
- console.log('๐ง Model: text-embedding-004');
- console.log(`๐ Embedding dimension: ${data.embedding?.values?.length || 'unknown'}`);
-
+ console.log("โ
Gemini API connection successful");
+ console.log("๐ง Model: text-embedding-004");
+ console.log(
+ `๐ Embedding dimension: ${data.embedding?.values?.length || "unknown"}`,
+ );
+
return { success: true, embedding: data.embedding };
} catch (error) {
- console.error('โ Gemini validation failed:', error.message);
+ console.error("โ Gemini validation failed:", error.message);
return { success: false, error: error.message };
}
}
async function validateAllConnections() {
- console.log('๐ === API Connection Validation ===\n');
-
+ console.log("๐ === API Connection Validation ===\n");
+
const results = {
pinecone: await validatePinecone(),
github: await validateGitHub(),
- gemini: await validateGemini()
+ gemini: await validateGemini(),
};
-
- console.log('\n๐ === Validation Summary ===');
-
- const successful = Object.values(results).filter(r => r.success).length;
+
+ console.log("\n๐ === Validation Summary ===");
+
+ const successful = Object.values(results).filter((r) => r.success).length;
const total = Object.keys(results).length;
-
+
console.log(`โ
Successful: ${successful}/${total}`);
console.log(`โ Failed: ${total - successful}/${total}`);
-
+
if (successful === total) {
- console.log('\n๐ All API connections are working correctly!');
+ console.log("\n๐ All API connections are working correctly!");
process.exit(0);
} else {
- console.log('\nโ ๏ธ Some API connections failed. Check the errors above.');
+ console.log("\nโ ๏ธ Some API connections failed. Check the errors above.");
process.exit(1);
}
}
@@ -128,7 +140,7 @@ async function validateAllConnections() {
const args = process.argv.slice(2);
const service = args[0];
-if (args.includes('--help') || args.includes('-h')) {
+if (args.includes("--help") || args.includes("-h")) {
console.log(`
๐ Usage: node scripts/validate-apis.js [service]
@@ -155,21 +167,21 @@ if (args.includes('--help') || args.includes('-h')) {
// Run specific service or all
switch (service) {
- case 'pinecone':
- validatePinecone().then(result => {
+ case "pinecone":
+ validatePinecone().then((result) => {
process.exit(result.success ? 0 : 1);
});
break;
- case 'github':
- validateGitHub().then(result => {
+ case "github":
+ validateGitHub().then((result) => {
process.exit(result.success ? 0 : 1);
});
break;
- case 'gemini':
- validateGemini().then(result => {
+ case "gemini":
+ validateGemini().then((result) => {
process.exit(result.success ? 0 : 1);
});
break;
default:
validateAllConnections();
-}
\ No newline at end of file
+}
diff --git a/.github/workflows/api-validation.yml b/.github/workflows/api-validation.yml
index 04ba934..6f6fa3f 100644
--- a/.github/workflows/api-validation.yml
+++ b/.github/workflows/api-validation.yml
@@ -4,15 +4,15 @@ on:
workflow_dispatch:
inputs:
validation_scope:
- description: 'Which APIs to validate'
+ description: "Which APIs to validate"
required: true
- default: 'all-apis'
+ default: "all-apis"
type: choice
options:
- - 'all-apis'
- - 'pinecone-only'
- - 'github-only'
- - 'gemini-only'
+ - "all-apis"
+ - "pinecone-only"
+ - "github-only"
+ - "gemini-only"
permissions:
issues: read
@@ -21,7 +21,7 @@ permissions:
jobs:
validate-apis:
runs-on: ubuntu-latest
-
+
steps:
- name: Checkout repository
uses: actions/checkout@v3
@@ -81,4 +81,4 @@ jobs:
echo "- **Triggered by:** @${{ github.actor }}" >> $GITHUB_STEP_SUMMARY
echo "- **Timestamp:** $(date -u)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
- echo "โ
Run this before database operations to ensure API connectivity." >> $GITHUB_STEP_SUMMARY
\ No newline at end of file
+ echo "โ
Run this before database operations to ensure API connectivity." >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/database-operations.yml b/.github/workflows/database-operations.yml
index 901c0fc..5b41bf7 100644
--- a/.github/workflows/database-operations.yml
+++ b/.github/workflows/database-operations.yml
@@ -4,17 +4,17 @@ on:
workflow_dispatch:
inputs:
action:
- description: 'Database operation to perform'
+ description: "Database operation to perform"
required: true
- default: 'populate-issues'
+ default: "populate-issues"
type: choice
options:
- - 'populate-issues'
- - 'cleanup-duplicates'
- - 'debug-database'
- - 'clear-all-vectors'
+ - "populate-issues"
+ - "cleanup-duplicates"
+ - "debug-database"
+ - "clear-all-vectors"
force:
- description: 'Force action (required for destructive operations)'
+ description: "Force action (required for destructive operations)"
required: false
default: false
type: boolean
@@ -26,7 +26,7 @@ permissions:
jobs:
database-operation:
runs-on: ubuntu-latest
-
+
steps:
- name: Checkout repository
uses: actions/checkout@v3
@@ -101,4 +101,4 @@ jobs:
echo "- **Repository:** ${{ github.repository }}" >> $GITHUB_STEP_SUMMARY
echo "- **Timestamp:** $(date -u)" >> $GITHUB_STEP_SUMMARY
echo "" >> $GITHUB_STEP_SUMMARY
- echo "๐ Use 'API Validation' workflow to test connections before database operations." >> $GITHUB_STEP_SUMMARY
\ No newline at end of file
+ echo "๐ Use 'API Validation' workflow to test connections before database operations." >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/duplicate-issue.yml b/.github/workflows/duplicate-issue.yml
index a157bef..f68c970 100644
--- a/.github/workflows/duplicate-issue.yml
+++ b/.github/workflows/duplicate-issue.yml
@@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
issue_number:
- description: 'Issue number to manually check for duplicates'
+ description: "Issue number to manually check for duplicates"
required: true
type: number
diff --git a/.github/workflows/leaderboard.yml b/.github/workflows/leaderboard.yml
index 13b6395..3576107 100644
--- a/.github/workflows/leaderboard.yml
+++ b/.github/workflows/leaderboard.yml
@@ -30,7 +30,6 @@
# run: |
# echo '${{ secrets.GOOGLE_CREDENTIALS }}' > ${{ github.workspace }}/credentials.json
-
# - name: Run leaderboard script
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/ping-render.yml b/.github/workflows/ping-render.yml
index 33058f6..16a12b5 100644
--- a/.github/workflows/ping-render.yml
+++ b/.github/workflows/ping-render.yml
@@ -4,8 +4,8 @@ name: Ping Render App
on:
schedule:
# Every 10 minutes
- - cron: '*/5 * * * *'
- workflow_dispatch:
+ - cron: "*/5 * * * *"
+ workflow_dispatch:
jobs:
ping:
diff --git a/.github/workflows/post_PR_thankyou.yml b/.github/workflows/post_PR_thankyou.yml
index c638b9d..26e5257 100644
--- a/.github/workflows/post_PR_thankyou.yml
+++ b/.github/workflows/post_PR_thankyou.yml
@@ -2,7 +2,7 @@ name: Post-PR Merge Thank You
on:
pull_request_target:
- types: [closed] # Trigger when a PR is closed
+ types: [closed] # Trigger when a PR is closed
permissions:
issues: write
@@ -10,14 +10,14 @@ permissions:
jobs:
post_merge_message:
- if: github.event.pull_request.merged == true # Only run if the PR was merged
+ if: github.event.pull_request.merged == true # Only run if the PR was merged
runs-on: ubuntu-latest
steps:
- name: Post thank you message
uses: actions/github-script@v7
with:
- github-token: ${{ secrets.GITHUB_TOKEN }} # Ensure token is used
+ github-token: ${{ secrets.GITHUB_TOKEN }} # Ensure token is used
script: |
const prNumber = context.payload.pull_request.number;
const owner = context.repo.owner;
diff --git a/.github/workflows/pr_greetings.yml b/.github/workflows/pr_greetings.yml
index d9e5d62..523d928 100644
--- a/.github/workflows/pr_greetings.yml
+++ b/.github/workflows/pr_greetings.yml
@@ -22,4 +22,4 @@ jobs:
repo: context.repo.repo,
body: `๐ Thanks @${context.payload.pull_request.user.login} for raising this PR!
Our team will review it soon โณ โ meanwhile, please make sure your PR follows our contributing guidelines ๐ [Contributing Guidelines](https://github.com/DevSyncx/DevSync/blob/main/CONTRIBUTING.md) ๐`
- })
\ No newline at end of file
+ })
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index 017b013..5522c17 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -51,7 +51,6 @@ This Code of Conduct applies within all project spaces, including:
If you observe or experience behavior that violates this Code of Conduct, please report it privately by contacting
๐ง [**Annanya Tiwary**](mailto:anonym.notifys@gmail.com)
-
All reports will be handled with discretion. We are committed to protecting reporters from retaliation.
---
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index a3e1dbc..d975069 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -17,7 +17,6 @@ We welcome all kinds of contributions โ from code improvements and documentati
- [Contributor Guidelines โ Issue & PR Management](#-Contributor-Guidelines)
- [Community Standards](#-Community-Standards)
-
---
## ๐ Getting Started
@@ -28,8 +27,11 @@ We welcome all kinds of contributions โ from code improvements and documentati
```bash
git clone https://github.com/your-username/DevSync.git
cd DevSync
+ ```
+
3. **Setup Frontend**
-```bash
+
+```bash
#move into the frontend directory
cd frontend
#install frontend dependencies
@@ -38,9 +40,9 @@ npm install
npm run dev # Starts frontend on http://localhost:5173
```
-
4. **Setup backtend**
-```bash
+
+```bash
#move into the backend directory
cd backend
#install backend dependencies
@@ -49,7 +51,6 @@ npm install
npm run dev # Starts frontend on http://localhost:5173
```
-
---
## โจ How to Contribute
@@ -71,6 +72,7 @@ Here are some ways you can contribute:
- Follow naming conventions already used in the codebase.
- For UI components, use the existing **Shadcn UI** structure.
- Keep commits **atomic** and **descriptive**.
+
---
## ๐ Submitting Changes
@@ -88,11 +90,13 @@ Here are some ways you can contribute:
git add .
git commit -m "Add: Meaningful description of your change"
```
+
3. **Push to your fork**:
- ```bash
+```bash
git push origin feature/your-feature-name
```
+
4. Open a Pull Request (PR) to the main branch.
๐ฌ If your PR fixes an issue, mention it with Fixes #issue-number in the PR description.
@@ -100,6 +104,7 @@ git push origin feature/your-feature-name
---
### ๐ Reporting Bugs
+
Please include the following in your bug reports:
- โ
**Clear and descriptive title** of the bug.
@@ -110,6 +115,7 @@ Please include the following in your bug reports:
- ๐ Include **steps to reproduce** the issue.
### ๐ Requesting Features or Enhancements
+
Want to suggest something cool or improve the UX/UI? Hereโs what to include:
- โ
A descriptive **title** and a clear explanation of the idea.
@@ -118,6 +124,7 @@ Want to suggest something cool or improve the UX/UI? Hereโs what to include:
- ๐ If related to backend/API, mention the **endpoints or data flow**.
### ๐ Improving Documentation
+
Spotted unclear, outdated, or missing documentation?
- โ๏ธ Point out the exact **section or file** to update.
@@ -125,23 +132,24 @@ Spotted unclear, outdated, or missing documentation?
- ๐ฌ Ask for clarification in case something is confusing.
### ๐ท๏ธ Labels and Levels
+
All issues are tagged to help contributors choose wisely:
-| Level | Suitable For | Points |
-|---------|---------------------|--------|
-| `L-1` | Beginners | 3 |
-| `L-2` | Intermediate tasks | 7 |
-| `L-3` | Advanced/complex | 10 |
+| Level | Suitable For | Points |
+| ----- | ------------------ | ------ |
+| `L-1` | Beginners | 3 |
+| `L-2` | Intermediate tasks | 7 |
+| `L-3` | Advanced/complex | 10 |
## ๐ Issue Completion Timeline
To ensure smooth progress and timely contributions during GSSoC, weโve defined expected completion times for issues based on their difficulty levels:
-| Level | Difficulty |Expected Completion Time |
-|-------|---------------|-------------------------|
-| Level 1 | Beginner | **2-3 days** |
-| Level 2 | Intermediate| **4-6 days** |
-| Level 3 | Advanced | **7-10 days** |
+| Level | Difficulty | Expected Completion Time |
+| ------- | ------------ | ------------------------ |
+| Level 1 | Beginner | **2-3 days** |
+| Level 2 | Intermediate | **4-6 days** |
+| Level 3 | Advanced | **7-10 days** |
## ๐ Contributor Guidelines
diff --git a/README.md b/README.md
index 744653b..cc919e5 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
DevSync
-
+
@@ -30,13 +30,11 @@
# ๐ DevSync โ Developer Productivity Dashboard
-
From pull requests to pomodoros โ DevSync's got you covered.
Stay ahead. Stay synced. stay **DevSynced**
**DevSync** is a unified productivity tracker for developers. It aggregates your coding activity, daily goals, and contribution metrics from various platforms into a single, elegant dashboard โ designed to help you track growth, stay consistent, and showcase your development journey.
-
## ๐ Table of Contents
- [๐ Overview](#-overview)
@@ -53,42 +51,35 @@ Stay ahead. Stay synced. stay **DevSynced**
- [๐ฉโ๐ป Maintainers](#-maintainers)
- [โญ Support This Project](#-support-this-project)
-
## ๐ Overview
In todayโs fragmented developer ecosystem, tracking your contributions across multiple platforms can be overwhelming.
**DevSync** simplifies this by:
-- Consolidating your stats, streaks, and growth in one place.
-- Offering visual productivity logs and heatmaps.
-- Letting you manage tasks and goals alongside coding activity.
-
-Whether youโre preparing for internships, building a personal brand, or staying accountable โ **DevSync** empowers you with **data-driven insights** at a glance.
-
+- Consolidating your stats, streaks, and growth in one place.
+- Offering visual productivity logs and heatmaps.
+- Letting you manage tasks and goals alongside coding activity.
+Whether youโre preparing for internships, building a personal brand, or staying accountable โ **DevSync** empowers you with **data-driven insights** at a glance.
## โจ Features
-- ๐ **Unified Developer Insights** โ Track problems solved, commits, issues, ratings, and more.
-- ๐ฅ **Cross-Platform Heatmaps** โ Visualize your coding streaks and consistency.
-- ๐งฉ **Modular Platform Support** โ Easily add integrations for new coding platforms.
-- โ
**Task & Goal Tracker** โ Stay on top of daily, weekly, and long-term goals.
-- ๐งพ **Timeline Logs** โ Get auto-generated summaries (daily/weekly/monthly).
-- ๐ชช **Public Shareable Profile (Coming Soon)** โ Showcase your journey to recruiters or peers.
-
-
+- ๐ **Unified Developer Insights** โ Track problems solved, commits, issues, ratings, and more.
+- ๐ฅ **Cross-Platform Heatmaps** โ Visualize your coding streaks and consistency.
+- ๐งฉ **Modular Platform Support** โ Easily add integrations for new coding platforms.
+- โ
**Task & Goal Tracker** โ Stay on top of daily, weekly, and long-term goals.
+- ๐งพ **Timeline Logs** โ Get auto-generated summaries (daily/weekly/monthly).
+- ๐ชช **Public Shareable Profile (Coming Soon)** โ Showcase your journey to recruiters or peers.
## ๐ Tech Stack
-| Layer | Technology |
-|-------------|-------------------------------------|
-| Frontend | React, Tailwind CSS, ShadCN UI |
-| Backend | Node.js, Express, REST API |
-| Database | MongoDB |
-| Auth | JWT / OAuth |
-| Deployment | Vercel / Render |
-
-
+| Layer | Technology |
+| ---------- | ------------------------------ |
+| Frontend | React, Tailwind CSS, ShadCN UI |
+| Backend | Node.js, Express, REST API |
+| Database | MongoDB |
+| Auth | JWT / OAuth |
+| Deployment | Vercel / Render |
## ๐ธ Screenshots
@@ -96,18 +87,18 @@ Whether youโre preparing for internships, building a personal brand, or stayin


-
## ๐ ๏ธ Setup & Installation
-### 1. Fork the Repository
+### 1. Fork the Repository
+
Click the **Fork** button (top-right) to get your copy of the repo.
-### 2. Clone Your Fork
+### 2. Clone Your Fork
```bash
git clone https://github.com//DevSync.git
cd DevSync
-````
+```
### 3. Setup Frontend
@@ -129,8 +120,6 @@ npm run dev
Your backend runs at ๐ [http://localhost:5000](http://localhost:5000)
-
-
## ๐ Folder Structure
```
@@ -188,25 +177,20 @@ DevSync/
```
-
## ๐ค Contributing
We โค๏ธ contributions!
-* Read the [Contributing Guide](./CONTRIBUTING.md).
-* Check open issues or raise new ones.
-* Submit pull requests with clear descriptions.
+- Read the [Contributing Guide](./CONTRIBUTING.md).
+- Check open issues or raise new ones.
+- Submit pull requests with clear descriptions.
Every contribution counts โ from bug fixes to new features!
-
-
## ๐ License
This project is licensed under the [MIT License](./LICENSE).
-
-
## ๐ Code of Conduct
We enforce a [Code of Conduct](./CODE_OF_CONDUCT.md) to maintain a safe, inclusive, and welcoming environment. Please read it before contributing.
@@ -216,18 +200,16 @@ We enforce a [Code of Conduct](./CODE_OF_CONDUCT.md) to maintain a safe, inclusi
For detailed setup instructions, please follow the full documentation here:
[๐ DevSync Setup Docs](./docs)
-
## ๐ฉโ๐ป Maintainers
-* **Annanya Tiwary** โ [GitHub](https://github.com/Annanyatiwary4)
-
+- **Annanya Tiwary** โ [GitHub](https://github.com/Annanyatiwary4)
## โญ Support This Project
If **DevSync** inspired you:
-* Star โญ the repo on [GitHub](https://github.com/DevSyncx/DevSync)
-* Share it with your friends & community
-* Contribute by fixing issues or adding features
+- Star โญ the repo on [GitHub](https://github.com/DevSyncx/DevSync)
+- Share it with your friends & community
+- Contribute by fixing issues or adding features
Together, letโs make developer productivity smarter ๐
diff --git a/backend/config/passport.js b/backend/config/passport.js
index c4c1149..133bff0 100644
--- a/backend/config/passport.js
+++ b/backend/config/passport.js
@@ -3,27 +3,32 @@ const GoogleStrategy = require("passport-google-oauth20").Strategy;
const GitHubStrategy = require("passport-github2").Strategy;
const User = require("../models/User");
-console.log('Initializing Google OAuth strategy...');
-console.log('Callback URL:', process.env.GOOGLE_CALLBACK_URL);
+console.log("Initializing Google OAuth strategy...");
+console.log("Callback URL:", process.env.GOOGLE_CALLBACK_URL);
passport.use(
new GoogleStrategy(
{
clientID: process.env.GOOGLE_CLIENT_ID,
clientSecret: process.env.GOOGLE_CLIENT_SECRET,
- callbackURL: process.env.GOOGLE_CALLBACK_URL || "http://localhost:5000/auth/callback",
+ callbackURL:
+ process.env.GOOGLE_CALLBACK_URL ||
+ "http://localhost:5000/auth/callback",
},
async (accessToken, refreshToken, profile, done) => {
- console.log('Google profile received:', profile);
+ console.log("Google profile received:", profile);
try {
let user = await User.findOne({ googleId: profile.id });
-
+
if (!user) {
- console.log('Creating new user from Google profile');
+ console.log("Creating new user from Google profile");
user = new User({
googleId: profile.id,
name: profile.displayName,
- email: profile.emails && profile.emails[0] ? profile.emails[0].value : null,
+ email:
+ profile.emails && profile.emails[0]
+ ? profile.emails[0].value
+ : null,
});
await user.save();
}
@@ -32,8 +37,8 @@ passport.use(
} catch (err) {
return done(err, null);
}
- }
- )
+ },
+ ),
);
// GitHub OAuth
@@ -43,14 +48,18 @@ if (process.env.GITHUB_CLIENT_ID && process.env.GITHUB_CLIENT_SECRET) {
{
clientID: process.env.GITHUB_CLIENT_ID,
clientSecret: process.env.GITHUB_CLIENT_SECRET,
- callbackURL: process.env.GITHUB_CALLBACK_URL || "http://localhost:5000/auth/github/callback",
+ callbackURL:
+ process.env.GITHUB_CALLBACK_URL ||
+ "http://localhost:5000/auth/github/callback",
scope: ["read:user", "user:email"],
},
async (accessToken, refreshToken, profile, done) => {
try {
let email = null;
if (Array.isArray(profile.emails) && profile.emails.length > 0) {
- email = profile.emails.find(e => e.verified)?.value || profile.emails[0].value;
+ email =
+ profile.emails.find((e) => e.verified)?.value ||
+ profile.emails[0].value;
}
let user = await User.findOne({ githubId: profile.id });
@@ -77,8 +86,8 @@ if (process.env.GITHUB_CLIENT_ID && process.env.GITHUB_CLIENT_SECRET) {
} catch (err) {
return done(err, null);
}
- }
- )
+ },
+ ),
);
}
diff --git a/backend/db/connection.js b/backend/db/connection.js
index 8f6fc83..c9f4bb3 100644
--- a/backend/db/connection.js
+++ b/backend/db/connection.js
@@ -1,10 +1,13 @@
// Initiate connection to MongoDB
-require('dotenv').config();
-const mongoose = require('mongoose');
+require("dotenv").config();
+const mongoose = require("mongoose");
const dburl = process.env.MONGODB_URI;
-mongoose.connect(dburl).then(() => {
+mongoose
+ .connect(dburl)
+ .then(() => {
console.log("Connected to DB Successfully ");
-}).catch((err) => {
+ })
+ .catch((err) => {
console.log(err.message);
-});
\ No newline at end of file
+ });
diff --git a/backend/middleware/auth.js b/backend/middleware/auth.js
index db40e01..a9a8132 100644
--- a/backend/middleware/auth.js
+++ b/backend/middleware/auth.js
@@ -1,16 +1,19 @@
-const jwt = require('jsonwebtoken');
-require('dotenv').config();
+const jwt = require("jsonwebtoken");
+require("dotenv").config();
// Use a fallback JWT secret if env variable is missing
-const JWT_SECRET = process.env.JWT_SECRET || 'devsync_secure_jwt_secret_key_for_authentication';
+const JWT_SECRET =
+ process.env.JWT_SECRET || "devsync_secure_jwt_secret_key_for_authentication";
-module.exports = function(req, res, next) {
+module.exports = function (req, res, next) {
// Get token from header
- const token = req.header('x-auth-token');
+ const token = req.header("x-auth-token");
// Check if no token
if (!token) {
- return res.status(401).json({ errors: [{ msg: 'No token, authorization denied' }] });
+ return res
+ .status(401)
+ .json({ errors: [{ msg: "No token, authorization denied" }] });
}
// Verify token
@@ -19,7 +22,7 @@ module.exports = function(req, res, next) {
req.user = decoded.user;
next();
} catch (err) {
- console.error('Token verification error:', err.message);
- res.status(401).json({ errors: [{ msg: 'Token is not valid' }] });
+ console.error("Token verification error:", err.message);
+ res.status(401).json({ errors: [{ msg: "Token is not valid" }] });
}
-};
\ No newline at end of file
+};
diff --git a/backend/middleware/rateLimit/authLimiterMiddleware.js b/backend/middleware/rateLimit/authLimiterMiddleware.js
index db01791..2b1d3c9 100644
--- a/backend/middleware/rateLimit/authLimiterMiddleware.js
+++ b/backend/middleware/rateLimit/authLimiterMiddleware.js
@@ -1,7 +1,7 @@
-const { RateLimiterMemory } = require('rate-limiter-flexible');
+const { RateLimiterMemory } = require("rate-limiter-flexible");
exports.authLimiter = new RateLimiterMemory({
- points: 20, // Increased from 5 to 20 attempts
- duration: 60, // Per minute
- blockDuration: 60 * 2, // Reduced block time to 2 minutes
-})
\ No newline at end of file
+ points: 20, // Increased from 5 to 20 attempts
+ duration: 60, // Per minute
+ blockDuration: 60 * 2, // Reduced block time to 2 minutes
+});
diff --git a/backend/middleware/rateLimit/generalLimiterMiddleware.js b/backend/middleware/rateLimit/generalLimiterMiddleware.js
index d58dca0..53ef033 100644
--- a/backend/middleware/rateLimit/generalLimiterMiddleware.js
+++ b/backend/middleware/rateLimit/generalLimiterMiddleware.js
@@ -1,7 +1,7 @@
-const { RateLimiterMemory } = require('rate-limiter-flexible');
+const { RateLimiterMemory } = require("rate-limiter-flexible");
exports.generalLimiter = new RateLimiterMemory({
- points: 100,
- duration: 60,
- blockDuration: 60 * 2,
-})
\ No newline at end of file
+ points: 100,
+ duration: 60,
+ blockDuration: 60 * 2,
+});
diff --git a/backend/middleware/rateLimit/index.js b/backend/middleware/rateLimit/index.js
index 69f7eb5..7841151 100644
--- a/backend/middleware/rateLimit/index.js
+++ b/backend/middleware/rateLimit/index.js
@@ -1,15 +1,22 @@
-const { authLimiter } = require('./authLimiterMiddleware');
-const { generalLimiter } = require('./generalLimiterMiddleware');
-
+const { authLimiter } = require("./authLimiterMiddleware");
+const { generalLimiter } = require("./generalLimiterMiddleware");
exports.authMiddleware = (req, res, next) => {
- authLimiter.consume(req.ip).then(() => next()).catch(() => {
- res.status(429).json({ message: "Too many login/signup attempts, please try later." });
+ authLimiter
+ .consume(req.ip)
+ .then(() => next())
+ .catch(() => {
+ res
+ .status(429)
+ .json({ message: "Too many login/signup attempts, please try later." });
});
-}
+};
exports.generalMiddleware = (req, res, next) => {
- generalLimiter.consume(req.ip).then(() => next()).catch(() => {
- res.status(429).json({ message: "Too many requests, please slow down." });
- })
-}
\ No newline at end of file
+ generalLimiter
+ .consume(req.ip)
+ .then(() => next())
+ .catch(() => {
+ res.status(429).json({ message: "Too many requests, please slow down." });
+ });
+};
diff --git a/backend/models/ContactMessage.js b/backend/models/ContactMessage.js
index 9ea8c69..3b53385 100644
--- a/backend/models/ContactMessage.js
+++ b/backend/models/ContactMessage.js
@@ -19,7 +19,7 @@ const contactMessageSchema = new mongoose.Schema(
minlength: 10,
},
},
- { timestamps: true }
+ { timestamps: true },
);
const ContactMessage = mongoose.model("ContactMessage", contactMessageSchema);
diff --git a/backend/models/Feedback.js b/backend/models/Feedback.js
index 0faaa6b..017a305 100644
--- a/backend/models/Feedback.js
+++ b/backend/models/Feedback.js
@@ -1,35 +1,35 @@
-const mongoose = require('mongoose');
+const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const FeedbackSchema = new Schema({
userId: {
type: String,
- required: true
+ required: true,
},
rating: {
type: Number,
required: true,
min: 1,
- max: 5
+ max: 5,
},
comment: {
type: String,
required: true,
- minlength: 10
+ minlength: 10,
},
category: {
type: String,
- default: 'other',
- enum: ['ui', 'features', 'bugs', 'suggestions', 'other']
+ default: "other",
+ enum: ["ui", "features", "bugs", "suggestions", "other"],
},
isAnonymous: {
type: Boolean,
- default: false
+ default: false,
},
date: {
type: Date,
- default: Date.now
- }
+ default: Date.now,
+ },
});
-module.exports = mongoose.model('Feedback', FeedbackSchema);
\ No newline at end of file
+module.exports = mongoose.model("Feedback", FeedbackSchema);
diff --git a/backend/models/Leetcode.js b/backend/models/Leetcode.js
index 513fcf3..27a4dcc 100644
--- a/backend/models/Leetcode.js
+++ b/backend/models/Leetcode.js
@@ -56,8 +56,8 @@ const leetcodeSchema = new mongoose.Schema(
],
lastUpdated: { type: Date, default: Date.now },
},
-
- { timestamps: true }
+
+ { timestamps: true },
);
const LeetCode = mongoose.model("LeetCode", leetcodeSchema);
diff --git a/backend/models/Task.js b/backend/models/Task.js
index dfc56ba..602e776 100644
--- a/backend/models/Task.js
+++ b/backend/models/Task.js
@@ -1,16 +1,22 @@
-const mongoose = require('mongoose');
+const mongoose = require("mongoose");
const TaskSchema = new mongoose.Schema(
{
title: { type: String, required: true, trim: true },
- description: { type: String, default: '' },
- status: { type: String, enum: ['pending', 'completed'], default: 'pending' },
+ description: { type: String, default: "" },
+ status: {
+ type: String,
+ enum: ["pending", "completed"],
+ default: "pending",
+ },
deadline: { type: Date },
- userId: { type: mongoose.Schema.Types.ObjectId, ref: 'User', required: true },
+ userId: {
+ type: mongoose.Schema.Types.ObjectId,
+ ref: "User",
+ required: true,
+ },
},
- { timestamps: true }
+ { timestamps: true },
);
-module.exports = mongoose.model('Task', TaskSchema);
-
-
+module.exports = mongoose.model("Task", TaskSchema);
diff --git a/backend/models/User.js b/backend/models/User.js
index 1f4d540..cf12c53 100644
--- a/backend/models/User.js
+++ b/backend/models/User.js
@@ -1,4 +1,4 @@
-const mongoose = require('mongoose');
+const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const UserSchema = new Schema({
@@ -7,7 +7,7 @@ const UserSchema = new Schema({
unique: true,
sparse: true,
},
- googleId: {
+ googleId: {
type: String,
unique: true,
sparse: true, // multiple nulls allowed
@@ -19,12 +19,12 @@ const UserSchema = new Schema({
},
name: {
type: String,
- required: true,
+ required: true,
},
email: {
type: String,
required: true,
- unique: true
+ unique: true,
},
isEmailVerified: {
type: Boolean,
@@ -33,22 +33,22 @@ const UserSchema = new Schema({
emailVerificationExpires: Date,
password: {
type: String,
- required: function () {
+ required: function () {
return !this.googleId && !this.githubId;
- },
+ },
},
avatar: {
type: String,
- default: '/uploads/avatars/default-avatar.png'
+ default: "/uploads/avatars/default-avatar.png",
},
bio: {
- type: String
+ type: String,
},
location: {
- type: String
+ type: String,
},
skills: {
- type: [String]
+ type: [String],
},
socialLinks: {
github: String,
@@ -60,44 +60,44 @@ const UserSchema = new Schema({
hackerrank: String,
leetcode: String,
codeforces: String,
- hackerearth: String
+ hackerearth: String,
},
projects: [
{
name: {
type: String,
- required: true
+ required: true,
},
description: {
type: String,
- required: true
+ required: true,
},
link: {
- type: String
+ type: String,
},
date: {
type: Date,
- default: Date.now
- }
- }
+ default: Date.now,
+ },
+ },
],
// โ
New fields for dashboard
streak: {
type: Number,
- default: 0
+ default: 0,
},
timeSpent: {
type: String,
- default: "0 minutes"
+ default: "0 minutes",
},
activity: {
type: [Object], // e.g. [{ date: '2025-08-27', count: 3 }]
- default: []
+ default: [],
},
goals: {
type: [String],
- default: []
+ default: [],
},
// โ
Fields for forgot/reset password
@@ -106,8 +106,8 @@ const UserSchema = new Schema({
date: {
type: Date,
- default: Date.now
- }
+ default: Date.now,
+ },
});
-module.exports = mongoose.model('User', UserSchema);
+module.exports = mongoose.model("User", UserSchema);
diff --git a/backend/routes/auth.js b/backend/routes/auth.js
index 88cecae..4ad9f1e 100644
--- a/backend/routes/auth.js
+++ b/backend/routes/auth.js
@@ -9,7 +9,13 @@ const crypto = require("crypto");
require("dotenv").config();
const passport = require("passport");
const { sendVerificationEmail } = require("../services/emailService");
-const { generateVerificationCode, generateJWT, formatUserResponse, setVerificationToken, handleVerificationEmail } = require("../utils/emailVerificationHelpers")
+const {
+ generateVerificationCode,
+ generateJWT,
+ formatUserResponse,
+ setVerificationToken,
+ handleVerificationEmail,
+} = require("../utils/emailVerificationHelpers");
// Helper function to generate avatar URL from email or name
const generateAvatarUrl = (email, name) => {
@@ -23,7 +29,7 @@ const generateAvatarUrl = (email, name) => {
// DiceBear (modern styled avatars)
const diceBearStyle = "micah"; // Options: avataaars, bottts, initials, micah, miniavs, etc.
const diceBearUrl = `https://api.dicebear.com/6.x/${diceBearStyle}/svg?seed=${encodeURIComponent(
- identifier
+ identifier,
)}`;
return diceBearUrl;
@@ -35,47 +41,49 @@ const JWT_SECRET =
router.get(
"/google",
- passport.authenticate("google", { scope: ["profile", "email"] })
+ passport.authenticate("google", { scope: ["profile", "email"] }),
);
// Handle callback from Google
router.get(
"/callback",
(req, res, next) => {
- console.log('OAuth callback received. Starting authentication...');
- console.log('Query params:', req.query);
+ console.log("OAuth callback received. Starting authentication...");
+ console.log("Query params:", req.query);
next();
},
passport.authenticate("google", {
// Redirect failures to the frontend login for better UX
failureRedirect: `${process.env.CLIENT_URL}/login`,
- failureMessage: true, // Enable failure messages
+ failureMessage: true, // Enable failure messages
session: true,
}),
async (req, res, next) => {
try {
- console.log('OAuth successful, user:', req.user);
+ console.log("OAuth successful, user:", req.user);
// Issue JWT and redirect to frontend with token as query param
// Frontend reads ?token=... on /dashboard and stores it
const token = await generateJWT(req.user.id);
const redirectUrl = `${process.env.CLIENT_URL}/dashboard?token=${encodeURIComponent(token)}`;
return res.redirect(redirectUrl);
} catch (err) {
- console.error('JWT generation failed after OAuth:', err);
- return res.redirect(`${process.env.CLIENT_URL}/login?error=oauth_token_failed`);
+ console.error("JWT generation failed after OAuth:", err);
+ return res.redirect(
+ `${process.env.CLIENT_URL}/login?error=oauth_token_failed`,
+ );
}
},
// Error handler for the authentication
(err, req, res, next) => {
- console.error('OAuth error:', err);
+ console.error("OAuth error:", err);
res.redirect(`${process.env.CLIENT_URL}/login?error=oauth_failed`);
- }
+ },
);
// GitHub OAuth
router.get(
"/github",
- passport.authenticate("github", { scope: ["read:user", "user:email"] })
+ passport.authenticate("github", { scope: ["read:user", "user:email"] }),
);
router.get(
@@ -91,10 +99,12 @@ router.get(
const redirectUrl = `${process.env.CLIENT_URL}/dashboard?token=${encodeURIComponent(token)}`;
return res.redirect(redirectUrl);
} catch (err) {
- console.error('JWT generation failed after GitHub OAuth:', err);
- return res.redirect(`${process.env.CLIENT_URL}/login?error=github_oauth_token_failed`);
+ console.error("JWT generation failed after GitHub OAuth:", err);
+ return res.redirect(
+ `${process.env.CLIENT_URL}/login?error=github_oauth_token_failed`,
+ );
}
- }
+ },
);
// @route POST api/auth/register
@@ -133,7 +143,11 @@ router.post(
needsVerification: true,
});
} else {
- return res.status(500).json({ errors: [{ msg: "User already exists. Please Sign in!!" }] });
+ return res
+ .status(500)
+ .json({
+ errors: [{ msg: "User already exists. Please Sign in!!" }],
+ });
}
}
@@ -166,16 +180,14 @@ router.post(
needsVerification: true,
email: user.email,
});
-
} catch (err) {
console.error(err.message);
- if(err.message === 'Invalid Email ID')
- {
+ if (err.message === "Invalid Email ID") {
return res.status(400).json({ errors: [{ msg: "Invalid Email ID" }] });
}
return res.status(500).json({ errors: [{ msg: "Server error" }] });
}
- }
+ },
);
// @route POST api/auth/forgot-password
@@ -201,7 +213,7 @@ router.post("/forgot-password", async (req, res) => {
await sendVerificationEmail(
user.email,
"Password Reset Request",
- `You requested a password reset. Click here to reset your password.
`
+ `You requested a password reset. Click here to reset your password.
`,
);
res.json({ message: "Password reset link sent to your email." });
@@ -211,7 +223,6 @@ router.post("/forgot-password", async (req, res) => {
}
});
-
// @route POST api/auth/reset-password/:token
// @desc Reset password using token
// @access Public
@@ -247,7 +258,6 @@ router.post("/reset-password/:token", async (req, res) => {
}
});
-
// @route POST api/auth/verify-email
// @desc Verify user email with code
// @access Public
@@ -307,7 +317,6 @@ router.post("/verify-email", async (req, res) => {
console.error("JWT generation error:", jwtError);
res.status(500).json({ errors: [{ msg: "Error generating token" }] });
}
-
} catch (err) {
console.error("Email verification error:", err.message);
res
@@ -353,7 +362,6 @@ router.post("/resend-verification", async (req, res) => {
],
});
}
-
} catch (err) {
console.error("Resend verification error:", err.message);
res.status(500).json({ errors: [{ msg: "Server error during resend" }] });
@@ -383,8 +391,10 @@ router.post(
if (!user) {
return res
.status(400)
- .json({ errors: [{ msg: "User not found. Please Sign up first!!" }] });
- }
+ .json({
+ errors: [{ msg: "User not found. Please Sign up first!!" }],
+ });
+ }
// Check password
const isMatch = await bcrypt.compare(password, user.password);
@@ -419,7 +429,7 @@ router.post(
console.error(err.message);
res.status(500).json({ errors: [{ msg: "Server error" }] });
}
- }
+ },
);
// @route GET api/auth
@@ -446,4 +456,4 @@ router.get("/me", (req, res) => {
}
});
-module.exports = router;
\ No newline at end of file
+module.exports = router;
diff --git a/backend/routes/feedback.js b/backend/routes/feedback.js
index 00f3121..3e0cd1a 100644
--- a/backend/routes/feedback.js
+++ b/backend/routes/feedback.js
@@ -1,106 +1,121 @@
-const express = require('express');
+const express = require("express");
const router = express.Router();
-const auth = require('../middleware/auth');
-const Feedback = require('../models/Feedback');
+const auth = require("../middleware/auth");
+const Feedback = require("../models/Feedback");
// @route POST api/feedback
// @desc Submit user feedback
// @access Private
-router.post('/', auth, async (req, res) => {
+router.post("/", auth, async (req, res) => {
try {
const { rating, comment, category, isAnonymous } = req.body;
-
+
// Validate the data
if (!rating || rating < 1 || rating > 5) {
- return res.status(400).json({ message: 'Please provide a valid rating between 1 and 5' });
+ return res
+ .status(400)
+ .json({ message: "Please provide a valid rating between 1 and 5" });
}
-
+
if (!comment || comment.trim().length < 10) {
- return res.status(400).json({ message: 'Please provide feedback with at least 10 characters' });
+ return res
+ .status(400)
+ .json({
+ message: "Please provide feedback with at least 10 characters",
+ });
}
-
+
// Create a new feedback instance
const newFeedback = new Feedback({
userId: req.user.id,
rating,
comment,
- category: category || 'other',
- isAnonymous: isAnonymous || false
+ category: category || "other",
+ isAnonymous: isAnonymous || false,
});
-
+
// Save the feedback to the database
await newFeedback.save();
-
- res.json({ success: true, message: 'Feedback submitted successfully' });
+
+ res.json({ success: true, message: "Feedback submitted successfully" });
} catch (err) {
- console.error('Error submitting feedback:', err.message);
- res.status(500).json({ message: 'Server error' });
+ console.error("Error submitting feedback:", err.message);
+ res.status(500).json({ message: "Server error" });
}
});
// @route POST api/feedback/guest
// @desc Submit guest (unauthenticated) feedback
// @access Public
-router.post('/guest', async (req, res) => {
+router.post("/guest", async (req, res) => {
try {
const { rating, comment, category, isAnonymous = true } = req.body;
-
+
// Validate the data
if (!rating || rating < 1 || rating > 5) {
- return res.status(400).json({ message: 'Please provide a valid rating between 1 and 5' });
+ return res
+ .status(400)
+ .json({ message: "Please provide a valid rating between 1 and 5" });
}
-
+
if (!comment || comment.trim().length < 10) {
- return res.status(400).json({ message: 'Please provide feedback with at least 10 characters' });
+ return res
+ .status(400)
+ .json({
+ message: "Please provide feedback with at least 10 characters",
+ });
}
-
+
// Create a new feedback instance for guest user
const newFeedback = new Feedback({
userId: "guest",
rating,
comment,
- category: category || 'other',
- isAnonymous: true // Always anonymous for guests
+ category: category || "other",
+ isAnonymous: true, // Always anonymous for guests
});
-
+
// Save the feedback to the database
await newFeedback.save();
-
- res.json({ success: true, message: 'Guest feedback submitted successfully' });
+
+ res.json({
+ success: true,
+ message: "Guest feedback submitted successfully",
+ });
} catch (err) {
- console.error('Error submitting guest feedback:', err.message);
- res.status(500).json({ message: 'Server error' });
+ console.error("Error submitting guest feedback:", err.message);
+ res.status(500).json({ message: "Server error" });
}
});
// @route GET api/feedback
// @desc Get all feedback (for admin/community page)
// @access Public
-router.get('/', async (req, res) => {
+router.get("/", async (req, res) => {
try {
console.log("Feedback GET request received");
-
+
// Get feedback sorted by date (newest first)
try {
const feedbackList = await Feedback.find()
.sort({ date: -1 })
.select(
// Don't include user ID if feedback is anonymous
- '-__v ' + (req.query.includePrivate === 'true' ? '' : '-userId')
+ "-__v " + (req.query.includePrivate === "true" ? "" : "-userId"),
);
-
+
// Process feedback for public display
- const processedFeedback = feedbackList.map(feedback => {
+ const processedFeedback = feedbackList.map((feedback) => {
const feedbackObj = feedback.toObject();
-
+
// If feedback is anonymous, remove any identifiable information
if (feedbackObj.isAnonymous && !req.query.includePrivate) {
- feedbackObj.userId = 'anonymous';
+ feedbackObj.userId = "anonymous";
}
-
+
return feedbackObj;
});
-
+
return res.json(processedFeedback);
} catch (dbError) {
console.error("Database error:", dbError);
@@ -108,9 +123,9 @@ router.get('/', async (req, res) => {
return res.json([]);
}
} catch (err) {
- console.error('Error getting feedback:', err.message);
- res.status(500).json({ message: 'Server error' });
+ console.error("Error getting feedback:", err.message);
+ res.status(500).json({ message: "Server error" });
}
});
-module.exports = router;
\ No newline at end of file
+module.exports = router;
diff --git a/backend/routes/github.route.js b/backend/routes/github.route.js
index 290dd7f..50e4ea8 100644
--- a/backend/routes/github.route.js
+++ b/backend/routes/github.route.js
@@ -1,7 +1,7 @@
// routes/github.route.js
const express = require("express");
const fetch = (...args) =>
-import("node-fetch").then(({ default: fetch }) => fetch(...args));
+ import("node-fetch").then(({ default: fetch }) => fetch(...args));
const router = express.Router();
// Helper to run GitHub GraphQL queries
@@ -139,4 +139,4 @@ router.get("/:username", async (req, res) => {
}
});
-module.exports = router;
\ No newline at end of file
+module.exports = router;
diff --git a/backend/routes/profile.js b/backend/routes/profile.js
index 07ff0db..59a8179 100644
--- a/backend/routes/profile.js
+++ b/backend/routes/profile.js
@@ -1,349 +1,354 @@
-const express = require('express');
+const express = require("express");
const router = express.Router();
-const { check, validationResult } = require('express-validator');
-const auth = require('../middleware/auth');
-const multer = require('multer');
-const path = require('path');
-const User = require('../models/User');
-const fs = require('fs');
-const crypto = require('crypto');
-const LeetCode = require("../models/Leetcode")
+const { check, validationResult } = require("express-validator");
+const auth = require("../middleware/auth");
+const multer = require("multer");
+const path = require("path");
+const User = require("../models/User");
+const fs = require("fs");
+const crypto = require("crypto");
+const LeetCode = require("../models/Leetcode");
// Helper function to generate avatar URL from email or name
const generateAvatarUrl = (email, name) => {
// Use email for consistent avatar, or fallback to name
- const identifier = email || name || 'user';
- const md5Hash = crypto.createHash('md5').update(identifier.toLowerCase().trim()).digest('hex');
-
+ const identifier = email || name || "user";
+ const md5Hash = crypto
+ .createHash("md5")
+ .update(identifier.toLowerCase().trim())
+ .digest("hex");
+
// Choose one of these services:
// 1. Gravatar
// const gravatarUrl = `https://www.gravatar.com/avatar/${md5Hash}?d=identicon&s=400`;
-
+
// 2. DiceBear (more modern styled avatars)
- const diceBearStyle = 'micah'; // Options: avataaars, bottts, initials, micah, miniavs, etc.
+ const diceBearStyle = "micah"; // Options: avataaars, bottts, initials, micah, miniavs, etc.
const diceBearUrl = `https://api.dicebear.com/6.x/${diceBearStyle}/svg?seed=${encodeURIComponent(identifier)}`;
-
+
// 3. UI Avatars (text based)
- const uiAvatarsUrl = `https://ui-avatars.com/api/?name=${encodeURIComponent(name || 'User')}&background=random&size=128`;
-
+ const uiAvatarsUrl = `https://ui-avatars.com/api/?name=${encodeURIComponent(name || "User")}&background=random&size=128`;
+
// Return your preferred avatar service
return diceBearUrl;
};
// Set up multer for file uploads
const storage = multer.diskStorage({
- destination: function(req, file, cb) {
- const uploadDir = 'uploads/avatars';
+ destination: function (req, file, cb) {
+ const uploadDir = "uploads/avatars";
if (!fs.existsSync(uploadDir)) {
fs.mkdirSync(uploadDir, { recursive: true });
}
cb(null, uploadDir);
},
- filename: function(req, file, cb) {
+ filename: function (req, file, cb) {
cb(null, `${Date.now()}-${file.originalname}`);
- }
+ },
});
-const upload = multer({
+const upload = multer({
storage: storage,
limits: { fileSize: 2000000 }, // 2MB limit
- fileFilter: function(req, file, cb) {
+ fileFilter: function (req, file, cb) {
const filetypes = /jpeg|jpg|png/;
- const extname = filetypes.test(path.extname(file.originalname).toLowerCase());
+ const extname = filetypes.test(
+ path.extname(file.originalname).toLowerCase(),
+ );
const mimetype = filetypes.test(file.mimetype);
if (mimetype && extname) {
return cb(null, true);
} else {
- cb('Error: Images Only!');
+ cb("Error: Images Only!");
}
- }
+ },
});
// @route GET api/profile
// @desc Get current user's profile
// @access Private
-router.get('/', auth, async (req, res) => {
+router.get("/", auth, async (req, res) => {
try {
- const user = await User.findById(req.user.id).select('-password');
+ const user = await User.findById(req.user.id).select("-password");
if (!user) {
- return res.status(400).json({ errors: [{ msg: 'User not found' }] });
+ return res.status(400).json({ errors: [{ msg: "User not found" }] });
}
res.json(user);
} catch (err) {
console.error(err.message);
- res.status(500).json({ errors: [{ msg: 'Server Error' }] });
+ res.status(500).json({ errors: [{ msg: "Server Error" }] });
}
});
// @route POST api/profile/avatar
// @desc Upload user avatar
// @access Private
-router.post('/avatar', auth, upload.single('avatar'), async (req, res) => {
+router.post("/avatar", auth, upload.single("avatar"), async (req, res) => {
try {
- const user = await User.findById(req.user.id).select('-password');
+ const user = await User.findById(req.user.id).select("-password");
if (!user) {
- return res.status(404).json({ errors: [{ msg: 'User not found' }] });
+ return res.status(404).json({ errors: [{ msg: "User not found" }] });
}
// If a file was uploaded, use that
if (req.file) {
// Delete old avatar if it's stored locally and not the default
- if (user.avatar && user.avatar.startsWith('/uploads/')) {
- const oldAvatarPath = path.join(__dirname, '..', user.avatar);
+ if (user.avatar && user.avatar.startsWith("/uploads/")) {
+ const oldAvatarPath = path.join(__dirname, "..", user.avatar);
if (fs.existsSync(oldAvatarPath)) {
fs.unlinkSync(oldAvatarPath);
}
}
// Update user with new avatar path
- const avatarPath = `/${req.file.path.replace(/\\/g, '/')}`;
+ const avatarPath = `/${req.file.path.replace(/\\/g, "/")}`;
user.avatar = avatarPath;
- }
+ }
// Otherwise generate an avatar from dicebear or similar service
else {
const newAvatar = generateAvatarUrl(user.email, user.name);
user.avatar = newAvatar;
}
-
+
await user.save();
res.json({ avatar: user.avatar });
} catch (err) {
console.error(err.message);
- res.status(500).json({ errors: [{ msg: 'Server Error' }] });
+ res.status(500).json({ errors: [{ msg: "Server Error" }] });
}
});
// @route PUT api/profile
// @desc Update user profile
// @access Private
-router.put('/', [
- auth,
- [
- check('name', 'Name is required').not().isEmpty()
- ]
-], async (req, res) => {
- const errors = validationResult(req);
- if (!errors.isEmpty()) {
- return res.status(400).json({ errors: errors.array() });
- }
+router.put(
+ "/",
+ [auth, [check("name", "Name is required").not().isEmpty()]],
+ async (req, res) => {
+ const errors = validationResult(req);
+ if (!errors.isEmpty()) {
+ return res.status(400).json({ errors: errors.array() });
+ }
- const {
- name,
- bio,
- location,
- skills,
- github,
- gitlab,
- linkedin,
- twitter,
- website,
- // Competitive coding platforms
- codechef,
- hackerrank,
- leetcode,
- codeforces,
- hackerearth
- } = req.body;
-
- // Build profile object
- const profileFields = {};
- if (name) profileFields.name = name;
- if (bio) profileFields.bio = bio;
- if (location) profileFields.location = location;
- if (skills && Array.isArray(skills)) {
- profileFields.skills = skills;
- } else if (skills) {
- profileFields.skills = skills.split(',').map(skill => skill.trim());
- }
+ const {
+ name,
+ bio,
+ location,
+ skills,
+ github,
+ gitlab,
+ linkedin,
+ twitter,
+ website,
+ // Competitive coding platforms
+ codechef,
+ hackerrank,
+ leetcode,
+ codeforces,
+ hackerearth,
+ } = req.body;
+
+ // Build profile object
+ const profileFields = {};
+ if (name) profileFields.name = name;
+ if (bio) profileFields.bio = bio;
+ if (location) profileFields.location = location;
+ if (skills && Array.isArray(skills)) {
+ profileFields.skills = skills;
+ } else if (skills) {
+ profileFields.skills = skills.split(",").map((skill) => skill.trim());
+ }
- // Build social object
- profileFields.socialLinks = {};
- if (github) profileFields.socialLinks.github = github;
- if (gitlab) profileFields.socialLinks.gitlab = gitlab;
- if (linkedin) profileFields.socialLinks.linkedin = linkedin;
- if (twitter) profileFields.socialLinks.twitter = twitter;
- if (website) profileFields.socialLinks.website = website;
-
- // Add competitive coding platforms
- if (codechef) profileFields.socialLinks.codechef = codechef;
- if (hackerrank) profileFields.socialLinks.hackerrank = hackerrank;
- if (leetcode) profileFields.socialLinks.leetcode = leetcode;
- if (codeforces) profileFields.socialLinks.codeforces = codeforces;
- if (hackerearth) profileFields.socialLinks.hackerearth = hackerearth;
+ // Build social object
+ profileFields.socialLinks = {};
+ if (github) profileFields.socialLinks.github = github;
+ if (gitlab) profileFields.socialLinks.gitlab = gitlab;
+ if (linkedin) profileFields.socialLinks.linkedin = linkedin;
+ if (twitter) profileFields.socialLinks.twitter = twitter;
+ if (website) profileFields.socialLinks.website = website;
+
+ // Add competitive coding platforms
+ if (codechef) profileFields.socialLinks.codechef = codechef;
+ if (hackerrank) profileFields.socialLinks.hackerrank = hackerrank;
+ if (leetcode) profileFields.socialLinks.leetcode = leetcode;
+ if (codeforces) profileFields.socialLinks.codeforces = codeforces;
+ if (hackerearth) profileFields.socialLinks.hackerearth = hackerearth;
- try {
- let user = await User.findById(req.user.id);
-
- if (!user) {
- return res.status(404).json({ errors: [{ msg: 'User not found' }] });
- }
+ try {
+ let user = await User.findById(req.user.id);
- // Update
- user = await User.findByIdAndUpdate(
- req.user.id,
- { $set: profileFields },
- { new: true }
- ).select('-password');
+ if (!user) {
+ return res.status(404).json({ errors: [{ msg: "User not found" }] });
+ }
- return res.json(user);
- } catch (err) {
- console.error(err.message);
- res.status(500).json({ errors: [{ msg: 'Server Error' }] });
- }
-});
+ // Update
+ user = await User.findByIdAndUpdate(
+ req.user.id,
+ { $set: profileFields },
+ { new: true },
+ ).select("-password");
+
+ return res.json(user);
+ } catch (err) {
+ console.error(err.message);
+ res.status(500).json({ errors: [{ msg: "Server Error" }] });
+ }
+ },
+);
// Added a route to generate a new avatar from online services
-router.post('/generate-avatar', auth, async (req, res) => {
+router.post("/generate-avatar", auth, async (req, res) => {
try {
- const user = await User.findById(req.user.id).select('-password');
+ const user = await User.findById(req.user.id).select("-password");
if (!user) {
- return res.status(404).json({ errors: [{ msg: 'User not found' }] });
+ return res.status(404).json({ errors: [{ msg: "User not found" }] });
}
-
+
// Generate a new avatar using online service
const newAvatar = generateAvatarUrl(user.email, user.name);
user.avatar = newAvatar;
await user.save();
-
+
res.json({ avatar: newAvatar });
} catch (err) {
console.error(err.message);
- res.status(500).json({ errors: [{ msg: 'Server Error' }] });
+ res.status(500).json({ errors: [{ msg: "Server Error" }] });
}
});
// @route POST api/profile/projects
// @desc Add project to profile
// @access Private
-router.post('/projects', [
- auth,
+router.post(
+ "/projects",
[
- check('name', 'Project name is required').not().isEmpty(),
- check('description', 'Description is required').not().isEmpty()
- ]
-], async (req, res) => {
- const errors = validationResult(req);
- if (!errors.isEmpty()) {
- return res.status(400).json({ errors: errors.array() });
- }
+ auth,
+ [
+ check("name", "Project name is required").not().isEmpty(),
+ check("description", "Description is required").not().isEmpty(),
+ ],
+ ],
+ async (req, res) => {
+ const errors = validationResult(req);
+ if (!errors.isEmpty()) {
+ return res.status(400).json({ errors: errors.array() });
+ }
- const { name, description, link } = req.body;
+ const { name, description, link } = req.body;
- try {
- const user = await User.findById(req.user.id);
+ try {
+ const user = await User.findById(req.user.id);
- user.projects.unshift({
- name,
- description,
- link
- });
+ user.projects.unshift({
+ name,
+ description,
+ link,
+ });
- await user.save();
- res.json(user);
- } catch (err) {
- console.error(err.message);
- res.status(500).json({ errors: [{ msg: 'Server Error' }] });
- }
-});
+ await user.save();
+ res.json(user);
+ } catch (err) {
+ console.error(err.message);
+ res.status(500).json({ errors: [{ msg: "Server Error" }] });
+ }
+ },
+);
// @route DELETE api/profile/projects/:proj_id
// @desc Delete project from profile
// @access Private
-router.delete('/projects/:proj_id', auth, async (req, res) => {
+router.delete("/projects/:proj_id", auth, async (req, res) => {
try {
const user = await User.findById(req.user.id);
-
+
// Get remove index
const removeIndex = user.projects
- .map(item => item.id)
+ .map((item) => item.id)
.indexOf(req.params.proj_id);
if (removeIndex === -1) {
- return res.status(404).json({ errors: [{ msg: 'Project not found' }] });
+ return res.status(404).json({ errors: [{ msg: "Project not found" }] });
}
-
+
user.projects.splice(removeIndex, 1);
await user.save();
res.json(user);
} catch (err) {
console.error(err.message);
- res.status(500).json({ errors: [{ msg: 'Server Error' }] });
+ res.status(500).json({ errors: [{ msg: "Server Error" }] });
}
});
// @route PUT api/profile/goals
// @desc Update user goals
// @access Private
-router.put('/goals', auth, async (req, res) => {
+router.put("/goals", auth, async (req, res) => {
try {
const user = await User.findById(req.user.id);
- if (!user) return res.status(404).json({ msg: 'User not found' });
+ if (!user) return res.status(404).json({ msg: "User not found" });
user.goals = req.body.goals || [];
await user.save();
res.json(user.goals);
} catch (err) {
console.error(err.message);
- res.status(500).send('Server error');
+ res.status(500).send("Server error");
}
});
// @route PUT api/profile/notes
// @desc Update user notes
// @access Private
-router.put('/notes', auth, async (req, res) => {
+router.put("/notes", auth, async (req, res) => {
try {
const user = await User.findById(req.user.id);
- if (!user) return res.status(404).json({ msg: 'User not found' });
+ if (!user) return res.status(404).json({ msg: "User not found" });
user.notes = req.body.notes || "";
await user.save();
res.json(user.notes);
} catch (err) {
console.error(err.message);
- res.status(500).send('Server error');
+ res.status(500).send("Server error");
}
});
// @route PUT api/profile/activity
// @desc Update activity log (for heatmap)
// @access Private
-router.put('/activity', auth, async (req, res) => {
+router.put("/activity", auth, async (req, res) => {
try {
const { date } = req.body; // expects YYYY-MM-DD or timestamp
const user = await User.findById(req.user.id);
- if (!user) return res.status(404).json({ msg: 'User not found' });
+ if (!user) return res.status(404).json({ msg: "User not found" });
user.activity.push(date);
await user.save();
res.json(user.activity);
} catch (err) {
console.error(err.message);
- res.status(500).send('Server error');
+ res.status(500).send("Server error");
}
});
// @route PUT api/profile/time
// @desc Update time spent
// @access Private
-router.put('/time', auth, async (req, res) => {
+router.put("/time", auth, async (req, res) => {
try {
const { timeSpent } = req.body; // e.g. "2h 30m"
const user = await User.findById(req.user.id);
- if (!user) return res.status(404).json({ msg: 'User not found' });
+ if (!user) return res.status(404).json({ msg: "User not found" });
user.timeSpent = timeSpent;
await user.save();
res.json(user.timeSpent);
} catch (err) {
console.error(err.message);
- res.status(500).send('Server error');
+ res.status(500).send("Server error");
}
});
-
-
-
router.post("/leetcode/:username", async (req, res) => {
const { username } = req.params;
@@ -438,17 +443,20 @@ router.post("/leetcode/:username", async (req, res) => {
ranking: json.data.matchedUser.profile?.ranking,
avatar: json.data.matchedUser.profile?.userAvatar,
},
- submitStatsGlobal: json.data.matchedUser.submitStatsGlobal.acSubmissionNum.map(sub => ({
- difficulty: sub.difficulty,
- count: sub.count,
- })),
- badges: json.data.matchedUser.badges.map(badge => ({
+ submitStatsGlobal:
+ json.data.matchedUser.submitStatsGlobal.acSubmissionNum.map((sub) => ({
+ difficulty: sub.difficulty,
+ count: sub.count,
+ })),
+ badges: json.data.matchedUser.badges.map((badge) => ({
id: badge.id,
displayName: badge.displayName,
icon: badge.icon,
})),
- submissionCalendar: JSON.parse(json.data.matchedUser.submissionCalendar || "{}"),
- recentSubmissions: json.data.recentAcSubmissionList.map(sub => ({
+ submissionCalendar: JSON.parse(
+ json.data.matchedUser.submissionCalendar || "{}",
+ ),
+ recentSubmissions: json.data.recentAcSubmissionList.map((sub) => ({
id: sub.id,
title: sub.title,
titleSlug: sub.titleSlug,
@@ -466,7 +474,7 @@ router.post("/leetcode/:username", async (req, res) => {
expired: contestRanking.badge?.expired || false,
},
},
- contestHistory: contestHistory.map(contest => ({
+ contestHistory: contestHistory.map((contest) => ({
attended: contest.attended || false,
rating: contest.rating || 0,
contest: {
@@ -488,8 +496,6 @@ router.post("/leetcode/:username", async (req, res) => {
}
});
-
-
router.post("/leetcode/update/:username", async (req, res) => {
const { username } = req.params;
@@ -500,8 +506,9 @@ router.post("/leetcode/update/:username", async (req, res) => {
return res.status(404).json({ error: "User not found in database" });
}
- const timeDifference = Date.now() - new Date(existingUser.lastUpdated).getTime();
- const sixHoursInMillis = 6 * 60 * 60 * 1000;
+ const timeDifference =
+ Date.now() - new Date(existingUser.lastUpdated).getTime();
+ const sixHoursInMillis = 6 * 60 * 60 * 1000;
if (timeDifference < sixHoursInMillis) {
return res.json({
@@ -590,17 +597,20 @@ router.post("/leetcode/update/:username", async (req, res) => {
ranking: json.data.matchedUser.profile?.ranking,
avatar: json.data.matchedUser.profile?.userAvatar,
},
- submitStatsGlobal: json.data.matchedUser.submitStatsGlobal.acSubmissionNum.map(sub => ({
- difficulty: sub.difficulty,
- count: sub.count,
- })),
- badges: json.data.matchedUser.badges.map(badge => ({
+ submitStatsGlobal:
+ json.data.matchedUser.submitStatsGlobal.acSubmissionNum.map((sub) => ({
+ difficulty: sub.difficulty,
+ count: sub.count,
+ })),
+ badges: json.data.matchedUser.badges.map((badge) => ({
id: badge.id,
displayName: badge.displayName,
icon: badge.icon,
})),
- submissionCalendar: JSON.parse(json.data.matchedUser.submissionCalendar || "{}"),
- recentSubmissions: json.data.recentAcSubmissionList.map(sub => ({
+ submissionCalendar: JSON.parse(
+ json.data.matchedUser.submissionCalendar || "{}",
+ ),
+ recentSubmissions: json.data.recentAcSubmissionList.map((sub) => ({
id: sub.id,
title: sub.title,
titleSlug: sub.titleSlug,
@@ -618,7 +628,7 @@ router.post("/leetcode/update/:username", async (req, res) => {
expired: contestRanking.badge?.expired || false,
},
},
- contestHistory: contestHistory.map(contest => ({
+ contestHistory: contestHistory.map((contest) => ({
attended: contest.attended || false,
rating: contest.rating || 0,
contest: {
@@ -649,5 +659,4 @@ router.post("/leetcode/update/:username", async (req, res) => {
}
});
-
-module.exports = router;
\ No newline at end of file
+module.exports = router;
diff --git a/backend/routes/tasks.route.js b/backend/routes/tasks.route.js
index 9e49627..ccef556 100644
--- a/backend/routes/tasks.route.js
+++ b/backend/routes/tasks.route.js
@@ -1,87 +1,94 @@
-const express = require('express');
+const express = require("express");
const router = express.Router();
-const auth = require('../middleware/auth');
-const Task = require('../models/Task');
+const auth = require("../middleware/auth");
+const Task = require("../models/Task");
// POST /api/tasks - Create a new task
-router.post('/', auth, async (req, res) => {
+router.post("/", auth, async (req, res) => {
try {
const { title, description, status, deadline } = req.body;
if (!title || !title.trim()) {
- return res.status(400).json({ errors: [{ msg: 'Title is required' }] });
+ return res.status(400).json({ errors: [{ msg: "Title is required" }] });
}
const task = new Task({
title: title.trim(),
- description: description || '',
- status: status === 'completed' ? 'completed' : 'pending',
+ description: description || "",
+ status: status === "completed" ? "completed" : "pending",
deadline: deadline ? new Date(deadline) : undefined,
userId: req.user.id,
});
const saved = await task.save();
return res.status(201).json(saved);
} catch (err) {
- console.error('Create task error:', err);
- return res.status(500).json({ errors: [{ msg: 'Server error' }] });
+ console.error("Create task error:", err);
+ return res.status(500).json({ errors: [{ msg: "Server error" }] });
}
});
// GET /api/tasks - Get tasks for the authenticated user
-router.get('/', auth, async (req, res) => {
+router.get("/", auth, async (req, res) => {
try {
- const tasks = await Task.find({ userId: req.user.id }).sort({ createdAt: -1 });
+ const tasks = await Task.find({ userId: req.user.id }).sort({
+ createdAt: -1,
+ });
return res.json(tasks);
} catch (err) {
- console.error('Fetch tasks error:', err);
- return res.status(500).json({ errors: [{ msg: 'Server error' }] });
+ console.error("Fetch tasks error:", err);
+ return res.status(500).json({ errors: [{ msg: "Server error" }] });
}
});
// PUT /api/tasks/:id - Update a task (only owner)
-router.put('/:id', auth, async (req, res) => {
+router.put("/:id", auth, async (req, res) => {
try {
const { id } = req.params;
const updates = {};
- if (typeof req.body.title === 'string') updates.title = req.body.title.trim();
- if (typeof req.body.description === 'string') updates.description = req.body.description;
- if (typeof req.body.status === 'string' && ['pending','completed'].includes(req.body.status)) updates.status = req.body.status;
- if (typeof req.body.deadline !== 'undefined') updates.deadline = req.body.deadline ? new Date(req.body.deadline) : null;
+ if (typeof req.body.title === "string")
+ updates.title = req.body.title.trim();
+ if (typeof req.body.description === "string")
+ updates.description = req.body.description;
+ if (
+ typeof req.body.status === "string" &&
+ ["pending", "completed"].includes(req.body.status)
+ )
+ updates.status = req.body.status;
+ if (typeof req.body.deadline !== "undefined")
+ updates.deadline = req.body.deadline ? new Date(req.body.deadline) : null;
// Never allow changing userId via API
const task = await Task.findOne({ _id: id, userId: req.user.id });
if (!task) {
- return res.status(404).json({ errors: [{ msg: 'Task not found' }] });
+ return res.status(404).json({ errors: [{ msg: "Task not found" }] });
}
Object.assign(task, updates);
const saved = await task.save();
return res.json(saved);
} catch (err) {
- console.error('Update task error:', err);
- if (err.name === 'CastError') {
- return res.status(400).json({ errors: [{ msg: 'Invalid task id' }] });
+ console.error("Update task error:", err);
+ if (err.name === "CastError") {
+ return res.status(400).json({ errors: [{ msg: "Invalid task id" }] });
}
- return res.status(500).json({ errors: [{ msg: 'Server error' }] });
+ return res.status(500).json({ errors: [{ msg: "Server error" }] });
}
});
// DELETE /api/tasks/:id - Delete a task (only owner)
-router.delete('/:id', auth, async (req, res) => {
+router.delete("/:id", auth, async (req, res) => {
try {
const { id } = req.params;
const task = await Task.findOneAndDelete({ _id: id, userId: req.user.id });
if (!task) {
- return res.status(404).json({ errors: [{ msg: 'Task not found' }] });
+ return res.status(404).json({ errors: [{ msg: "Task not found" }] });
}
- return res.json({ msg: 'Task deleted' });
+ return res.json({ msg: "Task deleted" });
} catch (err) {
- console.error('Delete task error:', err);
- if (err.name === 'CastError') {
- return res.status(400).json({ errors: [{ msg: 'Invalid task id' }] });
+ console.error("Delete task error:", err);
+ if (err.name === "CastError") {
+ return res.status(400).json({ errors: [{ msg: "Invalid task id" }] });
}
- return res.status(500).json({ errors: [{ msg: 'Server error' }] });
+ return res.status(500).json({ errors: [{ msg: "Server error" }] });
}
});
module.exports = router;
-
-
diff --git a/backend/server.js b/backend/server.js
index 2c20dd3..b947644 100644
--- a/backend/server.js
+++ b/backend/server.js
@@ -8,7 +8,6 @@ require("./utils/leetcodeCron");
const passport = require("passport");
const githubRoute = require("./routes/github.route");
-
// Database connection
require("./db/connection");
@@ -16,14 +15,19 @@ require("./db/connection");
try {
require("./config/passport");
} catch (err) {
- console.warn("Google OAuth is not configured properly. Skipping Passport strategy.");
+ console.warn(
+ "Google OAuth is not configured properly. Skipping Passport strategy.",
+ );
}
// Import routes
const contactRouter = require("./routes/contact.route");
// Rate limiter middleware placeholders
-const { generalMiddleware, authMiddleware } = require("./middleware/rateLimit/index");
+const {
+ generalMiddleware,
+ authMiddleware,
+} = require("./middleware/rateLimit/index");
// Initialize Express
const app = express();
@@ -36,7 +40,7 @@ app.use(
cors({
origin: process.env.CLIENT_URL || "http://localhost:5173",
credentials: true,
- })
+ }),
);
// Session setup
@@ -46,7 +50,7 @@ app.use(
resave: false,
saveUninitialized: false,
cookie: { secure: false }, // set true if using HTTPS
- })
+ }),
);
// Initialize Passport
@@ -66,7 +70,6 @@ app.use("/api/contact", generalMiddleware, contactRouter);
app.use("/api/tasks", require("./routes/tasks.route"));
app.use("/api/feedback", require("./routes/feedback"));
-
// Default route
app.get("/", (req, res) => {
res.send("DEVSYNC BACKEND API ๐");
@@ -76,4 +79,4 @@ app.get("/", (req, res) => {
const PORT = process.env.PORT || 5000;
app.listen(PORT, () => {
console.log(`Server is up and running at http://localhost:${PORT} ๐`);
-});
\ No newline at end of file
+});
diff --git a/backend/services/sendResetEmail.js b/backend/services/sendResetEmail.js
index 65d9e5b..d01fe6b 100644
--- a/backend/services/sendResetEmail.js
+++ b/backend/services/sendResetEmail.js
@@ -1,24 +1,24 @@
-const nodemailer = require('nodemailer');
+const nodemailer = require("nodemailer");
const transporter = nodemailer.createTransport({
- service: 'gmail',
+ service: "gmail",
auth: {
user: process.env.EMAIL_USER,
- pass: process.env.EMAIL_PASSWORD
- }
+ pass: process.env.EMAIL_PASSWORD,
+ },
});
const sendResetEmail = async (email, resetLink) => {
const mailOptions = {
from: process.env.EMAIL_USER,
to: email,
- subject: 'Password Reset - DevSync',
+ subject: "Password Reset - DevSync",
html: `
Password Reset Request
Click the link below to reset your password:
${resetLink}
This link will expire in 1 hour.
- `
+ `,
};
await transporter.sendMail(mailOptions);
diff --git a/backend/utils/emailVerificationHelpers.js b/backend/utils/emailVerificationHelpers.js
index a6fefe1..aae7193 100644
--- a/backend/utils/emailVerificationHelpers.js
+++ b/backend/utils/emailVerificationHelpers.js
@@ -1,9 +1,10 @@
-const { sendVerificationEmail } = require('../services/emailService');
+const { sendVerificationEmail } = require("../services/emailService");
const crypto = require("crypto");
-const jwt = require('jsonwebtoken');
+const jwt = require("jsonwebtoken");
// Use a fallback JWT secret if env variable is missing
-const JWT_SECRET = process.env.JWT_SECRET || 'devsync_secure_jwt_secret_key_for_authentication';
+const JWT_SECRET =
+ process.env.JWT_SECRET || "devsync_secure_jwt_secret_key_for_authentication";
/**
* Generate a 6-digit verification code
@@ -60,7 +61,7 @@ const handleVerificationEmail = async (email, verificationCode) => {
}
try {
- console.log('Sending For Email Verifcation ...')
+ console.log("Sending For Email Verifcation ...");
await sendVerificationEmail(email, verificationCode);
console.log(`Verification code for ${email}: ${verificationCode}`);
} catch (emailError) {
@@ -74,5 +75,5 @@ module.exports = {
generateJWT,
formatUserResponse,
setVerificationToken,
- handleVerificationEmail
+ handleVerificationEmail,
};
diff --git a/backend/utils/leetcodeCron.js b/backend/utils/leetcodeCron.js
index dd6e147..2fb2c84 100644
--- a/backend/utils/leetcodeCron.js
+++ b/backend/utils/leetcodeCron.js
@@ -1,24 +1,24 @@
-const cron = require('node-cron');
-const LeetCode = require('../models/Leetcode');
-const axios = require('axios');
+const cron = require("node-cron");
+const LeetCode = require("../models/Leetcode");
+const axios = require("axios");
-console.log('โ
leetcodeCron.js file loaded');
+console.log("โ
leetcodeCron.js file loaded");
const batchLimit = 50; // number users should update for the each hour
const runLeetCodeBatchUpdate = async () => {
try {
- console.log('Starting batch update for LeetCode users...');
+ console.log("Starting batch update for LeetCode users...");
const sixHoursAgo = new Date(Date.now() - 6 * 60 * 60 * 1000);
const usersToUpdate = await LeetCode.find({
- lastUpdated: { $lt: sixHoursAgo }
+ lastUpdated: { $lt: sixHoursAgo },
}).limit(batchLimit);
const totalUsers = usersToUpdate.length;
if (totalUsers === 0) {
- console.log('No users need updating at the moment.');
+ console.log("No users need updating at the moment.");
return;
}
@@ -37,20 +37,18 @@ const runLeetCodeBatchUpdate = async () => {
}
}
- console.log('Batch update completed.');
- console.log(`Summary: Total: ${totalUsers}, Successfully updated: ${successCount}, Failed: ${failCount}`);
+ console.log("Batch update completed.");
+ console.log(
+ `Summary: Total: ${totalUsers}, Successfully updated: ${successCount}, Failed: ${failCount}`,
+ );
} catch (err) {
- console.error('Error in batch update:', err);
+ console.error("Error in batch update:", err);
}
};
-
-
runLeetCodeBatchUpdate(); //comment this function to stop cronjon on server start / restart
-cron.schedule('0 * * * *', runLeetCodeBatchUpdate); // execute on each one hour
-
-
+cron.schedule("0 * * * *", runLeetCodeBatchUpdate); // execute on each one hour
const updateUserLeetCodeProfile = async (username) => {
try {
@@ -110,10 +108,10 @@ const updateUserLeetCodeProfile = async (username) => {
headers: {
"Content-Type": "application/json",
},
- }
+ },
);
- const json = response.data;
+ const json = response.data;
if (!json.data?.matchedUser) {
console.error(`User ${username} not found.`);
@@ -129,17 +127,20 @@ const updateUserLeetCodeProfile = async (username) => {
ranking: json.data.matchedUser.profile?.ranking,
avatar: json.data.matchedUser.profile?.userAvatar,
},
- submitStatsGlobal: json.data.matchedUser.submitStatsGlobal.acSubmissionNum.map(sub => ({
- difficulty: sub.difficulty,
- count: sub.count,
- })),
- badges: json.data.matchedUser.badges.map(badge => ({
+ submitStatsGlobal:
+ json.data.matchedUser.submitStatsGlobal.acSubmissionNum.map((sub) => ({
+ difficulty: sub.difficulty,
+ count: sub.count,
+ })),
+ badges: json.data.matchedUser.badges.map((badge) => ({
id: badge.id,
displayName: badge.displayName,
icon: badge.icon,
})),
- submissionCalendar: JSON.parse(json.data.matchedUser.submissionCalendar || "{}"),
- recentSubmissions: json.data.recentAcSubmissionList.map(sub => ({
+ submissionCalendar: JSON.parse(
+ json.data.matchedUser.submissionCalendar || "{}",
+ ),
+ recentSubmissions: json.data.recentAcSubmissionList.map((sub) => ({
id: sub.id,
title: sub.title,
titleSlug: sub.titleSlug,
@@ -157,7 +158,7 @@ const updateUserLeetCodeProfile = async (username) => {
expired: contestRanking.badge?.expired || false,
},
},
- contestHistory: contestHistory.map(contest => ({
+ contestHistory: contestHistory.map((contest) => ({
attended: contest.attended || false,
rating: contest.rating || 0,
contest: {
@@ -170,11 +171,9 @@ const updateUserLeetCodeProfile = async (username) => {
const user = await LeetCode.findOneAndUpdate(
{ username },
{ ...result, lastUpdated: new Date() },
- { new: true }
+ { new: true },
);
-
} catch (err) {
console.error(`Error updating ${username}:`, err);
}
};
-
diff --git a/docs/env_guide.md b/docs/env_guide.md
index 32a0fe9..644ef5a 100644
--- a/docs/env_guide.md
+++ b/docs/env_guide.md
@@ -1,75 +1,73 @@
-# ๐ Environment Variables Guide
-
-**Purpose:**
-This file lists all the environment variables required for the project.
-For detailed setup instructions, refer to the linked setup guides below.
-
----
-
-```bash
-# -----------------------------
-# Application Settings
-# -----------------------------
-
-# Port on which the application runs locally
-PORT=5000
-
-# URL of the frontend client
-CLIENT_URL=http://localhost:5173
-
-# -----------------------------
-# Database Settings
-# -----------------------------
-
-# MongoDB connection string
-MONGODB_URI=mongodb+srv://:@cluster0.mongodb.net/?retryWrites=true&w=majority
-
-# -----------------------------
-# Authentication & Security
-# -----------------------------
-
-# Secret key for signing JSON Web Tokens
-JWT_SECRET=
-
-# Secret key for session cookies
-SESSION_SECRET=
-
-# -----------------------------
-# Google OAuth Settings
-# -----------------------------
-
-# Google OAuth client ID
-GOOGLE_CLIENT_ID= # See [Google OAuth Setup](./google-auth-setup.md) for detailed instructions
-
-# Google OAuth client secret
-GOOGLE_CLIENT_SECRET= # See [Google OAuth Setup](./google-auth-setup.md)
-
-# Callback URL for Google OAuth redirects
-GOOGLE_CALLBACK_URL=http://localhost:5000/auth/callback # See [Google OAuth Setup](./google-auth-setup.md)
-
-# -----------------------------
-# Admin & Notifications
-# -----------------------------
-
-# Email of the admin account
-ADMIN_EMAIL=your-registered-email@example.com # Must match the email used for services like Resend
-
-# -----------------------------
-# Email Service (Resend) Settings
-# -----------------------------
-
-# API key for sending emails via Resend
-RESEND_API_KEY= # Must have full access permissions; see [Resend Setup](./resend-setup.md)
-
-```
-
-## โ ๏ธ Notes
-
-- **Do not commit `.env` files** to Git. Always add `.env` to `.gitignore`.
-- Ensure `ADMIN_EMAIL` matches the registered email for services like Resend.
-- After setting environment variables, test **Google OAuth login** and **email functionality**.
-- For detailed setup instructions, follow the guides below:
- - [Google OAuth Setup](./setup/google_auth_setup.md)
- - [Resend API Setup](./setup/resend_setup.md)
-
-
+# ๐ Environment Variables Guide
+
+**Purpose:**
+This file lists all the environment variables required for the project.
+For detailed setup instructions, refer to the linked setup guides below.
+
+---
+
+```bash
+# -----------------------------
+# Application Settings
+# -----------------------------
+
+# Port on which the application runs locally
+PORT=5000
+
+# URL of the frontend client
+CLIENT_URL=http://localhost:5173
+
+# -----------------------------
+# Database Settings
+# -----------------------------
+
+# MongoDB connection string
+MONGODB_URI=mongodb+srv://:@cluster0.mongodb.net/?retryWrites=true&w=majority
+
+# -----------------------------
+# Authentication & Security
+# -----------------------------
+
+# Secret key for signing JSON Web Tokens
+JWT_SECRET=
+
+# Secret key for session cookies
+SESSION_SECRET=
+
+# -----------------------------
+# Google OAuth Settings
+# -----------------------------
+
+# Google OAuth client ID
+GOOGLE_CLIENT_ID= # See [Google OAuth Setup](./google-auth-setup.md) for detailed instructions
+
+# Google OAuth client secret
+GOOGLE_CLIENT_SECRET= # See [Google OAuth Setup](./google-auth-setup.md)
+
+# Callback URL for Google OAuth redirects
+GOOGLE_CALLBACK_URL=http://localhost:5000/auth/callback # See [Google OAuth Setup](./google-auth-setup.md)
+
+# -----------------------------
+# Admin & Notifications
+# -----------------------------
+
+# Email of the admin account
+ADMIN_EMAIL=your-registered-email@example.com # Must match the email used for services like Resend
+
+# -----------------------------
+# Email Service (Resend) Settings
+# -----------------------------
+
+# API key for sending emails via Resend
+RESEND_API_KEY= # Must have full access permissions; see [Resend Setup](./resend-setup.md)
+
+```
+
+## โ ๏ธ Notes
+
+- **Do not commit `.env` files** to Git. Always add `.env` to `.gitignore`.
+- Ensure `ADMIN_EMAIL` matches the registered email for services like Resend.
+- After setting environment variables, test **Google OAuth login** and **email functionality**.
+- For detailed setup instructions, follow the guides below:
+ - [Google OAuth Setup](./setup/google_auth_setup.md)
+ - [Resend API Setup](./setup/resend_setup.md)
diff --git a/docs/setup/google_auth_setup.md b/docs/setup/google_auth_setup.md
index 17d84cc..4625133 100644
--- a/docs/setup/google_auth_setup.md
+++ b/docs/setup/google_auth_setup.md
@@ -1,46 +1,46 @@
-# ๐ Google OAuth Setup
-
-**Purpose:**
-Enable Google login for the project and allow access to Google APIs securely.
-
----
-
-## ๐ Steps
-
-1. **Create a Google Cloud Project**
- - Go to [Google Cloud Console](https://console.cloud.google.com/).
- - Click on **โCreate Projectโ** โ Give it a name (e.g., `Devsync`).
- 
-
-2. **Enable OAuth 2.0 API**
- - Navigate to **APIs & Services โ Library**.
- - Search for **โGoogle Identity Servicesโ** or **โOAuth 2.0 Client IDsโ**.
- - Click **Enable**.
-
-3. **Configure OAuth Consent Screen**
- - Go to **APIs & Services โ OAuth consent screen**.
- - Select **External** (if this is public) or **Internal** (for organization use).
- - Fill in:
- - App Name: `My Project`
- - User Support Email: `your-email@example.com`
- - Add Scopes: `email`, `profile`
-
-
-4. **Create OAuth Credentials**
- - Go to **APIs & Services โ Credentials โ Create Credentials โ OAuth Client ID**
- - Application type: **Web Application**
- 
- - Add **Redirect URIs**:
- - `http://localhost:5000/auth/callback` (for local dev)
- - `https://yourdomain.com/auth/callback` (for production)
- - Click **Create** โ Save your `Client ID` and `Client Secret`.
- 
-
-5. **Add Environment Variables**
- - Open `.env` (or create it by copying `.env.example`)
- - Add the following keys:
-
-```bash
-GOOGLE_CLIENT_ID=
-GOOGLE_CLIENT_SECRET=
-GOOGLE_REDIRECT_URI=http://localhost:5000/api/auth/callback
+# ๐ Google OAuth Setup
+
+**Purpose:**
+Enable Google login for the project and allow access to Google APIs securely.
+
+---
+
+## ๐ Steps
+
+1. **Create a Google Cloud Project**
+ - Go to [Google Cloud Console](https://console.cloud.google.com/).
+ - Click on **โCreate Projectโ** โ Give it a name (e.g., `Devsync`).
+ 
+
+2. **Enable OAuth 2.0 API**
+ - Navigate to **APIs & Services โ Library**.
+ - Search for **โGoogle Identity Servicesโ** or **โOAuth 2.0 Client IDsโ**.
+ - Click **Enable**.
+
+3. **Configure OAuth Consent Screen**
+ - Go to **APIs & Services โ OAuth consent screen**.
+ - Select **External** (if this is public) or **Internal** (for organization use).
+ - Fill in:
+ - App Name: `My Project`
+ - User Support Email: `your-email@example.com`
+ - Add Scopes: `email`, `profile`
+
+
+4. **Create OAuth Credentials**
+ - Go to **APIs & Services โ Credentials โ Create Credentials โ OAuth Client ID**
+ - Application type: **Web Application**
+ 
+ - Add **Redirect URIs**:
+ - `http://localhost:5000/auth/callback` (for local dev)
+ - `https://yourdomain.com/auth/callback` (for production)
+ - Click **Create** โ Save your `Client ID` and `Client Secret`.
+ 
+
+5. **Add Environment Variables**
+ - Open `.env` (or create it by copying `.env.example`)
+ - Add the following keys:
+
+```bash
+GOOGLE_CLIENT_ID=
+GOOGLE_CLIENT_SECRET=
+GOOGLE_REDIRECT_URI=http://localhost:5000/api/auth/callback
diff --git a/docs/setup/google_sheet_github_action.md b/docs/setup/google_sheet_github_action.md
index fe9aefd..d638ce7 100644
--- a/docs/setup/google_sheet_github_action.md
+++ b/docs/setup/google_sheet_github_action.md
@@ -1,40 +1,37 @@
-# ๐ GitHub โ Google Sheets Action Scripts Setup
-
-**Purpose:**
-Automate updating Google Sheets from GitHub Actions.
-This guide explains the recommended method for setting up credentials and secrets.
-
----
-
-## ๐ Steps
-
-1. **Create a Google Cloud Project**
- - Go to [Google Cloud Console](https://console.cloud.google.com/) and create a new project.
- - Enable the **Google Sheets API** and **Google Drive API**.
-
-2. **Create a Service Account**
- - Navigate to **IAM & Admin โ Service Accounts โ Create Service Account**.
- 
-
- - Assign **Editor** or appropriate permissions.
- 
-
-3. **Download the Full JSON Credentials**
- - Click **Keys โ Add Key โ Create New Key โ JSON**.
- - **Important:** Download the **entire JSON file**.
- > Partial hardcoding (e.g., only client ID or private key) **will not work**.
- 
-
-4. **Share Your Google Sheet with the Service Account**
- - Open the Google Sheet you want to automate.
- - Click **Share โ Enter the service account email โ Give Editor access โ Send**.
- - The **service account email** can be found in the downloaded JSON under `"client_email"`.
-
-5. **Add JSON as GitHub Secret**
- - Go to **Repository โ Settings โ Secrets and Variables โ Actions โ New Repository Secret**.
- - Upload the **full JSON content** as `GOOGLE_CREDENTIALS`.
- 
-
-6. **Reference Secret in GitHub Action**
- - Example workflow snippet (`.github/workflows/leaderboard.yml`):
-
+# ๐ GitHub โ Google Sheets Action Scripts Setup
+
+**Purpose:**
+Automate updating Google Sheets from GitHub Actions.
+This guide explains the recommended method for setting up credentials and secrets.
+
+---
+
+## ๐ Steps
+
+1. **Create a Google Cloud Project**
+ - Go to [Google Cloud Console](https://console.cloud.google.com/) and create a new project.
+ - Enable the **Google Sheets API** and **Google Drive API**.
+2. **Create a Service Account**
+ - Navigate to **IAM & Admin โ Service Accounts โ Create Service Account**.
+ 
+ - Assign **Editor** or appropriate permissions.
+ 
+
+3. **Download the Full JSON Credentials**
+ - Click **Keys โ Add Key โ Create New Key โ JSON**.
+ - **Important:** Download the **entire JSON file**.
+ > Partial hardcoding (e.g., only client ID or private key) **will not work**.
+ > 
+
+4. **Share Your Google Sheet with the Service Account**
+ - Open the Google Sheet you want to automate.
+ - Click **Share โ Enter the service account email โ Give Editor access โ Send**.
+ - The **service account email** can be found in the downloaded JSON under `"client_email"`.
+
+5. **Add JSON as GitHub Secret**
+ - Go to **Repository โ Settings โ Secrets and Variables โ Actions โ New Repository Secret**.
+ - Upload the **full JSON content** as `GOOGLE_CREDENTIALS`.
+ 
+
+6. **Reference Secret in GitHub Action**
+ - Example workflow snippet (`.github/workflows/leaderboard.yml`):
diff --git a/docs/setup/resend_setup.md b/docs/setup/resend_setup.md
index 1fa9db2..423053b 100644
--- a/docs/setup/resend_setup.md
+++ b/docs/setup/resend_setup.md
@@ -1,27 +1,28 @@
-# ๐ง Resend API Setup
-
-**Purpose:**
-Enable email sending functionality using Resend API (emails, notifications, etc.) in the project.
-
----
-
-## ๐ Steps
-
-1. **Create a Resend Account**
- - Go to [Resend](https://resend.com/) and sign up.
- - Verify your email and log in to the dashboard.
- 
-
-2. **Create an API Key**
- - Navigate to **API Keys โ Create Key**.
- 
- - Give it a name (e.g., `project-dev-key`) and save it securely.
- 
-
-3. **Add Environment Variables**
- - Open `.env` (or create one if it doesnโt exist).
- - Add your Resend API key and sender email:
-
-```bash
-RESEND_API_KEY= # Must have full access permissions
-ADMIN_EMAIL=your-registered-email@example.com # Must be the same email you registered with on Resend
+# ๐ง Resend API Setup
+
+**Purpose:**
+Enable email sending functionality using Resend API (emails, notifications, etc.) in the project.
+
+---
+
+## ๐ Steps
+
+1. **Create a Resend Account**
+ - Go to [Resend](https://resend.com/) and sign up.
+ - Verify your email and log in to the dashboard.
+ 
+
+2. **Create an API Key**
+ - Navigate to **API Keys โ Create Key**.
+ 
+ - Give it a name (e.g., `project-dev-key`) and save it securely.
+ 
+
+3. **Add Environment Variables**
+ - Open `.env` (or create one if it doesnโt exist).
+ - Add your Resend API key and sender email:
+
+```bash
+RESEND_API_KEY= # Must have full access permissions
+ADMIN_EMAIL=your-registered-email@example.com # Must be the same email you registered with on Resend
+```
diff --git a/frontend/eslint.config.js b/frontend/eslint.config.js
index cee1e2c..777fe18 100644
--- a/frontend/eslint.config.js
+++ b/frontend/eslint.config.js
@@ -1,29 +1,29 @@
-import js from '@eslint/js'
-import globals from 'globals'
-import reactHooks from 'eslint-plugin-react-hooks'
-import reactRefresh from 'eslint-plugin-react-refresh'
-import { defineConfig, globalIgnores } from 'eslint/config'
+import js from "@eslint/js";
+import globals from "globals";
+import reactHooks from "eslint-plugin-react-hooks";
+import reactRefresh from "eslint-plugin-react-refresh";
+import { defineConfig, globalIgnores } from "eslint/config";
export default defineConfig([
- globalIgnores(['dist']),
+ globalIgnores(["dist"]),
{
- files: ['**/*.{js,jsx}'],
+ files: ["**/*.{js,jsx}"],
extends: [
js.configs.recommended,
- reactHooks.configs['recommended-latest'],
+ reactHooks.configs["recommended-latest"],
reactRefresh.configs.vite,
],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
parserOptions: {
- ecmaVersion: 'latest',
+ ecmaVersion: "latest",
ecmaFeatures: { jsx: true },
- sourceType: 'module',
+ sourceType: "module",
},
},
rules: {
- 'no-unused-vars': ['error', { varsIgnorePattern: '^[A-Z_]' }],
+ "no-unused-vars": ["error", { varsIgnorePattern: "^[A-Z_]" }],
},
},
-])
+]);
diff --git a/frontend/index.html b/frontend/index.html
index bc07f9e..97d1ccf 100644
--- a/frontend/index.html
+++ b/frontend/index.html
@@ -5,7 +5,10 @@
-
+
Devsync
diff --git a/frontend/jsconfig.json b/frontend/jsconfig.json
index f60767c..7cf9c96 100644
--- a/frontend/jsconfig.json
+++ b/frontend/jsconfig.json
@@ -6,4 +6,4 @@
"@/*": ["./src/*"]
}
}
-}
\ No newline at end of file
+}
diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx
index b14df54..b94ac05 100644
--- a/frontend/src/App.jsx
+++ b/frontend/src/App.jsx
@@ -143,4 +143,4 @@ function App() {
);
}
-export default App;
\ No newline at end of file
+export default App;
diff --git a/frontend/src/Components/About.jsx b/frontend/src/Components/About.jsx
index 054c234..b0736d2 100644
--- a/frontend/src/Components/About.jsx
+++ b/frontend/src/Components/About.jsx
@@ -6,53 +6,67 @@ import { Users2 } from "lucide-react";
import { twMerge } from "tailwind-merge";
import { TracingBeam } from "./ui/tracing-beam";
-
const aboutPoints = [
{
badge: "Unified Dashboard",
title: "Track GitHub, LeetCode, Codeforces, and more โ all from one place.",
- description: "No more switching tabs. DevSync aggregates your coding life and shows your productivity stats clearly and beautifully.",
+ description:
+ "No more switching tabs. DevSync aggregates your coding life and shows your productivity stats clearly and beautifully.",
},
{
badge: "Focus Tools",
title: "Built-in Pomodoro Timer and Focus Logs.",
- description: "Boost your deep work sessions with built-in timers and log how you spend your dev time efficiently.",
+ description:
+ "Boost your deep work sessions with built-in timers and log how you spend your dev time efficiently.",
},
{
badge: "Personal Insights",
title: "See your coding journey evolve in real time.",
- description: "Visualize your habits, identify patterns, and reflect on your growth as a developer.",
+ description:
+ "Visualize your habits, identify patterns, and reflect on your growth as a developer.",
},
{
badge: "Zero Context Switching",
title: "Everything you need, where you need it.",
- description: "From open-source activity to interview prep stats โ DevSync keeps you focused and synced.",
+ description:
+ "From open-source activity to interview prep stats โ DevSync keeps you focused and synced.",
},
];
const About = () => {
return (
-
+
{/* Section Header */}
-
-
About DevSync
+
+
+ About DevSync
+
{/* Tracing Points */}
{aboutPoints.map((item, index) => (
- {item.badge}
+ {item.badge}
-
+
{item.title}
-
+
{item.description}
diff --git a/frontend/src/Components/Ad.jsx b/frontend/src/Components/Ad.jsx
index 638ba92..d5bddab 100644
--- a/frontend/src/Components/Ad.jsx
+++ b/frontend/src/Components/Ad.jsx
@@ -40,33 +40,46 @@ const AdStrip = () => {
className="w-full py-16 px-6 mt-12"
>
-
{/* Top label */}
-
+
Built for Developers
{/* Heading */}
-
+
{/* Paragraph - FIXED: Changed from
to
*/}
-
- {/* Paragraph */}
-
+
{/* CTA Button */}
-
+
+ {" "}
+
diff --git a/frontend/src/Components/AllContributors.jsx b/frontend/src/Components/AllContributors.jsx
index 35ee03d..0391e86 100644
--- a/frontend/src/Components/AllContributors.jsx
+++ b/frontend/src/Components/AllContributors.jsx
@@ -13,7 +13,7 @@ const AllContributors = () => {
const fetchContributors = async () => {
try {
const response = await fetch(
- `https://api.github.com/repos/${OWNER}/${REPO}/contributors?per_page=100`
+ `https://api.github.com/repos/${OWNER}/${REPO}/contributors?per_page=100`,
);
if (!response.ok) {
throw new Error(`GitHub API error: ${response.status}`);
@@ -58,7 +58,6 @@ const AllContributors = () => {
alt={contributor.login}
className="w-20 h-20 rounded-full"
/>
-
{/* Username */}
diff --git a/frontend/src/Components/Contributors.jsx b/frontend/src/Components/Contributors.jsx
index 56f40dd..38a36a7 100644
--- a/frontend/src/Components/Contributors.jsx
+++ b/frontend/src/Components/Contributors.jsx
@@ -15,7 +15,7 @@ const ContributorsSection = () => {
const fetchContributors = async () => {
try {
const response = await fetch(
- `https://api.github.com/repos/${OWNER}/${REPO}/contributors?per_page=100`
+ `https://api.github.com/repos/${OWNER}/${REPO}/contributors?per_page=100`,
);
if (!response.ok) {
throw new Error(`GitHub API error: ${response.status}`);
diff --git a/frontend/src/Components/DashBoard/CardWrapper.jsx b/frontend/src/Components/DashBoard/CardWrapper.jsx
index 9bb397d..78475fd 100644
--- a/frontend/src/Components/DashBoard/CardWrapper.jsx
+++ b/frontend/src/Components/DashBoard/CardWrapper.jsx
@@ -1,7 +1,7 @@
// src/components/DashBoard/CardWrapper.jsx
export default function CardWrapper({ children, className = "" }) {
return (
-
+
{children}
);
diff --git a/frontend/src/Components/DashBoard/GithubRepoCard.jsx b/frontend/src/Components/DashBoard/GithubRepoCard.jsx
index ecfb7ef..cad0753 100644
--- a/frontend/src/Components/DashBoard/GithubRepoCard.jsx
+++ b/frontend/src/Components/DashBoard/GithubRepoCard.jsx
@@ -1,53 +1,53 @@
-import React from 'react';
-import { Github, Star, GitFork, Clock } from 'lucide-react';
-import CardWrapper from './CardWrapper';
+import React from "react";
+import { Github, Star, GitFork, Clock } from "lucide-react";
+import CardWrapper from "./CardWrapper";
/**
* Component to display a list of GitHub repositories
*/
-export default function GithubRepoCard({ repositories = [], className = '' }) {
+export default function GithubRepoCard({ repositories = [], className = "" }) {
// Format the update time to a readable string
const formatUpdateTime = (dateString) => {
- if (!dateString) return '';
-
+ if (!dateString) return "";
+
const date = new Date(dateString);
const now = new Date();
const diffMs = now - date;
const diffDays = Math.floor(diffMs / (1000 * 60 * 60 * 24));
-
+
if (diffDays === 0) {
- return 'Today';
+ return "Today";
} else if (diffDays === 1) {
- return 'Yesterday';
+ return "Yesterday";
} else if (diffDays < 7) {
return `${diffDays} days ago`;
} else if (diffDays < 30) {
const weeks = Math.floor(diffDays / 7);
- return `${weeks} ${weeks === 1 ? 'week' : 'weeks'} ago`;
+ return `${weeks} ${weeks === 1 ? "week" : "weeks"} ago`;
} else {
const months = Math.floor(diffDays / 30);
- return `${months} ${months === 1 ? 'month' : 'months'} ago`;
+ return `${months} ${months === 1 ? "month" : "months"} ago`;
}
};
// Language color mapping
const languageColors = {
- JavaScript: '#f1e05a',
- TypeScript: '#3178c6',
- HTML: '#e34c26',
- CSS: '#563d7c',
- Python: '#3572A5',
- Java: '#b07219',
- 'C#': '#178600',
- PHP: '#4F5D95',
- Ruby: '#701516',
- Go: '#00ADD8',
- Swift: '#F05138',
- Kotlin: '#A97BFF',
- Rust: '#dea584',
- Dart: '#00B4AB',
+ JavaScript: "#f1e05a",
+ TypeScript: "#3178c6",
+ HTML: "#e34c26",
+ CSS: "#563d7c",
+ Python: "#3572A5",
+ Java: "#b07219",
+ "C#": "#178600",
+ PHP: "#4F5D95",
+ Ruby: "#701516",
+ Go: "#00ADD8",
+ Swift: "#F05138",
+ Kotlin: "#A97BFF",
+ Rust: "#dea584",
+ Dart: "#00B4AB",
// Add more languages as needed
- default: '#cccccc'
+ default: "#cccccc",
};
return (
@@ -66,43 +66,50 @@ export default function GithubRepoCard({ repositories = [], className = '' }) {
) : (
{repositories.map((repo) => (
-
-
+
{repo.name}
-
+
{repo.description && (
{repo.description}
)}
-
+
{repo.language && (
-
{repo.language}
)}
-
+
{repo.stargazers_count}
-
+
{repo.forks_count}
-
+
{formatUpdateTime(repo.updated_at)}
@@ -114,4 +121,4 @@ export default function GithubRepoCard({ repositories = [], className = '' }) {
)}
);
-}
\ No newline at end of file
+}
diff --git a/frontend/src/Components/DashBoard/GoalsCard.jsx b/frontend/src/Components/DashBoard/GoalsCard.jsx
index 8da18e9..e6285de 100644
--- a/frontend/src/Components/DashBoard/GoalsCard.jsx
+++ b/frontend/src/Components/DashBoard/GoalsCard.jsx
@@ -74,8 +74,8 @@ export default function GoalsCard() {
completed: !goal.completed,
status: goal.completed ? "pending" : "completed",
}
- : g
- )
+ : g,
+ ),
);
} catch (err) {
console.error("Update error:", err);
@@ -98,8 +98,7 @@ export default function GoalsCard() {
const pendingGoals = goals.filter((g) => !g.completed);
const completedGoals = goals.filter((g) => g.completed);
- const listClass =
- "max-h-64 overflow-y-auto space-y-3 pr-2 scrollbar-hidden"; // hide scrollbar
+ const listClass = "max-h-64 overflow-y-auto space-y-3 pr-2 scrollbar-hidden"; // hide scrollbar
return (
diff --git a/frontend/src/Components/DashBoard/LeetCode.jsx b/frontend/src/Components/DashBoard/LeetCode.jsx
index 88911e2..637741e 100644
--- a/frontend/src/Components/DashBoard/LeetCode.jsx
+++ b/frontend/src/Components/DashBoard/LeetCode.jsx
@@ -16,7 +16,15 @@ import ReactCalendarHeatmap from "react-calendar-heatmap";
import "react-calendar-heatmap/dist/styles.css";
import BackButton from "../ui/backbutton";
-ChartJS.register(CategoryScale, LinearScale, PointElement, LineElement, Title, Tooltip, Legend);
+ChartJS.register(
+ CategoryScale,
+ LinearScale,
+ PointElement,
+ LineElement,
+ Title,
+ Tooltip,
+ Legend,
+);
export default function LeetCode({ platforms = {} }) {
const [stats, setStats] = useState(null);
@@ -26,10 +34,13 @@ export default function LeetCode({ platforms = {} }) {
const fetchStats = async () => {
if (!leetUser) return;
try {
- const res = await fetch(`${import.meta.env.VITE_API_URL}/api/profile/leetcode/${leetUser}`, {
- method: "POST",
- headers: { "Content-Type": "application/json" },
- });
+ const res = await fetch(
+ `${import.meta.env.VITE_API_URL}/api/profile/leetcode/${leetUser}`,
+ {
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ },
+ );
if (!res.ok) throw new Error(`API error: ${res.status}`);
const json = await res.json();
setStats(json.data);
@@ -53,10 +64,12 @@ export default function LeetCode({ platforms = {} }) {
submissionCalendar,
} = stats;
- const calendarData = Object.entries(submissionCalendar).map(([timestamp, count]) => ({
- date: new Date(Number(timestamp) * 1000).toISOString().split("T")[0],
- count: parseInt(count, 10),
- }));
+ const calendarData = Object.entries(submissionCalendar).map(
+ ([timestamp, count]) => ({
+ date: new Date(Number(timestamp) * 1000).toISOString().split("T")[0],
+ count: parseInt(count, 10),
+ }),
+ );
const startDate = new Date("2025-01-01");
const endDate = new Date();
@@ -64,7 +77,7 @@ export default function LeetCode({ platforms = {} }) {
const attendedContests = contestHistory.filter((c) => c.attended);
const labels = attendedContests.map((c) =>
- new Date(c.contest.startTime).toLocaleDateString()
+ new Date(c.contest.startTime).toLocaleDateString(),
);
const data = {
@@ -86,7 +99,10 @@ export default function LeetCode({ platforms = {} }) {
responsive: true,
maintainAspectRatio: false,
plugins: {
- legend: { position: "top", labels: { color: "#FFA116", font: { size: 12 } } },
+ legend: {
+ position: "top",
+ labels: { color: "#FFA116", font: { size: 12 } },
+ },
title: {
display: true,
text: "LeetCode Contest Rating Over Time",
@@ -123,7 +139,7 @@ export default function LeetCode({ platforms = {} }) {
return (
<>
-
+
@@ -138,7 +154,8 @@ export default function LeetCode({ platforms = {} }) {
{profile?.ranking && (
- Global Rank: #{profile.ranking}
+ Global Rank:{" "}
+ #{profile.ranking}
)}
{badges.length > 0 && (
@@ -146,7 +163,11 @@ export default function LeetCode({ platforms = {} }) {
{badges.map(({ id, icon, displayName }) => (
-
Problems Solved
+
+ Problems Solved
+
{stats.submitStatsGlobal?.map(({ difficulty, count }) => (
-
{difficulty}
-
{count}
+
+ {difficulty}
+
+
+ {count}
+
))}
-
Recent Submissions
+
+ Recent Submissions
+
{recentSubmissions.length > 0 ? (
- {recentSubmissions.slice(0, 3).map(({ id, title, timestamp }) => (
- -
- {title}
- {new Date(timestamp).toLocaleDateString()}
-
- ))}
+ {recentSubmissions
+ .slice(0, 3)
+ .map(({ id, title, timestamp }) => (
+ -
+ {title}
+
+ {new Date(timestamp).toLocaleDateString()}
+
+
+ ))}
) : (
-
No recent activity found.
+
+ No recent activity found.
+
)}
-
Submission Heatmap
+
+ Submission Heatmap
+
({
- "data-tip": value ? `Submissions: ${value.count}` : "No submissions",
+ "data-tip": value
+ ? `Submissions: ${value.count}`
+ : "No submissions",
})}
/>
-
Contest Stats
+
+ Contest Stats
+
{contestRating?.badge && (
-
- { contestRating.badge.icon != "/default_icon.png" &&

}
+ {contestRating.badge.icon != "/default_icon.png" && (
+

+ )}
{contestRating.badge.name}
- {contestRating.badge.expired &&
(Expired)}
+ {contestRating.badge.expired && (
+
(Expired)
+ )}
)}
Contests: {contestRating?.attendedContestsCount ?? "N/A"}
Rating: {contestRating?.rating ?? "N/A"}
-
Rank: {contestRating?.globalRanking ? `#${contestRating.globalRanking}` : "N/A"}
+
+ Rank:{" "}
+ {contestRating?.globalRanking
+ ? `#${contestRating.globalRanking}`
+ : "N/A"}
+
Top %:{" "}
- {contestRating?.topPercentage ? `${contestRating.topPercentage.toFixed(2)}%` : "N/A"}
+ {contestRating?.topPercentage
+ ? `${contestRating.topPercentage.toFixed(2)}%`
+ : "N/A"}
-
Contest Rating History
+
+ Contest Rating History
+
{attendedContests.length > 0 ? (