Skip to content

Commit

Permalink
fix: update rate limmiter
Browse files Browse the repository at this point in the history
  • Loading branch information
AKharytonchyk committed Nov 7, 2024
1 parent 77f8a47 commit ca55a69
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 40 deletions.
2 changes: 1 addition & 1 deletion src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ function App() {
if (localStorage.getItem("token")) {
setOctokit(
new GitService(
(import.meta as any).env.VITE_APP_GITHUB_API_URL || "https://api.github.com/",
(import.meta as any).env.VITE_GITHUB_API_URL || "https://api.github.com",
localStorage.getItem("token") || ""
)
);
Expand Down
69 changes: 30 additions & 39 deletions src/utils/RateLimiterQueue.ts
Original file line number Diff line number Diff line change
@@ -1,74 +1,65 @@
class RateLimiterQueue {
private queue: any[];
private delay: number;
private isProcessing: boolean;
private loggingInterval: NodeJS.Timeout | null;
private queue: Array<() => Promise<void>>;
private maxRequestsPerMinute: number;
private activeRequests: number;
private requestTimestamps: number[];

constructor(maxRequestsPerMinute: number) {
this.queue = [];
this.delay = 60000 / maxRequestsPerMinute;
this.isProcessing = false;
this.maxRequestsPerMinute = maxRequestsPerMinute;
this.activeRequests = 0;
this.requestTimestamps = [];

this.loggingInterval = setInterval(() => {
const statusMessage = this.isProcessing
? "Reached rate limit. Waiting to process next request."
: "Currently processing requests.";
console.log(`Queue size: ${this.queue.length}. Rate limit: ${maxRequestsPerMinute}. Estimated time: ${this.queue.length / maxRequestsPerMinute} minutes. ${statusMessage}`);
setInterval(() => {
this.cleanupOldTimestamps();
}, 60000);
}

async enqueue<T>(requestFunction: () => Promise<T>, force = false): Promise<T> {
async enqueue<T>(requestFunction: () => Promise<T>): Promise<T> {
return new Promise((resolve, reject) => {
const request = async () => {
this.queue.push(async () => {
try {
this.activeRequests++;
const result = await requestFunction();
resolve(result);
} catch (error) {
reject(error);
} finally {
this.activeRequests--;
this.requestTimestamps.push(Date.now());
this.processQueue();
}
}

if(force) this.queue.unshift(request);
else this.queue.push(request);

if (!this.isProcessing) {
this.processQueue();
}
});
this.processQueue();
});
}

private async processQueue() {
this.isProcessing = true;
while (this.queue.length > 0) {
console.debug(`Processing request. Queue size before: ${this.queue.length}`);
this.cleanupOldTimestamps();

while (this.queue.length > 0 && this.canProcessMoreRequests()) {
const requestFunction = this.queue.shift();
if (requestFunction) {
try {
await Promise.all([requestFunction(), this.sleep(this.delay)]);
console.debug(`Request processed. Queue size after: ${this.queue.length}`);
} catch (error) {
requestFunction().catch((error) => {
console.error("Error processing request:", error);
}
});
}
}
this.isProcessing = false;
}

private sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
private canProcessMoreRequests(): boolean {
return this.requestTimestamps.length < this.maxRequestsPerMinute;
}

private cleanupOldTimestamps() {
const oneMinuteAgo = Date.now() - 60000;
this.requestTimestamps = this.requestTimestamps.filter(timestamp => timestamp > oneMinuteAgo);
}

async processAll<T>(requestFunctions: Array<() => Promise<T>>): Promise<T[]> {
const results = requestFunctions.map((requestFunction) => this.enqueue(requestFunction));
return Promise.all(results);
}

destroy() {
if (this.loggingInterval) {
clearInterval(this.loggingInterval);
this.loggingInterval = null;
}
}
}

const rateLimiter = new RateLimiterQueue(
Expand Down

0 comments on commit ca55a69

Please sign in to comment.