Skip to content

Commit

Permalink
Don't shuffle proxies before checking
Browse files Browse the repository at this point in the history
  • Loading branch information
monosans committed Jan 25, 2024
1 parent 3059940 commit 890c000
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 24 deletions.
23 changes: 11 additions & 12 deletions proxy_scraper_checker/checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import asyncio
import logging
from random import shuffle
from typing import Mapping

from aiohttp_socks import ProxyType
Expand Down Expand Up @@ -53,15 +52,15 @@ async def check_all(
for proto in sort.PROTOCOL_ORDER
if proto in storage.enabled_protocols
}
coroutines = [
check_one(
progress=progress,
proxy=proxy,
settings=settings,
storage=storage,
task=tasks[proxy.protocol],
await asyncio.gather(
*(
check_one(
progress=progress,
proxy=proxy,
settings=settings,
storage=storage,
task=tasks[proxy.protocol],
)
for proxy in storage
)
for proxy in storage
]
shuffle(coroutines)
await asyncio.gather(*coroutines)
)
25 changes: 13 additions & 12 deletions proxy_scraper_checker/scraper.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,17 +94,18 @@ async def scrape_all(
for proto, sources in settings.sources.items()
}
timeout = ClientTimeout(total=settings.source_timeout)
coroutines = (
scrape_one(
progress=progress,
proto=proto,
session=session,
source=source,
storage=storage,
task=tasks[proto],
timeout=timeout,
await asyncio.gather(
*(
scrape_one(
progress=progress,
proto=proto,
session=session,
source=source,
storage=storage,
task=tasks[proto],
timeout=timeout,
)
for proto, sources in settings.sources.items()
for source in sources
)
for proto, sources in settings.sources.items()
for source in sources
)
await asyncio.gather(*coroutines)

0 comments on commit 890c000

Please sign in to comment.