Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
150 changes: 150 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
# Created by .ignore support plugin (hsz.mobi)
### Python template
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# IPython Notebook
.ipynb_checkpoints

# pyenv
.python-version

# celery beat schedule file
celerybeat-schedule

# dotenv
.env

# virtualenv
venv/
ENV/

# Spyder project settings
.spyderproject

# Rope project settings
.ropeproject
### VirtualEnv template
# Virtualenv
# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
[Bb]in
[Ii]nclude
[Ll]ib
[Ll]ib64
[Ll]ocal
[Ss]cripts
pyvenv.cfg
.venv
pip-selfcheck.json
### JetBrains template
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839

# User-specific stuff:
.idea/workspace.xml
.idea/tasks.xml
.idea/dictionaries
.idea/vcs.xml
.idea/jsLibraryMappings.xml

# Sensitive or high-churn files:
.idea/dataSources.ids
.idea/dataSources.xml
.idea/dataSources.local.xml
.idea/sqlDataSources.xml
.idea/dynamic.xml
.idea/uiDesigner.xml

# Gradle:
.idea/gradle.xml
.idea/libraries

# Mongo Explorer plugin:
.idea/mongoSettings.xml

.idea/

## File-based project format:
*.iws

## Plugin-specific files:

# IntelliJ
/out/

# mpeltonen/sbt-idea plugin
.idea_modules/

# JIRA plugin
atlassian-ide-plugin.xml

# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
25 changes: 25 additions & 0 deletions cpuasinh.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from hashlib import md5
from random import choice
import concurrent.futures
import time


def miner(n):
while True:
s = "".join([choice("0123456789") for i in range(50)])
h = md5(s.encode('utf8')).hexdigest()
if h.endswith("00000"):
return s + ' ' + h


def main():
with concurrent.futures.ProcessPoolExecutor(10) as executor:
for chain in zip(executor.map(miner, range(4))):
print(chain)


if __name__ == '__main__':
start_time = time.time()
main()
end_time = time.time()
print(end_time - start_time)
11 changes: 11 additions & 0 deletions cpusinh.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from hashlib import md5
from random import choice


while True:
s = "".join([choice("0123456789") for i in range(50)])
h = md5(s.encode('utf8')).hexdigest()

if h.endswith("00000"):
print(s, h)
break
Binary file added img.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img_1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img_2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img_3.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img_4.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img_5.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img_6.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img_7.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
25 changes: 25 additions & 0 deletions ioasinh.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from urllib.request import Request, urlopen
import concurrent.futures


urls = open('res.txt', encoding='utf8').read().split('\n')


def viewer(url):
try:
request = Request(
url,
headers={'User-Agent': 'Mozilla/5.0 (Windows NT 9.0; Win65; x64; rv:97.0) Gecko/20105107 Firefox/92.0'},
)
resp = urlopen(request, timeout=5)
code = resp.code
print(code)
resp.close()
except Exception as e:
print(url, e)


with concurrent.futures.ThreadPoolExecutor(50) as executor:
future_to_url = {executor.submit(viewer, url=url): url for url in urls}
for future in concurrent.futures.as_completed(future_to_url):
future.result(60)
17 changes: 17 additions & 0 deletions iosinh.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
from urllib.request import Request, urlopen
from urllib.parse import unquote

links = open('res.txt', encoding='utf8').read().split('\n')[0:101]

for url in links:
try:
request = Request(
url,
headers={'User-Agent': 'Mozilla/5.0 (Windows NT 9.0; Win65; x64; rv:97.0) Gecko/20105107 Firefox/92.0'},
)
resp = urlopen(request, timeout=5)
code = resp.code
print(code)
resp.close()
except Exception as e:
print(url, e)
18 changes: 18 additions & 0 deletions links_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from urllib.request import urlopen
from urllib.parse import unquote
from bs4 import BeautifulSoup
from tqdm import tqdm

url = 'https://ru.wikipedia.org/wiki/%D0%A1%D0%BB%D1%83%D0%B6%D0%B5%D0%B1%D0%BD%D0%B0%D1%8F:%D0%A1%D0%BB%D1%83%D1%87%D0%B0%D0%B9%D0%BD%D0%B0%D1%8F_%D1%81%D1%82%D1%80%D0%B0%D0%BD%D0%B8%D1%86%D0%B0'

res = open('res.txt', 'w', encoding='utf8')

for i in tqdm(range(100)):
html = urlopen(url).read().decode('utf8')
soup = BeautifulSoup(html, 'html.parser')
links = soup.find_all('a')

for l in links:
href = l.get('href')
if href and href.startswith('http') and 'wiki' not in href:
print(href, file=res)
18 changes: 18 additions & 0 deletions report.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
io:синхронный проход
![img.png](img.png)
асинхронный 5 воркеров
![img_5.png](img_5.png)
асинхронный 10 воркеров
![img_6.png](img_6.png)
асинхронный 50 воркеров
![img_7.png](img_7.png)
С увеличением количества воркеров, уменьшается время работы программы
cpu:синхронный поиск
![img_1.png](img_1.png)
асинхронный 5 воркеров
![img_2.png](img_2.png)
асинхронный 10 воркеров
![img_4.png](img_4.png)
асинхронный 50 воркеров
![img_3.png](img_3.png)
я заметил, что при увеличении количества воркеров, увеличевается длительность программы. Но при дальнейший испытаниях пришел к выводу, что время иногда зависит от рандомной генерации.
Loading