Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions CPU-bound.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import concurrent.futures
from hashlib import md5
from random import choice
from time import time


def is_prime(n):
while True:
s = "".join([choice("0123456789") for i in range(50)])
h = md5(s.encode('utf8')).hexdigest()

if h.endswith("00000"):
return s + ',' + h


def main():
with concurrent.futures.ProcessPoolExecutor(max_workers=5) as executor:
for answer in zip(executor.map(is_prime, range(3))):
print(answer)


if __name__ == '__main__':
time1 = time()
main()
time2 = time()
print(time2 - time1)
115 changes: 115 additions & 0 deletions REPORT.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
# Параллелизм и асинхронность
## IO-bound. Проверяем ссылки на страницах Википедии
### синхронно в 1 поток:

Время выполнения:

![image](/images/1.bmp)
Заняло около 22 минут

Диспетчер задач:

![image](/images/2.bmp)


### С помощью ThreadPoolExecutor:
* 5 воркеров

Время выполнения:

![image](/images/3.bmp)
Около 5 минут

Диспетчер задач:

![image](/images/4.bmp)

* 10 воркеров:

Время выполнения:

![image](/images/5.bmp)
около 4 минут

Диспетчер задач:

![image](/images/6.bmp)

* 100 воркеров:

Время выполнения:

![image](/images/7.bmp)
около 3 минут

Диспетчер задач:

![image](/images/8.bmp)

### Вывод:
Использование памяти значительно разнится в сравнение с одним потоком, если сравнивать с разными значениями(5, 10, 100), то разница практически не заметна. При использовании ThreadPoolExecutor уменьшилось время выполнения.

## CPU-bound. Генерируем монетки

* На 1 ядре:

Время выполнения:

![image](/images/9.bmp)
около 2 минут

Диспетчер задач:

![image](/images/10.bmp)

* 2 воркера:

Время выполнения:

![image](/images/11.bmp)
около 2 минут

Диспетчер задач:

![image](/images/12.bmp)

* 4 воркера:

Время выполнения:

![image](/images/13.bmp)
около 2 минут

Диспетчер задач:

![image](/images/14.bmp)

* 5 воркеров:

Время выполнения:

![image](/images/15.bmp)
меньше минуты

Диспетчер задач:

![image](/images/16.bmp)

* 10 воркеров:

Время выполнения:

![image](/images/17.bmp)
около минуты

Диспетчер задач:

![image](/images/18.bmp)

* 100 воркеров:
* ошибка

![image](/images/19.bmp)

### Вывод:
При использовании одного ядра нагрузка была несильной. При использовании ProcessPoolExecutor время сократилось. При увеличении воркеров нагрузка на ЦП немного увеличивалась. Есть некоторая погрешность в значениях, так как нагрузка была неоднозначной.
Binary file added images/1.bmp
Binary file not shown.
Binary file added images/10.bmp
Binary file not shown.
Binary file added images/11.bmp
Binary file not shown.
Binary file added images/12.bmp
Binary file not shown.
Binary file added images/13.bmp
Binary file not shown.
Binary file added images/14.bmp
Binary file not shown.
Binary file added images/15.bmp
Binary file not shown.
Binary file added images/16.bmp
Binary file not shown.
Binary file added images/17.bmp
Binary file not shown.
Binary file added images/18.bmp
Binary file not shown.
Binary file added images/19.bmp
Binary file not shown.
Binary file added images/2.bmp
Binary file not shown.
Binary file added images/3.bmp
Binary file not shown.
Binary file added images/4.bmp
Binary file not shown.
Binary file added images/5.bmp
Binary file not shown.
Binary file added images/6.bmp
Binary file not shown.
Binary file added images/7.bmp
Binary file not shown.
Binary file added images/8.bmp
Binary file not shown.
Binary file added images/9.bmp
Binary file not shown.
22 changes: 22 additions & 0 deletions wiki.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import urllib.request
from urllib.parse import unquote
import concurrent.futures

links = open('res.txt', encoding='utf8').read().split('\n')


def load_url(link, timeout):
with urllib.request.urlopen(link, timeout=timeout) as conn:
return conn.code


with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
future_to_url = {executor.submit(load_url, url, 5): url for url in links}
for future in concurrent.futures.as_completed(future_to_url):
url = future_to_url[future]
try:
data = future.result()
except Exception as e:
print('%r exception: %s' % (url, e))
else:
print(data)