-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapp.py
103 lines (73 loc) · 3.11 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
from flask import Flask, request, jsonify
from src.utils.parse_pgn import parse_pgn
from src.utils.generate_report import generate_report
from src.utils.stockfish_pool import StockfishPool
import os
import asyncio
import concurrent.futures
import time
app = Flask(__name__)
THREADS = os.cpu_count() or 4 # Use number of CPU cores
DEPTH = 17
# Initialize the Stockfish pool
stockfish_pool = StockfishPool(
pool_size = THREADS,
)
def process_fens_for_thread(start_idx, end_idx, thread_id, fen_list):
results = []
try:
for index in range(start_idx, end_idx):
fen = fen_list[index]
move_analysis = asyncio.run(stockfish_pool.analyze_position(fen, DEPTH))
print(move_analysis)
results.append(move_analysis)
return results
except Exception as e:
print(f"Thread {thread_id}: Error processing FENs: {e}")
return []
@app.route('/review_game', methods=['POST'])
def review_game():
pgn_data = request.json.get('pgn')
if not pgn_data:
return jsonify({"error": "No PGN data provided"}), 400
try:
# Step 1: Parse PGN
fen_list = parse_pgn(pgn_data)
#! Single threaded approach
# analysis_results = []
# for fen in fen_list:
# engine_instance = asyncio.run(stockfish_pool.get_stockfish())
# analysis = asyncio.run(analyze_position(engine_instance, fen))
# analysis_results.append(analysis)
#! Multi threaded approach
total_fens = len(fen_list)
results = []
# Calculate the range of fens for each thread
chunkk_size = total_fens // THREADS
chunks = [(i * chunkk_size, (i + 1) * chunkk_size) for i in range(THREADS)]
chunks[-1] = (chunks[-1][0], total_fens) # Adjust the last chunk to include the remaining fens
start_time = time.time()
#! Create a thread pool executor and I understand shit about this
# Start a ThreadPoolExecutor with 4 threads
with concurrent.futures.ThreadPoolExecutor(max_workers=THREADS) as executor:
future_to_chunk = {executor.submit(process_fens_for_thread, start, end, idx, fen_list): (start, end) for idx, (start, end) in enumerate(chunks)}
results = [] # Initialize results list to collect results from futures
for future in concurrent.futures.as_completed(future_to_chunk):
result = future.result()
results.extend(result) # Append results for this chunk to the overall results
print("Analyzed all positions.", len(results), " in ", time.time() - start_time, " seconds")
#TODO Step 3: Generate report
report = generate_report(pgn_data, results)
# Combine all results
result = {
"positions": [
{"fen": fen, "analysis": analysis}
for fen, analysis in zip(fen_list, results)
],
"report": report
}
return jsonify(result)
except Exception as e:
return jsonify({"error": str(e)}), 400
if __name__ == '__main__':
app.run(debug=True)