-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain_cluster_minimalist.py
executable file
·143 lines (99 loc) · 4.82 KB
/
main_cluster_minimalist.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
import os
from datetime import datetime
import shutil
import networkx as nx
import csv
import math
import numpy as np
from copy import copy
from web import Network
from network_creator import obtain_interactions_network
from ecosystem import Ecosystem
from utilities import get_out_row, get_eco_state_row, NetStats, EcosystemStats, write_spatial_analysis, write_spatial_state, write_adjacency_matrix
from configure import ITERATIONS, HABITAT_LOSS, HABITAT_LOSS_ITER, INVASION, INVASION_ITER, NETWORK_RESET, SPATIAL_VARIATION
from configure import REFRESH_RATE, REMOVAL_LEVEL, REMOVAL_FRACTION, EXTINCTION_EVENT, TIME_WINDOW
from configure import SRC_NET_FILE, READ_FILE_NETWORK, NETWORK_RECORD, ITERATIONS_TO_RECORD, INT_STRENGTHS, RECORD_SPATIAL_VAR
if __name__ == '__main__':
start_sim = datetime.now()
##### these modifications are performed so different replicates can be run on the cluster
#job = '1' #os.environ['JOB_ID'];
#task = '1' #os.environ['SGE_TASK_ID']; CHANGE!
job = os.environ['PBS_JOBID'];
job = job[0:7]
task = os.environ['PBS_ARRAYID'];
#output_dir = '../'
output_dir = './' + job + '_' + task;
if not os.path.exists(output_dir):
os.makedirs(output_dir)
if (os.path.isfile('./output/'+SRC_NET_FILE)):
shutil.copy('./output/'+SRC_NET_FILE, output_dir)
##############################################
#network_file = output_dir+'/'+SRC_NET_FILE
network_file = output_dir + '/new_network_%d.graphml' %(int(task)-1)
#if True:
if READ_FILE_NETWORK:
graph = nx.read_graphml(network_file)
net = Network(graph)
print 'connectance = ', net.connectance()
tls = net.get_trophic_levels()
top, top_preds = net.top_predators()
basal, basal_sps = net.basal()
for u,v in net.edges():
if u in basal_sps and v in top_preds and tls[v] == 3:
net.remove_edge(u,v)
print 'new connectance = ', net.connectance()
else:
net = obtain_interactions_network()
net_to_save = net.copy()
nx.write_graphml(net_to_save, network_file)
ecosystem = Ecosystem(net, drawing=False)
ecosystem.initialise_world(True)
# here it works out the inital populations
series_counts = dict()
dict_stats = get_eco_state_row(0, ecosystem)
series_counts[0] = ecosystem.populations
# we don't this to store data, just for its keys!
cumulative_sps_stats = dict.fromkeys(net.nodes(), None)
for i in range(1, ITERATIONS+1):
print i
ecosystem.update_world()
#dict_stats = get_eco_state_row(ITERATIONS, ecosystem)
dict_stats = get_eco_state_row(ITERATIONS, ecosystem)
#series_counts[1] = ecosystem.populations
series_counts[i] = ecosystem.populations
if HABITAT_LOSS and i == HABITAT_LOSS_ITER:
ecosystem.apply_habitat_loss()
if i%1000 == 0 or i == ITERATIONS:
net_temp = ecosystem.realised_net.copy()
series = copy(series_counts)
write_adjacency_matrix(i, NETWORK_RECORD, series, net_temp, output_dir)
write_spatial_state(ecosystem, i, output_dir)
### OUTPUT:
## testing output of adjacency and spatial state. Do they require more than the following?
#net_temp = ecosystem.realised_net.copy()
#series = copy(series_counts)
#write_adjacency_matrix(ITERATIONS, NETWORK_RECORD, series, net_temp, output_dir)
#write_spatial_state(ecosystem, ITERATIONS, output_dir)
header_names = ['species', 'init_tl', 'mutualist', 'mutualistic_producer', 'individuals_init', 'immigrants', 'born', 'dead', 'individuals_final']
file_species = open(output_dir+'/output_species.csv', 'w')
out_species = csv.DictWriter(file_species, header_names)
out_species.writeheader()
out_row_species = dict()
for sp in sorted(cumulative_sps_stats.keys()):
out_row_species['species'] = sp
init_tls = net.get_trophic_levels()
out_row_species['init_tl'] = init_tls[sp]
out_row_species['mutualist'] = net.node[sp]['mut']
out_row_species['mutualistic_producer'] = net.node[sp]['mut_prod']
out_row_species['individuals_init'] = series_counts[0][sp]
out_row_species['individuals_final'] = series_counts[ITERATIONS][sp]
out_species.writerow(out_row_species)
file_species.close()
pops = np.zeros((ITERATIONS+1, net.number_of_nodes()))
for i in range(0, ITERATIONS+1):
for sp in sorted(cumulative_sps_stats.keys()):
pops[i,int(sp)-1] = series_counts[i][sp]
np.savetxt(output_dir+'/output_pops.csv', pops, delimiter=',')
stop_sim = datetime.now()
elapsed_sim = stop_sim-start_sim
print 'time for simulation' , elapsed_sim