-
Notifications
You must be signed in to change notification settings - Fork 0
/
injector.py
58 lines (47 loc) · 1.59 KB
/
injector.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
from config.log_config import create_log
from dotenv import load_dotenv
# import yaml
import json
import os
from service.job_service import JobHandler
load_dotenv()
# Initialize & Inject with only one instance
logger = create_log()
# read host file to make an dict in memory
def read_hosts(server_file):
server_list_dict = {}
with open(server_file) as data_file:
for line in data_file:
if '#' in line:
continue
line = line.strip().split("|")
# print(f"{line}")
server_list_meta = []
for index, meta in enumerate(line):
# print(index, meta)
if index == 2:
server_list_meta.append(meta.lower())
else:
server_list_meta.append(meta)
server_list_dict.update({line[0] : server_list_meta})
return server_list_dict
# read host file to make an dict in memory
def read_sparkjob(server_file):
sparkjob_list = []
with open(server_file) as data_file:
for line in data_file:
if '#' in line:
continue
line = line.strip().split("|")
# print(f"{line}")
sparkjob_list.append(line)
return sparkjob_list
''' get all hots '''
hosts = read_hosts("./repository/hosts")
''' hosts = ['localhost', 'dev',...] '''
logger.info(list(hosts.keys()))
# es_hosts_enum_list =list(hosts.keys())
''' get sparkjob list '''
sparkjob_list = read_hosts("./repository/sparkjob_process")
logger.info(sparkjob_list)
JobHandlerInject = JobHandler(logger, hosts, sparkjob_list)