Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from concurrent.futures import ThreadPoolExecutor
- from concurrent.futures import as_completed
- from multiprocessing import Pool
- import multiprocessing
- import json
- """
- DO NOT USE THIS CODE IT DOES NOT WORK
- In my testing I was not able to consistently return data
- from threaded child processes. If you have information that
- might help to make this work please comment!
- """
- def metric_1_cmd_runner(ssh_conn_object, commands):
- output = ssh_conn_object.send_command(commands)
- '''
- # parse output
- '''
- return output
- def metric_2_cmd_runner(ssh_conn_object, commands):
- output = ssh_conn_object.send_command(commands)
- '''
- # parse output
- '''
- return output
- def get_first_metric(ipaddress, commands):
- '''
- # Create SSH session here to pass to executor as object
- '''
- executor_data = {}
- with ThreadPoolExecutor(max_workers=100) as EXECUTOR:
- ssh_exec = [EXECUTOR.submit(metric_1_cmd_runner,
- desc,
- desc_args) for
- desc, desc_args in executor_data.items()]
- def get_second_metric(ipaddress, commands):
- '''
- # Create SSH session here to pass to executor as object
- '''
- executor_data = {}
- with ThreadPoolExecutor(max_workers=100) as EXECUTOR:
- ssh_exec = [EXECUTOR.submit(metric_2_cmd_runner,
- desc,
- desc_args) for
- desc, desc_args in executor_data.items()]
- def worker(ipaddress, command_data, MAINPROC_QUEUE):
- first_metric_show_commands = []
- second_metric_show_commands = []
- METRIC_POOL = Pool(processes=2)
- first_metric_restults = METRIC_POOL.apply_async(
- get_first_metric,
- args=(ipaddress,
- first_metric_show_commands)
- )
- second_metric_results = METRIC_POOL.apply_async(
- get_second_metric,
- args=(ipaddress,
- second_metric_show_commands)
- )
- mt1_results = first_metric_restults.get()
- mt2_results = second_metric_results.get()
- METRIC_POOL.close()
- METRIC_POOL.join()
- MAINPROC_QUEUE.put([mt1_results, mt2_results])
- def main():
- MAINPROC_QUEUE = multiprocessing.Queue()
- ''' open data file '''
- ip_node_association = {}
- main_proc = []
- QUEUE_RESULTS = []
- for ipaddress, tree in ip_node_association.items():
- CMTS_MAIN_PROCESSES = multiprocessing.Process(
- target=worker,
- name=str(ipaddress),
- args=(ipaddress, tree, MAINPROC_QUEUE)
- )
- CMTS_MAIN_PROCESSES.daemon = False
- CMTS_MAIN_PROCESSES.start()
- main_proc.append(CMTS_MAIN_PROCESSES)
- for p in main_proc:
- QUEUE_RESULTS.append(MAINPROC_QUEUE.get())
- CMTS_MAIN_PROCESSES.join()
- for i in main_proc:
- print(i.name, i.exitcode)
- for i in QUEUE_RESULTS:
- print(json.dumps(i, indent=2))
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement