summaryrefslogtreecommitdiff
path: root/samples/multi_queues.py
blob: 3b647071c713165a8322a5e62521c9495bff89d5 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
#!/usr/bin/env python

import sys
import time
import Queue
import traceback
import multiprocessing

from ansible.inventory import Inventory
from ansible.inventory.host import Host
from ansible.playbook.play import Play
from ansible.playbook.play_context import PlayContext
from ansible.playbook.task import Task
from ansible.executor.task_executor import TaskExecutor
from ansible.executor.task_result import TaskResult
from ansible.parsing.dataloader import DataLoader
from ansible.vars import VariableManager

from ansible.utils.debug import debug

NUM_WORKERS = 20
NUM_HOSTS   = 1778
NUM_TASKS   = 1

def results(final_q, workers):
   cur_worker = 0
   def _read_worker_result(cur_worker):
      result = None
      starting_point = cur_worker
      while True:
         (worker_prc, main_q, res_q) = workers[cur_worker]
         cur_worker += 1
         if cur_worker >= len(workers):
            cur_worker = 0

         try:
            if not res_q.empty():
               debug("worker %d has data to read" % cur_worker)
               result = res_q.get()
               debug("got a result from worker %d: %s" % (cur_worker, result))
               break
         except:
            pass

         if cur_worker == starting_point:
            break

      return (result, cur_worker)

   while True:
      result = None
      try:
         (result, cur_worker) = _read_worker_result(cur_worker)
         if result is None:
            time.sleep(0.01)
            continue
         final_q.put(result, block=False)
      except (IOError, EOFError, KeyboardInterrupt) as e:
         debug("got a breaking error: %s" % e)
         break
      except Exception as e:
         debug("EXCEPTION DURING RESULTS PROCESSING: %s" % e)
         traceback.print_exc()
         break

def worker(main_q, res_q, loader):
   while True:
      task = None
      try:
         if not main_q.empty():
            (host, task, task_vars, conn_info) = main_q.get(block=False)
            executor_result = TaskExecutor(host, task, task_vars, conn_info, loader).run()
            debug("executor result: %s" % executor_result)
            task_result = TaskResult(host, task, executor_result)
            res_q.put(task_result)
         else:
            time.sleep(0.01)
      except Queue.Empty:
         pass
      except (IOError, EOFError, KeyboardInterrupt) as e:
         debug("got a breaking error: %s" % e)
         break
      except Exception as e:
         debug("EXCEPTION DURING WORKER PROCESSING: %s" % e)
         traceback.print_exc()
         break

loader = DataLoader()

workers = []
for i in range(NUM_WORKERS):
   main_q = multiprocessing.Queue()
   res_q  = multiprocessing.Queue()
   worker_p = multiprocessing.Process(target=worker, args=(main_q, res_q, loader))
   worker_p.start()
   workers.append((worker_p, main_q, res_q))

res_q = multiprocessing.Queue()
res_p = multiprocessing.Process(target=results, args=(res_q, workers))
res_p.start()

def send_data(obj):
   global cur_worker
   global workers
   global pending_results

   (w_proc, main_q, wrkr_q) = workers[cur_worker]
   cur_worker += 1
   if cur_worker >= len(workers):
      cur_worker = 0

   pending_results += 1
   main_q.put(obj, block=False)
 
def _process_pending_results():
   global res_q
   global pending_results
   
   while not res_q.empty():
      try:
         result = res_q.get(block=False)
         debug("got final result: %s" % (result,))
         pending_results -= 1
      except Queue.Empty:
         pass

def _wait_on_pending_results():
   global pending_results
   while pending_results > 0:
      debug("waiting for pending results (%d left)" % pending_results)
      _process_pending_results()
      time.sleep(0.01)


debug("starting")
cur_worker      = 0
pending_results = 0


var_manager = VariableManager()

debug("loading inventory")
inventory = Inventory(host_list='/tmp/med_inventory', loader=loader, variable_manager=var_manager)
hosts = inventory.get_hosts()[:]
debug("done loading inventory")

play_context = PlayContext()
play_context.connection = 'local'

for i in range(NUM_TASKS):
   #for j in range(NUM_HOSTS):
   for h in hosts:
      debug("queuing %s %d" % (h, i))
      #h = Host(name="host%06d" % j)
      t = Task().load(dict(name="task %d" % (i,), debug="msg='hello from %s, %d'" % (h,i)))
      #t = Task().load(dict(name="task %d" % (i,), ping=""))
      #task_vars = var_manager.get_vars(loader=loader, host=h, task=t)
      task_vars = dict()
      new_t = t.copy()
      new_t.post_validate(task_vars)
      send_data((h, t, task_vars, play_context))
      debug("done queuing %s %d" % (h, i))
      _process_pending_results()
   debug("waiting for the results to drain...")
   _wait_on_pending_results()

res_q.close()
res_p.terminate()

for (w_p, main_q, wrkr_q) in workers:
   main_q.close()
   wrkr_q.close()
   w_p.terminate()

debug("done")