Does
import multiprocessing
import schedule
def worker():
#do some stuff
def sched(argv):
schedule.every(0.01).minutes.do(worker)
while True:
schedule.run_pending()
processs = []
..
..
p = multiprocessing.Process(target=sched,args)
..
..
processs.append(p)
for p in processs:
p.terminate()
kills gracefully a list of processes ?
If not what is the simplest way to do it ?
The goal is to reload the configuration file into memory, so I would like to kill all children processes and create others instead, those latter will read the new config file.
Edit : Added more code to explain that I am running a while True
loop
Edit : This is the new code after @dano suggestion
def get_config(self):
from ConfigParser import SafeConfigParser
..
return argv
def sched(self, args, event):
#schedule instruction:
schedule.every(0.01).minutes.do(self.worker,args)
while not event.is_set():
schedule.run_pending()
def dispatch_processs(self, conf):
processs = []
event = multiprocessing.Event()
for conf in self.get_config():
process = multiprocessing.Process(target=self.sched,args=( i for i in conf), kwargs={'event' : event})
processs.append((process, event)
return processs
def start_process(self, process):
process.start()
def gracefull_process(self, process):
process.join()
def main(self):
while True:
processs = self.dispatch_processs(self.get_config())
print ("%s processes running " % len(processs) )
for process, event in processs:
self.start_process(process)
time.sleep(1)
event.set()
self.gracefull_process(process)
The good thing about the code, is that I can edit config file and the process will reload its config also.
The problem is that only the first process runs and the others are ignored.
Edit : This saved my life , working with while True in schedule() is not a good idea, so I set up refresh_time
instead
def sched(self, args, event):
schedule.every(0.01).minutes.do(self.worker,args)
for i in range(refresh_time):
schedule.run_pending()
time.sleep(1)
def start_processs(self, processs):
for p,event in processs:
if not p.is_alive():
p.start()
time.sleep(1)
event.set()
self.gracefull_processs(processs)
def gracefull_processs(self, processs):
for p,event in processs:
p.join()
processs = self.dispatch_processs(self.get_config())
self.start_processs(processs)
def main(self):
while True:
processs = self.dispatch_processs(self.get_config())
self.start_processs(processs)
break
print ("Reloading function main")
self.main()
If you don't mind only aborting after worker
has completed all of its work, its very simple to add a multiprocessing.Event
to handle exiting gracefully:
import multiprocessing
import schedule
def worker():
#do some stuff
def sched(argv, event=None):
schedule.every(0.01).minutes.do(worker)
while not event.is_set(): # Run until we're told to shut down.
schedule.run_pending()
processes = []
..
..
event = multiprocessing.Event()
p = multiprocessing.Process(target=sched,args, kwargs={'event' : event})
..
..
processes.append((p, event))
# Tell all processes to shut down
for _, event in processes:
event.set()
# Now actually wait for them to shut down
for p, _ in processes:
p.join()
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With