I am using python multiprocessing lib and I would like to reload a list of processes every x time.
In other words, when the configuration is modified, all processes should read it again.

        def get_config(self):
                from ConfigParser import SafeConfigParser
        ..
                return argv

        def sched(self, args, event):
        #schedule instruction:
                schedule.every(0.01).minutes.do(self.worker,args)
                while not  event.is_set():
                        schedule.run_pending()                                                                    

        def dispatch_processs(self, conf):
                processs = []
                event = multiprocessing.Event()

                for conf in self.get_config():
                        process = multiprocessing.Process(target=self.sched,args=( i for i in conf), kwargs={'event' : event})
                        processs.append((process, event)
        return processs

        def start_process(self, process):
                process.start()

        def gracefull_process(self, process):
                process.join()

        def main(self):
                while True:
                        processs = self.dispatch_processs(self.get_config())
                        print ("%s processes running " % len(processs) )

                        for process, event in processs:                                                               

                                self.start_process(process)
                                time.sleep(1)
                                event.set()
                                self.gracefull_process(process)

This code is working, but for the first launched process (it can detects a new configuration), the others are not launched.

Recommended Answers

All 2 Replies

I would use a Manager dictionary which would contain
1. a counter or other method that can be easily compared to the original to show that there is a change
2. the configuration

See the Managing Shared State heading on Doug Hellmann's site Click Here

This simple example shows that changes made to the manager dictionary inside or outside of the process can be seen by all processes

from multiprocessing import Process, Manager

def test_f(test_d):
   """  frist process to run
        exit this process when dictionary's 'QUIT' == True
   """
   test_d['2'] = 2     ## add as a test
   while not test_d["QUIT"]:
      print "P1 test_f", test_d["QUIT"]
      test_d["ctr"] += 1
      time.sleep(1.0)

def test_f2(test_d):
    """ second process to run.  Runs until the for loop exits
   """
    for j in range(0, 10):
       ## print to show that changes made anywhere
       ## to the dictionary are seen by this process
       print "     P2", j, test_d
       time.sleep(0.5)

    print "second process finished"

if __name__ == '__main__':
   ##--- create a dictionary via Manager
   manager = Manager()
   test_d = manager.dict()
   test_d["ctr"] = 0
   test_d["QUIT"] = False

   ##---  start first process and send dictionary
   p = Process(target=test_f, args=(test_d,))
   p.start()

   ##--- start second process
   p2 = Process(target=test_f2, args=(test_d,))
   p2.start()

   ##--- sleep 2 seconds and then change dictionary
   ##     to exit first process
   time.sleep(2.0)
   print "\nterminate first process"
   test_d["QUIT"] = True
   print "test_d changed"
   print "data from first process", test_d

   ##---  may not be necessary, but I always terminate to be sure
   time.sleep(5.0)
   p.terminate()
   p2.terminate()
Be a part of the DaniWeb community

We're a friendly, industry-focused community of developers, IT pros, digital marketers, and technology enthusiasts meeting, networking, learning, and sharing knowledge.