Quite new to multiprocessing here. I have a code that runs two processes. One to continuously receive data blocks from the server and put it inside a queue and the other to remove the data blocks from the queue and process it.
Below is my client code:
import socket
import turtle
import multiprocessing
from multiprocessing import Process, Queue
from tkinter import *
class GUI:
def __init__(self, master):
rec_data = recv_data()
self.master = master
master.title("Collision Detection")
self.input_label = Label(root, text="Input all the gratings set straight wavelength values in nm")
self.input_label.grid(row=0)
self.core_string = "Core "
self.entries = []
self.label_col_inc = 0
self.entry_col_inc = 1
self.core_range = range(1, 5)
for y in self.core_range:
self.core_text = self.core_string + str(y) + '_' + '25'
self.core_label = Label(root, text=self.core_text)
self.entry = Entry(root)
self.core_label.grid(row=1, column=self.label_col_inc, sticky=E)
self.entry.grid(row=1, column=self.entry_col_inc)
self.entries.append(self.entry)
self.label_col_inc += 2
self.entry_col_inc += 2
self.threshold_label = Label(root, text="Threshold in nm")
self.entry_threshold = Entry(root)
self.threshold_label.grid(row=2, sticky=E)
self.entry_threshold.grid(row=2, column=1)
self.light_label = Label(root, text='Status')
self.light_label.grid(row=3, column=3)
self.canvas = Canvas(root, width=150, height=50)
self.canvas.grid(row=4, column=3)
# Green light
self.green_light = turtle.RawTurtle(self.canvas)
self.green_light.shape('circle')
self.green_light.color('grey')
self.green_light.penup()
self.green_light.goto(0, 0)
# Red light
self.red_light = turtle.RawTurtle(self.canvas)
self.red_light.shape('circle')
self.red_light.color('grey')
self.red_light.penup()
self.red_light.goto(40, 0)
self.data_button = Button(root, text="Get data above threshold", command=rec_data.getData)
self.data_button.grid(row=5, column=0)
class recv_data:
def __init__(self):
self.buff_data = multiprocessing.Queue()
self.p1 = multiprocessing.Process(target=self.recvData)
self.p2 = multiprocessing.Process(target=self.calculate_threshold)
self.host = '127.0.0.1'
self.port = 5001
self.s = socket.socket()
self.s.connect((self.host, self.port))
# function to receive TCP data blocks
def getData(self):
len_message = self.s.recv(4)
bytes_length = int(len_message.decode('utf-8')) # for the self-made server
recvd_data = self.s.recv(bytes_length)
self.buff_data.put(recvd_data)
self.p1.start()
self.p2.start()
self.p1.join()
self.p2.join()
def recvData(self):
len_message = self.s.recv(4)
while len_message:
bytes_length = int(len_message.decode('utf-8')) # for the self-made server
recvd_data = self.s.recv(bytes_length)
self.buff_data.put(recvd_data)
len_message = self.s.recv(4)
else:
print('out of loop')
self.s.close()
def calculate_threshold(self):
rmv_data = self.buff_data.get()
stringdata = rmv_data.decode('utf-8')
rep_str = stringdata.replace(",", ".")
splitstr = rep_str.split()
# received wavelength values
inc = 34
wav_threshold = []
for y in gui.entries:
straight_wav = float(y.get())
wav = float(splitstr[inc])
wav_diff = wav - straight_wav
if wav_diff < 0:
wav_diff = wav_diff * (-1)
wav_threshold.append(wav_diff)
inc += 56
threshold = float(gui.entry_threshold.get())
for x in wav_threshold:
if (x > threshold):
gui.red_light.color('red')
gui.green_light.color('grey')
else:
gui.red_light.color('grey')
gui.green_light.color('green')
# function to write into the file
def write_file(self, data):
with open("Output.txt", "a") as text_file:
text_file.write('\t'.join(data[0:]))
text_file.write('\n')
if __name__ == '__main__':
root = Tk()
gui1 = GUI(root)
root.mainloop()
The error I get is shown below:
Exception in Tkinter callback
Traceback (most recent call last):
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\tkinter\__init__.py", line 1883, in __call__
return self.func(*args)
File "C:/Users/PycharmProjects/GUI/GUI_multiprocess.py", line 85, in getData
self.p2.start()
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\multiprocessing\process.py", line 121, in start
self._popen = self._Popen(self)
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\multiprocessing\context.py", line 224, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\multiprocessing\context.py", line 326, in _Popen
return Popen(process_obj)
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\multiprocessing\popen_spawn_win32.py", line 93, in __init__
reduction.dump(process_obj, to_child)
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\multiprocessing\reduction.py", line 60, in dump
ForkingPickler(file, protocol).dump(obj)
TypeError: cannot pickle 'weakref' object
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\multiprocessing\spawn.py", line 116, in spawn_main
exitcode = _main(fd, parent_sentinel)
File "C:\Users\AppData\Local\Programs\Python\Python38-32\lib\multiprocessing\spawn.py", line 126, in _main
self = reduction.pickle.load(from_parent)
EOFError: Ran out of input
What am I doing wrong here and how can I fix it? Any help is appreciated. Thank you!
I just came to the same traceback and managed to solve it. It was due to that an object had a running or exited Process as a variable and it was starting another Process using that object.
Problem
This is a minimal code to produce your error:
import multiprocessing
class Foo:
def __init__(self):
self.process_1 = multiprocessing.Process(target=self.do_stuff1)
self.process_2 = multiprocessing.Process(target=self.do_stuff2)
def do_multiprocessing(self):
self.process_1.start()
self.process_2.start()
def do_stuff1(self):
print("Doing 1")
def do_stuff2(self):
print("Doing 2")
if __name__ == '__main__':
foo = Foo()
foo.do_multiprocessing()
[out]:
Traceback (most recent call last):
File "myfile.py", line 21, in <module>
foo.do_multiprocessing()
File "myfile.py", line 11, in do_multiprocessing
self.process_2.start()
File "...\lib\multiprocessing\process.py", line 121, in start
self._popen = self._Popen(self)
File "...\lib\multiprocessing\context.py", line 224, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "...\lib\multiprocessing\context.py", line 327, in _Popen
return Popen(process_obj)
File "...\lib\multiprocessing\popen_spawn_win32.py", line 93, in __init__
reduction.dump(process_obj, to_child)
File "...\lib\multiprocessing\reduction.py", line 60, in dump
ForkingPickler(file, protocol).dump(obj)
TypeError: cannot pickle 'weakref' object
Doing 1
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "...\lib\multiprocessing\spawn.py", line 116, in spawn_main
exitcode = _main(fd, parent_sentinel)
File "...\lib\multiprocessing\spawn.py", line 126, in _main
self = reduction.pickle.load(from_parent)
EOFError: Ran out of input
So the issue is that Foo contains also the running/exited process foo.process_1 when it starts foo.process_2.
Solution 1
Set foo.process_1 to None or something else. Or store the Processes somewhere else than in foo to prevent being passed when starting process_2.
...
def do_multiprocessing(self):
self.process_1.start()
self.process_1 = None # Remove exited process
self.process_2.start()
...
Solution 2
Remove the problematic variable (process_1) from pickling:
class Foo:
def __getstate__(self):
# capture what is normally pickled
state = self.__dict__.copy()
# remove unpicklable/problematic variables
state['process_1'] = None
return state
...
This seems to be problem in newer Python versions. My own code worked fine for 3.7 but failed due to this issue in 3.9.
I tested your code (from recv_data). Since you join the processes and need them, you should do the solution 2 or store the processes somewhere else than in recv_data. Not sure what other problems your code has.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With