Why can't multiprocess.Process call getattr method? - python

Trying to call two methods say_hello and say_world by getattr() in multiprocessing.Process, but method say_world hasn't been executed. How can I make it possible? Thanks.
# -*- coding: utf-8 -*-
from multiprocessing import Process
import time
class Hello:
def say_hello(self):
print('Hello')
def say_world(self):
print('World')
class MultiprocessingTest:
def say_process(self, say_type):
h = Hello()
while True:
if hasattr(h, say_type):
result = getattr(h, say_type)()
print(result)
time.sleep(1)
def report(self):
Process(target=self.say_process('say_hello')).start()
Process(target=self.say_process('say_world')).start() # This line hasn't been executed.
if __name__ == '__main__':
t = MultiprocessingTest()
t.report()

The parameter target expects a reference to a function as value but your code passes None to it. These are the necessary parts to change:
class Hello:
def say_hello(self):
while True:
print('Hello')
time.sleep(1)
def say_world(self):
while True:
print('World')
time.sleep(1)
class MultiprocessingTest:
def say_process(self, say_type):
h = Hello()
if hasattr(h, say_type):
return getattr(h, say_type) # Return function reference instead of execute function
else:
return None

Related

Using straing/data from another function

I'm trying to use returned data from one function into multiple other functions. But I don't want the first function to run each time; which is happening in my case.
#Function lab
def func_a():
print('running function a')
data = 'test'
return data
def func_b():
print(func_a())
def func_c():
print(func_a())
def func_d():
print(func_a())
if __name__ == '__main__':
func_a()
func_b()
func_c()
func_d()
Each time that whole function_a runs. But I just want the returned data from "func_a" in other functions.
IIUC, you could alleviate this with a simple class.
I hold the state of the class which runs func_a in a variable called output. I can then reference this output variable once the class has finished running as much as I like in all other functions without having to re-run func_a.
Hope this helps!
class FunctionA:
def __init__(self):
self.output = None
def run_function(self):
print('running function a')
data = 'test'
self.output = data
def func_b():
print(func_a.output)
def func_c():
print(func_a.output)
def func_d():
print(func_a.output)
if __name__ == '__main__':
func_a = FunctionA()
func_a.run_function()
func_b()
func_c()
func_d()
>>> running function a
>>> test
>>> test
>>> test
Your func_a does two things. To make this clear, let's call it, print_and_return_data.
There are several ways to to break apart the two things print_and_return_data does. One way is to split up the two behaviors into smaller sub-methods:
def print_and_return_data():
print('running function a') # keeping the old print behavior
data = 'test'
return data
into:
def print_run():
print('running function a') # keeping the old print behavior
def return_data():
return 'test'
def print_and_return_data():
print_run()
return return_data()
So that other functions only use what they need:
def func_b():
print(return_data())
Another way is to change print_and_return_data to behave differently the first time it's called from the following times it's called (I don't recommend this because functions changing based on how many times it's been called can be confusing):
context = {'has_printed_before': False}
def print_and_return_data():
if not context['has_printed_before']:
print('running function a')
context['has_printed_before'] = True
data = 'test'
return data
def func_b():
print(print_and_return_data())
if __name__ == '__main__':
func_a() # prints
func_b() # won't print
One way to avoid "functions behaving differently when they're called" is to pass the variation (the "context") in as an argument:
def return_data(also_print=False):
if also_print:
print('running function a')
data = 'test'
return data
def func_b():
print(return_data())
if __name__ == '__main__':
func_a(also_print=True) # prints
func_b() # won't print

threading args to many arguments given error?

I am trying to threading to call a function with args, but the Syntax I use says I am using to many args... but 2 are needed to are given... so why?
import threading
import time
class Baab():
def finc(phrase):
time.sleep(3)
print(phrase)
def fenc():
time.sleep("last")
def fanc(ophrase):
print(ophrase)
def func(phrase, ophrase):
b = Baab()
b.fanc(ophrase)
b.finc(phrase)
b.fenc()
th = threading.Thread(target=func, args=("baba", "lol"))
th.start()
time.sleep(1)
print("second")
The class methods expect a self attribute explicitly.
Try changing your class method as below:
class Baab():
def finc(self, phrase):
time.sleep(3)
print(phrase)
def fenc(self):
time.sleep("last")
def fanc(self, ophrase):
print(ophrase)

Passing string into Python's threading.Thread as an arg

I am trying to figure out how to pass a string as an argument in Python threading.Thread. This problem has been encountered before: Python Threading String Arguments
Is there a better way to pass in a string? There has to be a more obvious way, and I am just too new to coding to figure it out.
Code Block A
import threading
def start_my_thread():
my_thread = threading.Thread(target=my_func, args="string")
my_thread.start()
def my_func(input):
print(input)
Result:
TypeError: my_func() takes 1 positional argument but 6 were given
Code Block B
import threading
def start_my_thread():
my_thread = threading.Thread(target=my_func, args=("string",))
my_thread.start()
def my_func(input):
print(input)
Result: string
you can inherit Thread,and define my_func as run method.and create a new instance.
import threading
class MyThread(threading.Thread):
def __init__(self,string):
super().__init__()
self.string = string
def run(self):
print(self.string)
# def start_my_thread():
# my_thread = threading.Thread(target=my_func, args=("string",))
# my_thread.start()
# def my_func(input):
# print(input)
if __name__ == "__main__":
MyThread("hello").start()

Send file pointer to python thread and update file pointer

I have a python program with a thread and the thread should write into a file. I will spawn a thread from the main program. Now on new day trigger I will change the file pointer in the main program and I want the thread also to take the new file to write the data to the file.
I have a code which will take global variable and do this task. But is there any other better way of doing this?
#!/usr/bin/env python
import sys
import threading
import time
filePtr = None
import time
def fileWriteTh():
global filePtr
time.sleep(2)
filePtr.write("from the thrread this should in file 2")
def main():
global filePtr
filePtr = open("test1.txt","ab")
fileThread = threading.Thread(target=fileWriteTh)
fileThread.start()
if new_day_trigger:
filePtr.close()
filePtr = open("test2.txt","ab")
fileThread.join()
if __name__ == "__main__":
main()
This is the new code that is written:
#!/usr/bin/env python
import sys
import threading
import time
class SendPacket(object):
fileDesc = None
def __init__(self, fd):
super(SendPacket, self).__init__()
SendPacket.fileDesc = fd
def printFromInstance(self,var):
print var
SendPacket.fileDesc.write(var)
time.sleep(3)
print var
SendPacket.fileDesc.write(var)
def startabc(self, someVar):
self.printFromInstance(someVar)
#classmethod
def printVar(cls, printStr):
print printStr
cls.fileDesc.write(printStr)
#classmethod
def changeClsFile(cls, newFd):
cls.fileDesc = newFd
def main():
filePtr = open("test1.txt","ab")
sendPack_inst = SendPacket(filePtr)
fileThread = threading.Thread(target=sendPack_inst.startabc, args=("test1",))
fileThread.start()
time.sleep(2)
filePtr.close()
filePtr = open("test2.txt","ab")
SendPacket.changeClsFile(filePtr)
fileThread.join()
filePtr.close()
if __name__ == "__main__":
main()
Like this:
#!/usr/bin/env python
import sys
import thread
import time
class _fileACT :
def __init__(self):
self.trigger = 0
self.flag = True
self.msg = ""
self.files = (open("test1.txt","ab"),open("test2.txt","ab"))
def run(self,pssrg):
while self.flag :
if self.msg != "" :
self.files[self.trigger].write(self.msg)
self.msg = ""
def test(self,pssrg):
for i in range(20):
time.sleep(1)
if i %2 != 0 :
self.trigger = 0
elif i %2 != 1:
self.trigger = 1
self.msg = "%0.3d test-1,asdasdasd\n"%i
time.sleep(0.5)
print "wait..."
self.flag = False
for e in self.files : e.close()
print "can exit !"
if __name__ == "__main__":
fileACT = _fileACT()
thread.start_new_thread(fileACT.run,(None,))
thread.start_new_thread(fileACT.test,(None,))
We have three variables, filename, last opened file name and message. Two files, only False and True will be sufficient (of course you can use index for multiple files). We've written a test function into the class because we don't want our main cycle to freeze. The file selection is done with ' trigger ', but the previous and next file name is not the same, the previous closes.
The important point in the thread is that the time delay is strictly unavailable! The time delay is always applied to the trigger. The time delay cannot be placed in the main loop. An instance of access from outside the class is also attached. I hope it helps.

Using variables in signal handler - require global?

I have a signal handler to handle ctrl-c interrupt. If in the signal handler I want to read a variable set in my main script, is there an alternative to using a "global" statement when setting the variable?
I don't mind doing this, but read this post (Do you use the "global" statement in Python?) in which someone commented that there should be no reason to ever use global.
What is the alternative in this case?
My code looks like this:
def signal_handler(signal, frame):
print "in sig handler - g_var=%s" % g_var
def main():
global g_var
g_var = "test"
time.sleep(120)
if __name__ == '__main__':
signal.signal(signal.SIGINT, signal_handler)
main()
You can use a closure as the signal handler that acquires its state from the main script:
import signal
import sys
import time
def main_function():
data_for_signal_handler = 10
def signal_handler(*args):
print data_for_signal_handler
sys.exit()
signal.signal(signal.SIGINT, signal_handler) # Or whatever signal
while True:
data_for_signal_handler += 1
time.sleep(0.5)
if __name__ == '__main__':
main_function()
You can use partial to create a "closure".
import signal
from functools import partial
def signal_handler(g_var, signal, frame):
print "in sig handler - g_var=%s" % g_var
def main():
g_var = "test"
signal.signal(signal.SIGINT, partial(signal_handler, g_var))
time.sleep(120)
if __name__ == '__main__':
main()
Within the object-oriented paradigm (OOP) it's quite convenient to use lambdas for that purpose. Using lambdas you could pass some additional context (like a self reference) and/or get rid of the unused arguments (signal, frame).
import time
import signal
class Application:
def __init__( self ):
signal.signal( signal.SIGINT, lambda signal, frame: self._signal_handler() )
self.terminated = False
def _signal_handler( self ):
self.terminated = True
def MainLoop( self ):
while not self.terminated:
print( "I'm just doing my job like everyone else" )
time.sleep( 3 )
app = Application()
app.MainLoop()
print( "The app is terminated, exiting ..." )
If you're just reading the variable, there should be no need to make the variable "global"
def foo():
print a
a = 3
foo() #3
global is necessary to allow you to change the variable and have that change propagate into the module namespace.
If you want to pass some state to your callback without using global, the typical way to do this us to use an instance method as the callback:
class foo(object):
def __init__(self,arg):
self.arg = arg
def callback_print_arg(self):
print self.arg
def call_callback(callback):
callback()
a = foo(42)
call_callback(a.callback_print_arg) #42
You can access outer-scope variables from within an inline-defined function, like so:
my_values = {'foo':'bar'}
def handler(signum, frame):
for key,val in my_values.items():
print key,val
my_values['bat']='baz'
#remember to use mutable types, like dicts or lists
signal.signal(signal.SIGINT, handler)

Categories