Python use a virtual class to apply a generic "pipe" pattern - python

I am trying to find out if it would be possible to take the following code, and use the magic of python to simplify code.
Right now I have a command interface that sits on top of a bunch of python sub processes. When I need to communicate with the sub process's I pipe commands to them. Basically it comes down to a string command, and a dictionary of arguments.
Here is the pattern that gets repeated (I showed 1 for simplicitys sake but in reality this is repeated 7 times for different processes)
Create the processes:
class MasterProcess(object):
def __init__(self):
self.stop = multiprocessing.Event()
(self.event_generator_remote, self.event_generator_in)
= multiprocessing.Pipe(duplex=True)
self.event_generator= Process(target=self.create_event_generator,
kwargs={'in': self.event_generator_remote}
)
self.event_generator.start()
def create_event_generator(self, **kwargs):
eg= EventGenerator()
in_pipe = kwargs['in']
while(not self.stop.is_set()):
self.stop.wait(1)
if(in_pipe.poll()):
msg = in_pipe.recv()
cmd = msg[0]
args = msg[1]
if cmd =='create_something':
in_pipe.send(eg.create(args))
else:
raise NotImplementedException(cmd)
And then on the command interface is just pumping commands to the process:
mp.MasterProcess()
pipe = mp.event_generator_remote
>>cmd: create_something args
#i process the above and then do something like the below
cmd = "create_something"
args = {
#bah
}
pipe.send([command, args])
attempt = 0
while(not pipe.poll()):
time.sleep(1)
attempt +=1
if attempt > 20:
return None
return pipe.recv()
What I want to move to is more of a remote facade type deal where the client just calls a method like it would normally, and I translate that call to the above.
For example the new command would look like:
mp.MasterProcess()
mp_proxy = MasterProcessProxy(mp.event_generator_remote)
mp_proxy.create_something(args)
So my virtual class would be MasterProcessProxy, there are really no methods behind the scenes somehow take the method name, and provided args and pipe them to the process?
Does that make sense? Would it be possible to do the same on the other side? Just assume whatever comes down the pipe will be in the form cmd , args where cmd is a local method? and just do a self.() ?
As I am typing this up I understand it is probably confusing, so please let me know what needs clarification.
Thanks.

You can use __getattr__ to create proxy methods for your stub class:
class MasterProcessProxy(object):
def __init__(self, pipe):
self.pipe = pipe
# This is called when an attribute is requested on the object.
def __getattr__(self, name):
# Create a dynamic function that sends a command through the pipe
# Keyword arguments are sent as command arguments.
def proxy(**kwargs):
self.pipe.send([name, kwargs])
return proxy
Now you can use it as you wanted:
mp.MasterProcess()
mp_proxy = MasterProcessProxy(mp.event_generator_remote)
mp_proxy.create_something(spam="eggs", bacon="baked beans")
# Will call pipe.send(["create_something", {"spam":"eggs", "bacon":"baked beans"}])

You might want to check out the twisted framework. This won't beat figuring out how to do it yourself, but will make writing this style of application a lot easier.

Related

Display full help text in python click

I am having the following problem and I am fearful there isn't a straghtforward way to solve it so I am asking here. I am using Click to implement a CLI and I have created several grouped commands under the main command. This is the code:
#click.group()
def main():
pass
#main.command()
def getq():
'''Parameters: --questionnaire_id, --question_id, --session_id, --option_id'''
click.echo('Question Answers')
When I type the main command alone in my terminal it lists all the subcommands with the help text next to each one. However, the text is not displayed fully for the case of getq. Instead, it displays only "Parameters: --questionnaire_id, --question_id,... ."
Is there a way to display it all?
Thank You
The easiest way to do this is to use the command's short_help argument:
#click.group()
def main():
pass
#main.command(short_help='Parameters: --questionnaire_id, --question_id, --session_id, --option_id')
def getq():
click.echo('Question Answers')
If you insist to use the docstring for this and want to override the automatic shortening of it, then you could use a custom Group class overriding the format_commands method to directly use cmd.help instead of the get_short_help_str method:
import click
from gettext import gettext as _
class FullHelpGroup(click.Group):
def format_commands(self, ctx: click.Context, formatter: click.HelpFormatter) -> None:
"""Extra format methods for multi methods that adds all the commands
after the options.
"""
commands = []
for subcommand in self.list_commands(ctx):
cmd = self.get_command(ctx, subcommand)
# What is this, the tool lied about a command. Ignore it
if cmd is None:
continue
if cmd.hidden:
continue
commands.append((subcommand, cmd))
# allow for 3 times the default spacing
if len(commands):
limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands)
rows = []
for subcommand, cmd in commands:
help = cmd.help if cmd.help is not None else ""
rows.append((subcommand, help))
if rows:
with formatter.section(_("Commands")):
formatter.write_dl(rows)
#click.group(cls=FullHelpGroup)
def main():
pass
#main.command()
def getq():
'''Parameters: --questionnaire_id, --question_id, --session_id, --option_id'''
click.echo('Question Answers')
if __name__ == "__main__":
main()
You most probably want to override the max_content_width (at most 80 columns by default) also. You could do this by overriding the context settings:
import shutil
#click.group(cls=FullHelpGroup,
context_settings={'max_content_width': shutil.get_terminal_size().columns - 10})
def main():
pass

Can I tell python multiprocessing.Process not to serialize something?

I essentially have a class like this:
class ConnectionClass:
def __init__(self, addr:str) -> None:
self.addr = addr
def connection(self):
if self._connection is None:
self._connection = magic_create_socket(self.addr)
return self._connection
def do_something(self):
self._connection.send_message("something")
I will be passing it via something like:
def do_processing(connection):
# this will run many times:
connection.do_something()
# There will be more processes or maybe a process pool. I want to avoid repeating myself
my_connection = ConnectionClass("some address")
my_proc = multiprocessing.Process(do_processing, my_connection)
Now clearly, each process should have its own connection sockets, file descriptors and so on. So while I want to pass any props that describe the connection, like addr in this simplified example, but I want the ConnectionClass._connection be None when it is copied to the other process, so it gets lazy initialized again.
I COULD make the connection description and the actual wrapper for the socket/fd separate, but it means extra classes, extra code to pass the description from one to another and so on.
Is it possible to use some annotation to tell Pythons multiprocessing library to ignore certain values when serializing the data for the other process?

Inherit class Worker on Odoo15

In one of my Odoo installation I need to setup the socket_timeout variable of WorkerHTTP class directly from Python code, bypassing the usage of environment variable ODOO_HTTP_SOCKET_TIMEOUT.
If you never read about it, you can check here for more info: https://github.com/odoo/odoo/commit/49e3fd102f11408df00f2c3f6360f52143911d74#diff-b4207a4658979fdb11f2f2fa0277f483b4e81ba59ed67a5e84ee260d5837ef6d
In Odoo15, which i'm using, Worker classes are located at odoo/service/server.py
My idea was to inherit constructor for Worker class and simply setup self.sock_timeout = 10 or another value, but I can't make it work with inheritance.
EDIT: I almost managed it to work, but I have problems with static methods.
STEP 1:
Inherit WorkerHTTP constructor and add self.socket_timeout = 10
Then, I also have to inherit PreforkServer and override process_spawn() method so I can pass WorkerHttpExtend instead of WorkerHTTP, as argument for worker_spawn() method.
class WorkerHttpExtend(WorkerHTTP):
""" Setup sock_timeout class variable when WorkerHTTP object gets initialized"""
def __init__(self, multi):
super(WorkerHttpExtend, self).__init__(multi)
self.sock_timeout = 10
logging.info(f'SOCKET TIMEOUT: {self.sock_timeout}')
class PreforkServerExtend(PreforkServer):
""" I have to inherit PreforkServer and override process_spawn()
method so I can pass WorkerHttpExtend
instead of WorkerHTTP, as argument for worker_spawn() method.
"""
def process_spawn(self):
if config['http_enable']:
while len(self.workers_http) < self.population:
self.worker_spawn(WorkerHttpExtend, self.workers_http)
if not self.long_polling_pid:
self.long_polling_spawn()
while len(self.workers_cron) < config['max_cron_threads']:
self.worker_spawn(WorkerCron, self.workers_cron)
STEP 2:
static method start() should initialize PreforkServer with PreforkServerExtend, not with PreforkServer (last line in the code below). This is where I start to have problems.
def start(preload=None, stop=False):
"""Start the odoo http server and cron processor."""
global server
load_server_wide_modules()
if odoo.evented:
server = GeventServer(odoo.service.wsgi_server.application)
elif config['workers']:
if config['test_enable'] or config['test_file']:
_logger.warning("Unit testing in workers mode could fail; use --workers 0.")
server = PreforkServer(odoo.service.wsgi_server.application)
STEP 3:
At this point if I wanna go further (which I did) I should copy the whole start() method and import all package I need to make it work
import odoo
from odoo.service.server import WorkerHTTP, WorkerCron, PreforkServer, load_server_wide_modules, \
GeventServer, _logger, ThreadedServer, inotify, FSWatcherInotify, watchdog, FSWatcherWatchdog, _reexec
from odoo.tools import config
I did it and then in my custom start() method I wrote line
server = PreforkServerExtend(odoo.service.wsgi_server.application)
but even then, how do I tell to execute my start() method, instead of the original one??
I'm sure this would eventually work (mabe not safely, but would work) because at some point I wasn't 100% sure what I was doing, so I put my inherit classes WorkerHttpExtend and PreforkServerExtend in the original odoo/service/server.py and initialized server obj with PreforkServerExtend instead of PreforkServer.
server = PreforkServer(odoo.service.wsgi_server.application)
It works then: I get custom socket timeout value, print and logging info when Odoo service start, because PreforkServerExtend will call custom class on cascade at that point, otherwise my inherited class are there but they will never be called.
So I guess if I could tell the system to run my start() method I would have done it.
STEP 4 (not reached yet):
I'm pretty sure that start() method is called in odoo/cli/server.py, in main() method:
rc = odoo.service.server.start(preload=preload, stop=stop)
I could go deeper but I don't think the effort is worth for what I need.
So technically if I would be able to tell the system which start() method to choose, I would have done it. Still not sure it is safe procedure (probably not much actually, but at this point I was just experimenting), but I wonder if there is an easier method to set up socket timeout without using environment variable ODOO_HTTP_SOCKET_TIMEOUT.
I'm pretty sure there is an easier method than i'm doing, with low level python or maybe even with a class in odoo/service/server, but I can't figure out for now. If some one has an idea, let me know!
Working solution: I have been introduced to Monkeypatch in this post
Possible for a class to look down at subclass constructor?
This has solved my problem, now I'm able to patch process_request method of class WorkerHTTP :
import errno
import fcntl
import socket
import odoo
import odoo.service.server as srv
class WorkerHttpProcessRequestPatch(srv.WorkerHTTP):
def process_request(self, client, addr):
client.setblocking(1)
# client.settimeout(self.sock_timeout)
client.settimeout(10) # patching timeout setup to a needed value
client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
flags = fcntl.fcntl(client, fcntl.F_GETFD) | fcntl.FD_CLOEXEC
fcntl.fcntl(client, fcntl.F_SETFD, flags)
self.server.socket = client
try:
self.server.process_request(client, addr)
except IOError as e:
if e.errno != errno.EPIPE:
raise
self.request_count += 1
# Switch process_request class attribute - this is what I needed to make it work
odoo.service.server.WorkerHTTP.process_request = WorkerHttpProcessRequestPatch.process_request

Using a coroutine for Python's Cmd class input stream

The problem I am facing is that:
I have an asynchronous method
Calling plain-old python code I cannot change
Which calls back a plain-old python method
Which needs to call async code using await
I have a custom command interpreter built on top of Python's Cmd class. I provide it with custom stdin and stdout. For the purpose of this question, it looks like this:
import cmd
import sys
class CustomStream(object):
def readline(self):
return sys.stdin.readline()
def write(self, msg):
sys.stdout.write(msg)
def flush(self):
pass
class MyShell(cmd.Cmd):
def do_stuff(self, args):
print("Getting things done...")
def do_exit(self, args):
return True
stream = CustomStream()
shell = MyShell(stdin=stream, stdout=stream)
shell.use_rawinput = False
shell.cmdloop()
When Cmd needs to read from the user, it will do this:
line = self.stdin.readline()
I want to provide an SSH interface to my custom interperter using the AsyncSSH library based on asyncio. My SSH code is much like the Simple Server sample, which reads a stdin like interface like this (notice the await keyword):
line_from_client = (await self._process.stdin.readline()).rstrip('\n')
I tried a number of things but I can't duck type the SSH code to Cmd's expectation of stdin. What must I do to have my CustomStream object use asyncio/coroutines on the inside while providing an old-school, single thread interface to MyShell?
The solution was to patch the cmdloop method to make it asyncio aware.
This code is a copy of Python's 3.7.2 Cmd class cmdloop function you get if you set
raw_input set to True
put await in front of readline
Results in this code (aoicmd.py available as a gist):
async def adapter_cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
#This is the same code as the Python 3.7.2 Cmd class, with the
#following changes
# - Remove dead code caused by forcing use_rawinput=False.
# - Added a readline in front of readline()
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro)+"\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
self.stdout.write(self.prompt)
self.stdout.flush()
line = await self.stdin.readline()
if not len(line):
line = 'EOF'
else:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
Where you need to use your Cmd derived class, like MyShell, create a new class called MyAsyncShell at runtime:
#Patch the shell with async aware cmdloop
MyAsyncShell = type('MyAsyncSHell', (MyShell,), {
'cmdloop' :aiocmd.adapter_cmdloop,
'use_rawinput':False,
})
Implement write and flush as you see fit, but your readline should look like this:
async def readline(self):
return await my_implementation_of_readline()

Save a command line option's value in an object with Python's Click library

I want to parse some command line arguments with Python's Click library and save the provided values in an object.
My first guess would be to do it like this:
import click
class Configuration(object):
def __init__(self):
# configuration variables
self.MyOption = None
# method call
self.parseCommandlineArguments()
#click.command()
#click.option('--myoption', type=click.INT, default=5)
def parseCommandlineArguments(self, myoption):
# save option's value in the object
self.MyOption = myoption
# create an instance
configuration = Configuration()
print(configuration.MyOption)
However, this does not work, instead I get:
TypeError: parseCommandlineArguments() takes exactly 2 arguments (1 given)
Apparently, passing self to the decorated function is not the correct way to do it. If I remove self from the method arguments then I can e.g. do print(myoption) and it will print 5 on the screen but the value will not be known to any instances of my Configuration() class.
What is the correct way to handle this? I assume it has something to do with context handling in Click but I cannot get it working based on the provided examples.
If I'm understanding you correctly, you want a command line tool that will take configuration options and then do something with those options. If this is your objective then have a look at the example I posted. This example uses command groups and passes a context object through each command. Click has awesome documentation, be sure to read it.
import click
import json
class Configuration(object):
"""
Having a custom context class is usually not needed.
See the complex application documentation:
http://click.pocoo.org/5/complex/
"""
my_option = None
number = None
is_awesome = False
uber_var = 900
def make_conf(self):
self.uber_var = self.my_option * self.number
pass_context = click.make_pass_decorator(Configuration, ensure=True)
#click.group(chain=True)
#click.option('-m', '--myoption', type=click.INT, default=5)
#click.option('-n', '--number', type=click.INT, default=0)
#click.option('-a', '--is-awesome', is_flag=True)
#pass_context
def cli(ctx, myoption, number, is_awesome):
"""
this is where I will save the configuration
and do whatever processing that is required
"""
ctx.my_option = myoption
ctx.number = number
ctx.is_awesome = is_awesome
ctx.make_conf()
pass
#click.command('save')
#click.argument('output', type=click.File('wb'))
#pass_context
def save(ctx, output):
"""save the configuration to a file"""
json.dump(ctx.__dict__, output, indent=4, sort_keys=True)
return click.secho('configuration saved', fg='green')
#click.command('show')
#pass_context
def show(ctx):
"""print the configuration to stdout"""
return click.echo(json.dumps(ctx.__dict__, indent=4, sort_keys=True))
cli.add_command(save)
cli.add_command(show)
After this is installed your can run commands like this:
mycli -m 30 -n 40 -a show
mycli -m 30 -n 40 -a save foo.json
mycli -m 30 -n 40 -a show save foo.json
The complex example is an excellent demo for developing a highly configurable multi chaining command line tool.

Categories