Override click.Option to prompt for value only if it is not present in a config file - python

I am trying to write a custom class that will read from a JSON config file and prompt for value only if the value is not present in the file using python click's option decorator.
This is my CLIConfig class that will fetch the required data from the config.json file. How can this be enhanced to integrate with click.Option?
class CLIConfig():
def __init__(self, source_app_name):
self.source_app_name = source_app_name
def gitlab_access_token(self):
if 'scm' in self.config_dict and 'accessToken' in self.config_dict['scm']:
return self.config_dict['scm']['accessToken']
return None
def group_id(self):
if 'scm' in self.config_dict and 'groupId' in self.config_dict['scm']:
return self.config_dict['scm']['groupId']
return None
def microservice_path(self):
if 'scm' in self.config_dict and 'microservice' in self.config_dict['scm']:
return self.config_dict['scm']['microservice'].get('localPath', None)
return None
def terraform_path(self):
if 'scm' in self.config_dict and 'terraform' in self.config_dict['scm']:
return self.config_dict['scm']['terraform'].get('localPath', None)
return None
def __enter__(self):
self.f = open(pathlib.Path.home().joinpath(f".{self.source_app_name}", "config.json"), mode='r')
try:
self.config_dict = json.load(self.f)
except json.decoder.JSONDecodeError:
self.config_dict = {}
return self
def __exit__(self, exc_type, exc_value, tb):
self.f.close()
On the first run, it should be able to write the option to the config file and on subsequent runs, it should not prompt for the value anymore
This is the structure of my CLI
#click.group()
#click.pass_context
def cli(ctx):
"""Root level of CLI
"""
pass
#click.command(name='setup', help='Setup CLI')
#click.argument('source_app_name', default=pathlib.Path(sys.argv[0]).name)
#click.option('--microservice_path', prompt='Enter the local repository location for microservices', type=click.Path())
#click.option('--terraform_path', prompt='Enter the local repository location for terraform', type=click.Path())
#click.option('--private_token', hide_input=True, prompt='Provide GitLab API Token')
#click.pass_context
def setup(ctx, microservice_path, terraform_path, private_token):
some logic here
Requirement is almost identical to this

Related

How Can I send between sockets kivy.properties.ObjectProperty?

I need to send over sockets this object and can't find a way to do it (pickle doesn't work here, can't also convert this object to bytes)
ObjectProperty(FileSystemLocal(), baseclass=FileSystemAbstract)
This is FileSystemLocal:
class FileSystemLocal(FileSystemAbstract):
'''Implementation of :class:`FileSystemAbstract` for local files.
.. versionadded:: 1.8.0
'''
def listdir(self, fn):
return listdir(fn)
def getsize(self, fn):
return getsize(fn)
def is_hidden(self, fn):
if platform == 'win':
if not _have_win32file:
return False
try:
return GetFileAttributesExW(fn)[0] & FILE_ATTRIBUTE_HIDDEN
except error:
# This error can occurred when a file is already accessed by
# someone else. So don't return to True, because we have lot
# of chances to not being able to do anything with it.
Logger.exception('unable to access to <%s>' % fn)
return True
return basename(fn).startswith('.')
def is_dir(self, fn):
return isdir(fn)

How do I persist a parent config object across all instances of a child object in Python?

I'm new to OOP python and keep stumbling on this issue, here is my classes:
class SFTP:
​
""" Just a wrapper around pysftp module with extra functionality """
​
def __init__(self,
host,
username,
password):
self.host = host
self.username = username
self.password = password
self.cnopts = pysftp.CnOpts()
self.cnopts.hostkeys = None
self.latest_file = None
self.sftp_obj = pysftp.Connection(host=self.host,
username=self.username,
password=self.password,
cnopts=self.cnopts)
​
def _latest_file(self, remote_path, file_name) -> str:
for server_file in self.sftp_obj.listdir_attr(remotepath=remote_path):
if server_file.filename.startswith(file_name) and server_file.st_mtime > 0:
return server_file.filename
​
def to_df(self, remote_path, file_name, sep) -> pd.DataFrame:
self.latest_file = self._latest_file(remote_path, file_name)
if self.latest_file is not None:
return pd.read_csv(self.sftp_obj.open(remote_path + self.latest_file), sep=sep)
​
​
class Inputs(SFTP):
def __init__(self,
filename,
sep,
default_path,
backup_path):
super().__init__(host=ENV_HOST,
username=ENV_USERNAME,
password=ENV_PASSWORD)
self.filename = filename
self.sep = sep
self.default_path = default_path
​
def _data(self) -> pd.DataFrame:
if self.filename == 'Report_1':
data = self.to_df(remote_path=self.default_path, file_name=self.filename, sep=self.sep)
return data
elif self.filename == 'Report_2':
data = self.to_df(remote_path=self.default_path, file_name=self.filename, sep=self.sep)
return data
So here every time I'm calling self.to_df I will have a connection to the SFTP server and pulling the file, however, I will more than likely need all the files, so is there a way to pass in the SFTP obj and open 1 connection and use across all instances?
I realize that its this line in the constructor:
self.sftp_obj = pysftp.Connection(host=self.host,
username=self.username,
password=self.password,
cnopts=self.cnopts)
Is this where I would use a class method? or maybe property annotation?
As at the moment I would have do use this Input class like this:
report1 = Inputs(filename='Report_1', sep='|', default_path='/PATH/TO/Report_1/')._data()
report2 = Inputs(filename='Report_2', sep='|', default_path='/PATH/TO/Report_2/')._data()
Each time opening a connection to the SFTP server...
Use composition / delegation instead of inheritance, and explicitely pass your SFTP instance to Inputs instances. Simplistic example of composition / delegation:
class Delegatee(object):
def __init__(self, arg1, arg2):
self.arg1 = arg1
self.arg2 = arg2
def do_this(self, arg):
return (arg * self.arg1) + self.arg2
class Delegator(object):
def __init__(self, arg, delegatee):
self.arg = arg
self.delegatee = delegatee
def do_this(self, arg):
return self.delegatee.do_this(self.arg + arg)
delegatee = Delegatee(1, 42)
delegator = Delegator(0, delegatee)
print(delegator.do_this(0))

python - remain in base class when calling parent method from child

I have the following base class:
class ClientRepo(Repository):
def __init__(self) -> None:
self.__clientList = []
def hasClientWithId(self, clientId):
for client in self.__clientList:
if client.getId() == clientId:
return True
return False
def addClient(self, client):
if type(client).__name__ == 'ClientDAO':
if not self.hasClientWithId(client.getId()):
client.setClientId(self.__maximumIndexInClientList() + 1)
self.__clientList.append(client)
else:
raise ObjectAlreadyInCollectionException
else:
raise TypeError
which basically only holds a list and can add a ClientDAO to it.
And the following, which derives from it:
class ClientFileRepository(ClientRepo):
def __init__(self, fileName) -> None:
super().__init__()
self.__fileName = fileName
self.__file = None
def hasClientWithId(self, clientId):
self.__loadRepo()
hasClientWithId = super().hasClientWithId(clientId)
super().clean()
return hasClientWithId
def addClient(self, client):
self.__loadRepo()
super().addClient(client)
self.__storeRepo()
super().clean()
def __loadFileReadMode(self):
self.__file = open(self.__fileName, "r")
def __loadFileWriteMode(self):
self.__file = open(self.__fileName, "w")
def __closeFile(self):
self.__file.close()
def __loadRepo(self):
self.__loadFileReadMode()
for line in self.__file:
splitLine = line.split()
clientToAdd = ClientDAO(splitLine[1])
clientToAdd.setClientId(int(splitLine[0]))
super().addClientWithId(clientToAdd)
self.__closeFile()
def __storeRepo(self):
self.__loadFileWriteMode()
self.__file.write("")
for client in super().getList():
self.__file.write(self.clientToString(client))
self.__closeFile()
def clientToString(self, clientDAO):
return str(clientDAO.getId()) + " " + clientDAO.getName() + "\n"
a class which should load the list from a file, call addClient from parent, and store the updated list in the file. The problem is that after child class loads the file in addClient, it calls the method in the parent, which calls hasClientWithId, from the child, again. But I want it to call hasClientWithId, from the parent, that is, the context it is in. Can I achieve that?
I can think of several ways to achieve your goal. I ranked them from worst to best
1. Exactly what you asked for
You wanted that ClientRepo.addClient calls ClientRepo.hasClientWithId instead of ClientFileRepository.hasClientWithId. It is possible to enforce that:
class ClientRepo(Repository):
def addClient(self, client):
if type(client).__name__ == 'ClientDAO':
if not ClientRepo.hasClientWithId(self, client.getId()):
client.setClientId(self.__maximumIndexInClientList() + 1)
self.__clientList.append(client)
else:
raise ObjectAlreadyInCollectionException
else:
raise TypeError
This is not a good approach, because it's unintuitive and breaks the principles of OOP. Any other programmer writing a subclass of ClientRepo that overrides hasClientWithId would expect that this will have an effect for every call to hasClientWithId even inside of addClient
2. Let ClientFileRepository decide which function to use
Add a variable
self.__isFileOpen = False
in ClientFileRepository.__init__, set it to True when you open the file and to False when you close the file. Then change the hasClientWithId within ClientFileRepository to
def hasClientWithId(self, clientId):
if not self.__isFileOpen:
self.__loadRepo()
result = super().hasClientWithId(clientId)
super().clean()
return result
else:
return super().hasClientWithId(clientId)
to avoid opening the same file again. This works, but it is pretty difficult to write new functions for this class, because you always need to be aware if the function call is a call from within your class or from somewhere else. Also this seems pretty inefficient, because you read and write the entire file, even when you only add one client.
3. Read the file only once and modify the underlying ClientRepo
class ClientFileRepository(ClientRepo):
def __init__(self, fileName) -> None:
super().__init__()
self.__fileName = fileName
self.__loadRepo()
# No hasClientWithId needed
def addClient(self, client):
super().addClient(client)
self.__storeRepo()
def __loadRepo(self):
with open(self.__filename) as file:
for line in file:
splitLine = line.split()
clientToAdd = ClientDAO(splitLine[1])
clientToAdd.setClientId(int(splitLine[0]))
super().addClientWithId(clientToAdd)
def __storeRepo(self):
with open(self.__filename, "w") as file:
file.write("")
for client in super().getList():
file.write(self.clientToString(client))
This obviously assumes that the file is not changed by someone else between calls to addClient and the program still overwrites the entire file for every addClient. If this is a problem for you it is best to be explicit and make loadRepo and storeRepo public. Then the programmer using this class can decide when loading and saving are necessary and useful. You can use context managers for this.
Extra: Read and save the file for every method
You can use function decorators to use solution 2 without writing the same code for every function:
import functools
def loadAndStore(function):
#functoools.wraps(function)
def wrappedFunction(self, *args, **kwargs):
if self.__isFileOpen:
return function(self, *args, **kwargs)
else:
self.__isFileOpen = True
self.__loadRepo()
try:
return function(self, *args, **kwargs)
except Exception as e: # Only catch expected exceptions
raise
finally:
self.__storeRepo()
self.clear() # some cleanup
self.__isFileOpen = False
return wrappedFunction
class ClientFileRepository(ClientRepo):
def __init__(self, fileName) -> None:
super().__init__()
self.__fileName = fileName
self.__isFileOpen = False
#loadAndStore
def hasClientWithId(self, clientId):
return super().hasClientWithId(clientId)
#loadAndStore
def addClient(self, client):
super().addClient(client)
def __loadRepo(self):
with open(self.__filename) as file:
for line in file:
splitLine = line.split()
clientToAdd = ClientDAO(splitLine[1])
clientToAdd.setClientId(int(splitLine[0]))
super().addClientWithId(clientToAdd)
def __storeRepo(self):
with open(self.__filename, "w") as file:
file.write("")
for client in super().getList():
file.write(self.clientToString(client))
Be careful here, using this is not very intuitive. For example self.__isFileOpen is defined in __init__, but none of the methods below directly use it. Instead its use is hidden in the loadAndStore decorator.
Some quick hints at the end:
type(client).__name__ == 'ClientDAO' is bad practice. Use isinstance(client, ClientDAO) to fully adopt OOP
If this is not part of a bigger project with given naming conventions use the python style guide
Using private variables like __fileName is generally considered unnecessary, just prefix the variable with one underscore to indicate "internal use". The same is true for functions.

Flask( using watchdog) and uWSGI - no events from file system

I am using watchdog to reload python modules on run of my Flask server. All works when I run my debug Flask server. But when i start Flask server from uWSGI no notification come into watchdog from my Linux file system, and so modules are not reloaded.
MasterService is intialized when first request is accepted.
Note: I have tried to use waitress as well. There everything works fine, but i would rpefer to use uWSGI. Thx for any advice.
'''
Created on 10 Oct 2014
#author: ttrval
'''
import os
import datetime
import pkgutil
import logging
from threading import BoundedSemaphore
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, EVENT_TYPE_MOVED, EVENT_TYPE_MODIFIED, EVENT_TYPE_CREATED, EVENT_TYPE_DELETED
class Context(object):
'''Holds parameters passed into math services bz ServiceManager
'''
logger = None
serviceManager = None
class Service(object):
'''Container for python module imported by math_server on run.
'''
__slots__ = 'module', 'modifyDate', "name"
def __init__(self, name, module, modifyDate):
self.module = module
self.modifyDate = modifyDate
self.name = name
def update(self, otherService):
self.module = otherService.module
self.modifyDate = otherService.modifyDate
def __repr__(self):
return "<{typ}|{name}:{module}({date})>".format(
typ = type(self),module=self.module, date=self.modifyDate, name=self.name)
def __str__(self):
return "Service {name}:{module} was last updated {date}".format(
module=self.module, date=self.modifyDate, name=self.name)
class ServicesFilesEventHandler(FileSystemEventHandler):
'''Handles changes in file system of services loaded by math_server
'''
def __init__(self, master,logger=logging.getLogger('werkzeug'), supported_types = (".py")):
self.logger = logger
self.supported_types = supported_types
self.master = master
def dispatch(self, event):
'''Dispatches events to the appropriate methods.
:param event:
The event object representing the file system event.
:type event:
:class:`FileSystemEvent`
'''
print "event catched{}".format(str(event))
if event.is_directory:
return
path = event.src_path
if EVENT_TYPE_MOVED is event.event_type:
path = event.dest_path
if path[-3:] in self.supported_types:
_method_map = {
EVENT_TYPE_MODIFIED: self.on_modified,
EVENT_TYPE_MOVED: self.on_moved,
EVENT_TYPE_CREATED: self.on_created,
EVENT_TYPE_DELETED: self.on_deleted,
}
event_type = event.event_type
_method_map[event_type](event)
def on_moved(self, event):
"""Called when a file or a directory is moved or renamed.
:param event:
Event representing file/directory movement.
:type event:
:class:`DirMovedEvent` or :class:`FileMovedEvent`
"""
path = event.dest_path
self.logger.info("File moved: {}".format(path))
self.master.sync_modify_service(path)
self.master.sync_modify_service(event.src_path, unload=True)
def on_created(self, event):
"""Called when a file or directory is created.
:param event:
Event representing file/directory creation.
:type event:
:class:`DirCreatedEvent` or :class:`FileCreatedEvent`
"""
path = event.src_path
logging.getLogger('werkzeug').info("File created: {}".format(path))
self.master.sync_modify_service(path)
def on_deleted(self, event):
"""Called when a file or directory is deleted.
:param event:
Event representing file/directory deletion.
:type event:
:class:`DirDeletedEvent` or :class:`FileDeletedEvent`
"""
path = event.src_path
self.logger.info("File deleted: {}".format(path))
self.master.sync_modify_service(path, unload=True)
def on_modified(self, event):
"""Called when a file or directory is modified.
:param event:
Event representing file/directory modification.
:type event:
:class:`DirModifiedEvent` or :class:`FileModifiedEvent`
"""
path = event.src_path
self.logger.info("File modified: {}".format(path))
self.master.semaphore.acquire()
try:
self.master.unloadService(path)
self.master.loadService(path)
finally:
self.master.semaphore.release()
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs): # #NoSelf
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class ServicesMaster(object):
'''Singleton class, provides accesss to Services. It also handles services loading and unloading.
#uses :class: ServicesFilesEventHandler
#uses :package: watcghdog'''
# __metaclass__ = Singleton
services=None
dirname = None
observer = None
logger = None
semaphore = BoundedSemaphore(1)
def __init__(self, logger=logging.getLogger('werkzeug'), dirname="./services"):
Context.logger=logger
Context.serviceManager = self
self.__class__.dirname=os.path.abspath(dirname)
self.__class__.logger=logger
self._closeObserver()
self.loadServices()
self._initObserver()
def __del__(self):
self.dirname = None
self._closeObserver()
del self.services
del self.observer
#classmethod
def _initObserver(cls):
'''Creates observer of module folder (not recursive)
'''
event_handler = ServicesFilesEventHandler(cls, cls.logger)
print "event_handler init {}".format(str(event_handler))
if cls.observer is None:
cls.observer = Observer()
cls.observer.schedule(event_handler, cls.dirname, recursive=False)
cls.observer.start()
#classmethod
def _closeObserver(cls):
'''Deactivates observer of module folder (not recursive)'''
if cls.observer is not None:
cls.observer.stop()
cls.observer.join()
cls.observer = None
#classmethod
def sync_modify_service(cls, path, unload=False):
'''
synchronyzed modification of service
if unload = True: unloads service
else:loads service
'''
cls.semaphore.acquire()
try:
if unload:
cls.unloadService(path)
else:
cls.loadService(path)
finally:
cls.semaphore.release()
#classmethod
def loadServices(cls):
'''
Loads service from given path. Consider use of method
'sync_modify_service' when only one method ( loadServices xor unloadServices ) cen be executed at one time
'''
if cls.services is None:
cls.services={}
#remove current directory and replace file systema dress for python dot convention
importer = pkgutil.ImpImporter(path=cls.dirname)
cls.semaphore.acquire()
for name, ispkg in importer.iter_modules():
if not ispkg:
loader = importer.find_module(name)
if '.py'==loader.etc[0]:
new_service = Service(
name=name,
module = loader.load_module(loader.fullname),
modifyDate = cls.modification_date(loader.filename)
)
cls.services[name]=new_service
new_service.module.activate(Context)
cls.semaphore.release()
cls.logger.info("Loaded Services: {}".format( cls.services.keys() ))
print "check after services loaded"
#classmethod
def loadService(cls, path):
fullpath = os.path.abspath(path)
directory = os.path.dirname(fullpath)
if directory != cls.dirname:
raise Exception("Directory '{}' of new service is not module directory('{}')".
format(directory, cls.dirname))
new_service = Service(
name=os.path.basename(fullpath).split('.')[0],
module = cls._loadModule(fullpath),
modifyDate = cls.modification_date(fullpath)
)
if new_service.name in cls.services: #older version of new service is loaded already
#deactivate old module instance
cls.services[new_service.name].module.deactivate(Context)
#activate new module instance
cls.services[new_service.name].update(new_service)
else:
cls.services[new_service.name] = new_service
#activate new service
cls.services[new_service.name].module.activate(Context)
cls.logger.info( "Loaded Service: {}\nLoaded Services: {}"
.format( new_service.name, cls.services.keys() ))
#classmethod
def unloadService(cls, path):
fullpath = os.path.abspath(path)
directory = os.path.dirname(fullpath)
#check if file is(was) in directory of services
if directory != cls.dirname:
return
#file is(was) in observed directory of services
name=os.path.basename(fullpath).split('.')[0]
if name in cls.services:
#first deactivate old module
cls.services[name].module.deactivate(Context)
#remove old module
del cls.services[name]
#remove old module compile
try:
os.remove(fullpath.split('.')[0] + ".pyc")
except Exception:
#file does note exists already
cls.logger.info("Found that file {} was removed already.".format( fullpath.split('.')[0] + ".pyc" ))
else:
raise KeyError("Service {} not found in loadedServices", name)
cls.logger.info( "Unloaded Service: {}\nLoaded Services: {}"
.format( name, cls.services.keys() ))
return
#classmethod
def _loadModule(cls, path):
'''
Loads the single python module from file path
#param path: path to module f.e:
#type path: String F.E.:'./services/game_math.py'
'''
fullpath = os.path.abspath(path)
name = os.path.basename(fullpath).split('.')[0] #extracts file name without extension
folder = path[:-(len(os.path.basename(path)))] #extracts path to folder
importer = pkgutil.ImpImporter(path=folder)
loader = importer.find_module(name)
return loader.load_module(loader.fullname)
#staticmethod
def runService(name, args):
'''Returns result from math service for given arguiments
#raise exception: Exception( "Service '{}' not found on MathServer".format(name) )
'''
if name in ServicesMaster.services:
return ServicesMaster.services[name].module.run(args)
else:
raise Exception( "Service '{}' not found on MathServer".format(name) )
#staticmethod
def modification_date(filename):
'''returns modification date of file in datetime'''
t = os.path.getmtime(filename)
return datetime.datetime.fromtimestamp(t)
Solution is to enable threads in uwsg configuration and set number of thread to 2 or more

Why this validator return None while Django's return useful message?

Below is the code for the Validator, Why None None is print? This same code is in django 1.6.
In [1]: %paste
class SVNPathValidator(object):
message = 'Enter a valid value.'
code = 'invalid'
def __init__(self, verify_existed=True, message=None, code=None, path_type=0, rev=None):
'''
file_type:
0 file
1 dir
'''
if message is None:
self.message = message
if code is None:
self.code = code
print self.message, self.code
self.path_type = path_type
self.rev = rev
def __call__(self, value):
print self.message, self.code
## -- End pasted text --
In [2]: validator=SVNPathValidator()
None None
In [3]: validator('svn://10.10.10.10/repo')
None None
I'm not sure why you think it would do anything else. You don't pass either the message or the code when you instantiate the object, and you don't set them in the __call__ method.
The code is not the same as the Django code you link to. On the contrary, all the Django versions have the opposite of your code: if self.message is not None etc.
The function of you code is to set those two variables to None -- though I assume it wasn't meant to be.
def __init__(self, verify_existed=True, message=None, code=None, path_type=0, rev=None):
if message is None:
self.message = message
if code is None:
self.code = code
First it checks whether the arguments are None and if they are, it assigns them as instance variables. Nowhere does self.message or self.code get assigned to anything else.

Categories