I'm trying to subclass str, but having some difficulties due to its immutability.
class DerivedClass(str):
def __new__(cls, string):
ob = super(DerivedClass, cls).__new__(cls, string)
return ob
def upper(self):
#overridden, new functionality. Return ob of type DerivedClass. Great.
caps = super(DerivedClass, self).upper()
return DerivedClass(caps + '123')
derived = DerivedClass('a')
print derived.upper() #'A123'
print type(derived.upper()) #<class '__main__.DerivedClass'>
print derived.lower() #'a'
print type(derived.lower()) #<type 'str'>
For inherited methods that don't require any new functionality, such as derived.lower(), is there a simple, pythonic way to return an object of type DerivedClass (instead of str)? Or am I stuck manually overriding each str.method(), as I did with derived.upper()?
Edit:
#Any massive flaws in the following?
class DerivedClass(str):
def __new__(cls, string):
ob = super(DerivedClass, cls).__new__(cls, string)
return ob
def upper(self):
caps = super(DerivedClass, self).upper()
return DerivedClass(caps + '123')
def __getattribute__(self, name):
att = super(DerivedClass, self).__getattribute__(name)
if not callable(att):
return att
def call_me_later(*args, **kwargs):
result = att(*args, **kwargs)
if isinstance(result, basestring):
return DerivedClass(result)
return result
return call_me_later
Good use for a class decorator -- roughly (untested code):
#do_overrides
class Myst(str):
def upper(self):
...&c...
and
def do_overrides(cls):
done = set(dir(cls))
base = cls.__bases__[0]
def wrap(f):
def wrapper(*a, **k):
r = f(*a, **k)
if isinstance(r, base):
r = cls(r)
return r
for m in dir(base):
if m in done or not callable(m):
continue
setattr(cls, m, wrap(getattr(base, m)))
You can do this by overriding __getattribute__ as Zr40 suggests, but you will need to have getattribute return a callable function. The sample below should give you what you want; it uses the functools.partial wrapper to make life easier, though you could implement it without partial if you like:
from functools import partial
class DerivedClass(str):
def __new__(cls, string):
ob = super(DerivedClass, cls).__new__(cls, string)
return ob
def upper(self):
#overridden, new functionality. Return ob of type DerivedClass. Great.
caps = super(DerivedClass, self).upper()
return DerivedClass(caps + '123')
def __getattribute__(self, name):
func = str.__getattribute__(self, name)
if name == 'upper':
return func
if not callable(func):
return func
def call_me_later(*args, **kwargs):
result = func(*args, **kwargs)
# Some str functions return lists, ints, etc
if isinstance(result, basestring:
return DerivedClass(result)
return result
return partial(call_me_later)
You're both close, but checking for each doesn't extend well to overriding many methods.
from functools import partial
class DerivedClass(str):
def __new__(cls, string):
ob = super(DerivedClass, cls).__new__(cls, string)
return ob
def upper(self):
caps = super(DerivedClass, self).upper()
return DerivedClass(caps + '123')
def __getattribute__(self, name):
if name in ['__dict__', '__members__', '__methods__', '__class__']:
return object.__getattribute__(self, name)
func = str.__getattribute__(self, name)
if name in self.__dict__.keys() or not callable(func):
return func
def call_me_later(*args, **kwargs):
result = func(*args, **kwargs)
# Some str functions return lists, ints, etc
if isinstance(result, basestring):
return DerivedClass(result)
return result
return partial(call_me_later)
(Improvements suggested by jarret hardie in comments.)
You might be able to do this by overriding __getattribute__.
def __getattribute__(self, name):
# Simple hardcoded check for upper.
# I'm sure there are better ways to get the list of defined methods in
# your class and see if name is contained in it.
if name == 'upper':
return object.__getattribute__(self, name)
return DerivedClass(object.__getattribute__(self, name))
Related
I'm trying to decorate all methods in class and i succeded with this code, but i'm also trying to log calls to operators like * + - / , is there any way to decorate them or something like getattr(self,"*") to log the calls ?
class Logger(object):
def __init__(self, bool):
self.bool = bool
def __call__(self, cls):
class DecoratedClass(cls):
def __init__(cls, *args, **kwargs):
super().__init__(*args, **kwargs)
if not(self.bool):
return
methods = [func for func in dir(cls)
if callable(getattr(cls, func))
and not func.startswith("__class")]
for func in methods:
old_func = getattr(cls, func)
def decorated_function(fname, fn):
def loggedFunction(*args, **kwargs):
print("Calling {0} from {3} with params {1} and kwargs {2}".format(fname.upper(), args, kwargs, cls))
return fn(*args, **kwargs)
return loggedFunction
setattr(cls, func, decorated_function(func, old_func))
return DecoratedClass
#Logger(True)
class DummyClass():
def __init__(self,foo):
self.foo = foo
def bar(self):
print(self.foo)
def __mul__(self,other):
print("Hello",other)
if __name__ == '__main__':
a = DummyClass('hola')
a.method()
a.__mul__(a) #this is logged
print(a*a) #this is not logged by decorator
Thanks to Ćukasz, here is a working script.
A difficulty I encountered is to handle multiple instances and avoid to decorate multiple times the same class methods. To handle this problem, I keep track of the decorated class methods (cls.__logged).
Another difficulty is to deal with the magic methods like __setattr__, __getattribute__, __repr__, ... My solution is to ignore them, except for a list that you must define at start (loggable_magic_methods).
from functools import wraps
loggable_magic_methods = ['__mul__',]
def is_magic_method(method):
return method.startswith('__')
class Logger(object):
def __init__(self, bool):
self.bool = bool
def __call__(self, cls):
class LoggedClass(cls):
cls.__logged = []
def __init__(instance, *args, **kwargs):
super().__init__(*args, **kwargs)
if not(self.bool):
return
methods = [funcname for funcname in dir(instance)
if callable(getattr(instance, funcname))
and (funcname in loggable_magic_methods or not is_magic_method(funcname))]
def logged(method):
#wraps(method)
def wrapper(*args, **kwargs):
print (method.__name__, args, kwargs, cls)
return method(*args, **kwargs)
return wrapper
for funcname in methods:
if funcname in cls.__logged:
continue
if is_magic_method(funcname):
setattr(cls, funcname, logged(getattr(cls, funcname)))
cls.__logged.append(funcname)
else:
setattr(instance, funcname, logged(getattr(instance, funcname)))
return LoggedClass
#Logger(True)
class DummyClass():
def __init__(self, foo, coef):
self.foo = foo
self.coef = coef
def bar(self):
print(self.foo)
def __mul__(self, other):
print(self.foo)
print(other.foo)
return self.coef * other.coef
if __name__ == '__main__':
a = DummyClass('hola', 1)
a.bar()
print()
print(a.__mul__(a))
print()
print(a*a)
print()
b = DummyClass('gracias', 2)
b.bar()
print()
print(b.__mul__(a))
print()
print(b*a)
Currently you are patching values on instance. Your usage of cls in __init__ signature is false friend - actually it's old plain self in this case.
If you want to override magic methods, interpreter looks for them on class objects, not on instances.
Minimal example:
class DummyClass:
def __init__(self, foo):
self.foo = foo
def __mul__(self, other):
return self.foo * other.foo
def logged(method):
def wrapper(*args, **kwargs):
print (method.__name__, args, kwargs)
return method(*args, **kwargs)
return wrapper
DummyClass.__mul__ = logged(DummyClass.__mul__)
a = DummyClass(1)
b = DummyClass(2)
assert a * a == 1
assert a * b == 2
assert b * b == 4
Each call is logged.
>>> a = DummyClass(1)
>>> b = DummyClass(2)
>>> assert a * a == 1
__mul__ (<__main__.DummyClass object at 0x00000000011BFEB8>, <__main__.DummyClass object at 0x00000000011BFEB8>) {}
>>> assert a * b == 2
__mul__ (<__main__.DummyClass object at 0x00000000011BFEB8>, <__main__.DummyClass object at 0x00000000011BF080>) {}
>>> assert b * b == 4
__mul__ (<__main__.DummyClass object at 0x00000000011BF080>, <__main__.DummyClass object at 0x00000000011BF080>) {}
I'll leave a task of rewriting monkey-patching approach to you.
I have a class that I want to share in a read-only fashion with children processes in a pool, so I prepared a proxy of a class but it didn't work. The following is a simplified example of my problem.
from multiprocessing.managers import BaseManager
class TestClass:
def __init__(self, a):
self.a = a
def b(self):
print self.a
class MyManager(BaseManager): pass
MyManager.register('test', TestClass)
if __name__ == '__main__':
manager = MyManager()
manager.start()
t = TestClass(1)
print t.a
mt = manager.test(2)
mt.b()
mt.a
When I run this code I get:
1
2
Traceback (most recent call last):
File "multiprocess_example_stackexchange.py", line 20, in <module>
mt.a
AttributeError: 'AutoProxy[test]' object has no attribute 'a'
It seems that I cannot access the attribute of a shared object directly via a proxy. Is the only way using a method that gets the attribute, or am I doing something wrong?
The Proxy objects used by multiprocessing.BaseManager and its sub-classes normally only expose methods from the objects they're referring to, not attributes. Now, there is multiprocessing.Manager().Namespace, which provides a Proxy sub-class that does provide access to attributes, rather than methods. We can create our own Proxy type which inherits from that, which enables access to all our attributes, as well as access to our b function:
from multiprocessing.managers import BaseManager, NamespaceProxy
class TestClass(object):
def __init__(self, a):
self.a = a
def b(self):
print self.a
class MyManager(BaseManager): pass
class TestProxy(NamespaceProxy):
# We need to expose the same __dunder__ methods as NamespaceProxy,
# in addition to the b method.
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__', 'b')
def b(self):
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('b')
MyManager.register('test', TestClass, TestProxy)
if __name__ == '__main__':
manager = MyManager()
manager.start()
t = TestClass(1)
print t.a
mt = manager.test(2)
print mt.a
mt.a = 5
mt.b()
Output:
1
2
5
Edit:
If you want to be able to dynamically add methods from your original class to a Proxy class, you can do something like this:
from multiprocessing.managers import BaseManager, NamespaceProxy
import inspect
class TestClass(object):
def __init__(self, a):
self.a = a
def b(self):
print self.a
class AnotherClass(object):
def __init__(self, a):
self.a = a
def c(self):
print self.a
class MyManager(BaseManager): pass
class ProxyBase(NamespaceProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__')
class TestProxy(ProxyBase): pass
class AnotherProxy(ProxyBase): pass
def register_proxy(name, cls, proxy):
for attr in dir(cls):
if inspect.ismethod(getattr(cls, attr)) and not attr.startswith("__"):
proxy._exposed_ += (attr,)
setattr(proxy, attr,
lambda s: object.__getattribute__(s, '_callmethod')(attr))
MyManager.register(name, cls, proxy)
register_proxy('test', TestClass, TestProxy)
register_proxy('another', AnotherClass, AnotherProxy)
if __name__ == '__main__':
manager = MyManager()
manager.start()
mt = manager.test(2)
ma = manager.another(3)
mt.b()
ma.c()
mt.a = 5
ma.a = 6
mt.b()
ma.c()
After spending few hours to reading the source codes, here is the simplest ways to implement the proxy class to expose all attributes and methods:
class TestProxy(NamespaceProxy):
_exposed_ = tuple(dir(Test))
def __getattr__(self, name):
result = super().__getattr__(name)
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
self._callmethod(name, args)
return wrapper
return result
BaseManager.register('Test', Test, TestProxy)
manager = BaseManager()
test = manager.Test()
Also, here is an auto proxy method:
def Proxy(target):
dic = {'types': types}
exec('''def __getattr__(self, key):
result = self._callmethod('__getattribute__', (key,))
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
self._callmethod(key, args)
return wrapper
return result''', dic)
proxyName = target.__name__ + "Proxy"
ProxyType = type(proxyName, (NamespaceProxy,), dic)
ProxyType._exposed_ = tuple(dir(target))
return ProxyType
TestProxy = Proxy(Test)
BaseManager.register('Test', Test, TestProxy)
manager = BaseManager()
test = manager.Test()
Since I do not have enough reputation to comment, I am posting an answer. The otherwise excellent answer by #shtse8 has a bug. I wanted to point this out since this page is one of the top hits when you search for queries regarding NamespaceProxy and the said answer has been used by others as well.
The problem lies in the below code:
class TestProxy(NamespaceProxy):
_exposed_ = tuple(dir(Test))
def __getattr__(self, name):
result = super().__getattr__(name)
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
self._callmethod(name, args) # Result not returned
return wrapper
return result
If you use this class (or the equivalent 'auto proxy' method mentioned in the answer) to create proxy objects, then all functions which are programmed to return a value will always return a NoneType instead (if you access the functions from the proxy). Moreover, they will not pass on any keyword arguments either. This is because in the wrapper the result of the method call use kwargs and neither is the call itself being returned. Therefore, we need to pass kwargs and put a return in the line self._callmethod(name, args).
The TestProxy class should then become:
class TestProxy(NamespaceProxy):
_exposed_ = tuple(dir(Test))
def __getattr__(self, name):
result = super().__getattr__(name)
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
return self._callmethod(name, args, kwargs) # Note the return here
return wrapper
return result
The 'auto proxy' function would then become:
def Proxy(target):
dic = {'types': types}
exec('''def __getattr__(self, key):
result = self._callmethod('__getattribute__', (key,))
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
return self._callmethod(key, args, kwargs)
return wrapper
return result''', dic)
proxyName = target.__name__ + "Proxy"
ProxyType = type(proxyName, (NamespaceProxy,), dic)
ProxyType._exposed_ = tuple(dir(target))
return ProxyType
Update: Edited code and explanation to pass kwargs as well. Check Viktor's answer below
Here's a less verbose alternative that I found to work well in practice. Not sure if there are any disadvantages.
class TestClass:
def __init__(self, a):
self.a = a
def b(self):
print self.a
def wrap_test_class(*args, **kwargs):
obj = TestClass(*args, **kwargs)
obj.get_a = lambda: obj.a
return obj
class MyManager(BaseManager): pass
MyManager.register('test', wrap_test_class)
This allows you to access a by calling proxy_object.get_a()
This is an example of passing parameters (example: __getitem__) or not (example: __len__):
class TestProxy(NamespaceProxy):
_exposed_ = ('__getattribute__', '__setattr__', '__delattr__','__len__','__getitem__')
def __len__(self):
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__len__')
def __getitem__(self,index):
callmethod = object.__getattribute__(self, '_callmethod')
return callmethod('__getitem__',(index,))
Charcit's solution was working for me except I made a small completion/bugfixing. There kwargs cannot be passed to the called methods. So the fixed version:
class TestProxy(NamespaceProxy):
_exposed_ = tuple(dir(Test))
def __getattr__(self, name):
result = super().__getattr__(name)
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
return self._callmethod(name, args, kwargs) # args and kwargs!
return wrapper
return result
Didn't test the 'autoproxy' method but this fix should apply there too:
def Proxy(target):
dic = {'types': types}
exec('''def __getattr__(self, key):
result = self._callmethod('__getattribute__', (key,))
if isinstance(result, types.MethodType):
def wrapper(*args, **kwargs):
return self._callmethod(key, args, kwargs)
return wrapper
return result''', dic)
proxyName = target.__name__ + "Proxy"
ProxyType = type(proxyName, (NamespaceProxy,), dic)
ProxyType._exposed_ = tuple(dir(target))
return ProxyType
I was originally using the answer by #shtse8 (as modified by #Charchit-Agarwal and #Viktor), which mostly worked for me, but it was trying to pickle the methods returned by the __getattr__ function, which didn't work in some use cases.
So here is another solution which is closer to the implementation of NamespaceProxy, but adds in the public methods the same way AutoProxy does.
def MakeProxyWithAttrs(target):
""" Create a derived NamespaceProxy class for `target`. """
# This bit follows what multiprocessing.managers.MakeProxy normally does.
dic = {}
public_methods = [m for m in dir(target) if m[0] != '_']
for meth in public_methods:
exec('''def %s(self, *args, **kwds):
return self._callmethod(%r, args, kwds)
'''%(meth,meth), dic)
# NamespaceProxy starts with __getattribute__ defined, so subclass from that
# rather than BaseProxy, as MakeProxy normally does.
proxy_name = target.__name__ + "_Proxy"
ProxyType = type(proxy_name, (NamespaceProxy,), dic)
# Expose all the public methods and also __getattribute__ and __setattr__.
ProxyType._exposed_ = tuple(public_methods + ['__getattribute__', '__setattr__'])
return ProxyType
class Manager(multiprocessing.managers.BaseManager): pass
test_proxy = MakeProxyWithAttrs(test_func)
Manager.register('test', test_func, test_proxy)
manager = Manager()
manager.start()
test = manager.test()
This question already has answers here:
How can I decorate an instance method with a decorator class?
(2 answers)
Closed 4 years ago.
I'm trying to memoize using a decorator with the decorator being a class not a function, but I'm getting the error
TypeError: seqLength() takes exactly 2 arguments (1 given)
I'm guessing this has something to do with the classes, but not sure what's wrong from there.
The code:
import sys
class memoize(object):
'''memoize decorator'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.func(self, *args)
self.cache[args] = value
return value
class collatz(object):
def __init__(self, n):
self.max = 1
self.n = n
#memoize
def seqLength(self, n):
if n>1:
if n%2 == 0:
return 1+self.seqLength(n/2)
else:
return 1+self.seqLength(3*n+1)
else:
return 1
def maxLength(self):
for n in xrange(1, self.n):
l = self.seqLength(n)
if l > self.max:
self.max = n
return self.max
n = int(sys.argv[1])
c = collatz(n)
print c.maxLength()
This is confusing, syntactically. It's not clear if self.func is part of your memoize or a separate function that's part of some other object of some other class. (You mean the latter, BTW)
value = self.func(self, *args)
Do this to make it clear that the_func is just a function, not a member of the memoize class.
the_func= self.func
value= the_func( *args )
That kind of thing prevents confusion over the class to which self. is bound.
Also, please spell it Memoize. With a leading capital letter. It is a class definition, after all.
Using a class as a decorator is tricky, because you have to implement the descriptor protocol correctly (the currently accepted answer doesn't.) A much, much easier solution is to use a wrapper function, because they automatically implement the descriptor protocol correctly. The wrapper equivalent of your class would be:
import functools
def memoize(func):
cache = {}
#functools.wraps(func)
def wrapper(*args):
try:
return cache[args]
except KeyError:
value = func(*args)
cache[args] = value
return value
return wrapper
When have so much state you want to encapsulate it in a class anyway, you can still use a wrapper function, for example like so:
import functools
class _Memoize(object):
'''memoize decorator helper class'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.func(*args)
self.cache[args] = value
return value
def memoize(func):
o = _Memoize(func)
#functools.wraps(func)
def wrapper(*args):
return o(*args)
return wrapper
A decorator is just syntactic sugar for foo = decorator(foo), so in this case you're ending up making the self of seqLength be memoize instead of collatz. You need to use descriptors. This code works for me:
class memoize(object):
'''memoize descriptor'''
def __init__(self, func):
self.func = func
def __get__(self, obj, type=None):
return self.memoize_inst(obj, self.func)
class memoize_inst(object):
def __init__(self, inst, fget):
self.inst = inst
self.fget = fget
self.cache = {}
def __call__(self, *args):
# if cache hit, done
if args in self.cache:
return self.cache[args]
# otherwise populate cache and return
self.cache[args] = self.fget(self.inst, *args)
return self.cache[args]
More on descriptors:
http://docs.python.org/howto/descriptor.html#descriptor-example
I am looking for a way to intercept instance method calls in class MyWrapper below:
class SomeClass1:
def a1(self):
self.internal_z()
return "a1"
def a2(self):
return "a2"
def internal_z(self):
return "z"
class SomeClass2(SomeClass1):
pass
class MyWrapper(SomeClass2):
# def INTERCEPT_ALL_FUNCTION_CALLS():
# result = Call_Original_Function()
# self.str += result
# return result
def __init__(self):
self.str = ''
def getFinalResult(self):
return self.str
x = MyWrapper()
x.a1()
x.a2()
I want to intercept all function calls make through my wrapper class. In my wrapper class I want to keep track of all the result strings.
result = x.getFinalResult()
print result == 'a1a2'
Some quick and dirty code:
class Wrapper:
def __init__(self, obj):
self.obj = obj
self.callable_results = []
def __getattr__(self, attr):
print("Getting {0}.{1}".format(type(self.obj).__name__, attr))
ret = getattr(self.obj, attr)
if hasattr(ret, "__call__"):
return self.FunctionWrapper(self, ret)
return ret
class FunctionWrapper:
def __init__(self, parent, callable):
self.parent = parent
self.callable = callable
def __call__(self, *args, **kwargs):
print("Calling {0}.{1}".format(
type(self.parent.obj).__name__, self.callable.__name__))
ret = self.callable(*args, **kwargs)
self.parent.callable_results.append(ret)
return ret
class A:
def __init__(self, val): self.val = val
def getval(self): return self.val
w = Wrapper(A(10))
print(w.val)
w.getval()
print(w.callable_results)
Might not be thorough, but could be a decent starting point, I guess.
You could wrap your methods with decorators a instanciation time:
#!/usr/bin/env python
import inspect
def log(func):
def _logged(*args, **kw):
print "[LD] func", func.__name__, "called with:", args, kw
result = func(*args, **kw)
print "[LD] func", func.__name__, "returned:", result
return result
return _logged
class A(object):
def __init__(self):
for x in inspect.getmembers(self, (inspect.ismethod)):
if not x[0].startswith('__'):
setattr(self, x[0], log(getattr(self, x[0])))
def hello(self):
print "Hello"
def bye(self):
print "Bye"
return 0
Now if you call hello or bye, the call goes through log first:
a = A()
a.hello()
a.bye()
# [LD] func hello called with: () {}
# Hello
# [LD] func hello returned: None
# [LD] func bye called with: () {}
# Bye
# [LD] func bye returned: 0
What you want to do is quite similar to this question.
You should take your example code in the reverse order, i mean creating a class to record return values of method calls, and make the classes you want to watch inherit from it.
Which would give something like this
class RetValWatcher(object):
def __init__(self):
self.retvals = []
def __getattribute__(self, name):
attr = super(RetValWatcher, self).__getattribute__(name)
if callable(attr):
def wrapped(*args, **kwargs):
retval = attr(*args, **kwargs)
self.retvals.append(retval)
return retval
return wrapped
else:
return attr
def getFinalResult(self):
return ''.join(self.retvals)
class MyClass(RetValWatcher):
def a(self):
self.internal_z()
return 'a1'
def b(self):
return 'b1'
def internal_z(self):
return 'z'
x = MyClass()
x.a()
x.b()
print x.getFinalResult()
#'za1b1'
With some minor changes, this method would also allow you to record return values across all RetValWatcher instances.
Edit: added changes suggested by singularity's comment
Edit2: forgot to handle the case where attr is not a method (thx singularity again)
Edit3: fixed typo
Is there any way to avoid calling __init__ on a class while initializing it, such as from a class method?
I am trying to create a case and punctuation insensitive string class in Python used for efficient comparison purposes but am having trouble creating a new instance without calling __init__.
>>> class String:
def __init__(self, string):
self.__string = tuple(string.split())
self.__simple = tuple(self.__simple())
def __simple(self):
letter = lambda s: ''.join(filter(lambda s: 'a' <= s <= 'z', s))
return filter(bool, map(letter, map(str.lower, self.__string)))
def __eq__(self, other):
assert isinstance(other, String)
return self.__simple == other.__simple
def __getitem__(self, key):
assert isinstance(key, slice)
string = String()
string.__string = self.__string[key]
string.__simple = self.__simple[key]
return string
def __iter__(self):
return iter(self.__string)
>>> String('Hello, world!')[1:]
Traceback (most recent call last):
File "<pyshell#2>", line 1, in <module>
String('Hello, world!')[1:]
File "<pyshell#1>", line 17, in __getitem__
string = String()
TypeError: __init__() takes exactly 2 positional arguments (1 given)
>>>
What should I replace string = String(); string.__string = self.__string[key]; string.__simple = self.__simple[key] with to initialize the new object with the slices?
EDIT:
As inspired by the answer written below, the initializer has been edited to quickly check for no arguments.
def __init__(self, string=None):
if string is None:
self.__string = self.__simple = ()
else:
self.__string = tuple(string.split())
self.__simple = tuple(self.__simple())
When feasible, letting __init__ get called (and make the call innocuous by suitable arguments) is preferable. However, should that require too much of a contortion, you do have an alternative, as long as you avoid the disastrous choice of using old-style classes (there is no good reason to use old-style classes in new code, and several good reasons not to)...:
class String(object):
...
bare_s = String.__new__(String)
This idiom is generally used in classmethods which are meant to work as "alternative constructors", so you'll usually see it used in ways such as...:
#classmethod
def makeit(cls):
self = cls.__new__(cls)
# etc etc, then
return self
(this way the classmethod will properly be inherited and generate subclass instances when called on a subclass rather than on the base class).
A trick the standard pickle and copy modules use is to create an empty class, instantiate the object using that, and then assign that instance's __class__ to the "real" class. e.g.
>>> class MyClass(object):
... init = False
... def __init__(self):
... print 'init called!'
... self.init = True
... def hello(self):
... print 'hello world!'
...
>>> class Empty(object):
... pass
...
>>> a = MyClass()
init called!
>>> a.hello()
hello world!
>>> print a.init
True
>>> b = Empty()
>>> b.__class__ = MyClass
>>> b.hello()
hello world!
>>> print b.init
False
But note, this approach is very rarely necessary. Bypassing the __init__ can have some unexpected side effects, especially if you're not familiar with the original class, so make sure you know what you're doing.
Using a metaclass provides a nice solution in this example. The metaclass has limited use but works fine.
>>> class MetaInit(type):
def __call__(cls, *args, **kwargs):
if args or kwargs:
return super().__call__(*args, **kwargs)
return cls.__new__(cls)
>>> class String(metaclass=MetaInit):
def __init__(self, string):
self.__string = tuple(string.split())
self.__simple = tuple(self.__simple())
def __simple(self):
letter = lambda s: ''.join(filter(lambda s: 'a' <= s <= 'z', s))
return filter(bool, map(letter, map(str.lower, self.__string)))
def __eq__(self, other):
assert isinstance(other, String)
return self.__simple == other.__simple
def __getitem__(self, key):
assert isinstance(key, slice)
string = String()
string.__string = self.__string[key]
string.__simple = self.__simple[key]
return string
def __iter__(self):
return iter(self.__string)
>>> String('Hello, world!')[1:]
<__main__.String object at 0x02E78830>
>>> _._String__string, _._String__simple
(('world!',), ('world',))
>>>
Addendum:
After six years, my opinion favors Alex Martelli's answer more than my own approach. With meta-classes still on the mind, the following answer shows how the problem can be solved both with and without them:
#! /usr/bin/env python3
METHOD = 'metaclass'
class NoInitMeta(type):
def new(cls):
return cls.__new__(cls)
class String(metaclass=NoInitMeta if METHOD == 'metaclass' else type):
def __init__(self, value):
self.__value = tuple(value.split())
self.__alpha = tuple(filter(None, (
''.join(c for c in word.casefold() if 'a' <= c <= 'z') for word in
self.__value)))
def __str__(self):
return ' '.join(self.__value)
def __eq__(self, other):
if not isinstance(other, type(self)):
return NotImplemented
return self.__alpha == other.__alpha
if METHOD == 'metaclass':
def __getitem__(self, key):
if not isinstance(key, slice):
raise NotImplementedError
instance = type(self).new()
instance.__value = self.__value[key]
instance.__alpha = self.__alpha[key]
return instance
elif METHOD == 'classmethod':
def __getitem__(self, key):
if not isinstance(key, slice):
raise NotImplementedError
instance = self.new()
instance.__value = self.__value[key]
instance.__alpha = self.__alpha[key]
return instance
#classmethod
def new(cls):
return cls.__new__(cls)
elif METHOD == 'inline':
def __getitem__(self, key):
if not isinstance(key, slice):
raise NotImplementedError
cls = type(self)
instance = cls.__new__(cls)
instance.__value = self.__value[key]
instance.__alpha = self.__alpha[key]
return instance
else:
raise ValueError('METHOD did not have an appropriate value')
def __iter__(self):
return iter(self.__value)
def main():
x = String('Hello, world!')
y = x[1:]
print(y)
if __name__ == '__main__':
main()
Pass another argument to the constructor, like so:
def __init__(self, string, simple = None):
if simple is None:
self.__string = tuple(string.split())
self.__simple = tuple(self.__simple())
else:
self.__string = string
self.__simple = simple
You can then call it like this:
def __getitem__(self, key):
assert isinstance(key, slice)
return String(self.__string[key], self.__simple[key])
Also, I'm not sure it's allowed to name both the field and the method __simple. If only for readability, you should change that.