Can you yield from a lambda function? - python

I have a generator function in a class:
self.s = [['a',1],['b',2],['c',3]
def generator(self):
for r in self.s:
yield r
In another function I initalize it as a variable:
var = self.generator()
And it's yielded as necessary:
>>> next(var) # returns ['a',1]
>>> next(var) # returns ['b',2]
>>> next(var) # returns ['c',3]
Can defining the generator be done in one line, however? I've considered the below:
var = lambda: [(yield r) for r in self.s]
>>> next(var) # SyntaxError: 'yield' inside list comprehension
Here's a minimal code I'm working with:
class Foo():
def __init__(self):
self.s = {}
def generator(self):
for r in self.s:
yield r
def fetch(self):
if not self.s:
self.fetch_gen = self.generator()
self.s['a'] = 1
self.s['b'] = 2
self.s['c'] = 3
try:
var = self.s.get(next(self.fetch_gen))
except StopIteration:
return None
return var
BAR = Foo()
while True:
OUT = BAR.fetch()
if OUT is None:
break
print(OUT)
Output is below:
1
2
3
I just wanted to see if I could get rid of Foo.generator and instead declare the generator in one line.

You are returning a list comprehension. You can just do:
var = (r for r in self.s)
that will generate a generator with the values you want. You test it later with next(var) is in your code.

Related

How do I hard-code variables in a dynamically created function in python?

I have the following python3 code
class Test:
pos = [0,0]
actions = []
def bar(self, target):
for i in target:
def _():
print(i,end="")
self.actions.append(_)
foo = Test()
foo.bar("abcd")
for i in foo.actions:
i()
Which is meant to output:
abcd
but instead it outputs:
dddd
I'm pretty sure the function is using the value of i when executing (the last value i had) and not i's value the function _ is declared, which is what I want.
The general solution to this is to store the value as a default parameter value, like this:
class Test:
pos = [0,0]
actions = []
def bar(self, target):
for i in target:
def _(i=i): # This is the only changed line
print(i,end="")
self.actions.append(_)
foo = Test()
foo.bar("abcd")
for i in foo.actions:
i()
>>> abcd
Let's output some things:
class Test:
pos = [0, 0]
actions = []
def bar(self, target):
for i in target:
print(f"i={i} id={id(i)}")
def _():
print(f"_i={i} _id={id(i)}")
print(i, end="")
self.actions.append(_)
Output:
i=a id=2590411675120
i=b id=2590411458416
i=c id=2590411377456
i=d id=2590411377200
_i=d _id=2590411377200
d_i=d _id=2590411377200
d_i=d _id=2590411377200
d_i=d _id=2590411377200
See, the i in def _ overrides every time for loop iterates and eventually last value is what you get.
How to solve this? Pass i as an argument:
from functools import partial
class Test:
pos = [0, 0]
actions = []
def bar(self, target):
for i in target:
print(f"i={i} id={id(i)}")
def _(i):
print(f"_i={i} _id={id(i)}")
print(i, end="")
self.actions.append(partial(_, i))
Output:
i=a id=2618064721392
i=b id=2618064504688
i=c id=2618064423728
i=d id=2618064423472
_i=a _id=2618064721392
a_i=b _id=2618064504688
b_i=c _id=2618064423728
Let's remove print statements now:
from functools import partial
class Test:
pos = [0, 0]
actions = []
def bar(self, target):
for i in target:
def _(i):
print(i, end="")
self.actions.append(partial(_, i))
foo = Test()
foo.bar("abcd")
for i in foo.actions:
i()
# Output: abcd
the reason why you are getting the last value of i is that the function print takes reference to i and not literal value of it and since it's in a loop you will get the last value i had. a workaround to this problem would be as Mandera said to set function default parameter and this actually works because the value of i is stored in the function's attribute __defaults__, which is responsible of storing default parameters values.
So the final code would be :
class Test:
pos = [0,0]
actions = []
def bar(self, target):
for i in target:
def foo(p=i):print(p,end="")
self.actions.append(foo)
if __name__=="__main__":
foo = Test()
foo.bar("abcd")
for i in foo.actions:
i()
Note
the function foo isn't overridden

Trying to create a function that repeats a function

I have a helping function:
def incr(x):
return x+1
I want to create a function named "repeated" that use "incr" function n times on a certain parameter
In the end I want to use the "repeated" function in this matter only :
repeated (incr, 4)(2)
That for example will output 6.
So far I tried to do this:
def repeated(f, n):
func, x = f
for i in range(n):
func(x)
But it gave me an error saying I can't unpack a non Tuple function.
It doesn't seem like I don't have access in the function to the "(2)"
I do not recommend to use such a syntax construct, for such a task:
repeated(incr, 4)(2)
Your repeated function must return another function, that will be called by (2).
This should work in your requested manner:
def incr(x):
return x+1
def repeated(f, x):
# function foo will be returned by repeated and called by (2)
def foo(n):
res = x
for i in range(n):
res = f(res)
return res
return foo
print(repeated(incr, 4)(2))
I think you may want to do something like functional programming.
Add args to deal with for different kind of function you want to repeat.
I can't confirm if there is a position argument what kind of results you want, so I didn't deal with it.
code:
import functools
def incr(x):
return x + 1
def incx(x,y = 0):
return x + y + 1
def repeated_inner(func,args,times):
head, *rest = args
for _ in range(times):
head = func(head, *rest)
return args[0]
def repeated(func, *args ):
return functools.partial(repeated_inner, func, args)
print(repeated(incr, 4)(2))
print(repeated(incx, 4)(2))
print(repeated(incx, 4 ,3)(2))
result
6
6
12
the repeatedfunction must return a function
def repeated(func, n):
def repeatedfunc(x):
rsl = x
for i in range(n):
rsl = func(rsl)
return rsl
return repeatedfunc
def incr(x):
return x+1
rslt = repeated(incr, 4)(2)
print(rslt)
output
6
You should write something like this:
def repeated(f, arg_0, n):
arg = arg_0
for i in range(n):
arg = f(arg)
return arg
In a more general situation:
def repeated(f, arg):
def n_f(n):
result = 0
for i in range(n):
result =f(arg)
return result
return n_f

Python - next method not working properly with generator

I made a class in python that splits a stream of code in tokens and advances token by token to work with them
import re
class Tokenizer:
def __init__(self, input_file):
self.in_file = input_file
self.tokens = []
self.current_token = None
self.next_token = None
self.line = 1
def split_tokens(self):
''' Create a list with all the tokens of the input file '''
self.tokens = re.findall("\w+|[{}()\[\].;,+\-*/&|<>=~\n]", self.in_file)
def __iter__(self):
for token in self.tokens:
if token != '\n':
yield token
else:
self.line += 1
def advance(self):
self.current_token = self.next_token
self.next_token = next(self.__iter__())
After initialization:
text = 'constructor SquareGame03 new()\n\
{let square=square;\n\
let direction=direction;\n\
return square;\n\
}'
t = Tokenizer(text)
t.split_tokens()
t.advance()
It seems to work if i print the tokens
print(t.current_token, t.next_token)
None constructor
but every other call of the advance method give those results:
t.advance()
print(t.current_token, t.next_token)
constructor constructor
t.advance()
print(t.current_token, t.next_token)
constructor constructor
So it's not advancing and i can't understand why.
In this case, .__iter__ is implemented as a generator function (instead of a generator iterator) which returns a generator iterator.
Every time Tokenizer.advance is called, a new generator iterator is created and returned by .__iter__. Instead, an iterator should be stored by a Tokenizer object at the initialization stage for all subsequent usage.
For example:
import re
class Tokenizer:
def __init__(self, input_file):
self.in_file = input_file
self.tokens = []
self.current_token = None
self.next_token = None
self.line = 1
def split_tokens(self):
''' Create a list with all the tokens of the input file '''
self.tokens = re.findall("\w+|[{}()\[\].;,+\-*/&|<>=~\n]", self.in_file)
self.iterator = self.__iter__()
def __iter__(self):
for token in self.tokens:
if token != '\n':
yield token
else:
self.line += 1
def advance(self):
self.current_token = self.next_token
self.next_token = next(self.iterator)
Another minimal example that may explain:
def fib():
a = 0
b = 1
while True:
yield b
a, b = b, a + b
# 1, 1, 2, ...
fibs = fib()
next(fibs)
next(fibs)
next(fibs)
# 1, 1, 1, ...
next(fib())
next(fib())
next(fib())
By the way, I cannot see the reason to mixed the usage of a .__iter__ magic method and a separate .advance method. It might introduce some confusion.

Current value of generator

In Python I can build a generator like so:
def gen():
x = range(0, 100)
for i in x:
yield i
I can now define an instance of the generator using:
a = gen()
And pull new values from the generator using
a.next()
But is there a way—a.current()—to get the current value of the generator?
There isn't such a method, and you cannot add attributes to a generator. A workaround would be to create an iterator object that wraps your generator, and contains a 'current' attribute. Taking it an extra step is to use it as a decorator on the generator.
Here's a utility decorator class which does that:
class with_current(object):
def __init__(self, generator):
self.__gen = generator()
def __iter__(self):
return self
def __next__(self):
self.current = next(self.__gen)
return self.current
def __call__(self):
return self
You can then use it like this:
#with_current
def gen():
x=range(0,100)
for i in x:
yield i
a = gen()
print(next(a))
print(next(a))
print(a.current)
Outputs:
0
1
1
You set the value of a variable.
current_value = a.next()
then use current_value for all it's worth.
Python uses this often in for statements
a = xrange(10)
for x in a:
print(x)
Here you are defining x as the current value of a.
Using another global variable to store the current value might be feasible.
# Variable that store the current value of the generator
generator_current_val = None
def generator():
global generator_current_val # to set the current value
for i in range(10):
generator_current_val = i
yield i
a = generator()
print(next(a)) # 0
print(next(a)) # 1
print(next(a)) # 2
print(generator_current_val) # 2
print(next(a)) # 3
You can use a sliding window generator, something like
def _window(i: Iterator[T]) -> Iterator[Tuple[T, Optional[T]]]:
prev = None
for x in i:
if prev:
yield prev, x
prev = x
yield prev, None
i = _window(iter([1,2,3,4]))
print(next(i)) # (1, 2)
print(next(i)) # (2, 3)
print(next(i)) # (3, 4)
print(next(i)) # (4, None)

updating a python generator after it has been created

Is there any way to do something like this in python 2.7?
def scaleit(g, k):
for item in g:
yield item*k
promise = ??????
# defines a generator for reference but not use:
# other functions can make use of it,
# but it requires a call to promise.fulfill() to
# define what the generator is going to yield;
# promise raises an error if next() is called
# before the promise is fulfilled
f = scaleit(promise, 3)
promise.fulfill(range(10))
for item in f:
print item
Yes; generators don't run until they're actually iterated, so you can just defer iterating the fulfilled promise's value until requested:
class Promise(object):
def fulfill(self, result):
self.result = result
def __iter__(self):
return iter(self.result)
def scaleit(g, k):
for item in g:
yield item*k
promise = Promise()
f = scaleit(promise, 3)
promise.fulfill(range(10))
print list(f)
Is this what you want?
def scaleit(g, k):
for item in g:
yield item * k
class Promise(object):
def __init__(self):
self.g = None
def fulfill(self, g):
self.g = iter(g)
def __iter__(self):
return self
def next(self):
return next(self.g)
promise = Promise()
f = scaleit(promise, 3)
promise.fulfill(range(10))
for item in f:
print item
I think you want the send() method on generators:
def gen():
reply = yield None
if not reply: # reply will be None if send() wasn't called
raise ValueError("promise not fulfilled")
yield 5
g1 = gen()
next(g1) # advance to the first yield
g1.send(True)
print(next(g1)) # prints 5
g2 = gen()
next(g2)
# forget to send
print(next(g2)) # raises ValueError

Categories