I am having trouble getting a VPC ID from my shared infrastructure stack to another stack for creating an EC2 instance. Specifically, my error is:
AttributeError: type object 'property' has no attribute '__jsii_type__'
when calling ec2.Instance
Example code
app.py
app = cdk.App()
vpc_stack = VpcStack(app, "VpcStack")
ec2_stack = EC2Stack(app, "EC2Stack", vpc=vpc_stack.vpc)
ec2_stack.py
class EC2Stack(Stack):
def __init__(self, scope: Construct, construct_id: str, *, vpc=ec2.Vpc, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
ec2.Instance(self, "Instance",
vpc=vpc.vpc_id,
instance_type=ec2.InstanceType("t3.nano"),
machine_image=ec2.MachineImage.latest_amazon_linux()
)
vpc_stack.py
class VpcStack(Stack):
vpc = ec2.Vpc
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
vpcName = "MAIN"
vpcCidr = "10.0.0.0/16"
natAMI = ec2.GenericLinuxImage({'us-east-2': 'ami-0f9c61b5a562a16af'})
natInstance = ec2.NatProvider.instance(instance_type=ec2.InstanceType("t3.micro"),machine_image=natAMI)
vpc = ec2.Vpc(self, str(vpcName), cidr=str(vpcCidr), nat_gateway_provider=natInstance, nat_gateways=1)
I would like to be able to pull the VPC ID into my EC2 stack, to start with, although the ability to share resources across stacks in general is the end goal.
I have tried following the AWS docs here as well as this other wise very helpful blog post here.
Try adjusting your vpc_stack.py file to look like
class VpcStack(Stack):
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
vpcName = "MAIN"
vpcCidr = "10.0.0.0/16"
natAMI = ec2.GenericLinuxImage({'us-east-2': 'ami-0f9c61b5a562a16af'})
natInstance = ec2.NatProvider.instance(instance_type=ec2.InstanceType("t3.micro"),machine_image=natAMI)
self.vpc = ec2.Vpc(self, str(vpcName), cidr=str(vpcCidr), nat_gateway_provider=natInstance, nat_gateways=1)
Then make sure you're using the Vpc object on your EC2 instance
class EC2Stack(Stack):
def __init__(self, scope: Construct, construct_id: str, *, vpc=ec2.Vpc, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
ec2.Instance(self, "Instance",
vpc=vpc, # <- vpc instead of vpc.vpc_id
instance_type=ec2.InstanceType("t3.nano"),
machine_image=ec2.MachineImage.latest_amazon_linux()
)
Related
I think in reality I'm going to use a different design, where _attrib is set in the construct and can therefore not be None, however I'm fascinated to see if there's a way to make MyPy happy with this approach. I have a situation where an attribute (in this instance _attrib) is set after the construction of the Thing and there are a number of methods which require it to be set. It seamed reasonable, therefore, to spin up a teeny decorator to validate if the _attrib was set and to chuck an exception if it's not. The below code, however, causes MyPy to still complain - although the error is Item "None" of "Optional[Any]" has no attribute "upper", so I think the type of self is getting completely lost. I'm also getting Incompatible return value type (got "Callable[[Any, VarArg(Any), KwArg(Any)], Any]", expected "F") on return inner.
class AttribIsNone(Exception):
...
F = TypeVar("F", bound=Callable[...,Any]
def guard(func: F) -> F:
def inner(self, *args, **kwargs):
if self._attrib is None:
raise AttribIsNoneException()
return func(self, *args, **kwargs)
return inner
class Thing:
_attrib: str | None
#guard
def guarded_function(self) -> str:
return self._attrib.upper()
Ideally I think I'd bind F to something like Callable[[Thing,...], Any] but that's not a valid thing to do right now.
Similarly I tried creating a TypeVar for the return value:
R = TypeVar("R")
F = TypeVar("F", bound=Callable[...,R]
def guard(func: F) -> F:
def inner(self, *args, **kwargs) -> R:
...
However this is not allowed either.
PEP612 offers a ParamSpec but I can't seem to work out how to construct one, and I'm not even sure if I did that it would help much!
Yes, you can do what you're looking for in the static typing system (whether it's a good idea in practice is based on your actual use case). Here's an example to make it work.
First, Thing needs to be a generic with respect to its _attrib type:
_AttribT = TypeVar("_AttribT", str, None)
class Thing(Generic[_AttribT]):
_attrib: _AttribT
Then, disallow #guard from accepting self: Thing[None]:
from __future__ import annotations
from typing import *
class AttribIsNoneException(Exception):
...
S = TypeVar("S", bound="Thing[Any]")
P = ParamSpec("P")
R = TypeVar("R")
_AttribT = TypeVar("_AttribT", str, None)
def guard(
func: Callable[Concatenate[Thing[str], P], R]
) -> Callable[Concatenate[Thing[str], P], R]:
def inner(self: S, /, *args: P.args, **kwargs: P.kwargs) -> R:
if self._attrib is None:
raise AttribIsNoneException()
return func(self, *args, **kwargs)
return inner
Instance methods of Thing will then be forced to be annotated with self: Thing[str] if decorated with #guard:
class Thing(Generic[_AttribT]):
_attrib: _AttribT
#guard # Static typing errors
def bad_guarded_function(self) -> str:
return self._attrib.upper() # Static typing errors
#guard
def guarded_function(self: Thing[str]) -> str:
return self._attrib.upper()
In practice, you will need to ensure that you're accessing guarded_function through a Thing[str] instance and not a Thing[None] instance. I don't really know what your code looks like, but here's one possible implementation:
class Thing(Generic[_AttribT]):
_attrib: _AttribT
def __new__(cls, attrib: _AttribT) -> Thing[_AttribT]:
thing: Thing[_AttribT] = super().__new__(cls)
thing._attrib = attrib
return thing
#guard
def guarded_function(self: Thing[str]) -> str:
return self._attrib.upper()
>>> string_thing = Thing("asdfjkl")
>>> string_thing.guarded_function()
>>>
>>> none_thing = Thing(None)
>>> none_thing.guarded_function() # mypy: Invalid self argument "Thing[None]" to attribute function "guarded_function" with type "Callable[[Thing[str]], str]" [misc]
I have a class which is used to send requests to server.
There's a ROUTES dictionary which is a map of class method names to the server endpoint used in request function.
These methods are added to class in __init__ with setattr. I could've just added these methods to the class, but there are a lot of them and I hate code duplication.
Class looks like this:
ROUTES = {
"send_test": ["GET", "/test/"],
"send_start": ["GET", "/start/"]
}
class Response(TypedDict):
...
class Client(object):
def __init__(self) -> None:
for mname, endpoint in ROUTES.items():
setattr(self, mname, self.make_request_func(endpoint[0], endpoint[1]))
def make_request_func(self, method, path):
def _func(*args, **kwargs):
return self.request(method, path, *args, **kwargs)
return _func
def request(self, method, path, data: dict = {}, files: Optional[dict] = None) -> Response:
...
I basically need these send_test and send_start methods to just point to an alias of request function, but without method and path arguments, only data and files.
I have a very basic knowledge of python's typing, I will appreciate any additional explanation of typing this!
If you are willing to be a little less DRY and define instance methods explicitly, the following should work. (Let's face it, the repeated calls to makerequest aren't really any more repetitive than the original definition of ROUTES.)
from typing import Optional, Callable
def makerequest(method: str, path: str) -> Callable[[Client, Optional[dict], Optional[dict]], Response]:
def wrapper(self: Client, data: Optional[dict] = None, files: Optional[dict] = None) -> Response:
return self.request(method, path, data, files)
return wrapper
class Client(object):
def __init__(self) -> None:
...
send_test = makerequest("GET", "/test/")
send_start = makerequest("GET", "/start/")
def request(self, method, path, data: Optional[dict] = None, files: Optional[dict] = None) -> Response:
...
(I took the liberty of replacing your default dict value with None, to avoid problems with mutable default values.)
With this, mypy reports the revealed type of Client.send_test as
tmp.py:32: note: Revealed type is "def (tmp.Client, Union[builtins.dict[Any, Any], None], Union[builtins.dict[Any, Any], None]) -> TypedDict('tmp.Response', {})"
Adapting this to allow defining the aliases by iterating over ROUTES will be at best, I think, a mess, and at worst not possible, due to the class attributes send_test etc not being statically defined.
If typing weren't an issue, I would recommend a class decorator to define the methods.
ROUTES = {...}
def add_aliases(d: dict):
def _(cls):
for name, args in d.items():
setattr(cls, name, makereqeust(*args))
return cls
return _
def makerequest(method, path):
...
#add_aliases(ROUTES)
class Client:
def request(self, ...):
...
The problem is annotating add_aliases and _ in a way that captures the idea of the class that _ returns having methods of the desired type, since nothing can be inferred from the body of add_aliases alone.
I'm trying to declare a specific structure annotation for kwargs:
class MyType(TypedDict):
request: PydanticPayload
args: Dict[str, Any]
def handle_request(self, **kwargs: MyType) -> PydanticResponse:
But Pycharm expects that kwargs will be Dict[str, MyType] instead of MyType.
Is there any way to make Pycharm identify kwargs as MyType?
Since I couldn't find a way to support kwargs' specific type,
So I made sure I send only the specific arguments:
On the class that calls handle_request I did:
self.function = handle_request # (HAPPENS DYNAMICALLY)
args = flask_request.view_args
query = flask_request.args
flask_request_parameters = dict(request=request,
args=args,
query=query)
function_params = list(inspect.signature(self.function).parameters)
expected = {key: flask_request_parameters[key] for key in function_params}
result = self.function(**function_params)
I wrote a function based decorator to handle the authentication to my company web services:
def company_auth(path=None, method=None):
"""
Company Webservices authentication decorator
:param path: relative url of the endpoint;
:param method: http method used for the call
usage example:
#company_auth(path='/api/MO/GetDetails', method='GET')
def mo_get_details(**kwargs):
path = kwargs.pop('path')
headers = kwargs.pop('headers')
...
retrieve a valid token, compute the signature and pass to the inner function
the headers dict and the path parameter as kw arguments.
"""
def _signature_wrapper(fn, path, method):
def wrapper(*args, **kwargs):
token = get_valid_token()
signature_dict = {
'url': path,
'token': token.token,
'secret': token.secret,
'method': method
}
make_signature_results = make_signature(**signature_dict)
headers = {
'comp-header-date': make_signature_results['date'],
'comp-header-session-token': signature_dict['token'],
'comp-header-signature': make_signature_results['signature'],
'Content-Type': CONTENT_TYPE,
}
kwargs['headers'] = headers
kwargs['path'] = path
results = fn(*args, **kwargs)
return results
return wrapper
def decorating_fn(fn):
wrapper = _signature_wrapper(fn, path, method)
return update_wrapper(wrapper, fn)
return decorating_fn
def get_valid_token():
"""
get a valid token from db or generate a new one from company webservices.
"""
if not APIToken.objects.exists(): # this is a Django model
return set_new_token()
api_token = APIToken.objects.first()
if not api_token.is_valid:
return set_new_token()
else:
return api_token
It works fine, although I save the generated values of token/secret on database through the Django ORM.
I’d like to avoid this, turning it into a Class based decorator, instantiating it as a singleton and importing it in different modules. I’d like to keep the value of the token/secret pair not on database but in memory (as attributes of that instance), something like:
# Wanted CB Decorator
def CompanyAuth():
"""
Class Based Company Webservices authentication decorator
"""
def __init__(self, *args, **kwargs):
...
def __get__(self, *args, **kwargs):
...
def __call__(self, *args, **kwargs):
...
comp_auth = CompanyAuth()
# the singleton can then be imported by other modules and used
Is it doable and how I can achieve this?
I want to proxy an API over a network. I have the API in a dictionary. I'd like to create a class with the API methods from the dictionary so I can use the API as if I was local. The trouble is finding the name of my dynamically created method. (My approach is based on Adding a Method to an Existing Object and Python dynamic class methods.)
class MainClass(object):
def build_API(self):
methods = dict(meth1='arg1', meth2='arg2')
for key in methods.iterkeys():
setattr(self, key, MethodType(self.default_API, self))
def default_API(self, *args, **kwargs)
called_as_name = ????
self.send_message(called_as_name, args, kwargs)
def send_message(self, called_as_name, *args, **kwargs)
...
# Send API command over network
....
To use this:
api = MainClass()
api.build_API()
api.meth1()
However, everything I try for "called_as_name" always returns "default_API" and never "meth1". How can I get "called_as_name = meth1" when I type "api.meth1()" and "called_as_name = meth2" when I type "api.meth2()"?
I have tried:
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
called_as_name = calframe[1][3]
from Python: How to get the caller's method name in the called method?
called_as_name = inspect.stack()[1][5]
from Getting the caller function name inside another function in Python?
called_as_name = sys._getframe(1).f_code.co_name
from Getting the caller function name inside another function in Python?
Trying to do this with actual methods and grabbing the names from the stack frame with that sort of introspection trickery is a recipe for disaster. Instead, make the "methods" be custom callable objects that know their names. Here's a sketch:
class FakeMethod(object):
def __init__(self, name, parent):
self.name = name
self.parent = parent
def __call__(self, *args, **kwargs):
self.parent.send_message(self.name, args, kwargs)
class MainClass(object):
def build_API(self):
methods = dict(meth1='arg1', meth2='arg2')
for key in methods.iterkeys():
setattr(self, key, FakeMethod(key, self))
def send_message(self, called_as_name, *args, **kwargs):
print("Sending message:", called_as_name, args, kwargs)
Then:
>>> api = MainClass()
>>> api.build_API()
>>> api.meth1()
Sending message: meth1 ((), {}) {}
>>> api.meth2()
Sending message: meth2 ((), {}) {}
In theory you could even use __getattr__ on the MainClass to dynamically generate a FakeMethod every time an attribute name is accessed that is not defined but is listed in some list of API method names.