add to moto default aws ssm parameters - python

I am trying to run a set of tests where calls to boto3.client('ssm') are mocked using moto.
Moto is providing a set of default aws parameter. https://github.com/spulec/moto/blob/master/moto/ssm/models.py#L59 but is preventing from adding more:
https://github.com/spulec/moto/blob/master/moto/ssm/models.py#L858 Trying to actively add any aws prefix parameter will return an error as per the tests in https://github.com/spulec/moto/blob/master/tests/test_ssm/test_ssm_boto3.py#L397
As my lambda is relying on the following to be present my test fails: /aws/service/ecs/optimized-ami/amazon-linux-2/recommended
I was thinking of trying to monkey patch the mocked ssm client, but I have very little understanding of moto's internals.
I have been following this example but modifying it for my needs (calling SSM instead of calling SQS and S3). For ref my code looks like this as I have attempted to monkey patch the put_parameter method without success.
app.py
import boto3
from loguru import logger
#logger.catch()
def lambda_handler(event, context):
ssm_client = boto3.client("ssm", "eu-west-1")
ami_param_name = "/aws/service/ami-amazon-linux-latest/amzn2-ami-hvm-x86_64-ebs"
ami_param_value = ssm_client.get_parameter(Name=ami_param_name)
ecs_param_name = "/aws/service/ecs/optimized-ami/amazon-linux-2/recommended"
ecs_param_value = ssm_client.get_parameter(Name=ecs_param_name)
return [ami_param_value, ecs_param_value]
test.py
import os
from unittest import mock
import boto3
import pytest
from moto import mock_ssm
from src.app import lambda_handler
AWS_REGION = 'eu-west-1'
#pytest.fixture(scope="function")
def aws_credentials():
"""Mocked AWS Credentials for moto."""
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
os.environ["AWS_SECURITY_TOKEN"] = "testing"
os.environ["AWS_SESSION_TOKEN"] = "testing"
#pytest.fixture(scope="function")
def mock_ssm_client(aws_credentials):
with mock_ssm():
client = boto3.client("ssm", region_name=AWS_REGION)
# already present in moto
# client.put_parameter(
# Name='/aws/service/ami-amazon-linux-latest/amzn2-ami-hvm-x86_64-ebs',
# Type='String',
# Value='ami-stdparam'
# )
# What the lambda requires
# client.put_parameter(
# Name='/aws/service/ecs/optimized-ami/amazon-linux-2/recommended',
# Type='String',
# Value='{"image_id": "ami-ecsparam"}'
# )
def side_effect(path):
if path == "/aws/service/ecs/optimized-ami/amazon-linux-2/recommended":
return_value = {
"Parameter": {
"Name": "/aws/service/ecs/optimized-ami/amazon-linux-2/recommended",
"Type": "String",
"Value": "{\"ecs_agent_version\":\"1.63.1\",\"ecs_runtime_version\":\"Docker version 20.10.13\",\"image_id\":\"ami-002e2fef4b94f8fd0\",\"image_name\":\"amzn2-ami-ecs-hvm-2.0.20220921-x86_64-ebs\",\"image_version\":\"2.0.20220921\",\"os\":\"Amazon Linux 2\",\"schema_version\":1,\"source_image_name\":\"amzn2-ami-minimal-hvm-2.0.20220912.1-x86_64-ebs\"}",
"Version": 94,
"LastModifiedDate": 1664230158.399,
"ARN": "arn:aws:ssm:eu-west-1::parameter/aws/service/ecs/optimized-ami/amazon-linux-2/recommended",
"DataType": "text"
}
}
return return_value
else:
return client.get_parameter(path)
client.get_parameter = mock.patch(
'boto3.client.get_parameter',
side_effect=side_effect
)
yield client
def test_lambda_handler(mock_ssm_client):
# Arrange
# Act
results = lambda_handler('', 'test')
# Assert
assert len(results) == 2

You could use Moto's internal API to store the parameter, as a workaround to mocking/patching Moto.
See the following code to add a custom parameter called /aws/test:
#mock_ssm
def test_default_param():
client = boto3.client("ssm", region_name="us-east-1")
from moto.ssm.models import ssm_backends, Parameter
ssm_backends["123456789012"]["us-east-1"]._parameters["/aws/test"].append(Parameter(
account_id="123456789012",
name="/aws/test",
value="val",
parameter_type="String",
description="...",
allowed_pattern=None,
keyid=None,
last_modified_date=1664230158.399,
version=None,
tags=[],
data_type="text",
))
response = client.get_parameters(Names=["/aws/test"])
print(response)
Note that this works in the latest version of Moto (4.0.6), but as it's an internal API, it is liable to change.

Related

FastAPI read configuration before specifying dependencies

I'm using fastapi-azure-auth to make call to my API impossible, if the user is not logged in (doesn't pass a valid token in the API call from the UI to be precise).
My question doesn't have anything to do with this particular library, it's about FastAPI in general.
I use a class (SingleTenantAzureAuthorizationCodeBearer) which is callable. It is used in two cases:
api.onevent("startup") - to connect to Azure
as a dependency in routes that user wants to have authentication in
To initialize it, it requires some things like Azure IDs etc. I provide those via a config file.
The problem is, this class is created when the modules get evaluated, so the values from the config file would have to be already present.
So, I have this:
dependecies.py
azure_scheme = SingleTenantAzureAuthorizationCodeBearer(
app_client_id=settings.APP_CLIENT_ID,
tenant_id=settings.TENANT_ID,
scopes={
f'api://{settings.APP_CLIENT_ID}/user_impersonation': 'user_impersonation',
}
)
api.py
from .dependencies import azure_scheme
api = FastAPI(
title="foo"
)
def init_api() -> FastAPI:
# I want to read configuration here
api.swagger_ui.init_oauth = {"clientID": config.CLIENT_ID}
return api
#api.on_event('startup')
async def load_config() -> None:
"""
Load OpenID config on startup.
"""
await azure_scheme.openid_config.load_config()
#api.get("/", dependencies=[Depends(azure_scheme)])
def test():
return {"hello": "world"}
Then I'd run the app with gunicorn -k uvicorn.workers.UvicornWorker foo:init_api().
So, for example, the Depends part will get evaluated before init_api, before reading the config. I would have to read the config file before that happens. And I don't want to do that, I'd like to control when the config reading happens (that's why I have init_api function where I initialize the logging and other stuff).
My question would be: is there a way to first read the config then initialize a dependency like SingleTenantAzureAuthorizationCodeBearer so I can use the values from config for this initialization?
#Edit
api.py:
from fastapi import Depends, FastAPI, Response
from fastapi.middleware.cors import CORSMiddleware
from .config import get_config
from .dependencies import get_azure_scheme
api = FastAPI(
title="Foo",
swagger_ui_oauth2_redirect_url="/oauth2-redirect",
)
api.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
def init_api() -> FastAPI:
api.swagger_ui_init_oauth = {
"usePkceWithAuthorizationCodeGrant": True,
"clientId": get_config().client_id,
}
return api
#api.get("/test", dependencies=[Depends(get_azure_scheme)])
def test():
return Response(status_code=200)
config.py:
import os
from functools import lru_cache
import toml
from pydantic import BaseSettings
class Settings(BaseSettings):
client_id: str
tenant_id: str
#lru_cache
def get_config():
with open(os.getenv("CONFIG_PATH", ""), mode="r") as config_file:
config_data = toml.load(config_file)
return Settings(
client_id=config_data["azure"]["CLIENT_ID"], tenant_id=config_data["azure"]["TENANT_ID"]
)
dependencies.py:
from fastapi import Depends
from fastapi_azure_auth import SingleTenantAzureAuthorizationCodeBearer
from .config import Settings, get_config
def get_azure_scheme(config: Settings = Depends(get_config)):
return SingleTenantAzureAuthorizationCodeBearer(
app_client_id=config.client_id,
tenant_id=config.tenant_id,
scopes={
f"api://{config.client_id}/user": "user",
},
)

How to write/use K8 Python client to create a new role, sa & role binding

I am currently figuring out what is the best way to programmatically manage the Kubernetes cluster (eks). I have come across a python Kubernetes client where I was able to load the local config and then create a namespace.
I am running a jenkins job where I would like it to create a namespace, role, rolebinding, as. I have managed to create the namespace however having trouble understanding on how to call the function to create a new role, new role binding.
Here is the snippet to create namespaces using k8 python client:
from kubernetes import dynamic, config
from kubernetes import client as k8s_client
from kubernetes.client import api_client
import time, sys
def create_namespace(namespace_api, name):
namespace_manifest = {
"apiVersion": "v1",
"kind": "Namespace",
"metadata": {"name": name, "resourceversion": "v1"},
}
namespace_api.create(body=namespace_manifest)
def delete_namespace(namespace_api, name):
namespace_api.delete(name=name)
def main():
# Load local config
client = dynamic.DynamicClient(
api_client.ApiClient(configuration=config.load_incluster_config())
)
namespace_api = client.resources.get(api_version="v1", kind="Namespace")
# Creating a namespace
namespace_name = sys.argv[1]
create_namespace(namespace_api, namespace_name)
time.sleep(4)
print("\n[INFO] namespace: " + namespace_name + " created")
if __name__ == '__main__':
main()
I would appreciate any support
You'll most likely want to use the RbacAuthorizationV1Api. Afterward you can call create_namespaced_role and create_namespaced_role_binding to make what you need.
A snippet might look like
from kubernetes import client, config
config.load_incluster_config()
policy_api = client.RbacAuthorizationV1Api()
role = client.V1Role(
metadata=client.V1ObjectMeta(name="my-role"),
rules=[client.V1PolicyRule([""], resources=["pods"], verbs=["get", "list"])],
)
policy_api.create_namespaced_role(namespace="my-namespace", body=role)
role_binding = client.V1RoleBinding(
metadata=client.V1ObjectMeta(namespace="my-namespace", name="my-role-binding"),
subjects=[
client.V1Subject(
name="user", kind="User", api_group="rbac.authorization.k8s.io"
)
],
role_ref=client.V1RoleRef(
api_group="rbac.authorization.k8s.io", kind="Role", name="user-role"
),
)
policy_api.create_namespaced_role_binding(namespace="my-namespace", body=role_binding)
Some more useful examples here.

Can not monkeypatch imported module

I have a very simple Google Cloud Function written in Python and it makes a reference to Google's Secret manager via their Python library.
The code is very simple and it looks like this:
import os
from google.cloud import secretmanager
import logging
client = secretmanager.SecretManagerServiceClient()
secret_name = "my-secret"
project_id = os.environ.get('GCP_PROJECT')
resource_name = "projects/{}/secrets/{}/versions/latest".format(project_id, secret_name)
response = client.access_secret_version(resource_name)
secret_string = response.payload.data.decode('UTF-8')
def new_measures_handler(data, context):
logging.info(secret_string)
print('File: {}.'.format(data['name']))
and then I have my simple unit test which is trying to take advantage of monkey patching:
import main
def test_print(capsys, monkeypatch):
# arrange
monkeypatch.setenv("GCP_PROJECT", "TestingUser")
monkeypatch.setattr(secretmanager, "SecretManagerServiceClient", lambda: 1)
name = 'test'
data = {'name': name}
# act
main.new_measures_handler(data, None)
out, err = capsys.readouterr()
#assert
assert out == 'File: {}.\n'.format(name)
Everything goes well with the mock for the environment variable but I can not mock secretmanager. It keeps on trying to call the actual API. My ultimate goal is to mock secretmanager.SecretManagerServiceClient() and make it return an object which later on can be used by: client.access_secret_version(resource_name) (which I will need to mock as well, I think)
See my answer to this question for a working example of using unittest patching and mocking to mock Google API calls and return mock results:
How to Mock a Google API Library with Python 3.7 for Unit Testing

mock boto3 response for downloading file from S3

I've got code that downloads a file from an S3 bucket using boto3.
# foo.py
def dl(src_f, dest_f):
s3 = boto3.resource('s3')
s3.Bucket('mybucket').download_file(src_f, dest_f)
I'd now like to write a unit test for dl() using pytest and by mocking the interaction with AWS using the stubber available in botocore.
#pytest.fixture
def s3_client():
yield boto3.client("s3")
from foo import dl
def test_dl(s3_client):
with Stubber(s3_client) as stubber:
params = {"Bucket": ANY, "Key": ANY}
response = {"Body": "lorem"}
stubber.add_response(SOME_OBJ, response, params)
dl('bucket_file.txt', 'tmp/bucket_file.txt')
assert os.path.isfile('tmp/bucket_file.txt')
I'm not sure about the right approach for this. How do I add bucket_file.txt to the stubbed reponse? What object do I need to add_response() to (shown as SOME_OBJ)?
Have you considered using moto3?
Your code could look the same way as it is right now:
# foo.py
def dl(src_f, dest_f):
s3 = boto3.resource('s3')
s3.Bucket('mybucket').download_file(src_f, dest_f)
and the test:
import boto3
import os
from moto import mock_s3
#mock_s3
def test_dl():
s3 = boto3.client('s3', region_name='us-east-1')
# We need to create the bucket since this is all in Moto's 'virtual' AWS account
s3.create_bucket(Bucket='mybucket')
s3.put_object(Bucket='mybucket', Key= 'bucket_file.txt', Body='')
dl('bucket_file.txt', 'bucket_file.txt')
assert os.path.isfile('bucket_file.txt')
The intention of the code becomes a bit more obvious since you simply work with s3 as usual, except for there is no real s3 behind the method calls.

how do I test methods using boto3 with moto

I am writing test cases for a quick class to find / fetch keys from s3, using boto3. I have used moto in the past to test boto (not 3) code but am trying to move to boto3 with this project, and running into an issue:
class TestS3Actor(unittest.TestCase):
#mock_s3
def setUp(self):
self.bucket_name = 'test_bucket_01'
self.key_name = 'stats_com/fake_fake/test.json'
self.key_contents = 'This is test data.'
s3 = boto3.session.Session().resource('s3')
s3.create_bucket(Bucket=self.bucket_name)
s3.Object(self.bucket_name, self.key_name).put(Body=self.key_contents)
error:
...
File "/Library/Python/2.7/site-packages/botocore/vendored/requests/packages/urllib3/connectionpool.py", line 344, in _make_request
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
File "/Library/Python/2.7/site-packages/botocore/vendored/requests/packages/urllib3/connectionpool.py", line 314, in _raise_timeout
if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
TypeError: __str__ returned non-string (type WantWriteError)
botocore.hooks: DEBUG: Event needs-retry.s3.CreateBucket: calling handler <botocore.retryhandler.RetryHandler object at 0x10ce75310>
It looks like moto is not mocking out the boto3 call correctly - how do I make that work?
What worked for me is setting up the environment with boto before running my mocked tests with boto3.
Here's a working snippet:
import unittest
import boto
from boto.s3.key import Key
from moto import mock_s3
import boto3
class TestS3Actor(unittest.TestCase):
mock_s3 = mock_s3()
def setUp(self):
self.mock_s3.start()
self.location = "eu-west-1"
self.bucket_name = 'test_bucket_01'
self.key_name = 'stats_com/fake_fake/test.json'
self.key_contents = 'This is test data.'
s3 = boto.connect_s3()
bucket = s3.create_bucket(self.bucket_name, location=self.location)
k = Key(bucket)
k.key = self.key_name
k.set_contents_from_string(self.key_contents)
def tearDown(self):
self.mock_s3.stop()
def test_s3_boto3(self):
s3 = boto3.resource('s3', region_name=self.location)
bucket = s3.Bucket(self.bucket_name)
assert bucket.name == self.bucket_name
# retrieve already setup keys
keys = list(bucket.objects.filter(Prefix=self.key_name))
assert len(keys) == 1
assert keys[0].key == self.key_name
# update key
s3.Object(self.bucket_name, self.key_name).put(Body='new')
key = s3.Object(self.bucket_name, self.key_name).get()
assert 'new' == key['Body'].read()
When run with py.test test.py you get the following output:
collected 1 items
test.py .
========================================================================================= 1 passed in 2.22 seconds =========================================================================================
According to this information, it looks like streaming upload to s3 using Boto3 S3 Put is not yet supported.
In my case, I used following to successfully upload an object to a bucket:
s3.Object(self.s3_bucket_name, self.s3_key).put(Body=open("file_to_upload", 'rb'))
where "file_to_upload" is your local file to be uploaded to s3 bucket. For your test case, you can just create a temporary file to check this functionality:
test_file = open("test_file.json", "w")
test_file.write("some test contents")
test_file.close()
s3.Object(self.s3_bucket_name, self.s3_key).put(Body=open("test_file", 'rb'))

Categories