I created a code to invoke a AWS lambda function created somewhere else.
I would like to use moto for testing it, but I don't really understand how to do it and I continue to obtain errors.
This is a simple example of the main code:
import boto3
import json
class SimpleLambda:
def __init__(self):
self.aws_lambda = boto3.client("lambda", region_name="eu-west-2")
def __call__(self):
try:
lambda_response = self.aws_lambda.invoke(
FunctionName="test-lambda",
Payload=json.dumps(
{
"Records": [
{
"Source": "test_source",
"Version": "test_version",
}
]
}
),
)
return lambda_response["Payload"].read()
except Exception as err:
print(f"Could not invoke simple lambda: {err}")
return None
and the test:
import os
import pytest
import unittest.mock as mock
import boto3
from moto import mock_lambda
from aws_lambda import SimpleLambda
#pytest.fixture
def aws_credentials():
os.environ["AWS_ACCESS_KEY_ID"] = "testing"
os.environ["AWS_SECRET_ACCESS_KEY"] = "testing"
os.environ["AWS_SECURITY_TOKEN"] = "testing"
os.environ["AWS_SESSION_TOKEN"] = "testing"
#pytest.fixture
def lambda_client(aws_credentials):
with mock_lambda():
yield boto3.client("lambda", region_name="eu-west-2")
#pytest.fixture
def lambda_test(lambda_client):
lambda_client.create_function(
FunctionName="test-lambda",
Role="arn:aws:iam::123456789012:role/doesnotexist",
Code={"ZipFile": b"test"}
)
yield
def test_simple_lambda_call(lambda_client, lambda_test):
simple_lambda = SimpleLambda()
test = simple_lambda()
I obtain the error:
botocore.errorfactory.InvalidParameterValueException: An error occurred (InvalidParameterValueException) when calling the CreateFunction operation: The role defined for the function cannot be assumed by Lambda.
I found several example about how to use moto with S3 bucket, but nothing with lambda.
Running the code in this other question, I obtain the same error.
Any advice?
Moto also validates whether the IAM role exists, just like AWS does.
So make sure that the IAM role is created first:
with mock_iam():
iam = boto3.client("iam", region_name="eu-west-2")
iam_role = iam.create_role(
RoleName="my-role",
AssumeRolePolicyDocument="some policy",
Path="/my-path/",
)["Role"]["Arn"]
Related
I was mocking a function that is used to read k8s secret to fetch secret token. But running unittest is creating error - AttributeError: <module 'kubernetes.client' from '/usr/lib/python3.6/site-packages/kubernetes/client/init.py'> does not have the attribute 'read_namespaced_secret()' I have gone through How do you mock Python Kubernetes client CoreV1Api , but its also not helping my case. Can anyone point out what I am doing wrong here?
My script - read_sec.py
import base64
from kubernetes import client, config
from logger import logger
class kubernetesServices():
def __init__(self):
pass
def get_secret_vault_token(self):
try:
config.load_kube_config()
api_instance = client.CoreV1Api()
sec = api_instance.read_namespaced_secret("random-sec", "random-ns").data
token = base64.b64decode(sec['token']).decode("utf-8")
return token
except Exception as e:
logger.error("got error at get_secret_vault_token: {}".format(str(e)))
Unittest - test_read_sec.py
import unittest
from unittest.mock import patch
from read_sec import *
class MockKubernetes():
def __init__(self):
pass
def mocker_read_namespaced_secret(*args, **kwargs):
class MockReadns():
def __init__(self, json_data):
self.json_data = json_data
def json(self):
return self.json_data
return MockReadns({"data":{"token":"abc123"}})
class TestkubernetesServices(unittest.TestCase):
#patch("kubernetes.client",side_effect=MockKubernetes)
#patch("kubernetes.config",side_effect=MockKubernetes)
#patch("kubernetes.client.read_namespaced_secret()",side_effect=mocker_read_namespaced_secret)
def test_get_secret_vault_token(self,mock_client,mock_config,mock_read):
k8s = kubernetesServices()
token = k8s.get_secret_vault_token()
You need to mock kubernetes.client.CoreV1Api instead of kubernetes.client. Here is an example:
import base64
import unittest
from unittest.mock import patch, Mock
import requests
from kubernetes import client, config
class kubernetesServices():
def get_secret_vault_token(self):
config.load_kube_config()
api_instance = client.CoreV1Api()
sec = api_instance.read_namespaced_secret('random-sec', 'random-ns').data
token = base64.b64decode(sec['token']).decode('utf-8')
return token
class TestkubernetesServices(unittest.TestCase):
#patch(
'kubernetes.client.CoreV1Api',
return_value=Mock(read_namespaced_secret=Mock(return_value=Mock(data={'token': b'YWJjMTIz'})))
)
#patch('kubernetes.config.load_kube_config', return_value=Mock())
def test_get_secret_vault_token(self, mock_client, mock_config):
k8s = kubernetesServices()
token = k8s.get_secret_vault_token()
self.assertEqual(token, 'abc123')
Result:
---------------------------------------------------------------------
Ran 1 tests in 0.071s
PASSED (successes=1)
JFYI: side_effect better to use when you need multiple results. Example:
class TestRequest(unittest.TestCase):
def test_side_effect(self):
with patch('requests.get', side_effect=[1, 2, 3]):
print(requests.get('url1')) # 1
print(requests.get('url2')) # 2
print(requests.get('url3')) # 3
I am very, very new to FastAPI testing, so any guidance in the right direction would be appreciated.
So what I have right now is as follows:
A very simple routes file: datapoint_routes.py
from fastapi import APIRouter, Depends
datapoint_router = APIRouter()
def some_function_is():
return "Actual"
#datapoint_router.get('/{datapoint_name}')
def get_db(
datapoint_name: str,
some_function_output=Depends(some_function_is)
) -> dict:
return {
'datapoint_name': datapoint_name,
'state': some_function_output
}
I want to be able to test this. I checked out FastAPI Testing Dependencies guide here. But this did not help at all, because it didn't work for me.
For my tests, what I have right now is something like this:
File: test_datapoint_router.py
from typing import Union
from fastapi import FastAPI
from fastapi.testclient import TestClient
from datapoint_routes import datapoint_router, some_function_is
DATAPOINT_NAME = 'abcdef'
app = FastAPI()
client = TestClient(datapoint_router)
def override_dep(q: Union[str, None] = None):
return "Test"
app.dependency_overrides[some_function_is] = override_dep
def test_read_main():
response = client.get(f"/{DATAPOINT_NAME}")
assert response.status_code == 200
assert response.json() == {
'datapoint_name': DATAPOINT_NAME,
'state': "Test"
}
I would hope in the test, the response = client.get() would be based on the overriding function override_dep, which would replace some_function_is.
I thought the response.json() would be:
{
'datapoint_name': 'abcdef',
'state': 'Test'
}
instead, it is:
{
'datapoint_name': 'abcdef',
'state': 'Actual'
}
This means that the override_dep function in the test is useless.
I even checked out the value of app.dependency_overrides, and it shows a correct map:
(Pdb) app.dependency_overrides
{<function some_function_is at 0x102b3d1b0>: <function override_dep at 0x102b3e0e0>}
Where the memory values of functions do match:
(Pdb) some_function_is
<function some_function_is at 0x102b3d1b0>
(Pdb) override_dep
<function override_dep at 0x102b3e0e0>
What am I doing wrong?
You're creating the FastAPI app object in your test, but you're using a defined router with your TestClient. Since this router is never registered with the app, overriding a dependency with the app won't do anything useful.
The TestClient is usually used with the root app (so that the tests run against the app itself):
from fastapi import APIRouter, Depends, FastAPI
app = FastAPI()
datapoint_router = APIRouter()
def some_function_is():
return "Actual"
#datapoint_router.get('/{datapoint_name}')
def get_db(
datapoint_name: str,
some_function_output=Depends(some_function_is)
) -> dict:
return {
'datapoint_name': datapoint_name,
'state': some_function_output
}
app.include_router(datapoint_router)
And then the test:
from typing import Union
from fastapi.testclient import TestClient
from datapoint_routes import app, datapoint_router, some_function_is
DATAPOINT_NAME = 'abcdef'
client = TestClient(app)
def override_dep(q: Union[str, None] = None):
return "Test"
app.dependency_overrides[some_function_is] = override_dep
def test_read_main():
response = client.get(f"/{DATAPOINT_NAME}")
assert response.status_code == 200
assert response.json() == {
'datapoint_name': DATAPOINT_NAME,
'state': "Test"
}
This passes as expected, since you're now testing against the app (TestClient(app)) - the location where you overrode the dependency.
MatsLindh's answer does solve the problem, and I would like to suggest another improvement.
Overriding the depends function at the root of the test file, introduces a risk of interfering with the following tests, due to a lack of cleanup.
Instead, I suggest using a fixture, which would ensure the isolation of your tests. I wrote a simple pytest plugin to integrate with the dependency system of FastAPI to simplify the syntax as well.
Install it via: pip install pytest-fastapi-deps and then use it like so:
from typing import Union
from fastapi.testclient import TestClient
from datapoint_routes import app, datapoint_router, some_function_is
DATAPOINT_NAME = 'abcdef'
client = TestClient(app)
def override_dep(q: Union[str, None] = None):
return "Test"
def test_read_main_context_manager(fastapi_dep):
with fastapi_dep(app).override({some_function_is: override_dep}):
response = client.get(f"/{DATAPOINT_NAME}")
assert response.status_code == 200
assert response.json() == {
'datapoint_name': DATAPOINT_NAME,
'state': "Test"
}
I have a class used to authenticate with google sheet API and retrieve data from some spreadsheets.
Here a part of it:
spreadsheet.py
from typing import Optional
from google.oauth2.credentials import Credentials
from google.auth.transport.requests import Request
import gspread
class GoogleSheet:
def __init__(self, token_file: str):
self.token_file: str = token_file
self.google_client: Optional[gspread.Client] = None
self.gsheet: Optional[gspread.Spreadsheet] = None
def setup_connection(self) -> None:
credentials: Credentials = Credentials.from_authorized_user_file(self.token_file)
if credentials.expired:
credentials.refresh(Request())
self.google_client = gspread.authorize(credentials)
def open_gsheet_by_url(self, gsheet_url: str) -> None:
self.gsheet = self.google_client.open_by_url(gsheet_url)
I wanted to create some tests for the previous code.
Here is what I ended to:
test_spreadsheet.py
import pytest
from spreadsheet import GoogleSheet
from unittest.mock import patch
class TestSpreadSheetData:
#patch('spreadsheet.Credentials')
#patch('spreadsheet.gspread')
def test_setup_connection(self, mocked_creds, mocked_auth):
google_sheet = GoogleSheet('api_token.json')
google_sheet.setup_connection()
assert mocked_creds.from_authorized_user_file.call_count == 1
assert mocked_auth.authorize.call_count == 1
I tried the code above but it didn't work although I had similar approach with different project and packages.
So whenever I run the test, I get the following error:
AssertionError
Assert 0 == 1
Can anyone helps me fix this?
I am having trouble mocking an attribute of a property of a class.
I have the following class:
import boto3
class QueryFetcher:
def __init__(self, query: str):
self._query = query
self._athena = boto3.client("athena")
#property
def athena(self):
return self._athena
def query_athena(self):
# need to mock start_query_execution function
execution = self._athena.start_query_execution(. . .)
execution_id = execution["QueryExecutionId"]
while True:
# need to mock get_query_execution function
stats = self._athena.get_query_execution(QueryExecutionId=execution_id)
. . .
return status, execution_id
I have to mock the functions start_query_execution and get_query_execution which are part of the property athena in the class.
I am trying to unit test the function query_athena. This is a sample file for unit testing:
import pytest
from query_fetcher import QueryFetcher
from unittest.mock import patch, PropertyMock
#pytest.fixture
def fetcher_user():
return QueryFetcher(query="")
#mock.patch(
"query_fetcher.QueryFetcher.athena",
new_callable=mock.PropertyMock,
)
def test_query_athena(mock_athena, fetcher_user):
mock_athena.return_value.get_query_execution.return_value = {
"QueryExecution": {"Status": {"State": "SUCCEEDED"}}
}
mock_athena.return_value.start_query_execution.return_value = {
"QueryExecutionId": "69320478-2452-465e-bc0d-89cdc8bd4428"
}
fetcher_user.query_athena()
However, this does not work and I assumed it's because I have to create a new instance of the class. So then I tried the following:
#mock.patch(
"query_fetcher.QueryFetcher.athena",
new_callable=mock.PropertyMock,
)
def test_query_athena(mock_athena):
mock_athena.return_value.get_query_execution.return_value = {
"QueryExecution": {"Status": {"State": "SUCCEEDED"}}
}
mock_athena.return_value.start_query_execution.return_value = {
"QueryExecutionId": "69320478-2452-465e-bc0d-89cdc8bd4428"
}
# create a new class instance
fetcher_user = QueryFetcher(query="")
fetcher_user.query_athena()
But this doesn't work either. What am I missing here?
Ideally, I'd like to mock the property athena on my fixture fetcher_user instead of creating a new instance entirely. I also do not want to modify my fixture to mock the functionality inside the fixture. Is that even possible?
I'd appreciate any help on this issue, thanks!
I resolved this issue by doing the following:
import mock
def test_query_athena(fetcher_user):
fetcher_user._athena = mock.MagicMock()
fetcher_user._athena.get_query_execution.return_value = {"QueryExecution": {"Status": {"State": "SUCCEEDED"}}}
fetcher_user._athena.start_query_execution.return_value = {"QueryExecutionId": "69320478-2452-465e-bc0d-89cdc8bd4428"}
fetcher_user.query_athena()
This also makes use of the fixture as I wanted and keeps things simple :)
Background
I want to limit the number of times that AWS Parameter Store is called in my AWS Lambda. Using a global variable, I'm caching a Parameter Store value on the first call to Parameter Store.
main.py
import os
import boto3
redis_password = None
def get_redis_password():
global redis_password
if not redis_password:
client = boto3.client("ssm")
redis_password = client.get_parameter(
Name=f"{os.environ["ENV"]}.redis-cache.password",
WithDecryption=True
)
return redis_password["Parameter"]["Value"]
def lambda_handler(event, context):
get_redis_password()
However, if I want to cache multiple Parameter Store values, I must create multiple global variables and if not [INSERT_GLOBAL_VARIABLE] checks. For example:
main.py
import os
import boto3
redis_password = None
another_parameter_store_value = None
def get_redis_password():
global redis_password
if not redis_password:
client = boto3.client("ssm")
redis_password = client.get_parameter(
Name=f"{os.environ["ENV"]}.redis-cache.password",
WithDecryption=True
)
return redis_password["Parameter"]["Value"]
def get_another_parameter_store_value():
global another_parameter_store_value
if not another_parameter_store_value:
client = boto3.client("ssm")
another_parameter_store_value = client.get_parameter(
Name=f"{os.environ["ENV"]}.another.parameter.store.key",
WithDecryption=True
)
return redis_password["Parameter"]["Value"]
def lambda_handler(event, context):
get_redis_password()
get_another_parameter_store_value()
Attempted Solution
In an attempt to solve this issue, I've created a Parameter Store utility.
parameter_util.py
import os
import boto3
class ParameterUtil:
def __init__(self):
self.boto_client = boto3.client("ssm")
def get_parameter(self, parameter_path):
response = self.boto_client.get_parameter(
Name=f"{os.environ['ENV']}.{parameter_path}", WithDecryption=True
)
return response["Parameter"]["Value"]
My theory is that by instantiating the AWS Boto client as an instance variable, it will cache the entire Boto client object. Then get_parameter will be called using the cached Boto client. For example:
main.py
import os
import boto3
from parameter_util import ParameterUtil
redis_password = None
def get_redis_password():
global redis_password
if not redis_password:
client = boto3.client("ssm")
redis_password = client.get_parameter(
Name=f"{os.environ["ENV"]}.redis-cache.password",
WithDecryption=True
)
return redis_password["Parameter"]["Value"]
def lambda_handler(event, context):
param_util = ParameterUtil()
param_util.get_parameter(".redis-cache.password")
param_util.get_parameter(".another.parameter.store.key")
However, I'm not really sure if this solves the issue.
Questions
Does caching the Boto client result in only one call per parameter to the Parameter Store when get_parameter is called? Or am I optimizing in the wrong place?
Your original code won't work because param_util is a local variable that will go out of scope for every Lambda call.
You can use the built-in #functools.lru_cache to create a simple function that handles any parameter. It will cache the return values for you based on the input of the function (Python 3.2+).
Decorator to wrap a function with a memoizing callable that saves up to the maxsize most recent calls. It can save time when an expensive or I/O bound function is periodically called with the same arguments.
Example:
ssm_client = boto3.client("ssm")
#lru_cache(maxsize=None)
def get_param(name):
return ssm_client.get_parameter(
Name=f"{os.environ['ENV']}.{name}",
WithDecryption=True
)["Parameter"]["Value"]
def lambda_handler(event, context):
redis_password = get_param("redis-cache.password")
another_parameter_store_key = get_param("another.parameter.store.key")
I like this approach. I might suggest abstracting it a little bit to something like this:
main.py
parameter_store_values = {}
client = boto3.client("ssm")
def lookup_function(key):
global parameter_store_values
global client
if parameter_store_values.get(key) is None:
value = client.get_parameter(
Name=key,
WithDecryption=True)["Parameter"]["Value"]
parameter_store_values[key] = value
return value
def lambda_handler(event, context):
redis_password = lookup_function(f"{os.environ["ENV"]}.redis-cache.password")
another_parameter_store_key = lookup_function(f"{os.environ["ENV"]}.another.parameter.store.key")