I am trying to mock the result of API call made to compute engine to list VMs. But unfortunately couldn't mock an exact function.
I've tried using PATCH and MOCK methods to mock specific calls made, still unsuccessful
code.py file looks likes this
import googleapiclient.discovery
import logging
class Service:
def __init__(self, project, event):
self.project_id = project
self.compute = googleapiclient.discovery.build('compute', 'v1',
cache_discovery=False)
self.event = event
self.zones = self._validate_event()
def _validate_event(self):
if "jsonPayload" not in self.event:
zones = self.compute.zones().list(
project=self.project_id).execute()['items']
else:
zones = self.compute.zones().get(project=self.project_id,
zone=self.event["jsonPayload"]
["resource"]["zone"]).execute()
logging.debug(f"Identified Zones are {zones}")
return [zone["name"] for zone in zones]
My test file looks like this
# in-built
from unittest import TestCase
from unittest.mock import patch
# custom
import code
class TestServiceModule(TestCase):
def setUp(self):
self.project_id = "sample-project-id"
#patch('code.googleapiclient.discovery')
def test__validate_event_with_empty_inputs(self, mock_discovery):
mock_discovery.build.zones.list.execute.return_value = {"items": [
{
"name": "eu-west-1"
}
]}
obj = code.Service(event={}, project=self.project_id)
print(obj.zones)
In the above test case, I Expected to see "eu-west-1" as the value when I print obj.zones
You didn't mock the googleapiclient.discovery.build method correctly. Here is the unit test solution:
E.g.
code.py:
import googleapiclient.discovery
import logging
class Service:
def __init__(self, project, event):
self.project_id = project
self.compute = googleapiclient.discovery.build('compute', 'v1', cache_discovery=False)
self.event = event
self.zones = self._validate_event()
def _validate_event(self):
if "jsonPayload" not in self.event:
zones = self.compute.zones().list(project=self.project_id).execute()['items']
else:
zones = self.compute.zones().get(project=self.project_id,
zone=self.event["jsonPayload"]["resource"]["zone"]).execute()
logging.debug(f"Identified Zones are {zones}")
return [zone["name"] for zone in zones]
test_code.py:
from unittest import TestCase, main
from unittest.mock import patch
import code
class TestService(TestCase):
def setUp(self):
self.project_id = "sample-project-id"
#patch('code.googleapiclient.discovery')
def test__validate_event_with_empty_inputs(self, mock_discovery):
# Arrange
mock_discovery.build.return_value.zones.return_value.list.return_value.execute.return_value = {
"items": [{"name": "eu-west-1"}]}
# Act
obj = code.Service(event={}, project=self.project_id)
# Assert
mock_discovery.build.assert_called_once_with('compute', 'v1', cache_discovery=False)
mock_discovery.build.return_value.zones.assert_called_once()
mock_discovery.build.return_value.zones.return_value.list.assert_called_once_with(project='sample-project-id')
mock_discovery.build.return_value.zones.return_value.list.return_value.execute.assert_called_once()
self.assertEqual(obj.zones, ["eu-west-1"])
if __name__ == '__main__':
main()
unit test result with coverage report:
.
----------------------------------------------------------------------
Ran 1 test in 0.002s
OK
Name Stmts Miss Cover Missing
-----------------------------------------------------------------------
src/stackoverflow/56794377/code.py 14 1 93% 16
src/stackoverflow/56794377/test_code.py 16 0 100%
-----------------------------------------------------------------------
TOTAL 30 1 97%
Versions:
google-api-python-client==1.12.3
Python 3.7.5
Related
I have been trying to patch the list_blobs() function of ContainerClient, have not been able to do this successfully, this code outputs a MagicMock() function - but the function isn't patched as I would expect it to be (Trying to patch with a list ['Blob1', 'Blob2'].
#################Script File
import sys
from datetime import datetime, timedelta
import pyspark
import pytz
import yaml
# from azure.storage.blob import BlobServiceClient, ContainerClient
from pyspark.dbutils import DBUtils as dbutils
import azure.storage.blob
# Open Config
def main():
spark_context = pyspark.SparkContext.getOrCreate()
spark_context.addFile(sys.argv[1])
stream = None
stream = open(sys.argv[1], "r")
config = yaml.load(stream, Loader=yaml.FullLoader)
stream.close()
account_key = dbutils.secrets.get(scope=config["Secrets"]["Scope"], key=config["Secrets"]["Key Name"])
target_container = config["Storage Configuration"]["Container"]
target_account = config["Storage Configuration"]["Account"]
days_history_to_keep = config["Storage Configuration"]["Days History To Keep"]
connection_string = (
"DefaultEndpointsProtocol=https;AccountName="
+ target_account
+ ";AccountKey="
+ account_key
+ ";EndpointSuffix=core.windows.net"
)
blob_service_client: azure.storage.blob.BlobServiceClient = (
azure.storage.blob.BlobServiceClient.from_connection_string(connection_string)
)
container_client: azure.storage.blob.ContainerClient = (
blob_service_client.get_container_client(target_container)
)
blobs = container_client.list_blobs()
print(blobs)
print(blobs)
utc = pytz.UTC
delete_before_date = utc.localize(
datetime.today() - timedelta(days=days_history_to_keep)
)
for blob in blobs:
if blob.creation_time < delete_before_date:
print("Deleting Blob: " + blob.name)
container_client.delete_blob(blob, delete_snapshots="include")
if __name__ == "__main__":
main()
#################Test File
import unittest
from unittest import mock
import DeleteOldBlobs
class DeleteBlobsTest(unittest.TestCase):
def setUp(self):
pass
#mock.patch("DeleteOldBlobs.azure.storage.blob.ContainerClient")
#mock.patch("DeleteOldBlobs.azure.storage.blob.BlobServiceClient")
#mock.patch("DeleteOldBlobs.dbutils")
#mock.patch("DeleteOldBlobs.sys")
#mock.patch('DeleteOldBlobs.pyspark')
def test_main(self, mock_pyspark, mock_sys, mock_dbutils, mock_blobserviceclient, mock_containerclient):
# mock setup
config_file = "Delete_Old_Blobs_UnitTest.yml"
mock_sys.argv = ["unused_arg", config_file]
mock_dbutils.secrets.get.return_value = "A Secret"
mock_containerclient.list_blobs.return_value = ["ablob1", "ablob2"]
# execute test
DeleteOldBlobs.main()
# TODO assert actions taken
# mock_sys.argv.__get__.assert_called_with()
# dbutils.secrets.get(scope=config['Secrets']['Scope'], key=config['Secrets']['Key Name'])
if __name__ == "__main__":
unittest.main()
Output:
<MagicMock name='BlobServiceClient.from_connection_string().get_container_client().list_blobs()' id='1143355577232'>
What am I doing incorrectly here?
I'm not able to execute your code in this moment, but I have tried to simulate it. To do this I have created the following 3 files in the path: /<path-to>/pkg/sub_pkg1 (where pkg and sub_pkg1 are packages).
File ContainerClient.py
def list_blobs(self):
return "blob1"
File DeleteOldBlobs.py
from pkg.sub_pkg1 import ContainerClient
# Open Config
def main():
blobs = ContainerClient.list_blobs()
print(blobs)
print(blobs)
File DeleteBlobsTest.py
import unittest
from unittest import mock
from pkg.sub_pkg1 import DeleteOldBlobs
class DeleteBlobsTest(unittest.TestCase):
def setUp(self):
pass
def test_main(self):
mock_containerclient = mock.MagicMock()
with mock.patch("DeleteOldBlobs.ContainerClient.list_blobs", mock_containerclient.list_blobs):
mock_containerclient.list_blobs.return_value = ["ablob1", "ablob2"]
DeleteOldBlobs.main()
if __name__ == '__main__':
unittest.main()
If you execute the test code you obtain the output:
['ablob1', 'ablob2']
['ablob1', 'ablob2']
This output means that the function list_blobs() is mocked by mock_containerclient.list_blobs.
I don't know if the content of this post can be useful for you, but I'm not able to simulate better your code in this moment.
I hope you can inspire to my code to find your real solution.
The structure of the answer didn't match my solution, perhaps both will work but it was important for me to patch pyspark even though i never call it, or exceptions would get thrown when my code tried to interact with spark.
Perhaps this will be useful to someone:
#mock.patch("DeleteOldBlobs.azure.storage.blob.BlobServiceClient")
#mock.patch("DeleteOldBlobs.dbutils")
#mock.patch("DeleteOldBlobs.sys")
#mock.patch('DeleteOldBlobs.pyspark')
def test_list_blobs_called_once(self, mock_pyspark, mock_sys, mock_dbutils, mock_blobserviceclient):
# mock setup
config_file = "Delete_Old_Blobs_UnitTest.yml"
mock_sys.argv = ["unused_arg", config_file]
account_key = 'Secret Key'
mock_dbutils.secrets.get.return_value = account_key
bsc_mock: mock.Mock = mock.Mock()
container_client_mock = mock.Mock()
blob1 = Blob('newblob', datetime.today())
blob2 = Blob('oldfile', datetime.today() - timedelta(days=20))
container_client_mock.list_blobs.return_value = [blob1, blob2]
bsc_mock.get_container_client.return_value = container_client_mock
mock_blobserviceclient.from_connection_string.return_value = bsc_mock
# execute test
DeleteOldBlobs.main()
#Assert Results
container_client_mock.list_blobs.assert_called_once()
I would like to serve both gRPC and HTTP in my flow, but the flow description only allows a single value in the protocol parameter. Is it possible to add both? If not, do i have to deploy two flows or is there a better workaround?
The documentation doesn't mention if i can have two gateways from what i can see?
f = Flow(protocol='grpc', port=12345).add(uses=FooExecutor)
with f:
client = Client(port=12345)
docs = client.post(on='/')
print(docs.texts)
Unfortunately by default, no.
But you can develop your own custom gateway that enables both protocols at the same time.
A sample custom gateway looks like the following (borrowed from here)
import grpc
from grpc_health.v1 import health, health_pb2, health_pb2_grpc
from grpc_reflection.v1alpha import reflection
from pydantic import BaseModel
from uvicorn import Config, Server
from jina import Gateway, __default_host__
from jina.proto import jina_pb2, jina_pb2_grpc
class DummyResponseModel(BaseModel):
protocol: str
class MultiProtocolGateway(Gateway):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.http_port = self.ports[0]
self.grpc_port = self.ports[1]
self.health_servicer = health.HealthServicer(experimental_non_blocking=True)
async def _setup_http_server(self):
from fastapi import FastAPI
app = FastAPI(
title='HTTP Server',
)
#app.get(path='/', response_model=DummyResponseModel)
def _get_response():
return {'protocol': 'http'}
self.http_server = Server(
Config(app, host=__default_host__, port=self.http_port)
)
async def _setup_grpc_server(self):
self.grpc_server = grpc.aio.server()
jina_pb2_grpc.add_JinaRPCServicer_to_server(
self.streamer._streamer, self.grpc_server
)
service_names = (
jina_pb2.DESCRIPTOR.services_by_name['JinaRPC'].full_name,
reflection.SERVICE_NAME,
)
# Mark all services as healthy.
health_pb2_grpc.add_HealthServicer_to_server(
self.health_servicer, self.grpc_server
)
for service in service_names:
self.health_servicer.set(service, health_pb2.HealthCheckResponse.SERVING)
reflection.enable_server_reflection(service_names, self.grpc_server)
self.grpc_server.add_insecure_port(f'{__default_host__}:{self.grpc_port}')
await self.grpc_server.start()
async def setup_server(self):
await self._setup_http_server()
await self._setup_grpc_server()
async def run_server(self):
await self.http_server.serve()
await self.grpc_server.wait_for_termination()
async def shutdown(self):
self.http_server.should_exit = True
await self.grpc_server.stop(0)
await self.http_server.shutdown()
self.health_servicer.enter_graceful_shutdown()
#property
def _should_exit(self) -> bool:
return self.http_server.should_exit
And you can access it in the following way:
from xxx import MultiProtocolGateway
from xxx import MyExecutor
from jina import Flow, Client, DocumentArray
http_port = 51000
grpc_port = 52000
flow = Flow().config_gateway(
uses=MultiProtocolGateway,
port=[http_port, grpc_port],
protocol=['http', 'grpc'],
).add(MyExecutor)
with flow:
c1 = Client(host='http://0.0.0.0:51000)
c1.post(on='/', inputs=DocumentArray().empty(5))
c2 = Client(host='grpc://0.0.0.0:52000)
c2.post(on='/', inputs=DocumentArray().empty(5))
I'm using the nornir (3.3.0) automation framework with Python 3.8. I'd like to mock the SSH access to the devices in order to do testing without having some real or virtual network equipment online. How would I use patch or Mock/MagicMock from unittest.mock to mock netmiko_send_command (ssh interaction with device)?
I have the following nornir task function:
# dbb_automation/tasks.py
from nornir.core.task import Task, Result
from nornir_netmiko.tasks import netmiko_send_command
def get_interfaces_with_ip(task: Task):
log.debug(f"{task.name}: Getting result on host {task.host}")
result: MultiResult = task.run(name="show ip int br | e unass", task=netmiko_send_command,
command_string="show ip int br | e unass")
content_str = result[0].result
task.run(
task=write_file,
filename=f"outputs/{task.host}-{purpose}.{ending}",
content=content_str
)
return Result(
host=task.host,
result=f"{task.host.name} got ip result"
)
and the following test case (work in progress):
# tests/test_tasks.py
from dbb_automation.tasks import get_interfaces_with_ip
from nornir import InitNornir
from nornir.core.filter import F
from tests.settings import *
def test_get_interfaces_with_ip():
# [x] init nornir with fake host
# [ ] patch/mock netmiko_send_command
# [ ] check file contents with patched return string of netmiko_send_command
nr = InitNornir(
core={
"raise_on_error": True
},
runner={
"plugin": "threaded",
"options": {
"num_workers": 1,
}
},
inventory={
"plugin": "SimpleInventory",
"options": {
"host_file": DNAC_HOSTS_YAML,
"group_file": DNAC_GROUPS_YAML,
"defaults_file": DNAC_DEFAULT_YAML
}
},
logging={
"log_file": "logs/nornir.log"
}
)
result = nr.filter(F(has_parent_group="Borders")).run(name="get_interfaces_with_ip", task=get_interfaces_with_ip)
# todo: test code
assert False
Regards,
GĂ©rard
I think I found the solution. Key was to patch to where the imported function is used not to where it is defined and to set the return value on the mock object.
#patch("dbb_automation.tasks.netmiko_send_command")
def test_get_interfaces_with_ip(mock_netmiko_send_command, nr):
...
mock_netmiko_send_command.return_value = """Interface IP-Address OK? Method Status Protocol
GigabitEthernet22 10.1.54.146 YES TFTP up up
Loopback0 10.150.32.2 YES other up up
Port-channel1.2 10.150.33.65 YES manual up up
...
"""
import pytest
import os
import shutil
from unittest.mock import patch
from dbb_automation.tasks import get_interfaces_with_ip
from nornir import InitNornir
from nornir.core.filter import F
from tests.settings import *
#pytest.fixture()
def nr():
nr = InitNornir(
core={
"raise_on_error": True
},
runner={
"plugin": "threaded",
"options": {
"num_workers": 1,
}
},
inventory={
"plugin": "SimpleInventory",
"options": {
"host_file": DNAC_HOSTS_YAML,
"group_file": DNAC_GROUPS_YAML,
"defaults_file": DNAC_DEFAULT_YAML
}
},
logging={
"log_file": "logs/nornir.log"
}
)
return nr
#patch("dbb_automation.tasks.netmiko_send_command")
def test_get_interfaces_with_ip(mock_netmiko_send_command, nr):
output_folder_name = "outputs"
shutil.rmtree(output_folder_name)
os.mkdir(output_folder_name)
mock_netmiko_send_command.return_value = """Interface IP-Address OK? Method Status Protocol
GigabitEthernet22 10.1.54.146 YES TFTP up up
Loopback0 10.150.32.2 YES other up up
Port-channel1.2 10.150.33.65 YES manual up up
"""
nr.filter(F(has_parent_group="Borders")).run(name="get_interfaces_with_ip", task=get_interfaces_with_ip)
# test code
count = 0
files_found = None
for root_dir, cur_dir, files in os.walk(output_folder_name):
count += len(files)
assert files_found is None # make sure there are no subdirectories
files_found = files
assert count == 4 # we expect a file for each host
for file_name in files_found:
with open(f"{output_folder_name}/{file_name}") as f:
assert f.read() == mock_netmiko_send_command.return_value
The use-case is that I want to mock the opening of two files ~/.myconf and ./.myconf but not the other ones.
I'm testing the setup of a complex object which reads multiple files in its __init__ and so I'd like to mock some data for some of them, not mock at all for some others.
As an example here is how I mock the conditional opening of those two files, but it feels complex and I find it odd that there's no easy way already built-in that I'm missing.
import builtins
import configparser
import unittest
from textwrap import dedent
from pathlib import Path
from unittest.mock import mock_open
OPEN = builtins.open
def get_hierarchical_config():
cwd = Path.cwd()
global_config = configparser.ConfigParser()
local_config = configparser.ConfigParser()
global_config.read(Path("~/.myconf").expanduser().resolve())
local_config.read((cwd / ".myconf").expanduser().resolve())
full_config.read_dict(global_config)
full_config.read_dict(local_config)
return full_config["mysection"]
def get_custom_mock_open(global_conf_str, local_conf_str) -> callable:
def mocked_open():
def conditional_open_func(path, *args, **kwargs):
p = Path(path).expanduser().resolve()
if p.name == ".myconfig":
if p.parent == Path.home():
return mock_open(read_data=global_conf_str)()
return mock_open(read_data=local_conf_str)()
return OPEN(path, *args, **kwargs)
return conditional_open_func
return mocked_open
[...]
class TestConfig(unittest.TestCase):
def test_read_confs(self):
global_conf = dedent(
"""\
[mysection]
no_overwrite=path/to/somewhere
local_overwrite=ERROR:not overwritten
syntax_test_key= no/space= problem2
"""
)
local_conf = dedent(
"""\
[mysection]
local_overwrite=SUCCESS:overwritten
local_new_key=cool value
"""
)
with patch(
"builtins.open",
new_callable=get_custom_mock_open(global_conf, local_conf),
):
conf = dict(get_hierarchical_config()) # reads the config files
target = {
"no_overwrite": "path/to/somewhere",
"local_overwrite": "SUCCESS:overwritten",
"syntax_test_key": "no/space= problem2",
"local_new_key": "cool value",
}
self.assertDictEqual(conf, target)
I am trying to patch the fun_1 function from the worker_functions dictionary and I seem to be struggling:
cli.py:
import sys
from worker_functions import (
fun_1,
fun_2,
fun_3,
)
FUNCTION_MAP = {
'run_1': fun_1,
'run_2': fun_2,
'run_3': fun_3,
}
def main():
command = sys.argv[1]
tag = sys.argv[2]
action = FUNCTION_MAP[command]
action(tag)
I've tried mocking cli.fun_1 and cli.main.action and cli.action but this is leading to failure.
test_cli.py:
from mock import patch
from cli import main
def make_test_args(tup):
sample_args = ['cli.py']
sample_args.extend(tup)
return sample_args
def test_fun_1_command():
test_args = make_test_args(['run_1', 'fake_tag'])
with patch('sys.argv', test_args),\
patch('cli.fun_1') as mock_action:
main()
mock_action.assert_called_once()
Do I seem to be missing something?
You'll need to patch the references in the FUNCTION_MAP dictionary itself. Use the patch.dict() callable to do so:
from unittest.mock import patch, MagicMock
mock_action = MagicMock()
with patch('sys.argv', test_args),\
patch.dict('cli.FUNCTION_MAP', {'run_1': mock_action}):
# ...
That's because the FUNCTION_MAP dictionary is the location that the function reference is looked up.