How can i reference variable in lambda to AWS-RunShellScript command - python

I am trying to pass a variable account_id to shell script inside a lambda function, this lambda will run that script on instances for that account
import sys
import logging
import datetime
import boto3
import botocore
import time
import json
from os import getenv
region = 'ap-southeast-2'
boto3.setup_default_session(region_name=region)
account_id = 123454
def handler(event, context):
client = boto3.client('ssm')
instance_name_list=\[12345678890,0000000000\]
if instance_name_list:
try:
response = client.send_command(
Targets=\[
{
'Key': 'InstanceIds',
'Values': instance_name_list
},
\],
DocumentName='AWS-RunShellScript',
Parameters={
'commands': \[
# shell command
'echo {account_ID}',
]
}
)
What is the correct way of achieving this ?

You just need to string interpolation.
f'echo {account_id}'

Related

How to mock environment variables and packages before importing Python script using pytest tests?

I am using pytest to test out an AWS Lambda function I am writing. The function has some initialization code outside of any function. It looks similar to this:
catalog_service/lambda_v1.py
import boto3
import botocore
import json
import os
from aws_lambda_powertools import Logger
from elastic_enterprise_search import AppSearch
LOGGER = Logger()
ENV = os.environ['ENVIRONMENT']
ssm = boto3.client('ssm')
key_response = ssm.get_parameter(
Name=key,
WithDecryption=True
)
APP_SEARCH = AppSearch(
os.environ['APP_SEARCH_API_HOST'],
bearer_auth=key_response['Parameter']['Value'],
)
# Additional functions
Whenever I run my test, it always fails with a KeyError saying that ENVIRONMENT is not in os.environ which makes sense since I believe it's importing the Lambda script before running any of the other mocking code. Whenever I try to mock it with monkeypatch it seems like I am not doing it in the right spot. My test looks like this:
import pytest
import os
from . import fixtures
from catalog_service import lambda_v1 as v1
#pytest.fixture
def mock_set_environment(monkeypatch):
monkeypatch.setenv('ENVIRONMENT', 'qa')
class TestLambdaV1:
#pytest.mark.parametrize('data, expected', [
(
{'a': 1},
{
'statusCode': '200',
'headers': {
'content-type': 'application/json'
},
'body': '{"a": 1}'
},
),
])
def test_format_good_response(self, mock_set_environment, data, expected):
assert os.getenv('ENVIRONMENT') == 'qa'
result = v1.format_good_response(data)
assert result == expected
I think it would mock the environment variable if it were being used within the function but it is used during the import. How do I mock the os.environ variable as well as the AppSearch and boto3 packages before they're imported / used?

Mocking AWS lambda with Moto

I am trying to mock an AWS lambda function, below is my sample code
def get_lambda_resp(arn: str, input: str) -> str:
lambda_client = boto3.client("lambda")
response = lambda_client.invoke(
FunctionName=arn, LogType="None",
Payload=json.dumps({"param": input}).encode("utf-8")
)
output = json.loads(response["Payload"].read().decode("utf-8"))
return output["value"]
and below is my test case
import io
import zipfile
import boto3
from moto import mock_lambda
#mock_lambda
def test():
conn = boto3.client('lambda', 'us-east-1')
def get_test_zip_file():
pfunc = '''
import json
def lambda_handler(event, context):
resp = {"value":"input_str"}
return json.dumps(resp)
'''
zip_output = io.BytesIO()
zip_file = zipfile.ZipFile(zip_output, 'w', zipfile.ZIP_DEFLATED)
zip_file.writestr('lambda_function.py', pfunc)
zip_file.close()
zip_output.seek(0)
return zip_output.read()
conn.create_function(
FunctionName='lambda-function-name',
Runtime='python3.8',
Role='test-iam-role',
Handler='lambda_function.lambda_handler',
Code={
'ZipFile': get_test_zip_file(),
},
Description='test lambda function',
Timeout=3,
MemorySize=128,
Publish=True
)
resp = get_auth("arn", "input_str")
assert resp is not None
While running the test case, I am getting below error
E ModuleNotFoundError: No module named 'docker'
I already have my Docker running, What else should I do to run it?
That message refers to the pip-module called docker.
Assuming you use Moto >=2.x, make sure you install it correctly to get all required dependencies:
pip install moto[awslambda,s3,service1,etc]
Or if you use many services, install all dependencies without having to list all services:
pip install moto[all]
This will install all required Pip modules, including Docker.
Source: https://github.com/spulec/moto/issues/3722

Passing AWS Credentials in Python Script

I have a python script that gets called by a PHP. The user that invokes this php script is apache and hence, the python file also gets invoked by apache. So, it gives "Unable to locate credentials ". I've set the default credentials via awscli and when I invoke the python script as root, it works.
This is my line of code :
client = boto3.client('ses', region_name=awsregion, aws_access_key_id='AJHHJHJHJ', aws_secret_access_key='asdasd/asdasd/asd')
But, this gives "Invalid Syntax" Error. So, I tried this :
client = boto3.Session(aws_access_key_id='ASDASD', aws_secret_access_key='asd/asdasd/asdasd')
client = boto3.client('ses', region_name=awsregion, aws_access_key_id='ASDASD', aws_secret_access_key='asd/asdasd/asdasd')
Gives the same error as above. Weird thing is that this same thing is mentioned in the documentation. Even though it's not recommended, it should work.
Can somebody help me in fixing this?
Did you ever get this resolved? Here is how I connect to boto3 in my Python scripts:
import boto3
from botocore.exceptions import ClientError
import re
from io import BytesIO
import gzip
import datetime
import dateutil.parser as dparser
from datetime import datetime
import tarfile
import requests
import sys
from awsglue.transforms import *
from awsglue.utils import getResolvedOptions
from pyspark.context import SparkContext
from awsglue.context import GlueContext
from awsglue.job import Job
## Needed glue stuff
sc = SparkContext()
glueContext = GlueContext(sc)
spark = glueContext.spark_session
job = Job(glueContext)
##
## currently this will run for everything that is in the staging directory of omniture
# set needed parms
myProfileName = 'MyDataLake'
dhiBucket = 'data-lake'
#create boto3 session
try:
session = boto3.Session(aws_access_key_id='aaaaaaaaaaaa', aws_secret_access_key='abcdefghijklmnopqrstuvwxyz', region_name='us-east-1')aws_session_token=None, region_name=None, botocore_session=None
s3 = session.resource('s3') #establish connection to s3
except Exception as conne:
print ("Unable to connect: " + str(conne))
errtxt = requests.post("https://errorcapturesite", data={'message':'Unable to connect to : ' + myProfileName, 'notify':True,'color':'red'})
print(errtxt.text)
exit()

Appengine logservice with remote_api

I am trying to get my appengine application logs from remote.
I am using remote_api, I tried with appcfg but I discarded it because it has a limit on the download/buffer so I can't download all the logs.
Now I am using the logservice, but if I use it in my code it doesn't return anything.
Here is my code:
import time
import urllib2
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.api.logservice import logservice
import getpass
import base64
import os
from appcfg import *
import getpass
import subprocess
os.environ['HTTP_X_APPENGINE_TASKRETRYCOUNT']='1'
os.environ["SERVER_SOFTWARE"] = "Developement"
os.environ['HTTP_HOST'] = 'unitTest'
os.environ['CURRENT_MODULE_ID']='default'
os.environ['CURRENT_VERSION_ID']='1.0'
email_address = "iacopo#indiegala.com"
application_url = "store-indiegala.appspot.com"
def aut():
app_name = "store-indiegala.appspot.com"
f = lambda : ("*EMAIL*", "*PASSWORD*")
remote_api_stub.ConfigureRemoteApi(None, '/_ah/remote_api', auth_func,app_name)
print("successfully authenticated")
fetch_logs()
def fetch_logs():
end_time = time.time()
print ("starting")
for req_log in logservice.fetch(end_time = end_time, offset = None, minimum_log_level = logservice.LOG_LEVEL_INFO,
include_app_logs=True, include_incomplete=True):
print req_log.ip
def auth_func():
global email_address
return (email_address, getpass.getpass('Password:'))
aut()
It successfully connects to my app and he make the logservice.fetch(), but it returns an empty object... why?
Go to your logs in the App Engine admin and make sure you have the right module and version. They can be found in each log entry, for example:
2015-01-24 21:58:43.425 / active start=2015-01-24,13:57:36.773 AppEngine-Google; (+http://code.google.com/appengine) module=default version=baseline
Becomes:
import os
os.environ["CURRENT_MODULE_ID"] = "default"
os.environ['CURRENT_VERSION_ID']= "baseline"`

Get App Engine logs with the help of remote_api

I am trying to get my appengine application logs from remote. I am using remote_api, I tried with appcfg but I discarded it because it has a limit on the download/buffer so I can't download all the logs.
Now I am using the logservice, but if I use it in my code it doesn't return anything. Here is my code:
import time
import urllib2
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.api.logservice import logservice
import getpass
import base64
import os
from appcfg import *
import getpass
import subprocess
os.environ['HTTP_X_APPENGINE_TASKRETRYCOUNT']='1'
os.environ["SERVER_SOFTWARE"] = "Developement"
os.environ['HTTP_HOST'] = 'unitTest'
os.environ['CURRENT_MODULE_ID']='default'
os.environ['CURRENT_VERSION_ID']='1.0'
email_address = "********"
application_url = "myappid.appspot.com"
def aut():
app_name = "myappid.appspot.com"
f = lambda : ("*EMAIL*", "*PASSWORD*")
remote_api_stub.ConfigureRemoteApi(None, '/_ah/remote_api', auth_func,app_name)
print("successfully authenticated")
fetch_logs()
def fetch_logs():
end_time = time.time()
print ("starting")
for req_log in logservice.fetch(end_time = end_time, offset = None, minimum_log_level = logservice.LOG_LEVEL_INFO,
include_app_logs=True, include_incomplete=True):
print req_log.ip
def auth_func():
global email_address
return (email_address, getpass.getpass('Password:'))
aut()
It successfully connects to my app and he make the logservice.fetch(), but it returns an empty object... why?

Categories