I tried using the basic example in the aws documentation, by starting dynamoDB as follows.
java -Djava.library.path=./DynamoDBLocal_lib -jar DynamoDBLocal.jar -sharedDb
And it produces the output
Initializing DynamoDB Local with the following configuration:
Port: 8000
InMemory: false
DbPath: null
SharedDb: true
shouldDelayTransientStatuses: false
CorsParams: *
The example python code is as follows.
from __future__ import print_function # Python 2/3 compatibility
import boto3
dynamodb = boto3.resource('dynamodb', region_name='us-west-2',endpoint_url='http://localhost:8000')
table = dynamodb.create_table(
TableName='Movies',
KeySchema=[
{
'AttributeName': 'year',
'KeyType': 'HASH' #Partition key
},
{
'AttributeName': 'title',
'KeyType': 'RANGE' #Sort key
}
],
AttributeDefinitions=[
{
'AttributeName': 'year',
'AttributeType': 'N'
},
{
'AttributeName': 'title',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
print("Table status:", table.table_status)
But I get the output :
Traceback (most recent call last):
File "test.py", line 32, in <module>
'WriteCapacityUnits': 10
File "/home/janga/anaconda2/lib/python2.7/site-packages/boto3/resources/factory.py", line 520, in do_action
response = action(self, *args, **kwargs)
File "/home/janga/anaconda2/lib/python2.7/site-packages/boto3/resources/action.py", line 83, in __call__
response = getattr(parent.meta.client, operation_name)(**params)
File "/home/janga/.local/lib/python2.7/site-packages/botocore/client.py", line 251, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/home/janga/.local/lib/python2.7/site-packages/botocore/client.py", line 537, in _make_api_call
raise ClientError(parsed_response, operation_name)
botocore.exceptions.ClientError: An error occurred (503) when calling the CreateTable operation (reached max retries: 9): <HTML><TITLE>503 Service Unavailable</TITLE>
<H1>503 Service Unavailable</H1>
Failed to connect to server <B>localhost</B></HTML>
I couldn't find any solution anywhere for this issue.
The only reason I can think of is, that I am on a university network which runs behind a proxy server. But I can't find any solution for that.
Please help.
Related
Here is the scheme of my dynamoDB table
Here is the way I am requesting my get_item
if event['hcpcs_codes'] != None:
# codes must be in numerical, alphabetical order
# multi codes should be seperated by a comma with no spaces
client = boto3.client('dynamodb')
response = None
try:
response = client.get_item(TableName=os.environ['TABLE'],
Key={'antecedents':{'S': str(event['hcpcs_codes'])}})
except ValidationError as e:
print(e)
print('here comes the tacos brah')
print(response)
but I get the error:
[ERROR] ClientError: An error occurred (ValidationException) when calling the GetItem operation: The provided key element does not match the schema
Traceback (most recent call last):
File "/var/task/lambdafile.py", line 19, in lambda_handler
Key={'antecedents':{'S': str(event['hcpcs_codes'])}})
File "/var/task/botocore/client.py", line 415, in _api_call
return self._make_api_call(operation_name, kwargs)
File "/var/task/botocore/client.py", line 745, in _make_api_call
raise error_class(parsed_response, operation_name)
and according to the docs. It looks like Im doing this right
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/dynamodb.html#DynamoDB.Client.get_item
response = client.get_item(
TableName='string',
Key={
'string': {
'S': 'string',
'N': 'string',
'B': b'bytes',
'SS': [
'string',
],
'NS': [
'string',
],
'BS': [
b'bytes',
],
'M': {
'string': {'... recursive ...'}
},
'L': [
{'... recursive ...'},
],
'NULL': True|False,
'BOOL': True|False
}
},
AttributesToGet=[
'string',
],
ConsistentRead=True|False,
ReturnConsumedCapacity='INDEXES'|'TOTAL'|'NONE',
ProjectionExpression='string',
ExpressionAttributeNames={
'string': 'string'
}
)
You must supply both the partition key and the sort key when using GetItem if the table has a sort key set.
I have set up an AWS PinPoint project and I'm trying to test it by sending an event from a lambda function:
import boto3
import datetime
import time
client = boto3.client('pinpoint')
app_id = '1234abcd'
endpoint_id = 'test_endpoint'
address = 'test#test.com'
def lambda_handler(event, context):
response = client.put_events(
ApplicationId = app_id,
EventsRequest={
'BatchItem': {
endpoint_id: {
'Endpoint': {
'ChannelType': 'EMAIL',
'Address': address,
'Attributes': {
'Cart': ['Hat'],
'Purchased': ['No']
}
},
'Events':{
'cart-event-2': {
'Attributes':{
'AddedToCart': 'Hat'
},
'EventType': 'AddToCartEvent',
'Metrics': {
'price': 29.95
},
'Timestamp': datetime.datetime.fromtimestamp(time.time()).isoformat()
}
}
}
}
}
)
return response
But I am receiving an error that the resource cannot be found, even though I can see it in Pin Point console:
{
"errorMessage": "An error occurred (NotFoundException) when calling the PutEvents operation: Resource not found",
"errorType": "NotFoundException",
"requestId": "xxxxx-xxxxx-xxxx-xxxx-xxxxxxxxx",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 12, in lambda_handler\n response = client.put_events(\n",
" File \"/var/runtime/botocore/client.py\", line 391, in _api_call\n return self._make_api_call(operation_name, kwargs)\n",
" File \"/var/runtime/botocore/client.py\", line 719, in _make_api_call\n raise error_class(parsed_response, operation_name)\n"
]
}
Turns out I was just in the wrong region on my AWS account. 🧠
I created the AWS pinpoint project in one region but was trying to send events to the project from another AWS region, which was why I was getting the NotFoundException.
I'm trying to implement the object lock feature but functions (get/put_object_lock_configuration) are not available :
>>> import boto3
>>> boto3.__version__
'1.17.64'
>>> client = boto3.client('s3')
>>> client.get_object_lock_configuration
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/botocore/client.py", line 553, in __getattr__
self.__class__.__name__, item)
AttributeError: 'S3' object has no attribute 'get_object_lock_configuration'
>>> client.get_object_lock_configuration(Bucket='tst', ExpectedBucketOwner='tst')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib/python3.6/site-packages/botocore/client.py", line 553, in __getattr__
self.__class__.__name__, item)
AttributeError: 'S3' object has no attribute 'get_object_lock_configuration'
Edit:
object lock functions not showing in python (tab tab) :
>>> client.get_object_
client.get_object_acl( client.get_object_tagging( client.get_object_torrent(
>>> client.put_object
client.put_object( client.put_object_acl( client.put_object_tagging(
get_object_lock_configuration is a function not a property.
You need to call it like that:
response = client.get_object_lock_configuration(
Bucket='string',
ExpectedBucketOwner='string'
)
The syntanx to call function client.get_object_lock_configuration:
response = client.get_object_lock_configuration(
Bucket='string',
ExpectedBucketOwner='string'
)
Syntax to call function client.put_object_lock_configuration:
response = client.put_object_lock_configuration(
Bucket='string',
ObjectLockConfiguration={
'ObjectLockEnabled': 'Enabled',
'Rule': {
'DefaultRetention': {
'Mode': 'GOVERNANCE'|'COMPLIANCE',
'Days': 123,
'Years': 123
}
}
},
RequestPayer='requester',
Token='string',
ContentMD5='string',
ExpectedBucketOwner='string'
)
To know more please refer to this: https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/s3.html#S3.Client.put_object_lock_configuration
Edit:
Example Code:
import json
import boto3
client = boto3.client('s3')
response = client.get_object_lock_configuration(
Bucket='anynewname')
print(response)
Output sytanx:
{
'ObjectLockConfiguration': {
'ObjectLockEnabled': 'Enabled',
'Rule': {
'DefaultRetention': {
'Mode': 'GOVERNANCE'|'COMPLIANCE',
'Days': 123,
'Years': 123
}
}
}
}
Note: It will throw an error if object lock config is not set on bucket.
{
"errorMessage": "An error occurred (ObjectLockConfigurationNotFoundError) when calling the GetObjectLockConfiguration operation: Object Lock configuration does not exist for this bucket",
"errorType": "ClientError",
"stackTrace": [
" File \"/var/task/lambda_function.py\", line 7, in lambda_handler\n response = client.get_object_lock_configuration(\n",
" File \"/var/runtime/botocore/client.py\", line 357, in _api_call\n return self._make_api_call(operation_name, kwargs)\n",
" File \"/var/runtime/botocore/client.py\", line 676, in _make_api_call\n raise error_class(parsed_response, operation_name)\n"
]
}
I have spent the last 12 hours scouring the web. I am completely lost, please help.
I am trying to pull data from an API endpoint and put it into MongoDB. The data looks like this:
{"_links": {
"self": {
"href": "https://us.api.battle.net/data/sc2/ladder/271302?namespace=prod"
}
},
"league": {
"league_key": {
"league_id": 5,
"season_id": 37,
"queue_id": 201,
"team_type": 0
},
"key": {
"href": "https://us.api.battle.net/data/sc2/league/37/201/0/5?namespace=prod"
}
},
"team": [
{
"id": 6956151645604413000,
"rating": 5321,
"wins": 131,
"losses": 64,
"ties": 0,
"points": 1601,
"longest_win_streak": 15,
"current_win_streak": 4,
"current_rank": 1,
"highest_rank": 10,
"previous_rank": 1,
"join_time_stamp": 1534903699,
"last_played_time_stamp": 1537822019,
"member": [
{
"legacy_link": {
"id": 9964871,
"realm": 1,
"name": "mTOR#378",
"path": "/profile/9964871/1/mTOR"
},
"played_race_count": [
{
"race": "Zerg",
"count": 195
}
],
"character_link": {
"id": 9964871,
"battle_tag": "Hellghost#11903",
"key": {
"href": "https://us.api.battle.net/data/sc2/character/Hellghost-11903/9964871?namespace=prod"
}
}
}
]
},
{
"id": 11611747760398664000, .....
....
Here's the code:
for ladder_number in ladder_array:
ladder_call_url = ladder_call+slash+str(ladder_number)+eng_locale+access_token
url = str(ladder_call_url)
response = requests.get(url)
print('trying ladder number '+str(ladder_number))
print('calling :'+url)
if response.status_code == 200:
print('status: '+str(response))
mmr_db.ladders.insert_one(response.json())
I get an error:
OverflowError: MongoDB can only handle up to 8-byte ints?
Is this because the data I am trying to load is too large? Are the "ID" integers too large?
Oh man, any help would be sincerely appreciated.
_______ EDIT ____________
Edited to include the Traceback:
Traceback (most recent call last):
File "C:\scripts\mmr_from_ladders.py", line 96, in <module>
mmr_db.ladders.insert_one(response.json(), bypass_document_validation=True)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\collection.py", line 693, in insert_one
session=session),
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\collection.py", line 607, in _insert
bypass_doc_val, session)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\collection.py", line 595, in _insert_one
acknowledged, _insert_command, session)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\mongo_client.py", line 1243, in _retryable_write
return self._retry_with_session(retryable, func, s, None)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\mongo_client.py", line 1196, in _retry_with_session
return func(session, sock_info, retryable)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\collection.py", line 590, in _insert_command
retryable_write=retryable_write)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\pool.py", line 584, in command
self._raise_connection_failure(error)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\pool.py", line 745, in _raise_connection_failure
raise error
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\pool.py", line 579, in command
unacknowledged=unacknowledged)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\network.py", line 114, in command
codec_options, ctx=compression_ctx)
File "C:\Users\me\AppData\Local\Programs\Python\Python37-32\lib\site-packages\pymongo\message.py", line 679, in _op_msg
flags, command, identifier, docs, check_keys, opts)
OverflowError: MongoDB can only handle up to 8-byte ints
The BSON spec — MongoDB’s native binary extended JSON format / data type — only supports 32 bit (signed) and 64 bit (signed) integers — 8 bytes being 64 bits.
The maximum integer value that can be stored in a 64 bit int is:
9,223,372,036,854,775,807
In your example you appear to have larger ids, for example:
11,611,747,760,398,664,000
I’m guessing that the app generating this data is using uint64 types (unsigned can hold x2-1 values).
I would start by looking at either of these potential solutions, if possible:
Changing the other side to use int64 (signed) types for the IDs.
Replacing the incoming IDs using ObjectId() as you then get a 12 byte ~ GUID for your unique IDs.
I have been able to successfully authenticate, and list transfers and transfer runs. But I keep running into the issue of not being able to create a transfer because the transfer config is incorrect.
Here's the Transfer Config I have tried:
transferConfig = {
'data_refresh_window_days': 1,
'data_source_id': "adwords",
'destination_dataset_id': "AdwordsMCC",
'disabled': False,
'display_name': "TestR",
'name': "TestR",
'schedule': "every day 07:00",
'params': {
"customer_id": "999999999" -- Changed Number
}
}
response = client.create_transfer_config(parent, transferConfig)
print(response)
And this is the error I get:
Traceback (most recent call last):
File "./create_transfer.py", line 84, in <module>
main()
File "./create_transfer.py", line 61, in main
response = client.create_transfer_config(parent, transferConfig)
File "/usr/local/Cellar/python3/3.6.2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py", line 438, in create_transfer_config
authorization_code=authorization_code)
ValueError: Protocol message Struct has no "customer_id" field.
DDIS:bigquery siddharthsudheer$ ./create_transfer.py
Traceback (most recent call last):
File "./create_transfer.py", line 84, in <module>
main()
File "./create_transfer.py", line 61, in main
response = client.create_transfer_config(parent, transferConfig)
File "/usr/local/Cellar/python3/3.6.2/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/google/cloud/bigquery_datatransfer_v1/gapic/data_transfer_service_client.py", line 438, in create_transfer_config
authorization_code=authorization_code)
ValueError: Protocol message Struct has no "customer_id" field.
I managed to set up a Data Transfer through the API by defining the params as class google.protobuf.struct_pb2.Struct.
Try if the adding the following works for you:
from google.protobuf.struct_pb2 import Struct
params = Struct()
params["customer_id"] = "999999999"
And then changing your transferConfig to:
transferConfig = {
'data_refresh_window_days': 1,
'data_source_id': "adwords",
'destination_dataset_id': "AdwordsMCC",
'disabled': False,
'display_name': "TestR",
'name': "TestR",
'schedule': "every day 07:00",
'params': params
}
}