I am now able to upload a file and create an initial version for the file but am unable to update the version. Using the code in Step 6 of this tutorial does not work even after replacing versions:autodesk.core:File with versions:autodesk.bim360:File. From the error message, Request contains 0 includes instead of 1, it seems I need part of the included block from the example in Step 5 but I am not sure what it should look like; given the outer relationships blocks do not match in format between Steps 5 and 6, I am assuming the inner content varies as well.
def create_version_for_file(self, project_name, file_name, folder_id, object_id, storage_id):
project_id = self.get_project_id_by_name(project_name)
existing_object_id = self.get_version_id_for_file_in_folder(project_name, folder_id, file_name)
url = '{}data/v1/projects/{}/items'.format(self.DOMAIN, project_id)
logger.info('Starting version create at %s for file_name %s, folder %s, object %s',
url, file_name, folder_id, object_id)
if existing_object_id:
logger.info('Creating version for existing object')
data = self._get_version_data_for_existing_file(file_name, object_id, storage_id)
else:
logger.info('Creating version for new object')
data = self._get_version_json_for_new_file(file_name, folder_id, object_id)
response = self.session.post(url, json=data, headers={
'content-type': 'application/vnd.api+json',
'accept': 'application/vnd.api+json'
})
if response.status_code != status.HTTP_201_CREATED:
logger.warn('Version create for %s failed with status %s: %s', file_name, response.status_code,
response.content)
return None
return json.loads(response.content)
def _get_version_json_for_new_file(self, file_name, folder_id, object_id):
return {
"jsonapi": {"version": "1.0"},
"data": {
"type": "items",
"attributes": {
"displayName": file_name,
"extension": {
"type": "items:autodesk.bim360:File",
"version": "1.0"
}
},
"relationships": {
"tip": {
"data": {
"type": "versions",
"id": "1"
}
},
"parent": {
"data": {
"type": "folders",
"id": folder_id
}
}
}
},
"included": [
{
"type": "versions",
"id": "1",
"attributes": {
"name": file_name,
"extension": {
"type": "versions:autodesk.bim360:File",
"version": "1.0"
}
},
"relationships": {
"storage": {
"data": {
"type": "objects",
"id": object_id
}
}
}
}
]
}
def _get_version_data_for_existing_file(self, file_name, object_id, storage_id):
return {
"jsonapi": {
"version": "1.0"
},
"data": {
"type": "versions",
"attributes": {
"name": file_name,
"extension": {
"type": "versions:autodesk.bim360:File",
"version": "1.0"
}
},
"relationships": {
"item": {
"data": {
"type": "items",
"id": object_id
}
},
"storage": {
"data": {
"type": "objects",
"id": storage_id
}
}
}
}
}
Your payloads are correct. To create the second version, you have to send your request to the CreateVersion endpoint.
url = '{}data/v1/projects/{}/versions'.format(self.DOMAIN, project_id)
You must be missing a little detail that makes it not working, but since you provide so little description nobody can tell based on that ... In order to create a new version, you first need to create a new storage location, upload the file to that location, and post a new version, referencing the existing item id.
Alternatively take a look my that node js sample to upload a file and create a new item or version if the item exists.
The data for your updating the version seems incorrect to me, Try the following steps.
Create a new Storage location on the same folder where V1 of your file is located
upload v2 of your file to the new storage location created on the step below.
Get the item ID of the resource from the manifest result of your successful upload of V1 or call the following end point to obtain the ID https://developer.autodesk.com/en/docs/data/v2/reference/http/projects-project_id-folders-folder_id-contents-GET
Once you have all this information your data to pass in will look like this.
{
"jsonapi": { "version": "1.0" },
"data": {
"type": "versions",
"attributes": {
"name": "v2File.rvt",
"extension": { "type": "versions:autodesk.bim360:File", "version": "1.0"}
},
"relationships": {
"item": { "data": { "type": "items", "id": "urn:adsk.wipprod:dm.lineage:8jehs1iSQGu_wXpZ977bjA" } },
"storage": { "data": { "type": "objects", "id": "urn:adsk.objects:os.object:wip.dm.prod/a7fde689-62cf-49c1-9273-8df976568996.rvt" } }
}
}
}
The only difference I can think of is your objectId is not correct. Image below is proof of both versions uploaded with the API.
Related
I am trying to construct a python request based on swagger.json schema. It mentioned multipart/form data and I did some research. And now the remaining issue is about type "array", not sure how to do it. Below is swagger.json schema.
"requestBody": {
"required": true,
"content": {
"multipart/form-data": {
"schema": {
"type": "object",
"properties": {
"id": {
"type": "string"
},
"name": {
"type": "string"
},
"file": {
"items": {
"type": "string",
"format": "binary"
},
"type": "array"
}
},
"required": [
"id",
"name",
"file"
]
}
}
}
}
I found files parameter in python requests module could do the multiform(How to send a "multipart/form-data" with requests in python?), but I don't know how to do the 'file' part which is an array here...if it is not array, just one object. I will go with 'file': ('testfile', open('testfile', 'rb')
current the UI side has not been deployed, so I cannot test. so could anyone help here? Thanks
data = {
'id' : test_id,
'name' : test_name,
'file': []
}
i have this document in mongodb
{
"_id": {
"$oid": "62644af0368cb0a46d7c2a95"
},
"insertionData": "23/04/2022 19:50:50",
"ipfsMetadata": {
"Name": "data.json",
"Hash": "Qmb3FWgyJHzJA7WCBX1phgkV93GiEQ9UDWUYffDqUCbe7E",
"Size": "431"
},
"metadata": {
"sessionDate": "20220415 17:42:55",
"dataSender": "user345",
"data": {
"height": "180",
"weight": "80"
},
"addtionalInformation": [
{
"name": "poolsize",
"value": "30m"
},
{
"name": "swimStyle",
"value": "mariposa"
},
{
"name": "modality",
"value": "swim"
},
{
"name": "gender-title",
"value": "schoolA"
}
]
},
"fileId": {
"$numberLong": "4"
}
}
I want to update nested array document, for instance the name with gender-tittle. This have value schoolA and i want to change to adult like the body. I give the parameter number of fileId in the post request and in body i pass this
post request : localhost/sessionUpdate/4
and body:
{
"name": "gender-title",
"value": "adultos"
}
flask
#app.route('/sessionUpdate/<string:a>', methods=['PUT'])
def sessionUpdate(a):
datas=request.json
r=str(datas['name'])
r2=str(datas['value'])
print(r,r2)
r3=collection.update_one({'fileId':a, 'metadata.addtionalInformation':r}, {'$set':{'metadata.addtionalInformation.$.value':r2}})
return str(r3),200
i'm getting the 200 but the document don't update with the new value.
As you are using positional operator $ to work with your array, make sure your select query is targeting array element. You can see in below query that it is targeting metadata.addtionalInformation array with the condition that name: "gender-title"
db.collection.update({
"fileId": 4,
"metadata.addtionalInformation.name": "gender-title"
},
{
"$set": {
"metadata.addtionalInformation.$.value": "junior"
}
})
Here is the Mongo playground for your reference.
i need parse terraform file, write in JSON format. I have to extract two data, resource and id, this is example file:
{
"version": 1,
"serial": 1,
"modules": [
{
"path": [
"root"
],
"outputs": {
},
"resources": {
"aws_security_group.vpc-xxxxxxx-test-1": {
"type": "aws_security_group",
"primary": {
"id": "sg-xxxxxxxxxxxxxx",
"attributes": {
"description": "test-1",
"name": "test-1"
}
}
},
"aws_security_group.vpc-xxxxxxx-test-2": {
"type": "aws_security_group",
"primary": {
"id": "sg-yyyyyyyyyyyy",
"attributes": {
"description": "test-2",
"name": "test-2"
}
}
}
}
}
]
}
I need export for any resources, the first key and value of id, in this case, aws_security_group.vpc-xxxxxxx-test-1 sg-xxxxxxxxxxxxxx and aws_security_group.vpc-xxxxxxx-test-2 sg-yyyyyyyyyyyy
I have tried to write this in python:
#!/usr/bin/python3.6
import json
import objectpath
with open('file.json') as json_file:
data = json.load(json_file)
json_tree = objectpath.Tree(data['modules'])
result = tuple(json_tree.execute('$..resources[0]'))
result is
('aws_security_group.vpc-xxxxxxx-test-1', 'aws_security_group.vpc-xxxxxxx-test-2')
It's'ok but I can't extract the id, any help is appreciated, also use other methods
Thanks
I don't know objectpath, but I think you need:
tree.execute('$..resources[0]..primary.id')
or even just
tree.execute('$..resources[0]..id')
I have got this response from the Facebook Graph API:
{
"taggable_friends": {
"data": [
{
"name": "Friend1 Name",
"picture": {
"data": {
"url": "https://fb-s-c-a.akamaihd.net/h-ak-fbx/v/t1.0-1/p200x200/completeUrl1"
}
},
"id": "response1d"
},
{
"name": "Friend2 name",
"picture": {
"data": {
"url": "https://fb-s-a-a.akamaihd.net/h-ak-fbx/v/t1.0-1/p200x200/completeURL2"
}
},
"id": "responseid2"
}
],
"paging": {
"cursors": {
"before": "xyz",
"after": "abc"
},
"next": "NextpageURl"
}
},
"id": "xxxxxxxxx"
}
I am willing to extract the URL part of the graph API response with field taggable_friends.
I have tried something like this:
for friends in data_json_liked_pages['taggable_friends']['data']:
friend_url = friends['picture']['data']['url']
print friend_url
I am getting the following error:
Exception Type: TypeError
Exception Value: list indices must be integers, not str
What can I do to improve this?
I'm using Python to add entries in a local ElasticSearch (localhost:9200)
Currently, I use this method:
def insertintoes(data):
"""
Insert data into ElasicSearch
:param data: dict
:return:
"""
timestamp = data.get('#timestamp')
logstashIndex = 'logstash-' + timestamp.strftime("%Y.%m.%d")
es = Elasticsearch()
if not es.indices.exists(logstashIndex):
# Setting mappings for index
mapping = '''
{
"mappings": {
"_default_": {
"_all": {
"enabled": true,
"norms": false
},
"dynamic_templates": [
{
"message_field": {
"path_match": "message",
"match_mapping_type": "string",
"mapping": {
"norms": false,
"type": "text"
}
}
},
{
"string_fields": {
"match": "*",
"match_mapping_type": "string",
"mapping": {
"fields": {
"keyword": {
"type": "keyword"
}
},
"norms": false,
"type": "text"
}
}
}
],
"properties": {
"#timestamp": {
"type": "date",
"include_in_all": true
},
"#version": {
"type": "keyword",
"include_in_all": true
}
}
}
}
}
'''
es.indices.create(logstashIndex, ignore=400, body=mapping)
es.index(index=logstashIndex, doc_type='system', timestamp=timestamp, body=data)
data is a dict structure with a valid #timestamp defined like this data['#timestamp'] = datetime.datetime.now()
The problem is, even if there is a timestamp value in my data, Kibana doesn't show the entry in «discovery» field. :(
Here is an example of a full entry in ElasicSearch:
{
"_index": "logstash-2017.06.25",
"_type": "system",
"_id": "AVzf3QX3iazKBndbIkg4",
"_score": 1,
"_source": {
"priority": 6,
"uid": 0,
"gid": 0,
"systemd_slice": "system.slice",
"cap_effective": "1fffffffff",
"exe": "/usr/bin/bash",
"hostname": "ns3003395",
"syslog_facility": 9,
"comm": "crond",
"systemd_cgroup": "/system.slice/cronie.service",
"systemd_unit": "cronie.service",
"syslog_identifier": "CROND",
"message": "(root) CMD (/usr/local/rtm/bin/rtm 14 > /dev/null 2> /dev/null)",
"systemd_invocation_id": "9228b6c72e6a4624a1806e4c59af8d04",
"syslog_pid": 26652,
"pid": 26652,
"#timestamp": "2017-06-25T17:27:01.734453"
}
}
As you can see, there IS a #timestamp field but it doesn't seems to be what Kibana expects.
And don't know what to do to make my entries visible in Kibana.
Any idea ?
Elasticsearch is not recognizing #timestamp as a date, but as a string. If your data['#timestamp'] is a datetime object, you can try to convert it to a ISO string, which is automatically recognized, try:
timestamp = data.get('#timestamp').isoformat()
timestamp should now be a string, but in ISO format