While trying to convert a JSON output below to CSV, getting error
Here is the JSON output
{
"data": [
{
"id": "-1000100591151294842",
"type": "fres",
"attributes": {
"operationState": "In Service",
"deploymentState": "discovered",
"displayData": {
"operationState": "Up",
"adminState": "Enabled",
"displayTopologySource": "Protocol,Derived",
"displayPhotonicSpectrumData": [
{
"frequency": "194.950000",
"wavelength": "1537.79",
"channel": "CH-20"
}
],
"displayDeploymentState": "Discovered",
"displayName": "J-BBEG-CHLC-P109"
},
"utilizationData": {
"totalCapacity": "100.0",
"usedCapacity": "100.0",
"utilizationPercent": "100",
"capacityUnits": "Gbps"
},
"resourceState": "discovered",
"serviceClass": "OTU",
"linkLabel": "BBEG-ROADM-0101:5-4-1,CHLC-ROADM-0401:7-35-1",
"lastUpdatedAdminStateTimeStamp": "2021-05-03T00:29:24.444Z",
"lastUpdatedOperationalStateTimeStamp": "2022-12-08T22:42:21.567Z",
"userLabel": "J-BBEG-CHLC-P109",
"mgmtName": "",
"nativeName": "",
"awarenessTime": "2022-12-08T22:42:22.123Z",
"layerRate": "OTU4",
"layerRateQualifier": "OTU4",
"supportedByLayerRatePackageList": [
{
"layerRate": "OTSi",
"layerRateQualifier": "100G"
}
],
"networkRole": "FREAP",
"directionality": "bidirectional",
"topologySources": [
"adjacency",
"stitched"
],
"adminState": "In Service",
"photonicSpectrumPackageList": [
{
"frequency": "194.950000",
"width": "37.5"
}
],
"active": true,
"additionalAttributes": {
"isActual": "true",
"hasLowerTopology": "true"
},
"reliability": "auto",
"resilienceLevel": "unprotected"
},
"relationships": {
"freDiscovered": {
"data": {
"type": "freDiscovered",
"id": "-1000100591151294842"
}
},
"supportedByServices": {
"data": [
{
"type": "fres",
"id": "6765278351459212874"
}
]
},
"endPoints": {
"data": [
{
"type": "endPoints",
"id": "-1000100591151294842:1"
},
{
"type": "endPoints",
"id": "-1000100591151294842:2"
}
]
},
"partitionFres": {
"data": [
{
"type": "fres",
"id": "7147507956181395827"
}
]
}
}
},
{
"id": "-1013895107051577774",
"type": "fres",
"attributes": {
"operationState": "In Service",
"deploymentState": "discovered",
"displayData": {
"operationState": "Up",
"adminState": "Enabled",
"displayTopologySource": "Protocol,Derived",
"displayPhotonicSpectrumData": [
{
"frequency": "191.600000",
"wavelength": "1564.68",
"channel": "CH-87"
}
],
"displayDeploymentState": "Discovered",
"displayName": "J-KFF9-PNTH-P101"
},
"utilizationData": {
"totalCapacity": "100.0",
"usedCapacity": "90.0",
"utilizationPercent": "90",
"capacityUnits": "Gbps"
},
"resourceState": "discovered",
"serviceClass": "OTU",
"tags": [
"J-KFF9-PNTH-P101"
],
"linkLabel": "KFF9-ROADM-0301:1-1-1,PNTH-ROADM-0101:1-1-1",
"lastUpdatedAdminStateTimeStamp": "2021-09-12T20:22:59.334Z",
"lastUpdatedOperationalStateTimeStamp": "2022-10-12T14:20:44.779Z",
"userLabel": "J-KFF9-PNTH-P101",
"mgmtName": "",
"nativeName": "",
"awarenessTime": "2022-10-12T14:20:45.417Z",
"layerRate": "OTU4",
"layerRateQualifier": "OTU4",
"supportedByLayerRatePackageList": [
{
"layerRate": "OTSi",
"layerRateQualifier": "100G"
}
],
"networkRole": "FREAP",
"directionality": "bidirectional",
"topologySources": [
"adjacency",
"stitched"
],
"adminState": "In Service",
"photonicSpectrumPackageList": [
{
"frequency": "191.600000",
"width": "37.5"
}
],
"active": true,
"additionalAttributes": {
"isActual": "true",
"hasLowerTopology": "true"
},
"reliability": "auto",
"resilienceLevel": "unprotected"
},
"relationships": {
"freDiscovered": {
"data": {
"type": "freDiscovered",
"id": "-1013895107051577774"
}
},
"supportedByServices": {
"data": [
{
"type": "fres",
"id": "6055685088078365419"
}
]
},
"endPoints": {
"data": [
{
"type": "endPoints",
"id": "-1013895107051577774:1"
},
{
"type": "endPoints",
"id": "-1013895107051577774:2"
}
]
},
"partitionFres": {
"data": [
{
"type": "fres",
"id": "-6727082893715936342"
}
]
}
}
}
] }
getting below error, not sure what is missing
Here is the python script I used. have been trying different variations but no luck getting different errors in all other instances
filename = Path('fre.json')
data = []
with open(filename,'r') as json_file:
data_str = json_file.read()
data_str = data_str.split('[',1)[-1]
data_str = data_str.rsplit(']',1)[0]
data_str = data_str.split('][')
for jsonStr in data_str:
jsonStr = '[' + jsonStr + ']'
temp_data = json.loads(jsonStr)
for each in temp_data:
data.append(each)
what is wrong?
Related
I have a json file name input which as follows
{
"abc": {
"dbc": {
"type": "string",
"metadata": {
"description": "Name of the namespace"
}
},
"fgh": {
"type": "string",
"metadata": {
"description": "Name of the Topic"
}
}
},
"resources": [
{
"sku": {
"name": "[parameters('sku')]"
},
"properties": {},
"resources": [
{
"resources": [
{
"resources": [
{
"properties": {
"filterType": "SqlFilter",
"sqlFilter": {
"sqlExpression": "HAI"
}
}
}
]
}
]
}
]
}
]
}
I want "sqlExpression": "HAI" value to be replaced with BYE as below
"sqlExpression": "BYE"
I want python code to do it, I tried the below code but not working
input['resources'][0]['resources'][0]['resources'][0]['resources'][0][properties][0][sqlFilter][0][sqlExpression][0]='BYE'
inp = {
"abc": {
"dbc": {
"type": "string",
"metadata": {
"description": "Name of the namespace"
}
},
"fgh": {
"type": "string",
"metadata": {
"description": "Name of the Topic"
}
}
},
"resources": [
{
"sku": {
"name": "[parameters('sku')]"
},
"properties": {},
"resources": [
{
"resources": [
{
"resources": [
{
"properties": {
"filterType": "SqlFilter",
"sqlFilter": {
"sqlExpression": "HAI"
}
}
}
]
}
]
}
]
}
]
}
inp['resources'][0]['resources'][0]['resources'][0]['resources'][0]['properties']['sqlFilter']['sqlExpression']='BYE'
print(inp)
Result
{'abc': {'dbc': ...truncated... {'sqlExpression': 'BYE'}}}]}]}]}]}
I have two files:
kube-apiserver.json
{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
[...]
},
"spec": {
"containers": [
{
"command": [
"kube-apiserver",
"--advertise-address=192.168.49.2",
"--allow-privileged=true",
"--authorization-mode=Node,RBAC",
"--client-ca-file=/var/lib/minikube/certs/ca.crt",
"--enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota",
"--enable-bootstrap-token-auth=true",
"--etcd-cafile=/var/lib/minikube/certs/etcd/ca.crt",
"--etcd-certfile=/var/lib/minikube/certs/apiserver-etcd-client.crt",
"--etcd-keyfile=/var/lib/minikube/certs/apiserver-etcd-client.key",
"--etcd-servers=https://127.0.0.1:2379",
"--kubelet-client-certificate=/var/lib/minikube/certs/apiserver-kubelet-client.crt",
"--kubelet-client-key=/var/lib/minikube/certs/apiserver-kubelet-client.key",
"--kubelet-preferred-address-types=InternalIP,ExternalIP,Hostname",
"--proxy-client-cert-file=/var/lib/minikube/certs/front-proxy-client.crt",
"--proxy-client-key-file=/var/lib/minikube/certs/front-proxy-client.key",
"--requestheader-allowed-names=front-proxy-client",
"--requestheader-client-ca-file=/var/lib/minikube/certs/front-proxy-ca.crt",
"--requestheader-extra-headers-prefix=X-Remote-Extra-",
"--requestheader-group-headers=X-Remote-Group",
"--requestheader-username-headers=X-Remote-User",
"--secure-port=8443",
"--service-account-issuer=https://kubernetes.default.svc.cluster.local",
"--service-account-key-file=/var/lib/minikube/certs/sa.pub",
"--service-account-signing-key-file=/var/lib/minikube/certs/sa.key",
"--service-cluster-ip-range=10.96.0.0/12",
"--tls-cert-file=/var/lib/minikube/certs/apiserver.crt",
"--tls-private-key-file=/var/lib/minikube/certs/apiserver.key"
],
[...]
"volumeMounts": [
{
"mountPath": "/etc/ssl/certs",
"name": "ca-certs",
"readOnly": true
},
{
"mountPath": "/etc/ca-certificates",
"name": "etc-ca-certificates",
"readOnly": true
},
{
"mountPath": "/var/lib/minikube/certs",
"name": "k8s-certs",
"readOnly": true
},
{
"mountPath": "/usr/local/share/ca-certificates",
"name": "usr-local-share-ca-certificates",
"readOnly": true
},
{
"mountPath": "/usr/share/ca-certificates",
"name": "usr-share-ca-certificates",
"readOnly": true
}
]
}
],
[...]
"volumes": [
{
"hostPath": {
"path": "/etc/ssl/certs",
"type": "DirectoryOrCreate"
},
"name": "ca-certs"
},
{
"hostPath": {
"path": "/etc/ca-certificates",
"type": "DirectoryOrCreate"
},
"name": "etc-ca-certificates"
},
{
"hostPath": {
"path": "/var/lib/minikube/certs",
"type": "DirectoryOrCreate"
},
"name": "k8s-certs"
},
{
"hostPath": {
"path": "/usr/local/share/ca-certificates",
"type": "DirectoryOrCreate"
},
"name": "usr-local-share-ca-certificates"
},
{
"hostPath": {
"path": "/usr/share/ca-certificates",
"type": "DirectoryOrCreate"
},
"name": "usr-share-ca-certificates"
}
]
},
"status": {
[...]
}
}
and patch.json
{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
},
"spec": {
"containers": [
{
"command": [
"--audit-policy-file=/etc/kubernetes/audit-policy.yaml",
"--audit-log-path=/var/log/kubernetes/audit/audit.log"
],
"volumeMounts": [
{
"mountPath": "/etc/kubernetes/audit-policy.yaml",
"name": "audit",
"readOnly": true
},
{
"mountPath": "/var/log/kubernetes/audit/",
"name": "audit-log",
"readOnly": true
}
]
}
],
"volumes": [
{
"hostPath": {
"path": "/etc/kubernetes/audit-policy.yaml",
"type": "FileOrCreate"
},
"name": "audit"
},
{
"hostPath": {
"path": "/var/log/kubernetes/audit/",
"type": "DirectoryOrCreate"
},
"name": "audit-log"
}
]
},
"status": {
}
}
When i try to do jq -s '.[0] * .[1]' kube-apiserver.json patch.json > patched-apiserver.json items that are in patch.json overrides items from kube-apiserver.json so it looks like this:
{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
[...]
"spec": {
"containers": [
{
"command": [
"--audit-policy-file=/etc/kubernetes/audit-policy.yaml",
"--audit-log-path=/var/log/kubernetes/audit/audit.log"
],
"volumeMounts": [
{
"mountPath": "/etc/kubernetes/audit-policy.yaml",
"name": "audit",
"readOnly": true
},
{
"mountPath": "/var/log/kubernetes/audit/",
"name": "audit-log",
"readOnly": true
}
]
}
],
[..].
"volumes": [
{
"hostPath": {
"path": "/etc/kubernetes/audit-policy.yaml",
"type": "FileOrCreate"
},
"name": "audit"
},
{
"hostPath": {
"path": "/var/log/kubernetes/audit/",
"type": "DirectoryOrCreate"
},
"name": "audit-log"
}
]
},
"status": {
[...]
}
}
and i would like my file to look like this:
{
"apiVersion": "v1",
"kind": "Pod",
"metadata": {
[...]
},
"spec": {
"containers": [
{
"command": [
"kube-apiserver",
"--advertise-address=192.168.49.2",
"--allow-privileged=true",
"--authorization-mode=Node,RBAC",
"--client-ca-file=/var/lib/minikube/certs/ca.crt",
"--enable-admission-plugins=NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota",
"--enable-bootstrap-token-auth=true",
"--etcd-cafile=/var/lib/minikube/certs/etcd/ca.crt",
"--etcd-certfile=/var/lib/minikube/certs/apiserver-etcd-client.crt",
"--etcd-keyfile=/var/lib/minikube/certs/apiserver-etcd-client.key",
"--etcd-servers=https://127.0.0.1:2379",
"--kubelet-client-certificate=/var/lib/minikube/certs/apiserver-kubelet-client.crt",
"--kubelet-client-key=/var/lib/minikube/certs/apiserver-kubelet-client.key",
"--kubelet-preferred-address-types=InternalIP,ExternalIP,Hostname",
"--proxy-client-cert-file=/var/lib/minikube/certs/front-proxy-client.crt",
"--proxy-client-key-file=/var/lib/minikube/certs/front-proxy-client.key",
"--requestheader-allowed-names=front-proxy-client",
"--requestheader-client-ca-file=/var/lib/minikube/certs/front-proxy-ca.crt",
"--requestheader-extra-headers-prefix=X-Remote-Extra-",
"--requestheader-group-headers=X-Remote-Group",
"--requestheader-username-headers=X-Remote-User",
"--secure-port=8443",
"--service-account-issuer=https://kubernetes.default.svc.cluster.local",
"--service-account-key-file=/var/lib/minikube/certs/sa.pub",
"--service-account-signing-key-file=/var/lib/minikube/certs/sa.key",
"--service-cluster-ip-range=10.96.0.0/12",
"--tls-cert-file=/var/lib/minikube/certs/apiserver.crt",
"--tls-private-key-file=/var/lib/minikube/certs/apiserver.key",
"--audit-policy-file=/etc/kubernetes/audit-policy.yaml",
"--audit-log-path=/var/log/kubernetes/audit/audit.log"
],
[...]
"volumeMounts": [
{
"mountPath": "/etc/ssl/certs",
"name": "ca-certs",
"readOnly": true
},
{
"mountPath": "/etc/ca-certificates",
"name": "etc-ca-certificates",
"readOnly": true
},
{
"mountPath": "/var/lib/minikube/certs",
"name": "k8s-certs",
"readOnly": true
},
{
"mountPath": "/usr/local/share/ca-certificates",
"name": "usr-local-share-ca-certificates",
"readOnly": true
},
{
"mountPath": "/usr/share/ca-certificates",
"name": "usr-share-ca-certificates",
"readOnly": true
},
{
"mountPath": "/etc/kubernetes/audit-policy.yaml",
"name": "audit",
"readOnly": true
},
{
"mountPath": "/var/log/kubernetes/audit/",
"name": "audit-log",
"readOnly": true
}
]
}
],
[...]
"volumes": [
{
"hostPath": {
"path": "/etc/ssl/certs",
"type": "DirectoryOrCreate"
},
"name": "ca-certs"
},
{
"hostPath": {
"path": "/etc/ca-certificates",
"type": "DirectoryOrCreate"
},
"name": "etc-ca-certificates"
},
{
"hostPath": {
"path": "/var/lib/minikube/certs",
"type": "DirectoryOrCreate"
},
"name": "k8s-certs"
},
{
"hostPath": {
"path": "/usr/local/share/ca-certificates",
"type": "DirectoryOrCreate"
},
"name": "usr-local-share-ca-certificates"
},
{
"hostPath": {
"path": "/usr/share/ca-certificates",
"type": "DirectoryOrCreate"
},
"name": "usr-share-ca-certificates"
},
{
"hostPath": {
"path": "/etc/kubernetes/audit-policy.yaml",
"type": "FileOrCreate"
},
"name": "audit"
},
{
"hostPath": {
"path": "/var/log/kubernetes/audit/",
"type": "DirectoryOrCreate"
},
"name": "audit-log"
}
]
},
"status": {
[...]
}
}
Does anyone know how to solve it with jq/python/bash/whatever?
jq --slurpfile patch patch.json '
(.spec.containers |= map(.command |= (. + $patch[].spec.containers[0].command | unique) |
.volumeMounts |= (. + $patch[].spec.containers[0].volumeMounts | unique))) |
(.spec.volumes |= (. + $patch[].spec.volumes | unique))
' kube-apiserver.json
I have added unique to ensure that commands, volumeMounts and volumes only appear once. This has the side effect of sorting the arrays.
You can remove unique if you do not want it.
There is a problem in your question: spec.containers is an array. In your example, this array contains only one element and my code adds the first element from the patch to each container.
Or would you like the patch to be merged based on position? In that case this solution does not work.
I currently have two JSONS that I want to merge into one singular JSON, additionally I want to add in a slight change.
Firstly, these are the two JSONS in question.
An intents JSON:
[
{
"ID": "G1",
"intent": "password_reset",
"examples": [
{
"text": "I forgot my password"
},
{
"text": "I can't log in"
},
{
"text": "I can't access the site"
},
{
"text": "My log in is failing"
},
{
"text": "I need to reset my password"
}
]
},
{
"ID": "G2",
"intent": "account_closure",
"examples": [
{
"text": "I want to close my account"
},
{
"text": "I want to terminate my account"
}
]
},
{
"ID": "G3",
"intent": "account_creation",
"examples": [
{
"text": "I want to open an account"
},
{
"text": "Create account"
}
]
},
{
"ID": "G4",
"intent": "complaint",
"examples": [
{
"text": "A member of staff was being rude"
},
{
"text": "I have a complaint"
}
]
}
]
and an entities JSON:
[
{
"ID": "K1",
"entity": "account_type",
"values": [
{
"type": "synonyms",
"value": "business",
"synonyms": [
"corporate"
]
},
{
"type": "synonyms",
"value": "personal",
"synonyms": [
"vanguard",
"student"
]
}
]
},
{
"ID": "K2",
"entity": "beverage",
"values": [
{
"type": "synonyms",
"value": "hot",
"synonyms": [
"heated",
"warm"
]
},
{
"type": "synonyms",
"value": "cold",
"synonyms": [
"ice",
"freezing"
]
}
]
}
]
The expected outcome is to create a JSON file that mimics this structure:
{
"intents": [
{
"intent": "password_reset",
"examples": [
{
"text": "I forgot my password"
},
{
"text": "I want to reset my password"
}
],
"description": "Reset a user password"
}
],
"entities": [
{
"entity": "account_type",
"values": [
{
"type": "synonyms",
"value": "business",
"synonyms": [
"company",
"corporate",
"enterprise"
]
},
{
"type": "synonyms",
"value": "personal",
"synonyms": []
}
],
"fuzzy_match": true
}
],
"metadata": {
"api_version": {
"major_version": "v2",
"minor_version": "2018-11-08"
}
},
"dialog_nodes": [
{
"type": "standard",
"title": "anything_else",
"output": {
"generic": [
{
"values": [
{
"text": "I didn't understand. You can try rephrasing."
},
{
"text": "Can you reword your statement? I'm not understanding."
},
{
"text": "I didn't get your meaning."
}
],
"response_type": "text",
"selection_policy": "sequential"
}
]
},
"conditions": "anything_else",
"dialog_node": "Anything else",
"previous_sibling": "node_4_1655399659061",
"disambiguation_opt_out": true
},
{
"type": "event_handler",
"output": {
"generic": [
{
"title": "What type of account do you hold with us?",
"options": [
{
"label": "Personal",
"value": {
"input": {
"text": "personal"
}
}
},
{
"label": "Business",
"value": {
"input": {
"text": "business"
}
}
}
],
"response_type": "option"
}
]
},
"parent": "slot_9_1655398217028",
"event_name": "focus",
"dialog_node": "handler_6_1655398217052",
"previous_sibling": "handler_7_1655398217052"
},
{
"type": "event_handler",
"output": {},
"parent": "slot_9_1655398217028",
"context": {
"account_type": "#account_type"
},
"conditions": "#account_type",
"event_name": "input",
"dialog_node": "handler_7_1655398217052"
},
{
"type": "standard",
"title": "business_account",
"output": {
"generic": [
{
"values": [
{
"text": "We have notified your corporate security team, they will be in touch to reset your password."
}
],
"response_type": "text",
"selection_policy": "sequential"
}
]
},
"parent": "node_3_1655397279884",
"next_step": {
"behavior": "jump_to",
"selector": "body",
"dialog_node": "node_4_1655399659061"
},
"conditions": "#account_type:business",
"dialog_node": "node_1_1655399028379",
"previous_sibling": "node_3_1655399027429"
},
{
"type": "standard",
"title": "intent_collection",
"output": {
"generic": [
{
"values": [
{
"text": "Thank you for confirming that you want to reset your password."
}
],
"response_type": "text",
"selection_policy": "sequential"
}
]
},
"next_step": {
"behavior": "jump_to",
"selector": "body",
"dialog_node": "node_3_1655397279884"
},
"conditions": "#password_reset",
"dialog_node": "node_3_1655396920143",
"previous_sibling": "Welcome"
},
{
"type": "frame",
"title": "account_type_confirmation",
"output": {
"generic": [
{
"values": [
{
"text": "Thank you"
}
],
"response_type": "text",
"selection_policy": "sequential"
}
]
},
"parent": "node_3_1655396920143",
"context": {},
"next_step": {
"behavior": "skip_user_input"
},
"conditions": "#password_reset",
"dialog_node": "node_3_1655397279884"
},
{
"type": "standard",
"title": "personal_account",
"output": {
"generic": [
{
"values": [
{
"text": "We have sent you an email with a password reset link."
}
],
"response_type": "text",
"selection_policy": "sequential"
}
]
},
"parent": "node_3_1655397279884",
"next_step": {
"behavior": "jump_to",
"selector": "body",
"dialog_node": "node_4_1655399659061"
},
"conditions": "#account_type:personal",
"dialog_node": "node_3_1655399027429"
},
{
"type": "standard",
"title": "reset_confirmation",
"output": {
"generic": [
{
"values": [
{
"text": "Do you need assistance with anything else today?"
}
],
"response_type": "text",
"selection_policy": "sequential"
}
]
},
"digress_in": "does_not_return",
"dialog_node": "node_4_1655399659061",
"previous_sibling": "node_3_1655396920143"
},
{
"type": "slot",
"output": {},
"parent": "node_3_1655397279884",
"variable": "$account_type",
"dialog_node": "slot_9_1655398217028",
"previous_sibling": "node_1_1655399028379"
},
{
"type": "standard",
"title": "welcome",
"output": {
"generic": [
{
"values": [
{
"text": "Hello. How can I help you?"
}
],
"response_type": "text",
"selection_policy": "sequential"
}
]
},
"conditions": "welcome",
"dialog_node": "Welcome"
}
],
"counterexamples": [],
"system_settings": {
"off_topic": {
"enabled": true
},
"disambiguation": {
"prompt": "Did you mean:",
"enabled": true,
"randomize": true,
"max_suggestions": 5,
"suggestion_text_policy": "title",
"none_of_the_above_prompt": "None of the above"
},
"human_agent_assist": {
"prompt": "Did you mean:"
},
"intent_classification": {
"training_backend_version": "v2"
},
"spelling_auto_correct": true
},
"learning_opt_out": false,
"name": "Reset Password",
"language": "en",
"description": "Basic Password Reset Request"
}
So what I am missing in my original files, is essentially:
"intents":
and for the entities file:
"entities"
at the start of each list of dictionaries.
Additionally, I would need to wrap the whole thing in curly braces to comply with json formatting.
As seen, the final goal is not just appending these two to one another but the file technically continues with some other JSON code that I have yet to write and deal with.
My question now is as follows; by what method can I either add in these words and the braces to the individual files, then combine them into a singular JSON or alternatively by what method can I read in these files and combine them with the changes all in one go?
The new output file closing on a curly brace after the entities list of dicts is an acceptable outcome for me at the time, so that I can continue to make changes and hopefully further learn from this how to do these changes in future when I get there.
TIA
JSON is only a string format, you can it load in a language structure, in python that is list and dict, do what you need then dump it back, so you don't "add strings" and "add brackets", on modify the structure
file = 'intents.txt'
intents = json.load(open(file)) # load a list
file = 'entities.txt'
entities = json.load(open(file)) # load a list
# create a dict
content = {
"intents": intents,
"entities": entities
}
json.dump(content, open(file, "w"))
If you're reading all the json in as a string, you can just prepend "{'intents':" to the start and append a closing "}".
myJson = "your json string"
myWrappedJson = '{"intents":' + myJson + "}"
I'm loading JSON payload from a stream using json.loads(). I can easily access objects using the following syntax myVar = AgentEvent["EventType"] and I get expected values. However, I'm having problems accessing object named "Contacts" and its properties. For example: How can I access myVar = AgentEvent["CurrentAgentSnapshot"]["Contacts"]["Status"]? When I tried I get an error. Sample JSON below. Any help from Python rock-stars would be much appreciated.
{
"AWSAccountId": "12345678",
"AgentARN": "arn:aws:connect:",
"CurrentAgentSnapshot": {
"AgentStatus": {
"ARN": "arn:aws:connect:",
"Name": "Available",
"StartTimestamp": "2022-03-05T20:35:30.836Z",
"Type": "ROUTABLE"
},
"Configuration": {
"AgentHierarchyGroups": null,
"FirstName": "test",
"LastName": "test",
"RoutingProfile": {
"ARN": "arn:aws:connect:",
"Concurrency": [
{
"AvailableSlots": 0,
"Channel": "CHAT",
"MaximumSlots": 2
},
{
"AvailableSlots": 0,
"Channel": "TASK",
"MaximumSlots": 1
},
{
"AvailableSlots": 0,
"Channel": "VOICE",
"MaximumSlots": 1
}
],
"DefaultOutboundQueue": {
"ARN": "arn:aws:connect:",
"Channels": [
"VOICE"
],
"Name": "BasicQueue"
},
"InboundQueues": [
{
"ARN": "arn:aws:connect:",
"Channels": [
"CHAT",
"TASK",
"VOICE"
],
"Name": "BasicQueue"
},
{
"ARN": "arn:aws:connect:",
"Channels": [
"CHAT",
"TASK",
"VOICE"
],
"Name": null
}
],
"Name": "Basic Routing Profile"
},
"Username": "test"
},
"Contacts": [
{
"Channel": "VOICE",
"ConnectedToAgentTimestamp": "2022-03-05T20:42:14.109Z",
"ContactId": "0a2b3c34-1b7d-4a94-8ff2-e9857d3eac8f",
"InitialContactId": "0a2b3c34-1b7d-4a94-8ff2-e9857d3eac8f",
"InitiationMethod": "INBOUND",
"Queue": {
"ARN": "arn:aws:connect:",
"Name": "BasicQueue"
},
"QueueTimestamp": "2022-03-05T20:42:08.078Z",
"State": "CONNECTED",
"StateStartTimestamp": "2022-03-05T20:51:11.819Z"
}
],
"NextAgentStatus": null
},
"EventId": "ca232cf3-3510-415b-8ff1-8ca89b59194f",
"EventTimestamp": "2022-03-05T21:11:56.315Z",
"EventType": "HEART_BEAT",
"InstanceARN": "arn:aws:connect:",
"PreviousAgentSnapshot": {
"AgentStatus": {
"ARN": "arn:aws:connect:",
"Name": "Available",
"StartTimestamp": "2022-03-05T20:35:30.836Z",
"Type": "ROUTABLE"
},
"Configuration": {
"AgentHierarchyGroups": null,
"FirstName": "test",
"LastName": "test",
"RoutingProfile": {
"ARN": "arn:aws:connect:",
"Concurrency": [
{
"AvailableSlots": 0,
"Channel": "CHAT",
"MaximumSlots": 2
},
{
"AvailableSlots": 0,
"Channel": "TASK",
"MaximumSlots": 1
},
{
"AvailableSlots": 0,
"Channel": "VOICE",
"MaximumSlots": 1
}
],
"DefaultOutboundQueue": {
"ARN": "arn:aws:connect:",
"Channels": [
"VOICE"
],
"Name": "BasicQueue"
},
"InboundQueues": [
{
"ARN": "arn:aws:connect:",
"Channels": [
"CHAT",
"TASK",
"VOICE"
],
"Name": "BasicQueue"
},
{
"ARN": "arn:aws:connect",
"Channels": [
"CHAT",
"TASK",
"VOICE"
],
"Name": null
}
],
"Name": "Basic Routing Profile"
},
"Username": "test"
},
"Contacts": [
{
"Channel": "VOICE",
"ConnectedToAgentTimestamp": "2022-03-05T20:42:14.109Z",
"ContactId": "0a2b3c34-1b7d-4a94-8ff2-e9857d3eac8f",
"InitialContactId": "0a2b3c34-1b7d-4a94-8ff2-e9857d3eac8f",
"InitiationMethod": "INBOUND",
"Queue": {
"ARN": "arn:aws:connect:",
"Name": "BasicQueue"
},
"QueueTimestamp": "2022-03-05T20:42:08.078Z",
"State": "CONNECTED",
"StateStartTimestamp": "2022-03-05T20:51:11.819Z"
}
],
"NextAgentStatus": null
},
"Version": "2017-10-01"}
There are 2 problems with this line
AgentEvent["CurrentAgentSnapshot"]["Contacts"]["Status"]
The Contacts have a State and not a Status
Contacts is a list, not an object. So you need to address its items by index or iterate.
AgentEvent["CurrentAgentSnapshot"]["Contacts"][0]["State"]
Converted into a nested JSON file using Pandas
This is the sample csv for one row
name type aitm alitm aaitm adsc1
specs glass 70072187 ESA65Z45 ESA 65Z45 CUT TIP FG 1808-40
I'm trying to achieve the below structure of Nested JSON for every row
import pandas as pd
import json
df = pd.DataFrame([['specs','glass','70072187','ESA65Z45','ESA 65Z45','CUT TIP FG 1808-40'],
['specs','glass','666','ESA6665','ESB 666','CUT TIP FG 66-40']],
columns = ['name', 'type','aitm','alitm','aaitm','adsc1' ])
data = {'entities':[]}
for key,grp in df.groupby('name'):
for idx, row in grp.iterrows():
temp_dict_alpha = {'name':key, 'type':row['type'], 'data':{'attributes':{}}}
attr_row = row[~row.index.isin(['name','type'])]
for idx2, row2 in attr_row.iteritems():
dict_temp = {}
dict_temp[idx2] = {'values':[]}
dict_temp[idx2]['values'].append({'value':row2,'source':'internal','locale':'en_US'})
temp_dict_alpha['data']['attributes'].update(dict_temp)
data['entities'].append(temp_dict_alpha)
print(json.dumps(data, indent= 4))
Output:
print(json.dumps(data, indent= 4))
{
"entities": [
{
"name": "specs",
"type": "glass",
"data": {
"attributes": {
"aitm": {
"values": [
{
"value": "70072187",
"source": "internal",
"locale": "en_US"
}
]
},
"alitm": {
"values": [
{
"value": "ESA65Z45",
"source": "internal",
"locale": "en_US"
}
]
},
"aaitm": {
"values": [
{
"value": "ESA 65Z45",
"source": "internal",
"locale": "en_US"
}
]
},
"adsc1": {
"values": [
{
"value": "CUT TIP FG 1808-40",
"source": "internal",
"locale": "en_US"
}
]
}
}
}
},
{
"name": "specs",
"type": "glass",
"data": {
"attributes": {
"aitm": {
"values": [
{
"value": "666",
"source": "internal",
"locale": "en_US"
}
]
},
"alitm": {
"values": [
{
"value": "ESA6665",
"source": "internal",
"locale": "en_US"
}
]
},
"aaitm": {
"values": [
{
"value": "ESB 666",
"source": "internal",
"locale": "en_US"
}
]
},
"adsc1": {
"values": [
{
"value": "CUT TIP FG 66-40",
"source": "internal",
"locale": "en_US"
}
]
}
}
}
}
]
}