I am trying to read log analytics in python.
Here is my code:
AZURE_CLIENT_ID = ''
AZURE_CLIENT_SECRET = ''
AZURE_TENANT_ID = ''
workspace_id = ''
from azure.identity import ClientSecretCredential
from datetime import datetime
from azure.monitor.query import LogsQueryClient, LogsQueryStatus
start_time = datetime(2022, 1, 1)
end_time = datetime(2023, 1, 2)
credential = ClientSecretCredential(
client_id = AZURE_CLIENT_ID,
client_secret = AZURE_CLIENT_SECRET,
tenant_id = AZURE_TENANT_ID
)
client = LogsQueryClient(credential)
query = "ContainerLog"
response = client.query_workspace(workspace_id=workspace_id,
query=query, timespan=(start_time, end_time - start_time))
if response.status == LogsQueryStatus.PARTIAL:
error = response.partial_error
print('Results are partial', error.message)
elif response.status == LogsQueryStatus.SUCCESS:
results = []
for table in response.tables:
for row in table.rows:
results.append(dict(zip(table.columns, row)))
print(convert_azure_table_to_dict(results))
and it is failing:
Traceback (most recent call last):
File "c:\temp\x.py", line 24, in <module>
response = client.query_workspace(workspace_id=workspace_id,
File "C:\kourosh\venv\lib\site-packages\azure\core\tracing\decorator.py", line 78, in wrapper_use_tracer
return func(*args, **kwargs)
File "C:\kourosh\venv\lib\site-packages\azure\monitor\query\_logs_query_client.py", line 136, in query_workspace
process_error(err, LogsQueryError)
File "C:\kourosh\venv\lib\site-packages\azure\monitor\query\_helpers.py", line 141, in process_error
raise HttpResponseError(message=error.message, response=error.response, model=model)
azure.core.exceptions.HttpResponseError: (InsufficientAccessError) The provided credentials have insufficient access to perform the requested operation
Code: InsufficientAccessError
Message: The provided credentials have insufficient access to perform the requested operation
I have added Log Analytics API -> Data.Read permission to the registered app that I'm using.
Any idea what is causing this?
Data.Read provides permissions to use the API and grant your app access to your Log Analytics Workspace. However, for accessing data within log analytics workspace, you need to provide permissions as per the data you need access to.
References:
https://learn.microsoft.com/en-us/azure/azure-monitor/logs/api/access-api
https://learn.microsoft.com/en-us/azure/azure-monitor/logs/manage-access?tabs=portal
Related
thanks to another user I have managed to connect an API through Python that serves me data and google sheets, the way to format the Json that this user taught me, for which I am very grateful he is not writing me data, I understand that the format of data is not designed for it, could someone tell me how to change the work that is done to json so that it is interpreted by the sheets API?
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from google.oauth2 import service_account
import requests
import json
#-------------------------------------------------------------------
BASE_URL = "https://apiserver.com/api"
token = '`xxxxxxxxxxx'
#headers = {'Content-Type':'application/x-www-form-urlencoded','Authorization': "Bearer {}".format(token)}
PARAMS = {#"employee_ids":"xxxx",
#"employee_ids":"xxxx",
"business_unit_ids":"1",
"start_date":"09/01/2022",
"end_date":"09/14/2022"
}
headers = {"Content-Type": "application/json",'Authorization': "Bearer {}".format(token), "Api-version": "1.0"}
response = requests.get(BASE_URL, params = PARAMS, headers = headers )
obj = response.json()
result = [response.content]
keys = obj[0].keys()
ar = []
for o in obj:
temp = []
for k in keys:
temp.append(o[k] if k in o else "")
ar.append(temp)
#print(response.content)
#--------------------------------------------------------------------------
SERVICE_ACCOUNT_FILE = 'keys.json'
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
creds = None
creds = service_account.Credentials.from_service_account_file(
SERVICE_ACCOUNT_FILE, scopes=SCOPES)
#--------------------------------------------------------------------------
SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
SAMPLE_SPREADSHEET_ID = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
service = build('sheets', 'v4', credentials=creds)
sheet = service.spreadsheets()
result = sheet.values().get(spreadsheetId=SAMPLE_SPREADSHEET_ID,
range="them!A1:J20").execute()
print(result)
values = result.get('values', [])
request = sheet.values().update(spreadsheetId=SAMPLE_SPREADSHEET_ID,
range="sheet!a1:xa", valueInputOption="USER_ENTERED", body={"values": [list(keys), *ar]}).execute()
print(request)
I leave a sample of the answer with which it works and the answer with which it does not work
this yes
[{"shift_id":2400298,"business_unit_id":10,"business_unit":"XXXXXXXX","employee_id":XXXXXXXX,"employee_code":"XXXXXXXX","entry":"2022-08-07T11:30:00","exit":"2022-08-07T15:30:00"},{"shift_id":2400299,"business_unit_id":10,"business_unit":"XXXXXXXX","employee_id":XXXXXXXX,"employee_code":"XXXXXXXX","entry":"2022-08-07T19:00:00","exit":"2022-08-07T23:00:00"},{"shift_id":2402414,"business_unit_id":10,"business_unit":"XXXXXXXX","employee_id":XXXXXXXX,"employee_code":"XXXXXXXX","entry":"2022-08-08T11:30:00","exit":"2022-08-08T16:00:00"},{"shift_id":2402415,"business_unit_id":10,"business_unit":"XXXXXXXX","employee_id":XXXXXXXX,"employee_code":"XXXXXXXX","entry":"2022-08-08T19:00:00","exit":"2022-08-08T23:30:00"},}]
not this one
[{"employee_id":XXXXXX,"employee_code":"XXXXXX","name":"XXXXXX","last_name":"XXXXXX","gender":"Undefined","marital_status":"Not_Defined","document_id":"XXXXXX","document_type":"Select From Below","authorization_type":"Select From Below","registration_time":"2022-09-14T00:00:00","address_road_type":"Select From Below","email":"XXXXXX","telephone1":"XXXXXX","bank_account":"XXXXXX","disability_level":"Select From Below","academic_education":"0","status":"Select From Below","level":"Select From Below","scale":"Select From Below","temporary_contract_reason":"Select From Below","additional_information":"","custom_field_collection":[{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":"UNDEFINED","value_id":"0"},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"Undefined","value":""},{"name":"","value":""},{"name":"","value":""},{"name":"","value":""},{"name":"","value":""},{"name":"","value":""},{"name":"","value":""},{"name":"","value":"UNDEFINED","value_id":"0"},{"name":"","value":"UNDEFINED","value_id":"0"},{"name":"","value":"UNDEFINED","value_id":"0"},{"name":"","value":"UNDEFINED","value_id":"0"},{"name":"","value":"UNDEFINED","value_id":"0"},{"name":"","value":"UNDEFINED","value_id":"0"}]}]
I would appreciate the help
the terminal response is like this and it only lets me see up to a certain point, it does not update the sheet, I understand that when the Json response is formatted to send it to the Google Sheet API it does not pass it correctly so that be interpreted
instead of getting this output, committing the write to the sheet
{'spreadsheetId': 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx', 'updatedRange': 'Empleados!A1:O528', 'updatedRows': 528, 'updatedColumns': 15, 'updatedCells': 7920}
i get this other error
Traceback (most recent call last):
File "C:\Proyectos Python\Empleados-detalle-fconnect.py", line 74, in <module>
range="Empleados-detalle!a1:xa", valueInputOption="USER_ENTERED", body={"values": [list(keys), *ar]}).execute()
File "C:\Users\xxxxx\AppData\Local\Programs\Python\Python310\lib\site-packages\googleapiclient\_helpers.py", line 130, in positional_wrapper
return wrapped(*args, **kwargs)
File "C:\Users\fabra\AppData\Local\Programs\Python\Python310\lib\site-packages\googleapiclient\http.py", line 938, in execute
raise HttpError(resp, content, uri=self.uri)
googleapiclient.errors.HttpError: <HttpError 400 when requesting https://sheets.googleapis.com/v4/spreadsheets/xxxxxxxxxxxxxxxxxxxxxxxx/values/Empleados-detalle%21a1%3Axa?valueInputOption=USER_ENTERED&alt=json returned "Invalid values[1][21]: list_value { values {
struct_value {
fields {
key: "name"
value {
string_value: "Undefined"
}
}
fields {
key: "value"
value {
string_value: "UNDEFINED"
}
}
fields {
key: "value_id"
value {
string_value: "0"
}
}
Of course, I leave 3 images of how the written answer should look on the sheet, thanks again for your patience and help
The .deploy() function is working for FundMe.sol contract but not for MockV3Interface.sol Contract.
Here's my deploy.py code below:
from brownie import FundMe, MockV3Aggregator, accounts, config, network
from scripts.helpful_scripts import get_account
def deploy_fund_me():
account = get_account()
# Passing Price Feed to our Solidity contract.
# If we are on a persistent network like rinkeby, use its price feed address.
# Otherwise use Mocks.
# print(account)
if network.show_active() != "development":
price_feed_address = config["networks"][network.show_active()][
"eth_usd_price_feed"
]
else:
print(f"The current Network is: {network.show_active()}")
print("Deploying Mock....")
mock_aggregator = MockV3Aggregator.deploy(
18, 2000000000000000000, {"from": accounts}
)
price_feed_address = mock_aggregator.address
print("Mock Deployed!!")
fund_me = FundMe.deploy(
price_feed_address,
{"from": account},
publish_source=True,
)
print(f"It is deployed to {fund_me.address}")
def main():
deploy_fund_me()
And Here's the error Window::
Running '\Users\HP\Development\demos\brownie_fund_me\scripts\deploy.py::main'...
The current Network is: development
Deploying Mock....
File "c:\users\hp\development\demos\brownie_simple_storage\venv\lib\site-packages\brownie\_cli\run.py", line 50, in main
return_value, frame = run(
File "c:\users\hp\development\demos\brownie_simple_storage\venv\lib\site-packages\brownie\project\scripts.py", line 103, in run
return_value = f_locals[method_name](*args, **kwargs)
File "\Users\HP\Development\demos\brownie_fund_me\scripts\deploy.py", line 34, in main
deploy_fund_me()
File "\Users\HP\Development\demos\brownie_fund_me\scripts\deploy.py", line 19, in deploy_fund_me
mock_aggregator = MockV3Aggregator.deploy(
File "c:\users\hp\development\demos\brownie_simple_storage\venv\lib\site-packages\brownie\network\contract.py", line 528, in __call__
return tx["from"].deploy(
AttributeError: 'Accounts' object has no attribute 'deploy'
Terminating local RPC client...
Everyone's Help would be appreciated.
THANKS IN ADVANCE
This is a solution to your problem...use account instead of accounts
from brownie import accounts, config, MyContract
def deploy_my_contract():
account = accounts[0]
my_contract = MyContract.deploy({"from": account})
some_return_value = my_contract.some_func()
print(some_return_value)
I'm doing the same course..
My problem was that accounts[0] was in a function and not returning anything due to missing () in the call
This is now working for me.
def get_account():
if network.show_active == "development":
return accounts[0]
else:
return accounts.add(config["wallets"]["from_key"])
def deploy_fund_me():
account = get_account()
fund_me = FundMe.deploy({"from": account})
Below error occur, whenever sending a message on discuss module, which we configured to send notifications to users firebase_id.
File "/odoo/odoo-server/addons/mail/models/mail_channel.py", line 368, in message_post
message = super(Channel, self.with_context(mail_create_nosubscribe=True)).message_post(message_type=message_type, moderation_status=moderation_status, **kwargs)**
File "/odoo/custom/addons/elite_event_management_api/controllers/message.py", line 34, in message_post
registration_id = channel_partner_id.partner_id.user_ids[0].firebase_id
File "/odoo/odoo-server/odoo/models.py", line 5624, in _getitem_
return self.browse((self._ids[key],))
IndexError: tuple index out of range
HERE IS THE CODE -All users are unable to send messages on discuss Module.
import logging
from odoo import models, api
from odoo.exceptions import AccessDenied, UserError
logger = logging.getLogger(__name_)
class MailThred(models.AbstractModel):
_inherit = "mail.thread"
#api.returns('mail.message', lambda value: value.id)
def message_post(self, *,
body='', subject=None, message_type='notification',
email_from=None, author_id=None, parent_id=False,
subtype_id=False, subtype=None, partner_ids=None, channel_ids=None,
attachments=None, attachment_ids=None,
add_sign=True, record_name=False,
**kwargs):
res = super(MailThred, self).message_post(body=body, subject=subject, message_type=message_type,
email_from=email_from, author_id=author_id, parent_id=parent_id,
subtype_id=subtype_id, subtype=subtype, partner_ids=partner_ids, channel_ids=channel_ids,
attachments=attachments, attachment_ids=attachment_ids,
add_sign=add_sign, record_name=record_name,
**kwargs)
message_subtype_id = self.env['mail.message.subtype'].sudo().search([('name', 'ilike', 'Discussions')])
if res.message_type == 'comment' and res.subtype_id.id == message_subtype_id.id:
for each in res.channel_ids:
for channel_partner_id in each.channel_last_seen_partner_ids:
if channel_partner_id.partner_id.id != res.author_id.id:
from . import FCMManager as fcm
registration_id = channel_partner_id.partner_id.user_ids[0].firebase_id
if registration_id:
try:
tokens = [str(registration_id)]
message_title = "ControlE#ERP - CHAT"
message_body = res.body
fcm.sendPush(message_title, message_body, tokens)
_logger.info('ControlE#ERP Alert- NEW CHAT MESSAGE SENT')
except Exception as e:
_logger.info('not sent')
return res
You have no users in channel_partner_id.partner_id.user_ids, so you are trying to get the 0th element from empty list ,
So check your code and try again.
I'm trying to fetch all issues in JIRA for all projects. When doing the call one at a time, it works perfect. When trying to run it in a for loop I'm prompted with a 400 Client error.
The way that works:
results = jira_instance.jql("project = FJA", limit = 100, fields=["issuetype", "status", "summary"])
The way that does not work:
projects = ["ADV", "WS", "FJA", "FOIJ", "QW", "UOI"]
for key in projects:
results = jira_instance.jql(f"project = {key})", limit = 100, fields=["issuetype", "status", "summary"])
The error:
Traceback (most recent call last):
File "C:\jira-api-python\jira.py", line 24, in <module>
results = jira_instance.jql("project = {key}", limit = 100, fields=["issuetype", "status", "summary"])
File "C:\.virtualenvs\jira-api-python-rouJrYa4\lib\site-packages\atlassian\jira.py", line 2271, in jql
return self.get("rest/api/2/search", params=params)
File "C:\.virtualenvs\jira-api-python-rouJrYa4\lib\site-packages\atlassian\rest_client.py", line 264, in get
response = self.request(
File "C:\.virtualenvs\jira-api-python-rouJrYa4\lib\site-packages\atlassian\rest_client.py", line 236, in request
response.raise_for_status()
File "C:\.virtualenvs\jira-api-python-rouJrYa4\lib\site-packages\requests\models.py", line 943, in raise_for_status
raise HTTPError(http_error_msg, response=self)
requests.exceptions.HTTPError: 400 Client Error: Bad Request for url: https://stuff.atlassian.net/rest/api/2/search?startAt=0&maxResults=100&fields=issuetype%2Cstatus%2Csummary&jql=project+%3D+%7Bkey%7D
My guess is that I'm not using the f-string correct. But when I print the value of {key} it is correct.
Any pointers would be greatly appreciated.
Thank you for your time.
Edit:
Added the full traceback, only removed the path to my machine and changed the URL to the endpoint. Below is the full file with credentials and endpoint redacted. The ideas is to create a csv for each project.
The full code:
from atlassian import Jira
import pandas as pd
import time
jira_instance = Jira(
url = "https://stuff.atlassian.net/",
username = "user",
password = "pass",
)
projects = ["ADV", "WS", "FJA", "FOIJ", "QW", "UOI"]
FIELDS_OF_INTEREST = ["key", "fields.summary", "fields.status.name"]
timestamp = time.strftime("%Y%m%d-%H%M%S")
file_ending = ".csv"
for key in projects:
print(f"stuff = {key}")
results = jira_instance.jql(f"project = {key})", limit = 1000, fields=["issuetype", "status", "summary"])
I found the very simple solution.
In this snippet: results = jira_instance.jql(f"project = {key})", limit = 1000, fields=["issuetype", "status", "summary"])
The ) after {key} was not supposed to be there.
Thank you for the help
I am trying to create an instance from a bootable volume in openstack using python-novaclient.
The steps I am taking are following:
Step1: create a volume with an Image "Centos" with 100GB.
Step2: create an instance with the volume that I created in step1.
However, I must be doing something wrong or missing some information that it is not able to complete the task.
Here are my commands in python shell.
import time, getpass
from cinderclient import client
from novaclient.client import Client
project_name = 'project'
region_name = 'region'
keystone_link = 'https://keystone.net:5000/v2.0'
network_zone = "Public"
key_name = 'key_pair'
user = 'user'
pswd = getpass.getpass('Password: ')
# create a connection
cinder = client.Client('1', user, pswd, project_name, keystone_link, region_name = region_name)
# get the volume id that we will attach
print(cinder.volumes.list())
[<Volume: 1d36203e-b90d-458f-99db-8690148b9600>, <Volume: d734f5fc-87f2-41dd-887e-c586bf76d116>]
vol1 = cinder.volumes.list()[1]
vol1.id
block_device_mapping = {'device_name': vol1.id, 'mapping': '/dev/vda'}
### +++++++++++++++++++++++++++++++++++++++++++++++++++++ ###
# now create a connection with nova and create then instance object
nova = Client(2, user, pswd, project_name, keystone_link, region_name = region_name)
# find the image
image = nova.images.find(name="NETO CentOS 6.4 x86_64 v2.2")
# get the flavor
flavor = nova.flavors.find(name="m1.large")
#get the network and attach
network = nova.networks.find(label=network_zone)
nics = [{'net-id': network.id}]
# get the keyname and attach
key_pair = nova.keypairs.get(key_name)
s1 = 'nova-vol-test'
server = nova.servers.create(name = s1, image = image.id, block_device_mapping = block_device_mapping, flavor = flavor.id, nics = nics, key_name = key_pair.name)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/lib/python2.6/site-packages/novaclient/v1_1/servers.py", line 902, in create
**boot_kwargs)
File "/usr/lib/python2.6/site-packages/novaclient/v1_1/servers.py", line 554, in _boot
return_raw=return_raw, **kwargs)
File "/usr/lib/python2.6/site-packages/novaclient/base.py", line 100, in _create
_resp, body = self.api.client.post(url, body=body)
File "/usr/lib/python2.6/site-packages/novaclient/client.py", line 490, in post
return self._cs_request(url, 'POST', **kwargs)
File "/usr/lib/python2.6/site-packages/novaclient/client.py", line 465, in _cs_request
resp, body = self._time_request(url, method, **kwargs)
File "/usr/lib/python2.6/site-packages/novaclient/client.py", line 439, in _time_request
resp, body = self.request(url, method, **kwargs)
File "/usr/lib/python2.6/site-packages/novaclient/client.py", line 433, in request
raise exceptions.from_response(resp, body, url, method)
novaclient.exceptions.BadRequest: Block Device Mapping is Invalid: failed to get volume /dev/vda. (HTTP 400) (Request-ID: req-2b9db4e1-f24f-48c6-8660-822741ca52ad)
>>>
I tried to find any documentation so that I can solve this on my own, however, I was not able to.
If anyone has tried this before, I would appreciate there help on this.
Thanks,
Murtaza
I was able to get it to work by using this dictionary:
block_dev_mapping = {'vda':'uuid of the volume you want to use'}
I then called it in the create method like this:
instance = nova.servers.create(name="python-test3", image='', block_device_mapping=block_dev_mapping,
flavor=flavor, key_name="my-keypair", nics=nics)