'm trying to upload a file using Python Script, when Run the code it gives me no Error but was not able to upload the file in my sharepoint folder.
import requests
from shareplum import Office365
from config import config
# get data from configuration
username = config['sp_user']
password = config['sp_password']
site_name = config['sp_site_name']
base_path = config['sp_base_path']
doc_library = config['sp_doc_library']
file_name = "cat_pic.jpg"
# Obtain auth cookie
authcookie = Office365(base_path, username=username, password=password).GetCookies()
session = requests.Session()
session.cookies = authcookie
session.headers.update({'user-agent': 'python_bite/v1'})
session.headers.update({'accept': 'application/json;odata=verbose'})
# perform the actual upload
with open( file_name, 'rb') as file_input:
try:
response = session.post(
url=base_path + "/sites/" + site_name + "/Shared%20Documents/Forms/AllItems.aspx/_api/web/GetFolderByServerRelativeUrl('" + doc_library + "')/Files/add(url='"
+ file_name + "',overwrite=true)",
data=file_input)
except Exception as err:
print("Some error occurred: " + str(err))
config.py
config = dict()
config['sp_user'] = 'email'
config['sp_password'] = 'pass
config['sp_base_path'] = 'https://bboxxeng.sharepoint.com'
config['sp_site_name'] = 'TESTIAN'
config['sp_doc_library'] = 'Test'
This is the url of my sharepoint https://bboxxeng.sharepoint.com/sites/TESTIAN/Shared%20Documents/Forms/AllItems.aspx I've already created a folder in it named Test...
Thank you for answering my question.
Modify the code as below.
response = session.post(
url=base_path + "/sites/" + site_name + "/_api/web/GetFolderByServerRelativeUrl('Shared%20Documents/"+doc_library+"')/Files/add(url='"
+ file_name + "',overwrite=true)",
data=file_input)
Related
I am new to python,and want to know upload image from pixabay api or others source to wordpress using rest api and python.
When i use this :
url=url_image = "https://pixabay.com/api/?key={API_KEY}&q={keyword}.jpg"
They show this message "
{"code":"rest_upload_unknown_error","message":"Sorry, you are not
allowed to upload this file type.","data":{"status":500}}"
import base64, requests
from tempfile import NamedTemporaryFile
# keyword = input('Enter Your name')
keyword = 'flower'
def header(user, password):
credentials = user + ':' + password
token = base64.b64encode(credentials.encode())
header_json = {'Authorization': 'Basic ' + token.decode('utf-8'),
'Content-Disposition' : 'attachment; filename=%s'% "test1.jpg"}
return header_json
def upload_image_to_wordpress(file_path, header_json):
media = {'file': file_path,'caption': 'f{keyword}'}
responce = requests.post("https://yourwebsite.com/wp-json/wp/v2/media", headers = header_json, files = media)
print(responce.text)
heder = header("username","password") #username, application password
url_image = "https://pixabay.com/api/?key={API_KEY}&q={keyword}.jpg"
# url="https://cdn.pixabay.com/photo/2021/11/30/08/24/strawberries-6834750_1280.jpg"
raw = requests.get(f'{url_image}').content
with NamedTemporaryFile(delete=False,mode="wb",suffix=".jpg") as img :
img.write(raw)
# print(f.file())
c = open(img.name,"rb")
upload_image_to_wordpress(c,heder)
I am using this much-shared code to try and upload a file to Sharepoint using Shareplum, into the Shared Documents folder.
import requests
from shareplum import Office365
# Set Login Info
username = 'my.email#address.com'
password = 'myverifiedapppassword'
site_name = 'mysite'
base_path = 'https://xxxxxxxx.sharepoint.com'
doc_library = 'Shared%20Documents'
file_name = "hellotest.txt" #when your file in the same directory
# Obtain auth cookie
authcookie = Office365(base_path, username=username, password=password).GetCookies()
session = requests.Session()
session.cookies = authcookie
session.headers.update({'user-agent': 'python_bite/v1'})
session.headers.update({'accept': 'application/json;odata=verbose'})
session.headers.update({'X-RequestDigest': 'FormDigestValue'})
response = session.post(url=base_path + "/sites/" + site_name + "/_api/web/GetFolderByServerRelativeUrl('" + doc_library + "')/Files/add(url='a.txt',overwrite=true)",
data="")
session.headers.update({'X-RequestDigest': response.headers['X-RequestDigest']})
# Upload file
with open(file_name, 'rb') as file_input:
try:
response = session.post(
url=base_path + "/sites/" + site_name + f"/_api/web/GetFolderByServerRelativeUrl('" + doc_library + "')/Files/add(url='"
+ file_name + "',overwrite=true)",
data=file_input)
print("response: ", response.status_code) #it returns 200
if response.status_code == '200':
print("File uploaded successfully")
except Exception as err:
print("Something went wrong: " + str(err))
print('File Uploaded Successfully')
The problem is occuring wheen running the code....i am always getting a traceback and a keyerror as follows:
Traceback (most recent call last):
File "S:\upload.py", line 22, in
session.headers.update({'X-RequestDigest': response.headers['X-RequestDigest']})
File "C:\Python39\lib\site-packages\requests\structures.py", line 54, in getitem
return self._store[key.lower()][1]
KeyError: 'x-requestdigest'
Something to do with x-requestdigest isnt working properly, in line 22, but i cannot figure out what.
Any tips would be greatly appreciated!!!
thanks
I have tried the below code and it is working.
from shareplum import Office365
from shareplum import Site
from shareplum.site import Version
#Logging info
server_url = "https://example.sharepoint.com/"
site_url = server_url + "sites/my_site_name"
Username = 'myusername'
Password = 'mypassword'
Sharepoint_folder = 'Shared Documents'
fileName = 'myfilename'
def file_upload_to_sharepoint(**context):
authcookie = Office365(server_url, username = Username, password=Password).GetCookies()
site = Site(site_url, version=Version.v365, authcookie=authcookie)
folder = site.Folder(Sharepoint_folder)
with open(fileName, mode='rb') as file:
fileContent = file.read()
folder.upload_file(fileContent, "filename.bin")
file_upload_to_sharepoint()
Let me know if this works for you as well.
** i have a google cloud function which needs to connect to url and get data in the form of csv files and store in one bucket. this is what written in python code .
when i test the function its compiling successfully but its not working at all. when i checked the log its giving the eblwo mentioned error.
favt_LnT_acn_blackline_data_pull_func43jttmffma0g Invalid constructor input for AccessSecretVersionRequest: 'projects/gcp-favt-acn-rpt-dev/secrets/blackline_api_key/versions/latest'
please find the code and suggest.
Thanks,
Vithal
**
'
import base64
import logging
import requests
#import pandas as pd
#from pandas import json_normalize
import json
import os
import datetime
from datetime import datetime as dt
import pytz
from google.cloud import storage
from google.cloud import secretmanager
def delete_and_upload_blob(landing_bucket_name,
source_file_name,
landing_blob_name,
retention_bucket_name,
file_retention_flag,
retn_file_suffix,
rpt_last_run_file):
storage_client = storage.Client()
bucket = storage_client.bucket(landing_bucket_name)
blob = bucket.blob(landing_blob_name)
rpt_last_run_blob = bucket.blob('some.csv')
retention_bucket = storage_client.bucket(retention_bucket_name)
if blob.exists(storage_client):
#Delete the old file
blob.delete()
print('File {} is deleted from Cloud Storage before
Upload'.format(landing_blob_name))
else:
print('No Such File Exists in Storage Bucket to Delete. So,
proceeding with Upload')
#Upload new one
blob.upload_from_filename(source_file_name)
print("File {} uploaded to Bucket {} With Name
{}.".format(source_file_name, bucket, landing_blob_name))
if file_retention_flag == 'Y':
#Copy the last file of the day to retention bucket
new_file_name = retn_file_suffix + '_' + landing_blob_name
blob_copy = bucket.copy_blob(blob, retention_bucket,
new_file_name)
print('File {} is copied to Retention Bucket
{}'.format(new_file_name, retention_bucket))
if rpt_last_run_blob.exists(storage_client):
#Delete the old file
rpt_last_run_blob.delete()
print('File {} is deleted from Cloud Storage before
Upload'.format(rpt_last_run_blob))
else:
print('No Such File Exists in Storage Bucket to Delete. So,
proceeding with Upload')
#Upload new one
rpt_last_run_blob.upload_from_filename(rpt_last_run_file)
print("File {} uploaded to Bucket {} With Name
{}.".format(rpt_last_run_file, bucket,
'Reports_Latest_Run_time.csv'))
def api_request():
et = pytz.timezone("US/Eastern")
current_et_time = dt.now().astimezone(et)
print('Current ET Time:', current_et_time)
pt = pytz.timezone("US/Pacific")
ut = pytz.timezone("UTC")
blackline_base_url = "https://....com"
blackline_sts_url = blackline_base_url + "/authorize/connect/token"
project_id = 'gcp-favt-acn-dev'
secret_id = '###_api_key'
secret_client = secretmanager.SecretManagerServiceClient()
secret_name =
secret_client.secret_version_path(project_id,secret_id,'latest')
secret_resp = secret_client.access_secret_version(secret_name)
api_key = secret_resp.payload.data.decode('UTF-8')
grant_type = 'password'
scope = '####'
username = '####'
payload = 'grant_type='+grant_type+'&scope='+scope+
'&username='+username+'&password='+api_key
sts_headers = { 'Authorization': 'Basic dXBzOk5KXXx2VENsSiEtRw==',
'Content-Type': 'application/x-www-form-urlencoded',
'Cookie':
'BLSIAPPEN=!bpJj4AOTHPcaqipWtDI6FrozN629M9xYLA/
sbM1DWVH+jjuY5fgHVMACha2rIapXRoB7CcqnlaHgBw=='}
response = requests.request("POST", ###_sts_url, headers =
sts_headers, data = payload)
if response.ok:
sts_response = response.json()
access_token = sts_response['access_token']
print(access_token)
blackline_rpt_submit_url = ##_base_url + '/api/queryruns'
rpt_payload = ''
blackline_rpt_api_headers =
{'Authorization': 'Bearer {}'.format(access_token), 'Content-Type':
'text/plain'}
rpt_resp = requests.request("GET", blackline_rpt_submit_url, headers
= blackline_rpt_api_headers, data = rpt_payload)
print(rpt_resp.text)
jl = json.loads(rpt_resp.text)
reports_list = []
rprts_filename = "tmp_rprts.csv"
rprts_full_path = os.path.join("/tmp",rprts_filename)
with open(rprts_full_path, 'w') as f:
f.write('ReportName,ReportLastRunTime'+'\n')
hrs = -2
hrs_to_subtract = datetime.timedelta(hours=hrs)
two_hrs_ago_time = current_et_time + hrs_to_subtract
#print(two_hrs_ago_time)#latest_rpt_check_time)
frmtd_curr_time = two_hrs_ago_time.strftime('%Y-%m-%d %H:%M:%S')
latest_rpt_check_time =
dt.strptime(frmtd_curr_time,'%Y-%m-%d %H:%M:%S')
print("Latest Report Check Time:", latest_rpt_check_time)
for each in jl:
strpd_time = dt.strptime(each['endTime'][0:19],'%Y-%m-
%dT%H:%M:%S')
#print(strpd_time)
pt_localize = pt.localize(strpd_time)
#print(pt_localize)
et_time = pt_localize.astimezone(et)
#print(et_time)
frmtd_et_time = et_time.strftime('%Y-%m-%d %H:%M:%S')
#print(frmtd_et_time)
cnvrted_endTime = dt.strptime(frmtd_et_time,'%Y-%m-%d %H:%M:%S')
#print("Report LastRun EndTime:", cnvrted_endTime)
ut_time = pt_localize.astimezone(ut)
frmtd_ut_time = ut_time.strftime('%Y-%m-%d %H:%M:%S')
if cnvrted_endTime > latest_rpt_check_time:
reports_list.append({each['name']:each['exportUrls'][0]
["url"]})
rpt_last_run = each['name']+','+frmtd_ut_time
print(rpt_last_run)
with open(rprts_full_path, 'a') as f:
f.write(rpt_last_run+'\n')
retn_file_suffix = each['endTime'][0:10]
#print(retn_file_suffix)
rpt_run_hr = cnvrted_endTime.hour
#print(rpt_run_hr)
#############
print(reports_list)
for report in reports_list:
for k in report:
print(report[k])
report_fetch_url = blackline_base_url + '/' + report[k]
print('Report Fetch URL: {}'.format(report_fetch_url))
filename = "temp_file.csv"
full_path = os.path.join("/tmp",filename)
rpt_data = requests.request("GET", report_fetch_url, headers
= blackline_rpt_api_headers)
print(rpt_data.text)
with open(full_path,'wb') as tmp_file:
tmp_file.write(rpt_data.content)
#Upload it to Cloud Storage
landing_bucket_name = "####_dev_landing_bkt" #CHANGE ME
source_file_name = os.path.join(full_path)
rpt_last_run_file = os.path.join(rprts_full_path)
landing_blob_name = '##.csv' #CHANGE ME
retention_bucket_name = '####_dev_retention_bkt'
print('file retention check')
if (rpt_run_hr >= 22):
file_retention_flag = 'Y'
else:
file_retention_flag = 'N'
print(file_retention_flag)
delete_and_upload_blob(landing_bucket_name,
source_file_name,
landing_blob_name,
retention_bucket_name,
file_retention_flag,
retn_file_suffix,
rpt_last_run_file)
#Remove the temp file after it is uploaded to Cloud Storage to
avoid OOM issues with the Cloud Function.
os.remove(full_path)
#Remove the tmp file after upload
os.remove(rprts_full_path)
#def pacific_to_eastern_conversion(pacific_time, eastern_time):
def main(event,context):
try:
if 'data' in event:
name = base64.b64decode(event['data']).decode('utf-8')
else:
name = 'World'
print('Hello{}',format(name))
api_request()
except Exception as e:
logging.error(e)' enter code here
The approach you are using will work for Cloud Run but won't work for Cloud functions.
To make use of secrets in Google cloud functions, following are the steps:
Make sure that the function's runtime service account must be granted access to the secret. To use Secret Manager with Cloud Functions, assign the roles/secretmanager.secretAccessor role to the service account associated with your function.
Make the secret accessible to the function. This can be done using either the Google Cloud Console or the gcloud command-line tool.
I exposed the secret as an environment variable(with name set to "api_key") and accessed them in the code as stated below:
import os
api_key = os.environ.get('api_key')
I hope this answers your question.
Your cloud functions service account haven't access to Secret manager. Grant your Cloud Functions service account on the secret, or on the project (not recommended).
If you don't set a custom service account on your Cloud Functions (which is also not a good practice), the App Engine default service account is used. Here the pattern <ProjectID>#appspot.gserviceaccount.com
I'm trying to develop a module to upload videos files via MinIO API of Python.
The file can be uploaded to MinIO, but it's cannot view via url like: http://localhost:9000/lms-videos/video/output.mp4. And also the file upload via MinIO should be 19.39Mb and the one uploaded via API turns out to be 25+MB, don't know what cause.....
Following is part of my code :
# minio client
#api.model
def _get_minio_client(self):
host = '192.168.1.102:9000'
access_key = 'minioadmin'
secret_key = 'minioadmin'
if not all((host, access_key, secret_key)):
raise exceptions.UserError('Incorrect configuration of MinIO')
return Minio(
host,
access_key = access_key,
secret_key = secret_key,
secure = False
)
# upload
#api.model
def _store_file_write(self):
client = self._get_minio_client()
bin_data = self.datas_minio
fname = "output_test"
#client.put_object('lms-videos','videos/'+ fname + '.mp4',io.BytesIO(self.datas_minio), len(bin_data),'video/mp4')
with io.BytesIO(self.datas_minio) as bin_data_io:
client.put_object('lms-videos',
'videos/'+ fname + '.mp4',
bin_data_io,
len(bin_data),
'video/mp4')
#api.depends('document_id', 'slide_type', 'mime_type', 'external_url')
def _compute_embed_code(self):
res = super(Slide, self)._compute_embed_code()
for record in self:
if record.slide_type == 'miniovideo':
self._store_file_write()
content_url = 'http://localhost:9000/lms-videos/videos/' + record.name + '.mp4'
record.embed_code = '<video class="miniovideo" controls controlsList="nodownload"><source src="' + content_url + '" type=MPEG-4/></video>'
#api.onchange('datas_minio')
def _on_change_datas(self):
res = super(Slide, self)._on_change_datas()
if self.datas_minio:
#fname = self.datas_minio.decode("utf-8")
#bin_data = self.datas_minio
self._store_file_write()
#self._get_minio_client().put_object('lms-videos', '/videos/'+ fname + '.mp4',io.BytesIO(bin_data), len(bin_data),'video/mp4')
return res
Problem fixed by adding b64decode
#api.model
def _store_file_write(self):
client = self._get_minio_client()
bin_data = base64.b64decode(self.datas_minio)
fsize = len(bin_data)
fname = "output_test"
with io.BytesIO(bin_data) as bin_data_io:
client.put_object('lms-videos',
#'videos/'+ fname + '.mp4',
'videos/output_test.mp4',
bin_data_io,
fsize,
'video/mp4')
I am able to to upload image successfully.
My code is:
pyrax.set_setting("identity_type", "rackspace")
pyrax.set_credentials("myuser_name", "my_api_key", region="ORD")
cf = pyrax.cloudfiles
cont = cf.create_container("media")
cont.make_public()
print "Beginning upload..."
obj = cont.upload_file(app.config['UPLOAD_FOLDER'] + "/" + str(filename))
print obj.get_temp_url
print cont
You can acquire a publicly-accessible URI of an object by joining its name with the cdn_uri or cdn_ssn_url of its container:
# Public URI
cont.cdn_uri + '/' + obj.name
# Public SSL URI
cont.cdn_ssl_uri + '/' + obj.name