How to implement refres token to youtube API - python

I want to upload a video with youtube API and always I get a message from google to terminal check this link to validate your application I read that I can use for it a refresh token function I found some on the internet but it is not working. I do not know why but I have done all steps before this so for every help I will be happy. Thanks
function:
import pickle
import os
from google_auth_oauthlib.flow import Flow, InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
from google.auth.transport.requests import Request
import datetime
def Create_Service(client_secret_file, api_name, api_version, *scopes):
print(client_secret_file, api_name, api_version, scopes, sep='-')
CLIENT_SECRET_FILE = client_secret_file
API_SERVICE_NAME = api_name
API_VERSION = api_version
SCOPES = [scope for scope in scopes[0]]
print(SCOPES)
cred = None
pickle_file = f'token_{API_SERVICE_NAME}_{API_VERSION}.pickle'
# print(pickle_file)
if os.path.exists(pickle_file):
with open(pickle_file, 'rb') as token:
cred = pickle.load(token)
if not cred or not cred.valid:
if cred and cred.expired and cred.refresh_token:
cred.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRET_FILE, SCOPES)
cred = flow.run_console()
with open(pickle_file, 'wb') as token:
pickle.dump(cred, token)
try:
service = build(API_SERVICE_NAME, API_VERSION, credentials=cred)
print(API_SERVICE_NAME, 'service created successfully')
return service
except Exception as e:
print('Unable to connect.')
print(e)
return None
def convert_to_RFC_datetime(year=1900, month=1, day=1, hour=0, minute=0):
dt = datetime.datetime(year, month, day, hour, minute, 0).isoformat() + 'Z'
return dt
and this is my code:
import argparse
import http.client
import httplib2
import os
import random
import time
import datetime
import google.oauth2.credentials
import google_auth_oauthlib.flow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
from googleapiclient.http import MediaFileUpload
from google_auth_oauthlib.flow import InstalledAppFlow
httplib2.RETRIES = 1
MAX_RETRIES = 10
RETRIABLE_EXCEPTIONS = (httplib2.HttpLib2Error, IOError, http.client.NotConnected,
http.client.IncompleteRead, http.client.ImproperConnectionState,
http.client.CannotSendRequest, http.client.CannotSendHeader,
http.client.ResponseNotReady, http.client.BadStatusLine)
RETRIABLE_STATUS_CODES = [500, 502, 503, 504]
CLIENT_SECRETS_FILE = 'youtube_client.json'
SCOPES = ['https://www.googleapis.com/auth/youtube.upload']
API_SERVICE_NAME = 'youtube'
API_VERSION = 'v3'
VALID_PRIVACY_STATUSES = ('public', 'private', 'unlisted')
upload_date_time = datetime.datetime(2022, 12, 25, 12, 30, 0).isoformat() + '.000Z'
request_body = {
'snippet': {
'categoryI': 10,
'title': 'best music on the youtube | happy mood mix | AMP',
'description': "test",
'tags': ['Travel', 'video test', 'Travel Tips']
},
'status': {
'privacyStatus': 'private',
'publishAt': upload_date_time,
'selfDeclaredMadeForKids': False,
},
'notifySubscribers': False
}
def get_authenticated_service():
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRETS_FILE, SCOPES)
credentials = flow.run_console()
return build(API_SERVICE_NAME, API_VERSION, credentials = credentials)
def initialize_upload(youtube,body,file):
insert_request = youtube.videos().insert(
part='snippet,status',
body=body,
media_body=MediaFileUpload(file, chunksize=-1, resumable=True))
response = resumable_upload(insert_request)
return response
def resumable_upload(request):
response = None
error = None
retry = 0
while response is None:
try:
print('Uploading file...')
status, response = request.next_chunk()
if response is not None:
if 'id' in response:
print('Video id "%s" was successfully uploaded.' % response['id'])
else:
exit('The upload failed with an unexpected response: %s' % response)
except HttpError as e:
if e.resp.status in RETRIABLE_STATUS_CODES:
error = 'A retriable HTTP error %d occurred:\n%s' % (e.resp.status,
e.content)
else:
raise
except RETRIABLE_EXCEPTIONS as e:
error = 'A retriable error occurred: %s' % e
if error is not None:
print(error)
retry += 1
if retry > MAX_RETRIES:
exit('No longer attempting to retry.')
max_sleep = 2 ** retry
sleep_seconds = random.random() * max_sleep
print('Sleeping %f seconds and then retrying...' % sleep_seconds)
time.sleep(sleep_seconds)
return response['id']
if __name__ == '__main__':
youtube = get_authenticated_service()
try:
response = initialize_upload(youtube,request_body,"output.mp4" )
except HttpError as e:
print('An HTTP error %d occurred:\n%s' % (e.resp.status, e.content))
youtube.thumbnails().set(
videoId=response.get('id'),
media_body=MediaFileUpload('thumbnail.png')
).execute()

The trick is to let the client library do most of the work for you.
In the code below you will find that the first time it us token.json file is created
This file will contain
the access token from the user
the refresh token
the client id used to create it
the client used to create it
what scopes it authorizes.
token.json
{
"token": "[redacted]",
"refresh_token": "[redacted]",
"token_uri": "https://oauth2.googleapis.com/token",
"client_id": "[redacted]",
"client_secret": "[redacted]",
"scopes": [
"https://www.googleapis.com/auth/youtube"
],
"expiry": "2022-08-15T13:05:53.162649Z"
}
Every time it is run it will check if this file exists if it does exist then it will attempt to load the credentials using from_authorized_user_file. If it does not exist then access will be requested of the user.
code
# To install the Google client library for Python, run the following command:
# pip install --upgrade google-api-python-client google-auth-httplib2 google-auth-oauthlib
from __future__ import print_function
import os.path
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.errors import HttpError
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/youtube']
def main():
"""Shows basic usage of the YouTube v3 API.
Prints the names and ids of the first 10 files the user has access to.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
try:
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
creds.refresh(Request())
except google.auth.exceptions.RefreshError as error:
# if refresh token fails, reset creds to none.
creds = None
print(f'Refresh token expired requesting authorization again: {error}')
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'C:\YouTube\dev\credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
try:
service = build('youtube', 'v3', credentials=creds)
# Call the YouTube v3 API
results = service.videos().list(
part='snippet',
chart='mostPopular',
fields="nextPageToken, items").execute()
items = results.get('items', [])
if not items:
print('No videos found.')
return
print('Videos:')
for item in items:
print(u'{0} ({1})'.format(item['id'], item['snippet']['title']))
except HttpError as error:
# TODO(developer) - Handle errors from drive API.
print(f'An error occurred: {error}')
if __name__ == '__main__':
main()
note
access tokens are valid for one hour the code will refresh it as needed.
refresh tokens for applications currently in testing will be good for seven days. At which point you will need to delete the token json file and authorize the application again.
once your application is in production the refresh tokens will no longer expire

Related

How to upload files into google drive via api using Python?

I have an excel file in my local computer that I would like to upload into google drive using python via an API.
The following is my code:
import pickle
import os
from google_auth_oauthlib.flow import Flow, InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
from google.auth.transport.requests import Request
from googleapiclient.http import MediaFileUpload
def Create_Service(client_secret_file, api_name, api_version, *scopes):
print(client_secret_file, api_name, api_version, scopes, sep='-')
CLIENT_SECRET_FILE = client_secret_file
API_SERVICE_NAME = api_name
API_VERSION = api_version
SCOPES = [scope for scope in scopes[0]]
print(SCOPES)
cred = None
pickle_file = f'token_{API_SERVICE_NAME}_{API_VERSION}.pickle'
if os.path.exists(pickle_file):
with open(pickle_file, 'rb') as token:
cred = pickle.load(token)
if not cred or not cred.valid:
if cred and cred.expired and cred.refresh_token:
cred.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRET_FILE, SCOPES)
cred = flow.run_local_server()
with open(pickle_file, 'wb') as token:
pickle.dump(cred, token)
try:
service = build(API_SERVICE_NAME, API_VERSION, credentials=cred)
print(API_SERVICE_NAME, 'service created successfully')
return service
except Exception as e:
print('Unable to connect.')
print(e)
return None
def convert_to_RFC_datetime(year=1900, month=1, day=1, hour=0, minute=0):
dt = datetime.datetime(year, month, day, hour, minute, 0).isoformat() + 'Z'
return dt
CLIENT_SECRET_FILE = '/Users/shruthiravishankar/Downloads/client_secret_316665721335-819139d5ea0aeet1ddshhk6p0mpl8mv2.apps.googleusercontent.com.json'
API_NAME = 'Desktop client 1'
API_VERSION = 'v3'
SCOPES =['https://www.googleapis.com/auth/drive']
service = Create_Service(CLIENT_SECRET_FILE,API_NAME, API_VERSION, SCOPES)
folder_id='10Xct2T1vBpqW3-3Ud6mjPuf_lKCN1bUL'
file_names = ['Manual_SIC_MVP (9).xlsx']
mime_types = ['application/vnd.openxmlformats-officedocument.spreadsheetml.sheet']
for file_name, mime_type in zip(file_names, mime_types):
file_metadata = {
'name': file_name,
'parents': [folder_id]
}
media = MediaFileUpload('/Users/shruthiravishankar/Downloads/{0}'.format(file_name), mimetype=mime_type)
print(media)
service.files().create(
supportsTeamDrives=True,
body=file_metadata,
media_body=media,
fields='id'
).execute()
The error that I am getting is:
Unable to connect and none files.
Is there an issue with he api_version and scope? Unable to figure out the issue. This is my first time dealing with api.
The issue is with this statement actually:
API_NAME = 'Desktop client 1'
That's not a valid Google service name.
This particular library needs to know which API service it needs to build, not how you want to call it.
Change that line to the following:
API_NAME = 'drive'
See the github line in question here and the general quickstart docs here

How can I upload A file to shared folder in my google drive? {python}

I have tried almost everything on google documentation and almost all the possibilities I could explore by myself. I still cant file a viable solution.
I just need to create a program which uploads a given file for example "test.zip" in my working directory to google drive.
I have a client_secret.json but none of the solutions actually help online as I am having issues with authentication.
from Google import Create_Service
from googleapiclient.http import MediaFileUpload
CLIENT_SECRET_FILE = "client_secret.json"
API_NAME = "drive"
API_VERSION = "v3"
SCOPES = ["https://www.googleapis.com/auth/drive"]
service = Create_Service(CLIENT_SECRET_FILE,API_NAME,API_VERSION,SCOPES)
folder_id = "1QpsQB_R7JyqxueQwIe8_AvKGm7a25IoJ"
file_names = ["my_file.zip"]
mime_types = ['application/zip']
for file_name , mime_type in zip(file_names , mime_types):
file_metadata = {
"name" : file_name,
"parents" : [folder_id]
}
media = MediaFileUpload('./Uploads/{0}'.format(file_name), mimetype=mime_type)
service.files().create(
body = file_metadata,
media_body = media,
fields = "id"
).execute()
this is the code I am using right now,
Create_service is being taken from google.py
import pickle
import os
from google_auth_oauthlib.flow import Flow, InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
from google.auth.transport.requests import Request
def Create_Service(client_secret_file, api_name, api_version, *scopes):
print(client_secret_file, api_name, api_version, scopes, sep='-')
CLIENT_SECRET_FILE = client_secret_file
API_SERVICE_NAME = api_name
API_VERSION = api_version
SCOPES = [scope for scope in scopes[0]]
print(SCOPES)
cred = None
pickle_file = f'token_{API_SERVICE_NAME}_{API_VERSION}.pickle'
# print(pickle_file)
if os.path.exists(pickle_file):
with open(pickle_file, 'rb') as token:
cred = pickle.load(token)
if not cred or not cred.valid:
if cred and cred.expired and cred.refresh_token:
cred.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRET_FILE, SCOPES)
cred = flow.run_local_server()
with open(pickle_file, 'wb') as token:
pickle.dump(cred, token)
try:
service = build(API_SERVICE_NAME, API_VERSION, credentials=cred)
print(API_SERVICE_NAME, 'service created successfully')
return service
except Exception as e:
print('Unable to connect.')
print(e)
return None
def convert_to_RFC_datetime(year=1900, month=1, day=1, hour=0, minute=0):
dt = datetime.datetime(year, month, day, hour, minute, 0).isoformat() + 'Z'
return dt
but even after allowing authentication it shows,
any help will be appreciation :)

Code not creating access token when authenticating account for Google API

When I run my code, I am able to login, but as soon as I try to authenticate, the server crashes and I'm assuming that's because I am not getting any access tokens. I am not sure on where to go next. I have downloaded the .json file for my credentials and its called 'credentials.json'
This is my Google.py
import pickle
import os
import datetime
from google_auth_oauthlib.flow import Flow, InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload, MediaIoBaseDownload
from google.auth.transport.requests import Request
def Create_Service(client_secret_file, api_name, api_version, *scopes, prefix=''):
CLIENT_SECRET_FILE = client_secret_file
API_SERVICE_NAME = api_name
API_VERSION = api_version
SCOPES = [scope for scope in scopes[0]]
cred = None
working_dir = os.getcwd()
token_dir = 'token files'
pickle_file = f'token_{API_SERVICE_NAME}_{API_VERSION}{prefix}.pickle'
### Check if token dir exists first, if not, create the folder
if not os.path.exists(os.path.join(working_dir, token_dir)):
os.mkdir(os.path.join(working_dir, token_dir))
if os.path.exists(os.path.join(working_dir, token_dir, pickle_file)):
with open(os.path.join(working_dir, token_dir, pickle_file), 'rb') as token:
cred = pickle.load(token)
if not cred or not cred.valid:
if cred and cred.expired and cred.refresh_token:
cred.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(CLIENT_SECRET_FILE, SCOPES)
cred = flow.run_local_server()
with open(os.path.join(working_dir, token_dir, pickle_file), 'wb') as token:
pickle.dump(cred, token)
try:
service = build(API_SERVICE_NAME, API_VERSION, credentials=cred)
print(API_SERVICE_NAME, API_VERSION, 'service created successfully')
return service
except Exception as e:
print(e)
print(f'Failed to create service instance for {API_SERVICE_NAME}')
os.remove(os.path.join(working_dir, token_dir, pickle_file))
return None
def convert_to_RFC_datetime(year=1900, month=1, day=1, hour=0, minute=0):
dt = datetime.datetime(year, month, day, hour, minute, 0).isoformat() + 'Z'
return dt
if __name__ == '__main__':
API_NAME = 'calendar'
API_VERSION = 'v3'
SCOPES = ['https://www.googleapis.com/auth/calendar']
CLIENT_FILE = 'client-secret.json'
service = Create_Service(CLIENT_FILE, API_NAME, API_VERSION, SCOPES, 'x')
This is my code to run
from pprint import pprint
from Google import Create_Service
CLIENT_SECRET_FILE = 'credentials.json'
API_NAME = 'calendar'
API_VERSION = "v3"
SCOPES = ['https://www.googleapis.com/auth/calendar']
service = Create_Service(CLIENT_SECRET_FILE, API_NAME, API_VERSION, SCOPES)

Authorize google api requests to list users

I want to get list of users google api but face an issue
My steps are:
Created Service account with domain wide delegation and listed scopes (same as in the script)
Downloaded json file with private key
On execution of the next script
import google.auth
import google.auth.transport.requests
from google.oauth2 import service_account
import requests
credentials = service_account.Credentials.from_service_account_file('key.json', scopes=[
'https://www.googleapis.com/auth/admin.directory.user',
'https://www.googleapis.com/auth/admin.directory.group',
'https://www.googleapis.com/auth/admin.directory.group.member',
'https://www.googleapis.com/auth/admin.directory.user.security',
'https://www.googleapis.com/auth/admin.directory.user.readonly'
])
auth_req = google.auth.transport.requests.Request()
refresh = credentials.refresh(auth_req)
response = requests.get('https://www.googleapis.com/admin/directory/v1/users?domain=domain.com',
headers={'Authorization': f'Bearer {credentials.token}'})
Response is:
{
"error": {
"code": 403,
"message": "Not Authorized to access this resource/api",
"errors": [
{
"message": "Not Authorized to access this resource/api",
"domain": "global",
"reason": "forbidden"
}
]
}
}
I ended up with the next solution
console.cloud.google.com => APIs & Services > Credentials
Create Credentials > OAuth Client ID > Desktop APp
Saved key as 'credentials.json'
The rest id described here
from __future__ import print_function
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
# If modifying these scopes, delete the file token.json.
SCOPES = ['https://www.googleapis.com/auth/admin.directory.user']
#
def main():
"""Shows basic usage of the Admin SDK Directory API.
Prints the emails and names of the first 10 users in the domain.
"""
creds = None
# The file token.json stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.json'):
creds = Credentials.from_authorized_user_file('token.json', SCOPES)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
print('Refreshing token')
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'python_admin.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.json', 'w') as token:
token.write(creds.to_json())
service = build('admin', 'directory_v1', credentials=creds)
# Call the Admin SDK Directory API
print('Getting the first 10 users in the domain')
service_users = service.users()
results = service_users.list(customer='my_customer', maxResults=10, orderBy='email').execute()
users = results.get('users', [])
service_users.insert()
if not users:
print('No users in the domain.')
else:
print('Users:')
for user in users:
print(u'{0} ({1})'.format(user['primaryEmail'],
user['name']['fullName']))
if __name__ == '__main__':
main()
For the first time I had to authorize request in the browser, after that access and refresh token could be used for next requests. Except manual authorization for the first time - works fine

Is there any specific way to filter Google Drive changes?

I started working with the Google Drive Changes API (https://developers.google.com/drive/api/v3/reference/changes). While my code can track all changes happening in My Drive, I cannot seem to filter out only specific changes like creation of new files. Whenever I'm uploading a file to a folder, there are 2 changes occurring, one which says about the change in file, and the other which says about the change in the folder.
My code so far is as follows:
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import schedule
import time
# If modifying these scopes, delete the file token.pickle.
SCOPES = [
'https://www.googleapis.com/auth/drive',
'https://www.googleapis.com/auth/drive.file',
'https://www.googleapis.com/auth/drive.readonly',
'https://www.googleapis.com/auth/drive.metadata.readonly',
'https://www.googleapis.com/auth/drive.appdata',
'https://www.googleapis.com/auth/drive.metadata',
'https://www.googleapis.com/auth/drive.photos.readonly',
]
def job():
print("Inside job()")
global saved_start_page_token
page_token = saved_start_page_token
while page_token is not None:
response = service.changes().list(pageToken=page_token, spaces='drive', restrictToMyDrive=True).execute()
for change in response.get('changes'):
# Process change
file_id = change.get('fileId')
print('Change found for file: %s' % file_id)
print('Change type: %s' % change.get('changeType'))
print('Filename: %s' % change.get('file').get('name'))
if 'newStartPageToken' in response:
# Last page, save this token for the next polling interval
saved_start_page_token = response.get('newStartPageToken')
page_token = response.get('nextPageToken')
def main():
creds = None
pickle_file = 'token.pickle'
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists(pickle_file):
with open(pickle_file, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(pickle_file, 'wb') as token:
pickle.dump(creds, token)
global service
service = build('drive', 'v3', credentials=creds)
# Call the Drive v3 API
response = service.changes().getStartPageToken().execute()
global saved_start_page_token
saved_start_page_token = response.get('startPageToken')
schedule.every().minute.do(job)
while True:
schedule.run_pending()
time.sleep(1)
if __name__ == '__main__':
main()
Can I filter out changes to only show file/folder creation changes?

Categories