Python, Graphql Mutation and Dango API - python

I am working on a project that takes signups from a Django form and transfers them to a website, the info is mainly the data ( Name, surname, email...) and some extra-information (tags).
One of the scripts give me the following error in the cronjob_logs;
Traceback (most recent call last): File
"/usr/local/lib/python2.7/dist-packages/django_cron/management/commands/runcrons.py",
line 71, in run_cron_with_cache_check manager.run(force) File
"/usr/local/lib/python2.7/dist-packages/django_cron/init.py", line
215, in run self.msg = self.cron_job.do() File
"/home/django/django_project/ogx/cron.py", line 31, in do ep_id =
get_ep_id(ep.email) File
"/home/django/django_project/ogx/graph_helper.py", line 75, in
get_ep_id ''', {"query": email}) File
"/usr/local/lib/python2.7/dist-packages/graphqlclient/client.py", line
11, in execute return self._send(query, variables) File
"/usr/local/lib/python2.7/dist-packages/graphqlclient/client.py", line
34, in _send raise e HTTPError: HTTP Error 500: Internal Server Error
The script was working normally some time ago, as for the graph_helper.py it is as follows;
def get_ep_id(email):
client = GraphQLClient(
'*this part I took off for confidentiality*')
result = client.execute('''
query Myquery($query: String!){
allPeople(q:$query)
{
data
{
id
full_name
}
}
}
''', {"query": email})
data = json.loads(result)
if len(data['data']['allPeople']['data']) > 0:
return data['data']['allPeople']['data'][0]['id']
else:
return None
The cron.py in question is the following;
class FetchEPsIDs(CronJobBase):
RUN_EVERY_MINS = 30
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = 'ogx.FetchEPsIDs' # a unique code
def do(self):
eps_query = mc_ogx_app.objects.filter(ep_id__isnull=True)
for ep in eps_query:
ep_id = get_ep_id(ep.email)
ep.ep_id = ep_id
ep.save()
As for the second script;
It is meant to update data called tags taken from the form and sent to the website through the API, now the script itself executes properly with no issues but it does not do what it is supposed to; Here you have the Cron...
class UpdateEpsTags(CronJobBase):
RUN_EVERY_MINS = 30
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = 'ogx.UpdateEpsTags' # a unique code
def do(self):
access_token = 'taken out for confidentiality'
eps_query = mc_ogx_app.objects.filter(ep_id__isnull=False, tags_uploaded=False)
for ep in eps_query:
if len(str(ep.ep_id)) >= 2:
tags_list = []
if ep.country_pref is not None:
tags_list.append(ep.country_pref.tag_id)
if ep.career_pref is not None:
tags_list.append(ep.career_pref.tag_id)
first_tags_list = return_user_tag_list(ep)
tags_list = tags_list + first_tags_list
if ep.product_ogv:
tags_list.append([7])
if ep.product_oge:
tags_list.append([9])
if ep.product_ogt:
tags_list.append([8])
try:
update_ep_tags(int(ep.ep_id), tags_list, access_token,ep.chosen_ref)
ep.tags_uploaded = True
ep.save()
except:
ep.save()
Now for the graphQl query in the script, it goes as follows;
def update_ep_tags(person_id, tags_list, token,referral):
client = GraphQLClient(
'taken out for confidentiality')
result = client.execute('''
mutation Mymutation($persons: [Int]!, $tags: [Int]!,$id: ID!, $referral: String!){
bulkTagUpdateForPeople(person_ids:$persons, tag_list_ids:$tags)
{
id
full_name
tag_lists
{
id
name
}
}
updatePerson(id:$id , person:
{
referral_type:$referral
})
{
full_name
referral_type
programmes
{
short_name
}
}
}
''', {"persons": [person_id], "tags": tags_list, "id": person_id, "referral": referral})
return result
Now executing the query on GraphQl I get Nullability mismatch on variable $id and argument id (ID / ID!).

Tried checking the datatypes and also running the query on Insomnia to check if it works, it does work just fine, I do not seem to grasp where the error is coming from

Solved:
What I am basically trying to do is reference using an email, the email is no longer supported by the API and therefor the query just gives a runtime error.

Related

ValueError: not enough values to unpack while running unit tests Django ModelViewSet

Am testing an endpoint that retrieves data using a ModelViewSet, and am passing a param via a URL to it to get data but am getting this error when I run the unit tests:
File "/Users/lutaayaidris/Documents/workspace/project_sample/project_sample/financing_settings/tests.py", line 195, in test_get_blocks
self.block_get_data), content_type='application/json')
File "/Users/lutaayaidris/Documents/workspace/project_sample/lib/python3.6/site-packages/rest_framework/test.py", line 286, in get
response = super().get(path, data=data, **extra)
File "/Users/lutaayaidris/Documents/workspace/project_sample/lib/python3.6/site-packages/rest_framework/test.py", line 194, in get
'QUERY_STRING': urlencode(data or {}, doseq=True),
File "/Users/lutaayaidris/Documents/workspace/project_sample/lib/python3.6/site-packages/django/utils/http.py", line 93, in urlencode
for key, value in query:
ValueError: not enough values to unpack (expected 2, got 1)
This is how I have structured my tests , plus some dummy data for testing :
class TemplateData:
"""Template Mock data."""
step_get_data = {
"param": "step"
}
block_get_data = {
"param": "block"
}
get_no_data = {
"param_": "block"
}
class TemplateViewTests(TestCase, TemplateData):
"""Template Tests (Block & Step)."""
def setUp(self):
"""
Initialize client, Step and Block id and data created.
"""
self.client = APIClient()
self.block_id = 0
self.step_id = 0
self.create_block_step_data()
def create_block_step_data(self):
"""Create ProcessVersion, Step, & Block mock data."""
self.process_version = ProcessVersion.objects.create(
tag="TESTING_TAG",
is_process_template=False,
status="IN EDITING",
attr_map="TESTING_ATTR",
loan_options=None
)
self.step = Step.objects.create(
version=self.process_version,
is_process_template=True,
title="TESTING",
help_text="TESTING",
order=1,
slug="slug",
can_be_duplicated=False,
max_duplicated_number=2,
)
self.step_id = self.step.pk
self.block_id = Block.objects.create(
step=self.step,
is_process_template=True,
title="TESTING",
information_text="This is testing "
"information",
order=1,
depending_field="depending_field",
visibility_value="visibility_value",
slug="slug",
can_be_duplicated=False,
max_duplicated_number=2,
).pk
self.process_version_1 = ProcessVersion.objects.create(
tag="TESTING_TAG",
is_process_template=False,
status="IN EDITING",
attr_map="TESTING_ATTR",
loan_options=None
)
self.step_1 = Step.objects.create(
version=self.process_version_1,
is_process_template=True,
title="TESTING",
help_text="TESTING",
order=1,
slug="slug",
can_be_duplicated=False,
max_duplicated_number=2,
)
self.block_1 = Block.objects.create(
step=self.step,
is_process_template=True,
title="TESTING",
information_text="This is testing "
"information",
order=1,
depending_field="depending_field",
visibility_value="visibility_value",
slug="slug",
can_be_duplicated=False,
max_duplicated_number=2,
).pk
def test_get_blocks(self):
"""Test get list of Block. """
response = self.client.get(
"/api/v1/financing-settings/template/",
data=json.dumps(
self.block_get_data), content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_get_steps(self):
"""Test get list of Step. """
response = self.client.get(
"/api/v1/financing-settings/template/",
data=json.dumps(
self.block_get_data),
content_type='application/json')
self.assertEqual(response.status_code, 200)
def test_no_step_or_block(self):
"""Test get no list of Step or Block. """
response = self.client.get(
"/api/v1/financing-settings/template/",
data=json.dumps(
self.block_get_data),
content_type='application/json')
self.assertEqual(response.status_code, 204)
As you can see above those are my tests, I have already setup the data , now I want to retrieve back the data, but because of the exception above I can't.
Lastly, in my endpoint implementation, I used a Viewset to handle this , below is the code :
class TemplateView(ModelViewSet):
"""ViewSet for Saving Block/ Step template."""
def list(self, request, *args, **kwargs):
"""Get list of Block/Steps with is_process_template is equal to True."""
param = request.data['param']
if param == "block":
_block = Block.objects.filter(is_process_template=True).values()
return JsonResponse({"data": list(_block)}, safe=False, status=200)
elif param == "step":
_step = Step.objects.filter(is_process_template=True)
return JsonResponse({"data": list(_step)}, safe=False, status=200)
return Response(status=status.HTTP_204_NO_CONTENT)
What is causing this , in my understanding I feel like everything should work.
The function Client.get expect a dictionary as data argument and try to encode it in the url using the function urlencode. You could do something like that:
from django.test import Client
c = Client()
block_get_data = {
"param": "block"
}
c.get('path', block_get_data)
block_get_data will be sent in the url as 'param=block'
If you want to send JSON formated data in a GET method, you can use Client.generic function as follow:
from django.test import Client
import json
c = Client()
block_get_data = {
"param": "block"
}
c.generic('GET', 'path', json.dumps(block_get_data), 'application/json')
You are facing this error because this dict
block_get_data = {
"param": "block"
}
you are trying to use it in this way
for key,val in block_get_data
and it will produce the error like
for key,val in block_get_data:
ValueError: too many values to unpack (expected 2)
It will be solved if your loop through dict by using .items() method.
for key,val in block_get_data.items():
I think by passing parameter as self.block_get_data.items() may solve your problem.

raise NotImplementedError NotImplementedError

I use pycharm to write a python3 web app project using tornado web framework,
The listing service has been built already. I need to build the remaining two components: the user service and the public API layer. The implementation of the listing service can serve as a good starting point to learn more about how to structure a web application using the Tornado web framework.
I am required to use tornado's built in framework for HTTP request.
error occurs at listening ( app.listen(options.port)) when I tried to run the program:
Traceback (most recent call last):
File "D:/Bill/python/Tornado/99-python-exercise-master/listing_service.py", line 203, in <module>
app.listen(options.port)
File "C:\Program Files\Python38\lib\site-packages\tornado\web.py", line 2116, in listen
server.listen(port, address)
File "C:\Program Files\Python38\lib\site-packages\tornado\tcpserver.py", line 152, in listen
self.add_sockets(sockets)
File "C:\Program Files\Python38\lib\site-packages\tornado\tcpserver.py", line 165, in add_sockets
self._handlers[sock.fileno()] = add_accept_handler(
File "C:\Program Files\Python38\lib\site-packages\tornado\netutil.py", line 279, in add_accept_handler
io_loop.add_handler(sock, accept_handler, IOLoop.READ)
File "C:\Program Files\Python38\lib\site-packages\tornado\platform\asyncio.py", line 100, in add_handler
self.asyncio_loop.add_reader(fd, self._handle_events, fd, IOLoop.READ)
File "C:\Program Files\Python38\lib\asyncio\events.py", line 501, in add_reader
raise NotImplementedError
NotImplementedError
code:
import tornado.web
import tornado.log
import tornado.options
import sqlite3
import logging
import json
import time
class App(tornado.web.Application):
def __init__(self, handlers, **kwargs):
super().__init__(handlers, **kwargs)
# Initialising db connection
self.db = sqlite3.connect("listings.db")
self.db.row_factory = sqlite3.Row
self.init_db()
def init_db(self):
cursor = self.db.cursor()
# Create table
cursor.execute(
"CREATE TABLE IF NOT EXISTS 'listings' ("
+ "id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
+ "user_id INTEGER NOT NULL,"
+ "listing_type TEXT NOT NULL,"
+ "price INTEGER NOT NULL,"
+ "created_at INTEGER NOT NULL,"
+ "updated_at INTEGER NOT NULL"
+ ");"
)
self.db.commit()
class BaseHandler(tornado.web.RequestHandler):
def write_json(self, obj, status_code=200):
self.set_header("Content-Type", "application/json")
self.set_status(status_code)
self.write(json.dumps(obj))
# /listings
class ListingsHandler(BaseHandler):
#tornado.gen.coroutine
def get(self):
# Parsing pagination params
page_num = self.get_argument("page_num", 1)
page_size = self.get_argument("page_size", 10)
try:
page_num = int(page_num)
except:
logging.exception("Error while parsing page_num: {}".format(page_num))
self.write_json({"result": False, "errors": "invalid page_num"}, status_code=400)
return
try:
page_size = int(page_size)
except:
logging.exception("Error while parsing page_size: {}".format(page_size))
self.write_json({"result": False, "errors": "invalid page_size"}, status_code=400)
return
# Parsing user_id param
user_id = self.get_argument("user_id", None)
if user_id is not None:
try:
user_id = int(user_id)
except:
self.write_json({"result": False, "errors": "invalid user_id"}, status_code=400)
return
# Building select statement
select_stmt = "SELECT * FROM listings"
# Adding user_id filter clause if param is specified
if user_id is not None:
select_stmt += " WHERE user_id=?"
# Order by and pagination
limit = page_size
offset = (page_num - 1) * page_size
select_stmt += " ORDER BY created_at DESC LIMIT ? OFFSET ?"
# Fetching listings from db
if user_id is not None:
args = (user_id, limit, offset)
else:
args = (limit, offset)
cursor = self.application.db.cursor()
results = cursor.execute(select_stmt, args)
listings = []
for row in results:
fields = ["id", "user_id", "listing_type", "price", "created_at", "updated_at"]
listing = {
field: row[field] for field in fields
}
listings.append(listing)
self.write_json({"result": True, "listings": listings})
#tornado.gen.coroutine
def post(self):
# Collecting required params
user_id = self.get_argument("user_id")
listing_type = self.get_argument("listing_type")
price = self.get_argument("price")
# Validating inputs
errors = []
user_id_val = self._validate_user_id(user_id, errors)
listing_type_val = self._validate_listing_type(listing_type, errors)
price_val = self._validate_price(price, errors)
time_now = int(time.time() * 1e6) # Converting current time to microseconds
# End if we have any validation errors
if len(errors) > 0:
self.write_json({"result": False, "errors": errors}, status_code=400)
return
# Proceed to store the listing in our db
cursor = self.application.db.cursor()
cursor.execute(
"INSERT INTO 'listings' "
+ "('user_id', 'listing_type', 'price', 'created_at', 'updated_at') "
+ "VALUES (?, ?, ?, ?, ?)",
(user_id_val, listing_type_val, price_val, time_now, time_now)
)
self.application.db.commit()
# Error out if we fail to retrieve the newly created listing
if cursor.lastrowid is None:
self.write_json({"result": False, "errors": ["Error while adding listing to db"]}, status_code=500)
return
listing = dict(
id=cursor.lastrowid,
user_id=user_id_val,
listing_type=listing_type_val,
price=price_val,
created_at=time_now,
updated_at=time_now
)
self.write_json({"result": True, "listing": listing})
def _validate_user_id(self, user_id, errors):
try:
user_id = int(user_id)
return user_id
except Exception as e:
logging.exception("Error while converting user_id to int: {}".format(user_id))
errors.append("invalid user_id")
return None
def _validate_listing_type(self, listing_type, errors):
if listing_type not in {"rent", "sale"}:
errors.append("invalid listing_type. Supported values: 'rent', 'sale'")
return None
else:
return listing_type
def _validate_price(self, price, errors):
# Convert string to int
try:
price = int(price)
except Exception as e:
logging.exception("Error while converting price to int: {}".format(price))
errors.append("invalid price. Must be an integer")
return None
if price < 1:
errors.append("price must be greater than 0")
return None
else:
return price
# /listings/ping
class PingHandler(tornado.web.RequestHandler):
#tornado.gen.coroutine
def get(self):
self.write("pong!")
def make_app(options):
return App([
(r"/listings/ping", PingHandler),
(r"/listings", ListingsHandler),
], debug=options.debug)
if __name__ == "__main__":
# Define settings/options for the web app
# Specify the port number to start the web app on (default value is port 6000)
tornado.options.define("port", default=6000)
# Specify whether the app should run in debug mode
# Debug mode restarts the app automatically on file changes
tornado.options.define("debug", default=True)
# Read settings/options from command line
tornado.options.parse_command_line()
# Access the settings defined
options = tornado.options.options
# Create web app
app = make_app(options)
app.listen(options.port)
logging.info("Starting listing service. PORT: {}, DEBUG: {}".format(options.port, options.debug))
# Start event loop
tornado.ioloop.IOLoop.instance().start()
How to fix this problem?
Python 3.8 made a backwards-incompatible change to the asyncio package used by Tornado. Applications that use Tornado on Windows with Python 3.8 must call asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) at the beginning of their main file/function. (as documented on the home page of tornadoweb.org)

Initial and send a message to a Microsoft Teams channel using Bot Framework SDK v4 for Python

I was trying to initial and send a proactive message to one Microsoft teams channel with the help of below example:
https://github.com/microsoft/BotBuilder-Samples/tree/master/samples/python/16.proactive-messages
I added this code to the example in order to initiate a message:
connectorClient = await ADAPTER.create_connector_client(service_url=SERVICE_URL)
parameters = ConversationParameters(
is_group=True,
channel_data=CHANNEL_ID,
activity=Activity(type=ActivityTypes.message,
text='Hello World!'),
bot=ChannelAccount(id=BOT_ID),
tenant_id=TENANT_ID)
response = await connectorClient.conversations.create_conversation(parameters)
response.send()
But it didn't work, and I tried many different ways and none of them worked too, always the error is:
Traceback (most recent call last):
File "/home/farid/works/16.proactive-messages/venv/lib/python3.7/site-packages/aiohttp/web_protocol.py", line 418, in start
resp = await task
File "/home/farid/works/16.proactive-messages/venv/lib/python3.7/site-packages/aiohttp/web_app.py", line 458, in _handle
resp = await handler(request)
File "/home/farid/works/16.proactive-messages/app.py", line 103, in notify
raise exception
File "/home/farid/works/16.proactive-messages/app.py", line 100, in notify
await _send_proactive_message()
File "/home/farid/works/16.proactive-messages/app.py", line 152, in _send_proactive_message
response = await connectorClient.conversations.create_conversation(parameters)
File "/home/farid/works/16.proactive-messages/venv/lib/python3.7/site-packages/botframework/connector/aio/operations_async/_conversations_operations_async.py", line 176, in create_conversation
raise models.ErrorResponseException(self._deserialize, response)
botbuilder.schema._models_py3.ErrorResponseException: (BadSyntax) Incorrect conversation creation parameters
I don't know what is my problem here!
Ok, last night Microsoft added a new python sample which solved this issue:
https://github.com/microsoft/BotBuilder-Samples/tree/master/samples/python/58.teams-start-thread-in-channel
There's a very good chance I'm totally off base as I've never tried, before now, to read Python before (I'm a C#/node guy), but it looks like, in your ConversationParameters, you're missing the "Recipient" details (you have the "From", i.e. your Bot, specified), which one does usually need to specify for this.
On the off-chance this helps...
Here is a sample code in C# using sdk 3
var userId = userOrChannelId.Trim();
var botId = context.Activity.Recipient.Id;
var botName = context.Activity.Recipient.Name;
var channelData = context.Activity.GetChannelData<TeamsChannelData>();
var connectorClient = new ConnectorClient(new Uri(context.Activity.ServiceUrl));
var parameters = new ConversationParameters
{
Bot = new ChannelAccount(botId, botName),
Members = !isChannelMessage ? new ChannelAccount[] { new ChannelAccount(userId) } : null,
ChannelData = new TeamsChannelData
{
Tenant = channelData.Tenant,
Channel = isChannelMessage ? new ChannelInfo(userId) : null,
Notification = new NotificationInfo() { Alert = true }
},
IsGroup = isChannelMessage
};
var conversationResource = await connectorClient.Conversations.CreateConversationAsync(parameters);
var replyMessage = Activity.CreateMessageActivity();
replyMessage.From = new ChannelAccount(botId, botName);
replyMessage.Conversation = new ConversationAccount(id: conversationResource.Id.ToString());
replyMessage.ChannelData = new TeamsChannelData() { Notification = new NotificationInfo(true) };
replyMessage.Text = messageText;
if (attachment != null)
replyMessage.Attachments.Add(attachment);
var resourceResponse = await connectorClient.Conversations.SendToConversationAsync(conversationResource.Id, (Activity)replyMessage);

Spotipy User Playlist Remove Tracks Issue

I am trying to use the Spotipy method to delete repeat occurrences of a track (so delete duplicates). But the function doesn't seem to work; the Spotify API call is returning an error that there is no authorization token.
Spotify API Return Error:
{
"error": {
"status": 401,
"message": "No token provided"
}
}
Python's Errors:
File "C:\Users\Dylan\Documents\PythonProjects\PlaylistTransfer\Spotify.py", line 87, in remove_all_duplicate_tracks
sp.user_playlist_remove_specific_occurrences_of_tracks(username, playlist_id, tracks)
File "C:\Users\Dylan\Documents\PythonProjects\PlaylistTransfer\venv\lib\site-packages\spotipy\client.py", line 539, in user_playlist_remove_specific_occurrences_of_tracks
payload=payload)
File "C:\Users\Dylan\Documents\PythonProjects\PlaylistTransfer\venv\lib\site-packages\spotipy\client.py", line 183, in _delete
return self._internal_call('DELETE', url, payload, kwargs)
File "C:\Users\Dylan\Documents\PythonProjects\PlaylistTransfer\venv\lib\site-packages\spotipy\client.py", line 124, in _internal_call
headers=r.headers)
spotipy.client.SpotifyException: http status: 400, code:-1 - https://api.spotify.com/v1/___________________________/tracks:
Could not remove tracks, please check parameters.
Here is my code:
def remove_all_duplicate_tracks(playlist_id, token):
sp = spotipy.Spotify(token)
username = get_username(token)
existing_tracks = get_track_uris_for_playlist(playlist_id, token)
duplicate_counter = Counter(existing_tracks)
tracks = []
for uri, count in duplicate_counter.items():
count = count-1
if count > 0:
# hard coded position as 1 for testing...
positions = [1]
#positions = [x for x in range(1, count+1)]
track_dict = {"uri": uri, "positions": positions}
tracks.append(track_dict)
sp.user_playlist_remove_specific_occurrences_of_tracks(username, playlist_id, tracks)
This is what "tracks" contains:
[{'uri': '6jq6rcOikCZAmjliAgAmfT', 'positions': [1]}, {'uri': '3tSmXSxaAnU1EPGKa6NytH', 'positions': [1]}, {'uri': '7jeI6EdY0elPSNz80mAKS8', 'positions': [1]}]
I tested the other methods get_username() and get_track_uris_for_playlist and they return what you'd expect and are working.
Although this answer comes quite late, it is needed because 1) the question is not solved and 2) I believe that it will be helpful to people with a similar problem.
First of all, you should restrict your question to the specific problem, which is the authorization error produced by calling the sp.user_playlist_remove_specific_occurrences_of_tracks() function. This would make the problem more clear. (In the way it is put, one has to dig up the code to find the "hot" spot! Also the details about the tracks just add to the confusion.)
So, I will limit my answer to just the problem and suggest using the following code as a basis:
# Data
username = (your username)
playlist_id = (playlist id) # The ID of the playlist containing the tracks to be deleted
track_ids = [(track_id), (track_id), ...] # List of track IDs to delete
# Authorization process
scope = "playlist-read-private"
token = spotipy.util.prompt_for_user_token(username, scope=scope)
sp = spotipy.Spotify(auth=token)
# Call the track deletion function
sp.user_playlist_remove_all_occurrences_of_tracks(username, playlist_id, track_ids)
I am using this process myself. I have just tried the above code with data of mine and it should also work for you.
You are trying to change user data:
Could not remove tracks, please check parameters.
Pass a valid scope, such as:
playlist-modify-public
playlist-modify-private
More on scopes: https://developer.spotify.com/documentation/general/guides/scopes/
import spotipy
import spotipy.util as util
scope = 'playlist-modify-public playlist-modify-private'
token = util.prompt_for_user_token(username, scope)
if token:
sp = spotipy.Spotify(auth=token)
results = sp.current_user_saved_tracks()
for item in results['items']:
track = item['track']
print track['name'] + ' - ' + track['artists'][0]['name']
else:
print "Can't get token for", username

Python UrBackupScript failing, attempting using various other methods

I don't want to be a bother, and I probably shouldn't be trying to even manipulate a programming language I know nothing about. But the deployment method this presents is too irresistible not to use. Basically urbackup has this script designed to automatically pull a downloader for a specific computer using its WMI %ComputerName% property. The issue I seem to be having is related to JSON not being able to authenticate with server any more after Python 3.4. I honestly know very little about Python, and if it is too complex for me to fix or would require more work than necessary I understand that, I just figured maybe its a simpler error that can be corrected by someone with a bit of know how.
import http.client as http
import json
from urllib.parse import urlparse
from urllib.parse import urlencode
from base64 import b64encode
import hashlib
import socket
import shutil
import os
#############################
# Settings. Please edit.
#############################
#Your server URL
server_url = 'intentionallyremoved.com'
server_basic_username='intentionallyremoved'
server_basic_password='intentionallyremoved'
#user needs following rights
# "settings": "all"
# "status": "some"
# "add_client": "all"
server_username='intentionallyremoved'
server_password='intentionallyremoved'
#############################
# Global script variables.
# Please do not modify.
#############################
session=""
def get_response(action, params):
global server_url;
global server_basic_username;
global server_basic_password;
global session;
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json; charset=UTF-8'
}
if('server_basic_username' in globals() and len(server_basic_username)>0):
userAndPass = b64encode(str.encode(server_basic_username+":"+server_basic_password)).decode("ascii")
headers['Authorization'] = 'Basic %s' % userAndPass
curr_server_url=server_url+"?"+urlencode({"a": action});
if(len(session)>0):
params["ses"]=session
curr_server_url+="&"+urlencode(params);
target = urlparse(curr_server_url)
method = 'GET'
body = ''
if(target.scheme=='http'):
h = http.HTTPConnection(target.hostname, target.port)
elif(target.scheme=='https'):
h = http.HTTPSConnection(target.hostname, target.port)
else:
print('Unkown scheme: '+target.scheme)
raise Exception("Unkown scheme: "+target.scheme)
h.request(
method,
target.path+"?"+target.query,
body,
headers)
return h.getresponse();
def get_json(action, params = {}):
response = get_response(action, params)
if(response.status != 200):
return ""
data = response.readall();
response.close()
return json.loads(data.decode('utf8'))
def download_file(action, outputfn, params):
response = get_response(action, params);
if(response.status!=200):
return False
with open(outputfn, 'wb') as outputf:
shutil.copyfileobj(response, outputf)
return True
def md5(s):
return hashlib.md5(s.encode()).hexdigest()
print("Logging in...")
salt = get_json("salt", {"username": server_username})
if( not ('ses' in salt) ):
print('Username does not exist')
exit(1)
session = salt["ses"];
if( 'salt' in salt ):
password_md5 = md5(salt["rnd"]+md5(salt["salt"]+server_password));
login = get_json("login", { "username": server_username,
"password": password_md5 })
if('success' not in login or not login['success']):
print('Error during login. Password wrong?')
exit(1)
print("Creating client "+socket.gethostname()+"...")
status = get_json("status", { "clientname": socket.gethostname()})
for client in status["client_downloads"]:
if (client["name"] == socket.gethostname()):
print("Downloading Installer...")
if not download_file("download_client", "Client Installer.exe", {"clientid": client["id"]}):
print("Downloading client failed")
exit(1)
print("Sucessfully downloaded client")
os.startfile("Client Installer.exe")
exit(0)
print("Could not find client for download. No permission?")
exit(1)
The error it is outputting looks like this (I've removed applicable server names).
Logging in...
Traceback (most recent call last):
File "\\server\Share\Shortcuts\BackupScript\Script.py", line 110, in <module>
salt = get_json("salt", {"username": server_username})
File "\\server\Share\Shortcuts\BackupScript\Script.py", line 89, in get_json
return json.loads(data.decode('utf8'))
File "C:\Anaconda3\lib\json\__init__.py", line 318, in loads
return _default_decoder.decode(s)
File "C:\Anaconda3\lib\json\decoder.py", line 343, in decode
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
File "C:\Anaconda3\lib\json\decoder.py", line 361, in raw_decode
raise ValueError(errmsg("Expecting value", s, err.value)) from None
ValueError: Expecting value: line 1 column 1 (char 0)
>>>
I've tried using different things talking about using different JSON methods that don't involve the decode variable, and using json.dump instead, but they all seem to lead to different errors because I don't know which parts of the code correspond to the parts of the code I'd be changing. This is really just a quality of life tool, and isn't absolutely necessary. But it would be comforting to simply convert this into an MSI I could deploy instead of having to manually one by one go to each of our (over 100) computers and manually do this. Any help would be appreciated :).
Here is the log in javascript from the back end of the server.
g.login1=function ()
{
var username=I('username').value;
var password=I('password').value;
if( username.length==0 )
{
alert(trans("username_empty"));
I('username').focus();
return false;
}
if( password.length==0 )
{
alert(trans("password_empty"));
I('password').focus();
return false;
}
if(!startLoading()) return false;
new getJSON("salt", "username="+username, login2);
return false;
}
function login2(data)
{
if(data.error==0)
{
alert(trans("user_n_exist"));
stopLoading();
I('username').focus();
return;
}
if(data.ses)
g.session=data.ses;
var username=I('username').value;
var password=I('password').value;
var pwmd5=calcMD5(data.rnd+calcMD5(data.salt+password));
new getJSON("login", "username="+username+"&password="+pwmd5, login3);
}
function login3(data)
{
stopLoading();
if(data.error==2)
{
alert(trans("password_wrong"));
I('password').focus();
return;
}
g.allowed_nav_items = [];
if(data.status!="none")
{
g.allowed_nav_items.push(6);
}
if(data.progress!="none")
{
g.allowed_nav_items.push(5);
}
if(data.browse_backups!="none")
{
g.allowed_nav_items.push(4);
}
if(data.logs!="none")
{
g.allowed_nav_items.push(3);
}
if(data.graph!="none")
{
g.allowed_nav_items.push(2);
}
if(data.settings!="none")
{
g.allowed_nav_items.push(1);
}
build_main_nav();
show_status1();
}
Could you try changing line 86 to read
data = response.read()
I don't know what readall() is but it's not listed in the docs for the HTTPResponse object

Categories