I am trying to create custom management command which will execute data from Api. I wrote this code:
from django.core.management.base import BaseCommand, CommandError
from data.models import Country
import requests
import json
def extracting():
country_req = requests.get("https://api-football-v1.p.rapidapi.com/countries", headers = {"X-RapidAPI-Key": "my_token"})
parsed_string = json.loads(country_req.text)
class Command(BaseCommand):
def handle(self, **options):
print(extracting())
but, when I try to execute it with python manage.py extract in my console I see None, while when I try to run this code in console without custom management command I see data which I try to execute.
Any ideas?
You do not return anything from the extracting() method. Depending on your interactive console, you might see the values for variables. But what you probably want to do is:
def extracting():
country_req = requests.get("https://api-football-v1.p.rapidapi.com/countries", headers = {"X-RapidAPI-Key": "my_token"})
return json.loads(country_req.text)
Related
So I have this code which works great for reading messages out of predefined topics and printing it to screen. The rosbags come with a rosbag_name.db3 (sqlite) database and metadata.yaml file
from rosbags.rosbag2 import Reader as ROS2Reader
import sqlite3
from rosbags.serde import deserialize_cdr
import matplotlib.pyplot as plt
import os
import collections
import argparse
parser = argparse.ArgumentParser(description='Extract images from rosbag.')
# input will be the folder containing the .db3 and metadata.yml file
parser.add_argument('--input','-i',type=str, help='rosbag input location')
# run with python filename.py -i rosbag_dir/
args = parser.parse_args()
rosbag_dir = args.input
topic = "/topic/name"
frame_counter = 0
with ROS2Reader(rosbag_dir) as ros2_reader:
ros2_conns = [x for x in ros2_reader.connections]
# This prints a list of all topic names for sanity
print([x.topic for x in ros2_conns])
ros2_messages = ros2_reader.messages(connections=ros2_conns)
for m, msg in enumerate(ros2_messages):
(connection, timestamp, rawdata) = msg
if (connection.topic == topic):
print(connection.topic) # shows topic
print(connection.msgtype) # shows message type
print(type(connection.msgtype)) # shows it's of type string
# TODO
# this is where things crash when it's a custom message type
data = deserialize_cdr(rawdata, connection.msgtype)
print(data)
The issue is that I can't seem to figure out how to read in custom message types. deserialize_cdr takes a string for the message type field, but it's not clear to me how to replace this with a path or how to otherwise pass in a custom message.
Thanks
One approach would be that you declare and register it to the type system as a string:
from rosbags.typesys import get_types_from_msg, register_types
MY_CUSTOM_MSG = """
std_msgs/Header header
string foo
"""
register_types(get_types_from_msg(
MY_CUSTOM_MSG, 'my_custom_msgs/msg/MyCustomMsg'))
from rosbags.typesys.types import my_custom_msgs__msg__MyCustomMsg as MyCustomMsg
Next, using:
msg_type = MyCustomMsg.__msgtype__
you can get the message type that you can pass to deserialize_cdr.
Also, see here for a quick example.
Another approach is to directly load it from the message definition.
Essentially, you would need to read the message
from pathlib import Path
custom_msg_path = Path('/path/to/my_custom_msgs/msg/MyCustomMsg.msg')
msg_def = custom_msg_path.read_text(encoding='utf-8')
and then follow the same steps as above starting with get_types_from_msg().
A more detailed example of this approach is given here.
I'm using Gcloud Composer as my Airflow. When I try to use Jinja in my HQL code, it does not translate it correctly.
I know that the HiveOperator has a Jinja translator as I'm used to it, but the DataProcHiveOperator doesn't.
I've tried to use the HiveConf directly into my HQL files, but when setting those values to my Partition (i.e. INSERT INTO TABLE abc PARTITION (ds = ${hiveconf:ds}))`, it doesn't work.
I have also added the following to my HQL file:
SET ds=to_date(current_timestamp());
SET hive.exec.dynamic.partition=true;
SET hive.exec.dynamic.partition.mode=nonstrict;
But it didn't work as HIVE is transforming the formula above into a STRING.
So my idea was to combine both operators to have the Jinja translator working fine, but when I do that, I get the following error: ERROR - submit() takes from 3 to 4 positional arguments but 5 were given.
I'm not very familiar with Python coding and any help would be great, see below code for the operator I'm trying to build;
Header of the Python File (please note that the file contains other Operators not mentioned in this question):
import ntpath
import os
import re
import time
import uuid
from datetime import timedelta
from airflow.contrib.hooks.gcp_dataproc_hook import DataProcHook
from airflow.contrib.hooks.gcs_hook import GoogleCloudStorageHook
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.version import version
from googleapiclient.errors import HttpError
from airflow.utils import timezone
from airflow.utils.operator_helpers import context_to_airflow_vars
modified DataprocHiveOperator:
class DataProcHiveOperator(BaseOperator):
template_fields = ['query', 'variables', 'job_name', 'cluster_name', 'dataproc_jars']
template_ext = ('.q',)
ui_color = '#0273d4'
#apply_defaults
def __init__(
self,
query=None,
query_uri=None,
hiveconfs=None,
hiveconf_jinja_translate=False,
variables=None,
job_name='{{task.task_id}}_{{ds_nodash}}',
cluster_name='cluster-1',
dataproc_hive_properties=None,
dataproc_hive_jars=None,
gcp_conn_id='google_cloud_default',
delegate_to=None,
region='global',
job_error_states=['ERROR'],
*args,
**kwargs):
super(DataProcHiveOperator, self).__init__(*args, **kwargs)
self.gcp_conn_id = gcp_conn_id
self.delegate_to = delegate_to
self.query = query
self.query_uri = query_uri
self.hiveconfs = hiveconfs or {}
self.hiveconf_jinja_translate = hiveconf_jinja_translate
self.variables = variables
self.job_name = job_name
self.cluster_name = cluster_name
self.dataproc_properties = dataproc_hive_properties
self.dataproc_jars = dataproc_hive_jars
self.region = region
self.job_error_states = job_error_states
def prepare_template(self):
if self.hiveconf_jinja_translate:
self.query_uri= re.sub(
"(\$\{(hiveconf:)?([ a-zA-Z0-9_]*)\})", "{{ \g<3> }}", self.query_uri)
def execute(self, context):
hook = DataProcHook(gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to)
job = hook.create_job_template(self.task_id, self.cluster_name, "hiveJob",
self.dataproc_properties)
if self.query is None:
job.add_query_uri(self.query_uri)
else:
job.add_query(self.query)
if self.hiveconf_jinja_translate:
self.hiveconfs = context_to_airflow_vars(context)
else:
self.hiveconfs.update(context_to_airflow_vars(context))
job.add_variables(self.variables)
job.add_jar_file_uris(self.dataproc_jars)
job.set_job_name(self.job_name)
job_to_submit = job.build()
self.dataproc_job_id = job_to_submit["job"]["reference"]["jobId"]
hook.submit(hook.project_id, job_to_submit, self.region, self.job_error_states)
I would like to be able to use Jinja templating inside my HQL code to allow partition automation on my data pipeline.
P.S: I'll use the Jinja templating mostly for Partition DateStamp
Does anyone know what is the error message I'm getting + help me solve it?
ERROR - submit() takes from 3 to 4 positional arguments but 5 were given
Thank you!
It is because of the 5th argument job_error_states which is only in master and not in the current stable release (1.10.1).
Source Code for 1.10.1 -> https://github.com/apache/incubator-airflow/blob/76a5fc4d2eb3c214ca25406f03b4a0c5d7250f71/airflow/contrib/hooks/gcp_dataproc_hook.py#L219
So remove that parameter and it should work.
I created the following custom management command following this tutorial.
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from topspots.models import Notification
class Command(BaseCommand):
help = 'Sends message to all users'
def add_arguments(self, parser):
parser.add_argument('message', nargs='?')
def handle(self, *args, **options):
message = options['message']
users = User.objects.all()
for user in users:
Notification.objects.create(message=message, recipient=user)
self.stdout.write(
self.style.SUCCESS(
'Message:\n\n%s\n\nsent to %d users' % (message, len(users))
)
)
It works exactly as I want it to, but I would like to add a confirmation step so that before the for user in users: loop you are asked if you really want to send message X to N users, and the command is aborted if you choose "no".
I assume this can be easily done because it happens with some of the built-in management commands, but it doesn't seem to cover this in the tutorial and even after some searching and looking at the source for the built-in management commands, I have not been able to figure it out on my own.
You can use Python's raw_input/input function. Here's an example method from Django's source code:
from django.utils.six.moves import input
def boolean_input(question, default=None):
result = input("%s " % question)
if not result and default is not None:
return default
while len(result) < 1 or result[0].lower() not in "yn":
result = input("Please answer yes or no: ")
return result[0].lower() == "y"
Be sure to use the import from django.utils.six.moves if your code should be compatible with Python 2 and 3, or use raw_input() if you're on Python 2. input() on Python 2 will evaluate the input rather than converting it to a string.
I'm building a bottle.py app that grabs some data from MongoDB and renders it into a web page using pygal.
The code produces a Error: 500 Internal Server Error in my browser.
On the server, I see: Exception: TypeError('serve_static() takes exactly 1 argument (0 given)',).
My question: how do I correct the code to render the .svg file?
The code:
import sys
import bottle
from bottle import get, post, request, route, run, static_file
import pymongo
import json
import pygal
connection = pymongo.MongoClient("mongodb://localhost", safe=True)
#get('/chart')
def serve_static(chart):
db = connection.control
chart = db.chart
cursor = chart.find({}, {"num":1, "x":1, "_id":0})
data = []
for doc in cursor:
data.append(doc)
list = [int(i.get('x')) for i in data]
line = pygal.Line()
line.title = 'widget quality'
line.x_labels = map(str, range(1, 20))
line.add('quality measure', list)
line.render_to_file('chart.svg')
try:
return static_file(chart.svg, root='/home/johnk/Desktop/chart/',mimetype='image/svg+xml')
except:
return "<p>Yikes! Somethin' wrong!</p>"
bottle.debug(True)
bottle.run(host='localhost', port=8080)
You didn't give a parameter to the route, so the function doesn't get any.
What you probably want to do, is either:
#get('/<chart>')
def serve_static(chart):
...
If you want /myfile.svg to work, or:
#get('/chart/<chart>')
def serve_static(chart):
...
If you want /chart/myfile.svg to work.
If you just want to show the same SVG file every time, you can just leave off the parameter:
#get('/chart')
def serve_static():
...
i want to make a search box on my website. I want the output of the particular searchbox input to be generated by particular file which is mapped on the server for the given output.
for ex:- fib 10
will run fib.py and give the response will be the result of the fib.py
You could use subprocess module to run command and take the output.
Update 1
Example view:
import subprocess
def runCmd(request):
cmd = request.POST.get('cmd')
param = request.POST.get('param')
codeDir = '/path/to/py/file/'
absoluteCodePath = codeDir + cmd + '.py'
result = subprocess.check_output([absoluteCodePath, param])
return result
Why would you want to do that? Just import your file/module and use it, this is the correct way.
Lets say you have this code in fib.py:
def calculate(.....):
#code. ...
return result
Now in you other file, lets say it's views.py just do:
import fib
# this is the search view
def search(request):
if request.method == "POST":
# other code
fib.calculate(param) # where param is the value of the search field
I'm not sure if I understood you correctly.. but no there's no reason to use subprocess and execute a file and get the value returned...