attendance csv file upload django multiple employees - python

views.py
def Add_Atten ( request ):
data = {}
attendance = Attendance.objects.all ( )
if "GET" == request.method:
return render ( request , 'hr/attendance.html' , {'attendance': attendance} )
# if not GET, then proceed
# try:
# csv_file = request.FILES[ "csv_file" ]
# if not csv_file.name.endswith ( '.csv' ):
# messages.error ( request , 'File is not CSV type' )
# return render ( request , 'hr/attendance.html' , {'attendance': attendance} )
# # if file is too large, return
# if csv_file.multiple_chunks ( ):
# messages.error ( request , "Uploaded file is too big (%.2f MB)." % (csv_file.size / (1000 * 1000) ,) )
# return render ( request , 'hr/attendance.html' , {'attendance': attendance} )
else:
added_by = request.POST.get('added_by', '')
month = request.POST.get('month', '')
year = request.POST.get('year', '')
att = request.FILES['csv_file']
a1 = add_attendace(added_by=added_by, month=month, year=year,file=att)
a1.save()
csv_file = request.FILES["csv_file"]
file_data = csv_file.read().decode("utf-8")
lines = file_data.split ( "\n" )
# loop over the lines and save them in db. If error , store as string and then display
for line in lines:
fields = line.split(",")
data_dict = {}
data_dict["department"] = fields[1]
data_dict["role"] = fields[2]
data_dict["one"] = fields[3]
data_dict["two"] = fields[4]
data_dict["three"] = fields[5]
data_dict["four"] = fields[6]
data_dict["five"] = fields[7]
data_dict["six"] = fields[8]
data_dict["seven"] = fields[9]
data_dict["eight"] = fields[10]
data_dict["nine"] = fields[11]
data_dict["ten"] = fields[12]
data_dict["eleven"] = fields[13]
data_dict["twelve"] = fields[14]
data_dict["thirteen"] = fields[15]
data_dict["fourteen"] = fields[16]
data_dict["fifteen"] = fields[17]
data_dict["sixteen"] = fields[18]
data_dict["seventeen"] = fields[19]
data_dict["eighteen"] = fields[20]
data_dict["nineteen"] = fields[21]
data_dict["twenty"] = fields[22]
data_dict["twentyone"] = fields[23]
data_dict["twentytwo"] = fields[24]
data_dict["twentythree"] = fields[25]
data_dict["twentyfour"] = fields[26]
data_dict["twentyfive"] = fields[27]
data_dict["twentysix"] = fields[28]
data_dict["twentyseven"] = fields[29]
data_dict["twentyeight"] = fields[30]
data_dict["twentynine"] = fields[31]
data_dict["thirty"] = fields[32]
data_dict["thirtyone"] = fields[33]
data_dict["total"] = fields[35]
data_dict["leaves"] = fields[36]
data_dict["month"] = fields[37]
data_dict["employee_name"] = fields[34]
return HttpResponse(data_dict['total'])
form = Attendance_form(data_dict)
if form.is_valid():
form.save()
# return render(request,'hr/index.html',{'attendance':attendance})
return render(request,'hr/index.html',{'attendance':attendance})
The above code here is a function which takes csv file as an input and converts into python dictionary format and adds into the database. the code was working properly until i added month field into the model . after adding the month field i altered in the forms.py models.py csv_file also i added the month detail.
Now if i add the csv file i am gettting a error
file.csv
1,it,manager,0.75,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,NULL,NULL,sunny,august
2,accounts,manager,1,0.5,0.75,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4.25,NULL,abdul,august
3,it,developer,1,0.75,0.5,NULL,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,NULL,0,1,NULL,NULL,mahesh,augustL
4,it,developer,1,0.75,0.5,NULL,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,NULL,0,1,NULL,NULL,firoz,august
5,it,developer,1,0.5,0.75,NULL,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,NULL,0,0.5,NULL,NULL,narayana,august
Here is the csv file which i have uploaded as i said first it took the data but now im getting this error.
output:
IndexError at /hrmsapp/Add_Attendance
list index out of range
Request Method: POST
Request URL: http://127.0.0.1:8000/hrmsapp/Add_Attendance
Django Version: 2.0.7
Exception Type: IndexError
Exception Value:
list index out of range
Exception Location: /home/admin1/Desktop/nar-backup/dd/django-ubuntu/hrmsprojects/hrmsapp/views.py in Add_Atten, line 102
Python Executable: /usr/bin/python3
Python Version: 3.5.2
Python Path:
['/home/admin1/Desktop/nar-backup/dd/django-ubuntu/hrmsprojects',
'/usr/lib/python35.zip',
'/usr/lib/python3.5',
'/usr/lib/python3.5/plat-x86_64-linux-gnu',
'/usr/lib/python3.5/lib-dynload',
'/home/admin1/.local/lib/python3.5/site-packages',
'/usr/local/lib/python3.5/dist-packages',
'/usr/lib/python3/dist-packages']
Server time: Fri, 3 Aug 2018 11:00:59 +0000
This is the output file or error which i am getting once after i altered the code,.

Related

Parsing logs to json Python

Folks,
I am trying to parse log file into json format.
I have a lot of logs, there is one of them
How can I parse this?
03:02:03.113 [info] ext_ref = BANK24AOS_cl_reqmarketcreditorderstate_6M8I1NT8JKYD_1591844522410384_4SGA08M8KIXQ reqid = 1253166 type = INREQ channel = BANK24AOS sid = msid_1591844511335516_KRRNBSLH2FS duration = 703.991 req_uri = marketcredit/order/state login = 77012221122 req_type = cl_req req_headers = {"accept-encoding":"gzip","connection":"close","host":"test-mobileapp-api.bank.kz","user-agent":"okhttp/4.4.1","x-forwarded-for":"212.154.169.134","x-real-ip":"212.154.169.134"} req_body = {"$sid":"msid_1591844511335516_KRRNBSLH2FS","$sid":"msid_1591844511335516_KRRNBSLH2FS","app":"bank","app_version":"2.3.2","channel":"aos","colvir_token":"GExPR0lOX1BBU1NXT1JEX0NMRUFSVEVYVFNzrzh4Thk1+MjDKWl/dDu1fQPsJ6gGLSanBp41yLRv","colvir_commercial_id":"-1","colvir_id":"000120.335980","openway_commercial_id":"6247520","openway_id":"6196360","$lang":"ru","ekb_id":"923243","inn":"990830221722","login":"77012221122","bank24_id":"262"} resp_body = {"task_id":"","status":"success","data":{"state":"init","applications":[{"status":"init","id":"123db561-34a3-4a8d-9fa7-03ed6377b44f","name":"Sulpak","amount":101000,"items":[{"name":"Switch CISCO x24","price":100000,"count":1,"amount":100000}]}],"segment":{"range":{"min":6,"max":36,"step":1},"payment_day":{"max":28,"min":1}}}}
Into this type of json, or any other format (but I guess json is best one)
{
"time":"03:02:03.113",
"class_req":"info",
"ext_ref":"BANK24AOS_cl_reqmarketcreditorderstate_6M8I1NT8JKYD_1591844522410384_4SGA08M8KIXQ",
"reqid":"1253166",
"type":"INREQ",
"channel":"BANK24AOS",
"sid":"msid_1591844511335516_KRRNBSLH2FS",
"duration":"703.991",
"req_uri":"marketcredit/order/state",
"login":"77012221122",
"req_type":"cl_req",
"req_headers":{
"accept-encoding":"gzip",
"connection":"close",
"host":"test-mobileapp-api.bank.kz",
"user-agent":"okhttp/4.4.1",
"x-forwarded-for":"212.154.169.134",
"x-real-ip":"212.154.169.134"
},
"req_body":{
"$sid":"msid_1591844511335516_KRRNBSLH2FS",
"$sid":"msid_1591844511335516_KRRNBSLH2FS",
"app":"bank",
"app_version":"2.3.2",
"channel":"aos",
"colvir_token":"GExPR0lOX1BBU1NXT1JEX0NMRUFSVEVYVFNzrzh4Thk1+MjDKWl/dDu1fQPsJ6gGLSanBp41yLRv",
"colvir_commercial_id":"-1",
"colvir_id":"000120.335980",
"openway_commercial_id":"6247520",
"openway_id":"6196360",
"$lang":"ru",
"ekb_id":"923243",
"inn":"990830221722",
"login":"77012221122",
"bank24_id":"262"
},
"resp_body":{
"task_id":"",
"status":"success",
"data":{
"state":"init",
"applications":[
{
"status":"init",
"id":"123db561-34a3-4a8d-9fa7-03ed6377b44f",
"name":"Sulpak",
"amount":101000,
"items":[
{
"name":"Switch CISCO x24",
"price":100000,
"count":1,
"amount":100000
}
]
}
],
"segment":{
"range":{
"min":6,
"max":36,
"step":1
},
"payment_day":{
"max":28,
"min":1
}
}
}
}
}
I am trying to split first whole text, but there I met another problem is to match keys to values depending on '=' sign. Also there might be some keys with empty values. For ex.:
type = INREQ channel = sid = duration = 1.333 (to get to know that there is an empty value, you need to pay attention on number of spaces. Usually there is 1 space between prev.value and next key). So this example should look like this:
{
"type":"INREQ",
"channel":"",
"sid":"",
"duration":"1.333"
}
Thanks ahead!
Here, one thing pass for duplicate key about "$sid":"msid_1591844511335516_KRRNBSLH2FS"
import re
text = """03:02:03.113 [info] ext_ref = reqid = 1253166 type = INREQ channel = BANK24AOS sid = msid_1591844511335516_KRRNBSLH2FS duration = 703.991 req_uri = marketcredit/order/state login = 77012221122 req_type = cl_req req_headers = {"accept-encoding":"gzip","connection":"close","host":"test-mobileapp-api.bank.kz","user-agent":"okhttp/4.4.1","x-forwarded-for":"212.154.169.134","x-real-ip":"212.154.169.134"} req_body = {"$sid":"msid_1591844511335516_KRRNBSLH2FS","$sid":"msid_1591844511335516_KRRNBSLH2FS","app":"bank","app_version":"2.3.2","channel":"aos","colvir_token":"GExPR0lOX1BBU1NXT1JEX0NMRUFSVEVYVFNzrzh4Thk1+MjDKWl/dDu1fQPsJ6gGLSanBp41yLRv","colvir_commercial_id":"-1","colvir_id":"000120.335980","openway_commercial_id":"6247520","openway_id":"6196360","$lang":"ru","ekb_id":"923243","inn":"990830221722","login":"77012221122","bank24_id":"262"} resp_body = {"task_id":"","status":"success","data":{"state":"init","applications":[{"status":"init","id":"123db561-34a3-4a8d-9fa7-03ed6377b44f","name":"Sulpak","amount":101000,"items":[{"name":"Switch CISCO x24","price":100000,"count":1,"amount":100000}]}],"segment":{"range":{"min":6,"max":36,"step":1},"payment_day":{"max":28,"min":1}}}}"""
index1 = text.index('[')
index2 = text.index(']')
new_text = 'time = '+ text[:index1-1] + ' class_req = ' + text[index1+1:index2] + text[index2+2:]
lst = re.findall(r'\S+? = |\S+? = \{.*?\} |\S+? = \{.*?\}$|\S+? = \S+? ', new_text)
res = {}
for item in lst:
key, equal, value = item.partition('=')
key, value = key.strip(), value.strip()
if value.startswith('{'):
try:
value = json.loads(value)
except:
print(value)
res[key] = value
you can try regulation in python.
here is what i write, it works for your problem.
for convenience i deleted string before "ext_ref...",you can directly truncate the raw string.
import re
import json
string = 'ext_ref = BANK24AOS_cl_reqmarketcreditorderstate_6M8I1NT8JKYD_1591844522410384_4SGA08M8KIXQ reqid = 1253166 type = INREQ channel = BANK24AOS sid = msid_1591844511335516_KRRNBSLH2FS duration = 703.991 req_uri = marketcredit/order/state login = 77012221122 req_type = cl_req req_headers = {"accept-encoding":"gzip","connection":"close","host":"test-mobileapp-api.bank.kz","user-agent":"okhttp/4.4.1","x-forwarded-for":"212.154.169.134","x-real-ip":"212.154.169.134"} req_body = {"$sid":"msid_1591844511335516_KRRNBSLH2FS","$sid":"msid_1591844511335516_KRRNBSLH2FS","app":"bank","app_version":"2.3.2","channel":"aos","colvir_token":"GExPR0lOX1BBU1NXT1JEX0NMRUFSVEVYVFNzrzh4Thk1+MjDKWl/dDu1fQPsJ6gGLSanBp41yLRv","colvir_commercial_id":"-1","colvir_id":"000120.335980","openway_commercial_id":"6247520","openway_id":"6196360","$lang":"ru","ekb_id":"923243","inn":"990830221722","login":"77012221122","bank24_id":"262"} resp_body = {"task_id":"","status":"success","data":{"state":"init","applications":[{"status":"init","id":"123db561-34a3-4a8d-9fa7-03ed6377b44f","name":"Sulpak","amount":101000,"items":[{"name":"Switch CISCO x24","price":100000,"count":1,"amount":100000}]}],"segment":{"range":{"min":6,"max":36,"step":1},"payment_day":{"max":28,"min":1}}}}'
position = re.search("req_headers",string) # position of req_headers
resp_body_pos = re.search("resp_body",string)
resp_body = string[resp_body_pos.span()[0]:]
res1 = {}
res1.setdefault(resp_body.split("=")[0],resp_body.split("=")[1])
print(res1)
before = string[:position.span()[0]]
after = string[position.span()[0]:resp_body_pos.span()[0]] # handle req_body seperately
res2 = re.findall("(\S+) = (\S+)",before)
print(res2)
res3 = re.findall("(\S+) = ({.*?})",after)
print(res3)
#res1 type: dict{'resp_body':'...'} content in resp_body
#res2 type: list[(),()..] content before req_head
#res3 type: list[(),()..] the rest content
and now you can do what you want to do with the data(.e.g. transform it into json respectively)
Hope this is helpful

[Odoo][v10] IndexError in cron in formview everything is ok

I have two that same methods, one working in form view for record and second is for cron.
When i run action in Form view everything is OK, i can get value :
self.deadline = deadlines[0][0]
but when i run in cron:
emp.debug = deadlines[0][0]
i have IndexError: list index out of range
but emp.debug = deadlines works
Full code:
class UserProfile(models.Model):
_name = 'users.profile'
user_id = fields.Many2one(related='project_id.user_id', string='User')
partner_id = fields.Many2one('res.partner', 'Partner')
follower_id = fields.Many2one('mail.followers', 'Follower')
project_id = fields.Many2one('project.project', 'Project')
# project_start_date # TODO max date from deadlines
project_active = fields.Boolean(related='project_id.active', string='Project active')
project_percent = fields.Float(related='project_id.x_project_percent', string='Project percent')
project_money = fields.Float(related='project_id.x_project_money_share')
# project_money_paid = fields.Char(related='project_id.x_paid_debug')
project_sale = fields.Many2one(related='project_id.x_sales_id', string='Sales')
deadline = fields.Many2one('project.project.deadlines', 'Deadline')
deadline_date = fields.Datetime(related='deadline.end_date')
debug = fields.Text()
def get_closest_date(self): # In form view
find_deadlines = self.env["project.project.deadlines"].search([('project_id', '=', self.project_id.id)])
deadlines = []
for record in find_deadlines:
datetime_without_tz = datetime.datetime.strptime(record.end_date, "%Y-%m-%d %H:%M:%S")
record_id = record.id
delta = datetime_without_tz - datetime.datetime.now()
delta_in_seconds = int(delta.total_seconds())
if delta_in_seconds > 0:
deadlines.append((record_id, delta_in_seconds))
deadlines.sort(key=itemgetter(1))
self.deadline = deadlines[0][0] # No indexError i can get value
self.debug = self.env["users.profile"].search([])[0].project_id.id
#api.model
def get_closest_date2(self): # For cron
emp_details_all = self.env["users.profile"].search([])
for emp in emp_details_all:
find_deadlines = self.env["project.project.deadlines"].search([('project_id', '=', emp.project_id.id)])
deadlines = []
for record in find_deadlines:
datetime_without_tz = datetime.datetime.strptime(record.end_date, "%Y-%m-%d %H:%M:%S")
record_id = record.id
delta = datetime_without_tz - datetime.datetime.now()
delta_in_seconds = int(delta.total_seconds())
if delta_in_seconds > 0:
deadlines.append((record_id, delta_in_seconds))
deadlines.sort(key=itemgetter(1))
emp.debug = deadlines[0][0] # IndexError
In the cron method you are looping over all users profiles so it seems like the error message IndexError is shown for a different record.
Check the deadlines variable in get_closest_date2 method before trying to get some value.

Save an object during a Celery task

I would like to save an object in my database during the Celery task. This object is an export file. The Celery task lets to create it and I would like to store it in my table.
This is my models.py :
class CeleryExportFile(models.Model):
name = models.CharField(max_length=100, verbose_name=_('Name of export file'), default='')
file = models.FileField(upload_to='media/', default='')
creation_date = models.DateTimeField(verbose_name=_('Creation date'), auto_now_add=True)
expiration_date = models.DateTimeField(verbose_name=_('Expiration date'))
def __str__(self):
return self.file
class Meta:
verbose_name = _('Celery Export File')
verbose_name_plural = _('Celery Export Files')
I have a Celery tasks.py file :
def get_xls_export(self, model="", search_info="", query_params=None):
app_label = 'app'
its_fp_or_up_product = False
query_params = query_params or {}
obsolete = False
if query_params.get('obsolete', '') == 'True':
obsolete = True
default_sorting_key = ''
show_date_format = settings.USER_DATE_FORMAT
if model == "finalproduct" or model == "upstreamproduct":
its_fp_or_up_product = True
default_sorting_key = 'manufacturer_name' if model == "finalproduct" else 'releasing_body__short_name'
if model == "releasebodyinstitution":
default_sorting_key = 'name'
model = apps.get_model(app_label=app_label, model_name=model)
# create a workbook in memory
output = io.BytesIO()
book = Workbook(output, {'constant_memory': True})
sheet = book.add_worksheet('Page 1')
# Sheet header, first row
row_num = 0
columns = model.export_field_excel()
rows_width_max = {}
bold_format = book.add_format({'bold': True})
max_col_width = []
for col_num in range(len(columns)):
rows_width_max[col_num] = columns[col_num].__len__()
sheet.write(row_num, col_num, columns[col_num], bold_format)
max_col_width.append(len(columns[col_num]) if len(columns[col_num]) > 10 else 10)
default_sorting = True
sorting_key = ''
# Define search get all object or sorted value.
if search_info != '':
create_filters = search_info.split(';')
if create_filters.__len__() == 1:
if 'sorting' in create_filters[0]:
default_sorting = False
sorting_key = create_filters[0].split('=')[1].replace('~', '-')
search_info = ''
else:
for criter in create_filters:
if 'sorting' in criter:
default_sorting = False
sorting_key = criter.split('=')[1].replace('~', '-')
search_info = search_info.replace(criter, "")
search_info = search_info[:-1]
objects = model.objects.all()
if not its_fp_or_up_product:
if obsolete:
objects = objects.obsolete()
else:
objects = objects.active()
if sorting_key:
objects = objects.order_by(sorting_key, 'pk')
if default_sorting:
objects = objects.order_by(default_sorting_key, 'pk')
if search_info != '':
create_filters = search_info.split(';')
for search_filter in create_filters:
search_filter = search_filter.split('=')
try:
if search_filter[1]:
objects = objects.filter(**{search_filter[0]: search_filter[1]})
except:
# Crud patch search
if search_filter[0] == 'q':
search_info = search_info.replace('q=', '')
objects = objects.filter(get_query(search_info, model.get_xls_values_list()))
rows = objects.values_list(*model.get_xls_values_list())
for row in rows:
row_num += 1
for col_num in range(len(row)):
# Patch True False for boolean field
is_bool = False
if type(row[col_num]) is bool:
is_bool = True
if col_num in model.get_date_field_number():
if row[col_num]:
sheet.write(row_num, col_num, row[col_num].strftime(show_date_format))
else:
if is_bool:
sheet.write(row_num, col_num, 'True' if row[col_num] else 'False')
else:
sheet.write(row_num, col_num, row[col_num])
if len(str(row[col_num])) > max_col_width[col_num]:
max_col_width[col_num] = len(str(row[col_num]))
# AutoFit col
for col_num in range(len(columns)):
sheet.set_column(col_num, col_num, max_col_width[col_num] + 1)
book.close()
output.seek(0)
name = str(name + "_" + str(datetime.datetime.now().strftime("%Y_%m_%d_%H_%M_%s")) + '.xlsx')
CeleryExportFile.save(name=name, file=output, expiration_date=datetime.datetime.now())
# default_storage.save(name, output)
try:
self.send_email(name=name)
except ConnectionRefusedError as e:
return ['error_message', _('Error for sending email')]
return ['success_message', _('Generation of export file is done')]
I'm getting this issue :
[2019-02-14 12:23:56,151: ERROR/ForkPoolWorker-4] Task app.tasks.get_xls_export[80e7ea2e-4192-4df7-ba05-83a14805225f] raised unexpected: TypeError("save() got an unexpected keyword argument 'name'",)
Traceback (most recent call last):
File "/home/.pyenv/versions/3.6.2/envs/app/lib/python3.6/site-packages/celery/app/trace.py", line 382, in trace_task
R = retval = fun(*args, **kwargs)
File "/home/.pyenv/versions/3.6.2/envs/app/lib/python3.6/site-packages/celery/app/trace.py", line 641, in __protected_call__
return self.run(*args, **kwargs)
File "/home/Bureau/Projets/app/src/app/tasks.py", line 151, in get_xls_export
CeleryExportFile.save(name='test', file=book, expiration_date=datetime.datetime.now())
TypeError: save() got an unexpected keyword argument 'name'
How I can save my file into my database ?
I have to set name, file, .. as kwargs and create a save() method in my models.py with kwargs.pop ?
Just do it as following:
celery_export_file = CeleryExportFile(name=name, file=output, expiration_date=datetime.datetime.now())
celery_export_file.save()
or you can call create() method like:
CeleryExportFile.objects.create(name=name, file=output, expiration_date=datetime.datetime.now())

Proper way to format date for Fedex API XML

I have a Django application where I am trying to make a call to Fedex's API to send out a shipping label for people wanting to send in a product for cash. When I try to make the call though it says there is a data validation issue with the Expiration field in the XML I am filling out. I swear this has worked in the past with me formatting the date as "YYYY-MM-DD", but now it is not. I read that with Fedex, you need to format the date as ISO, but that is also not passing the data validation. I am using a python package created to help with tapping Fedex's API.
Django view function for sending API Call
def Fedex(request, quote):
label_link = ''
expiration_date = datetime.datetime.now() + datetime.timedelta(days=10)
# formatted_date = "%s-%s-%s" % (expiration_date.year, expiration_date.month, expiration_date.day)
formatted_date = expiration_date.replace(microsecond=0).isoformat()
if quote.device_type != 'laptop':
box_length = 9
box_width = 12
box_height = 3
else:
box_length = 12
box_width = 14
box_height = 3
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
## Page 411 of FedEx Dev Guide - 20.14 Email Labels
CONFIG_OBJ = FedexConfig(key=settings.FEDEX_KEY, password=settings.FEDEX_PASSWORD, account_number=settings.FEDEX_ACCOUNT,
meter_number=settings.FEDEX_METER, use_test_server=settings.USE_FEDEX_TEST)
fxreq = FedexCreatePendingShipRequestEmail(CONFIG_OBJ, customer_transaction_id='xxxxxx id:01')
fxreq.RequestedShipment.ServiceType = 'FEDEX_GROUND'
fxreq.RequestedShipment.PackagingType = 'YOUR_PACKAGING'
fxreq.RequestedShipment.DropoffType = 'REGULAR_PICKUP'
fxreq.RequestedShipment.ShipTimestamp = datetime.datetime.now()
# Special fields for the email label
fxreq.RequestedShipment.SpecialServicesRequested.SpecialServiceTypes = ('RETURN_SHIPMENT', 'PENDING_SHIPMENT')
fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.Type = 'EMAIL'
fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.ExpirationDate = formatted_date
email_address = fxreq.create_wsdl_object_of_type('EMailRecipient')
email_address.EmailAddress = quote.email
email_address.Role = 'SHIPMENT_COMPLETOR'
# RETURN SHIPMENT DETAIL
fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnType = ('PENDING')
fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnEMailDetail = fxreq.create_wsdl_object_of_type(
'ReturnEMailDetail')
fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnEMailDetail.MerchantPhoneNumber = 'x-xxx-xxx-xxxx'
fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.EmailLabelDetail.Recipients = [email_address]
fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.EmailLabelDetail.Message = "Xxxxxx Xxxxxx"
fxreq.RequestedShipment.LabelSpecification = {'LabelFormatType': 'COMMON2D', 'ImageType': 'PDF'}
fxreq.RequestedShipment.Shipper.Contact.PersonName = quote.first_name + ' ' + quote.last_name
fxreq.RequestedShipment.Shipper.Contact.CompanyName = ""
fxreq.RequestedShipment.Shipper.Contact.PhoneNumber = quote.phone
fxreq.RequestedShipment.Shipper.Address.StreetLines.append(quote.address)
fxreq.RequestedShipment.Shipper.Address.City = quote.city
fxreq.RequestedShipment.Shipper.Address.StateOrProvinceCode = quote.state
fxreq.RequestedShipment.Shipper.Address.PostalCode = quote.zip
fxreq.RequestedShipment.Shipper.Address.CountryCode = settings.FEDEX_COUNTRY_CODE
fxreq.RequestedShipment.Recipient.Contact.PhoneNumber = settings.FEDEX_PHONE_NUMBER
fxreq.RequestedShipment.Recipient.Address.StreetLines = settings.FEDEX_STREET_LINES
fxreq.RequestedShipment.Recipient.Address.City = settings.FEDEX_CITY
fxreq.RequestedShipment.Recipient.Address.StateOrProvinceCode = settings.FEDEX_STATE_OR_PROVINCE_CODE
fxreq.RequestedShipment.Recipient.Address.PostalCode = settings.FEDEX_POSTAL_CODE
fxreq.RequestedShipment.Recipient.Address.CountryCode = settings.FEDEX_COUNTRY_CODE
fxreq.RequestedShipment.Recipient.AccountNumber = settings.FEDEX_ACCOUNT
fxreq.RequestedShipment.Recipient.Contact.PersonName = ''
fxreq.RequestedShipment.Recipient.Contact.CompanyName = 'Xxxxxx Xxxxxx'
fxreq.RequestedShipment.Recipient.Contact.EMailAddress = 'xxxxxx#xxxxxxxxx'
# Details of Person Who is Paying for the Shipping
fxreq.RequestedShipment.ShippingChargesPayment.PaymentType = 'SENDER'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.AccountNumber = settings.FEDEX_ACCOUNT
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.PersonName = 'Xxxxx Xxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.CompanyName = 'Xxxxx Xxxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.PhoneNumber = 'x-xxx-xxx-xxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.EMailAddress = 'xxxxxxx#xxxxxxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.StreetLines = 'Xxxxx N. xXxxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.City = 'Xxxxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.StateOrProvinceCode = 'XX'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.PostalCode = 'xxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.CountryCode = 'US'
# Package Info
package1 = fxreq.create_wsdl_object_of_type('RequestedPackageLineItem')
package1.SequenceNumber = '1'
package1.Weight.Value = 1
package1.Weight.Units = "LB"
package1.Dimensions.Length = box_length
package1.Dimensions.Width = box_width
package1.Dimensions.Height = box_height
package1.Dimensions.Units = "IN"
package1.ItemDescription = 'Phone'
fxreq.RequestedShipment.RequestedPackageLineItems.append(package1)
fxreq.RequestedShipment.PackageCount = '1'
try:
fxreq.send_request()
label_link = str(fxreq.response.CompletedShipmentDetail.AccessDetail.AccessorDetails[0].EmailLabelUrl)
except Exception as exc:
print('Fedex Error')
print('===========')
print(exc)
print('==========')
return label_link
Error Log
Error:cvc-datatype-valid.1.2.1: \\'2017-11-3\\' is not a valid value for \\'date\\'.\\ncvc-type.3.1.3: The value \\'2017-11-3\\' of element \\'ns0:ExpirationDate\\' is not valid."\\n }\\n }' (Error code: -1)

writing to data to excel

I have data that I need to export to excel, I just don't know how to go about it, here's the view I'm using, I've commented out my attempts.A push to the right direction will be greatly appreciated.
def month_end(request):
"""
A simple view that will generate a month end report as a PDF response.
"""
current_date = datetime.now()
context = {}
context['month'] = current_date.month
context['year'] = current_date.year
context['company'] = 3
if request.method == 'POST':
context['form'] = MonthEndForm(user=request.user, data=request.POST)
if context['form'].is_valid():
#from reportlab.pdfgen import canvas
#import ho.pisa as pisa
context['month_no'] = int(context['form'].cleaned_data['month'])
context['company'] = context['form'].cleaned_data['company']
context['year'] = context['form'].cleaned_data['year']
context['month'] = datetime(context['year'], context['month_no'], 1).strftime('%B')
sql = '''SELECT "campaign_provider"."originator" as originator, "campaign_provider"."cost",
"campaign_receivedmessage"."network_id",
COUNT("campaign_provider"."originator") AS "originator_count",
"shortcode_network"."network"
FROM "campaign_receivedmessage"
LEFT OUTER JOIN "shortcode_network" ON ("shortcode_network"."id" = "campaign_receivedmessage"."network_id")
LEFT OUTER JOIN "campaign_provider" ON ("campaign_receivedmessage"."provider_id" = "campaign_provider"."id")
WHERE ("campaign_provider"."company_id" = %s
AND EXTRACT('month' FROM "campaign_receivedmessage"."date_received") = %s)
GROUP BY "campaign_provider"."originator", "campaign_provider"."cost", "campaign_receivedmessage"."network_id", "shortcode_network"."network"
ORDER BY "campaign_provider"."originator", "campaign_receivedmessage"."network_id" ASC
''' % (context['company'].id, context['month_no'])
context['rec_messages']= []
cursor = connection.cursor()
cursor.execute(sql)
data = cursor.fetchall()
for row in data:
dict = {}
desc = cursor.description
for (name, value) in zip(desc, row) :
dict[name[0]] = value
try:
dict['share'] = RevenueShare.objects.get(company=context['company'], priceband=dict['cost'], network=dict['network_id']).customer_share
dict['revenue'] = dict['originator_count'] * dict['share']
except:
dict['share'] = 0
dict['revenue'] = 0
context['rec_messages'].append(dict)
#context['rec_messages'] = ReceivedMessage.objects.filter(provider__company__id=context['company'].id, date_received__month=context['month_no'], date_received__year=context['year']).values('provider__originator', 'provider__cost', 'network').annotate(originator_count=Count('provider__originator')).order_by('provider__originator')
context['ret_messages'] = SentMessage.objects.filter(campaign__providers__company__id=context['company'].id, date_sent__month=context['month_no'], date_sent__year=context['year']).values('campaign__title').annotate(campaign_count=Count('campaign__title')).order_by('campaign__title')
context['revenue_share'] = RevenueShare.objects.filter(company=context['company'].id)
context['total_rec'] = 0
context['total_ret'] = 0
context['total_value'] = 0
context['total_cost'] = 0
context['queries'] = connection.queries
for message in context['rec_messages']:
context['total_rec'] += message['originator_count']
context['total_value'] += message['revenue']
for message in context['ret_messages']:
message['price'] = 0.175
message['cost'] = message['campaign_count'] * message['price']
context['total_ret'] += message['campaign_count']
context['total_cost'] += message['cost']
context['total'] = context['total_value'] - context['total_cost']
context['loaded_report'] = "yes"
data.append((context['data']))
data.append(('Orginator', 'cost', 'network_id', 'originator_count', 'network'))
file_name = '%s' % ('reports')
return generate_csv(file_name, data)
#template_data = render_to_string('reports/month_end_pdf.html', RequestContext(request, context))
#csv_data = StringIO.StringIO()
#csv_data.seek()
#simple_report = ExcelReport()34
#simple_report.addSheet("TestSimple")
#simple_report.writeReport(csv_data)
#response = HttpResponse(simple_report.writeReport(),mimetype='application/ms-excel')
#response['Content-Disposition'] = 'attachment; filename=simple_test.xls'
#return response
return render_to_response('reports/month_end.html', RequestContext(request, context))
#return render_to_response('reports/rfm_models.html', RequestContext(request, context))
#template_data = render_to_string('reports/month_end_pdf.html', RequestContext(request, context))
#pdf_data = StringIO.StringIO()
#pisa.CreatePDF(template_data, pdf_data, link_callback=fetch_resources)
#pdf_data.seek(0)
#response = HttpResponse(pdf_data, mimetype='application/pdf')
#response['Content-Disposition'] = 'attachment; filename=%s_%s_%s.pdf' % (context['company'].name.lower().replace(' ', '_'), context['month'].lower()[:3], context['year'])
if 'form' not in context.keys():
context['form'] = MonthEndForm(user=request.user, data=context)
return render_to_response('reports/month_end.html', RequestContext(request, context))
Have a look to xlwt http://pypi.python.org/pypi/xlwt
You could directly write CSV from MySQL records,
import csv
csv_writer = csv.writer(open(FILENAME,'w'), delimiter=',',quotechar="'")
data = cursor.fetchall()
for row in data:
csv_writer.writerow(row)
Full example at
http://snipplr.com/view/11970/simple-csv-dump-script/
SELECTQ="SELECT * FROM category"
FILENAME="dump.csv"
import MySQLdb
import csv
db = MySQLdb.connect(host="localhost", user="root", passwd="", db="sakila")
dump_writer = csv.writer(open(FILENAME,'w'), delimiter=',',quotechar="'")
cursor = db.cursor()
cursor.execute(SELECTQ)
result = cursor.fetchall()
for record in result:
dump_writer.writerow(record)
db.close()

Categories