An strange python "if" syntax error - python
I get this error: Invaild syntax in my "if" statement and rly can't figur why, can anyone of you guys help me? I'm using python 3.2
here is the part of my code whit the error my code:
L = list()
LT = list()
tn = 0
players = 0
newplayer = 0
newplayerip = ""
gt = "start"
demsg = "start"
time = 1
status = 0
day = 1
conclient = 1
print("DONE! The UDP Server is now started and Waiting for client's on port 5000")
while 1:
try:
data, address = server_socket.recvfrom(1024)
if not data: break
################### reciving data! ###################
UPData = pickle.loads(data)
status = UPData[0][[0][0]
if status > 998: ##### it is here the error are given####
try:
e = len(L)
ori11 = UPData[0][1][0]
ori12 = UPData[0][1][1]
ori13 = UPData[0][1][2]
ori14 = UPData[0][1][3]
ori21 = UPData[0][1][4]
ori22 = UPData[0][1][5]
ori23 = UPData[0][1][6]
ori24 = UPData[0][1][7]
ori31 = UPData[0][2][0]
ori32 = UPData[0][2][1]
ori33 = UPData[0][2][2]
ori34 = UPData[0][2][3]
ori41 = UPData[0][2][4]
ori42 = UPData[0][2][5]
ori43 = UPData[0][2][6]
ori44 = UPData[0][2][7]
ori51 = UPData[0][3][0]
ori52 = UPData[0][3][1]
ori53 = UPData[0][3][2]
ori54 = UPData[0][3][3]
ori61 = UPData[0][3][4]
ori62 = UPData[0][3][5]
ori63 = UPData[0][3][6]
ori64 = UPData[0][3][7]
ori71 = UPData[0][4][0]
ori72 = UPData[0][4][1]
ori73 = UPData[0][4][2]
ori74 = UPData[0][4][3]
ori81 = UPData[0][4][4]
ori82 = UPData[0][4][5]
ori83 = UPData[0][4][6]
ori84 = UPData[0][4][7]
ori91 = UPData[0][5][0]
ori92 = UPData[0][5][1]
ori93 = UPData[0][5][2]
ori94 = UPData[0][5][3]
ori101 = UPData[0][5][4]
ori102 = UPData[0][5][5]
ori103 = UPData[0][5][6]
ori104 = UPData[0][5][7]
npcp11 = UPData[0][6][0]
npcp12 = UPData[0][6][1]
npcp13 = UPData[0][6][2]
npcp21 = UPData[0][6][3]
npcp22 = UPData[0][6][4]
npcp23 = UPData[0][6][5]
npcp31 = UPData[0][6][6]
npcp32 = UPData[0][6][7]
npcp33 = UPData[0][7][0]
npcp41 = UPData[0][7][1]
npcp42 = UPData[0][7][2]
npcp43 = UPData[0][7][3]
npcp51 = UPData[0][7][4]
npcp52 = UPData[0][7][5]
npcp53 = UPData[0][7][6]
npcp61 = UPData[0][7][7]
npcp62 = UPData[0][8][0]
npcp63 = UPData[0][8][1]
npcp71 = UPData[0][8][2]
npcp72 = UPData[0][8][3]
npcp73 = UPData[0][8][4]
npcp81 = UPData[0][8][5]
npcp82 = UPData[0][8][6]
npcp83 = UPData[0][8][7]
npcp91 = UPData[1][0][0]
npcp92 = UPData[1][0][1]
npcp93 = UPData[1][0][2]
npcp101 = UPData[1][0][3]
npcp102 = UPData[1][0][4]
npcp103 = UPData[1][0][5]
d0 = (status, )
d1 = (ori11,ori12,ori13,ori14,ori21,ori22,ori23,ori24)
d2 = (ori31,ori32,ori33,ori34,ori41,ori42,ori43,ori44)
d3 = (ori51,ori52,ori53,ori54,ori61,ori62,ori63,ori64)
d4 = (ori71,ori72,ori73,ori74,ori81,ori82,ori83,ori84)
d5 = (ori91,ori92,ori93,ori94,ori101,ori102,ori103,ori104)
d6 = (npcp11,npcp21,npcp31,npcp21,npcp22,npcp23,npcp31,npcp32)
d7 = (npcp33,npcp41,npcp42,npcp43,npcp51,npcp52,npcp53,npcp61)
d8 = (npcp62,npcp63,npcp71,npcp72,npcp72,npcp81,npcp82,npcp83)
d9 = (npcp91,npcp92,npcp93,npcp101,npcp102,npcp103)
pack1 = (d0,d1,d2,d3,d4,d5,d6,d7,d8)
pack2 = (d9, )
dat = pickle.dumps((pack1,pack2))
while tn < e:
server_socket.sendto(dat, (L[tn],3560))
tn = tn + 1
except:
pass
print("could not send data to some one or could not run the server at all")
else:
the part where the console tells me my error is is here:
if status > 998:
The problem is here:
status = UPData[0][[0][0]
The second opened bracket [ is not closed. The Python compiler keeps looking for the closing bracket, finds if on the next line and gets confused because if is not supposed to be inside brackets.
You may want to remove this bracket, or close it, according to your specific needs (the structure of UPData)
Related
I can't make reference to the checkbutton variable 2 times, in the second it says the name is not defined
Can anyone tell me why I can only refer to the Checkbutton variable once? the important part of code is bold. The first time that I use the variable it run, but the second, the same variable dont work. I think I tried very much but dont work! def botao_executar(): botão_executar = comand of Button if (pag1 <= numero_quadros): img1 = cv.imread('imagemfinal.jpg') novo_y_1 = int(num_novos_y*1) crop_image_1 = img1[0:novo_y_1] status = cv.imwrite('Desktop\ORC/crop_image1.png', crop_image_1) q = 590 a = (q+220) z = (q-590) p = (q+250) #Dimensoes_1 crop_image_1_1_dimensoes = crop_image_1[q+60:a-20, z:p] status_dimensoes = cv.imwrite('Desktop\ORC/crop_image1.1_dimensoes.png', crop_image_1_1_dimensoes) img_1_1_dimensoes = PIL.Image.open('Desktop\ORC/crop_image1.1_dimensoes.png') pytesseract.pytesseract.tesseract_cmd =r'C:\Users\joaoccfaria\AppData\Local\Programs\Tesseract-OCR\tesseract.exe' result_1_1_dimensoes = pytesseract.image_to_string(img_1_1_dimensoes, config='--psm 6 digits') with open('abc.txt',mode ='w') as file: file.write(result_1_1_dimensoes) if v_dimensoes.get()> 0: v_dimensoes.get= variable of checkbutton here it works! print('dimensoes_1 = '+ result_1_1_dimensoes + ';') sheet.write('A1', result_1_1_dimensoes) else: pass pag2 = 2 if (pag2 <= numero_quadros): img2 = cv.imread('imagemfinal.jpg') novo_y_1 = int(num_novos_y*1) crop_image_2 = img2[novo_y_1:novo_y_1*2] status = cv.imwrite('Desktop\ORC/crop_image2.png', crop_image_2) q = 590 a = (q+220) z = (q-590) p = (q+250) #Dimensoes_2 crop_image_2_1_dimensoes = crop_image_2[q+60:a-20, z:p] status_dimensoes = cv.imwrite('Desktop\ORC/crop_image2.1_dimensoes.png', crop_image_2_1_dimensoes) img_2_1_dimensoes = PIL.Image.open('Desktop\ORC/crop_image2.1_dimensoes.png') pytesseract.pytesseract.tesseract_cmd =r'C:\Users\joaoccfaria\AppData\Local\Programs\Tesseract-OCR\tesseract.exe' result_2_1_dimensoes = pytesseract.image_to_string(img_2_1_dimensoes, config='--psm 6 digits') with open('abc.txt',mode ='w') as file: file.write(result_2_1_dimensoes) if v_dimensoes.get()> 0: here the same variable, but says that the name is not defined print('dimensoes_2 = '+ result_2_1_dimensoes + ';') sheet.write('A21', result_2_1_dimensoes) else: pass Thanks in advance, any suggestion. Compliments.
I am getting 'index out of bound error' when reading from csv in pandas but not when I extract the data via api. What could be the reason?
So for my bot, I am first extracting data via api and storing it in csv. When I run my for loop on data via api, it gives no error and runs smoothly. But when the csv file is read and run, it gives out of bound error. This is my function to generate data: full_list = pd.DataFrame(columns=("date","open","high","low","close","volume","ticker","RSI","ADX","20_sma","max_100")) def stock_data(ticker): create_data = fetchOHLC(ticker,'minute',60) create_data["ticker"] = ticker create_data["RSI"] = round(rsi(create_data,25),2) create_data["ADX"] = round(adx(create_data,14),2) create_data["20_sma"] = round(create_data.close.rolling(10).mean().shift(),2) create_data["max_100"] = create_data.close.rolling(100).max().shift() create_data.dropna(inplace=True,axis=0) create_data.reset_index(inplace=True) return create_data stocklist = open("stocklist.txt","r+") tickers = stocklist.readlines() for x in tickers: try: full_list = full_list.append(stock_data(x.strip())) except: print(f'{x.strip()} did not work') full_list.to_csv("All_Data") full_list So when I run the same code below on dataframe created I got no error. But when I run the same code on the csv file, I get out of bound error. list_tickers = full_list["ticker"].unique() for y in list_tickers[:2]: main = full_list[full_list["ticker"]==y] pos = 0 num = 0 tick = y signal_time = 0 signal_rsi = 0 signal_adx = 0 buy_time = 0 buy_price = 0 sl = 0 #to add trailing sl in this. for x in main.index: maxx = main.iloc[x]["max_100"] rsi = main.iloc[x]["RSI"] adx = main.iloc[x]["ADX"] sma = main.iloc[x]["20_sma"] close = main.iloc[x]["close"] high = main.iloc[x]["high"] if rsi > 80 and adx > 35 and close > maxx: if pos == 0: buy_price = main.iloc[x+1]["open"] buy_time = main.iloc[x+1]["date"] pos=1 signal_time = main.iloc[x]["date"] signal_rsi = main.iloc[x]["RSI"] signal_adx = main.iloc[x]["ADX"] elif close < sma: if pos == 1: sell_time = main.iloc[x]["date"] sell_price = sma*.998 pos=0 positions.loc[positions.shape[0]] = [y,signal_time,signal_rsi,signal_adx,buy_time,buy_price,sell_time,sell_price] Any idea why? Here is a cleanup and file call code: full_list = pd.read_csv("All_data") full_list.dropna(inplace=True,axis=0) full_list.drop(labels="Unnamed: 0",axis=1) < index of previous dataframe full_list.head(5) Thanks
How to use re function in python
I have the following list : ['[infotype02]', 'lastModifiedOn = serial<customMapping>', 'customString18 = BADGE_NUMBER<move>', 'firstName = FIRST_NAME<move>', 'lastName = LAST_NAME<move>', 'customString29 = USER_NAME<move>', 'email = EMAIL_ADDRESS<move>', 'documenttype = DOC_TYPE<move>', 'documentnumber = DOC_SERIA<customMapping>', 'documentnumberx2 = DOC_NUMBER<customMapping>', 'issuedate = DOC_ISSUE_DATE<move>', 'issueauthority = DOC_ISSUER<move>', 'nationalId = CNP<move>', 'company = COMPANY<move>', 'phoneNumber = PHONE_NUMBER<move>', 'startDate = HIRE_DATE<customMapping>', 'startDatex2 = TERMINATION_DATE<customMapping>', '[/infotype02]', '[infotype02]', 'lastModifiedOn = serial<customMapping>', 'customString18 = BADGE_NUMBER<move>', 'firstName = FIRST_NAME<move>', 'lastName = LAST_NAME<move>', 'customString29 = USER_NAME<move>', 'email = EMAIL_ADDRESS<move>', 'documenttype = DOC_TYPE<move>', 'documentnumber = DOC_SERIA<customMapping>', 'documentnumberx2 = DOC_NUMBER<customMapping>', 'issuedate = DOC_ISSUE_DATE<move>', 'issueauthority = DOC_ISSUER<move>', 'nationalId = CNP<move>', 'company = COMPANY<move>', 'phoneNumber = PHONE_NUMBER<move>', 'startDate = HIRE_DATE<customMapping>', 'startDatex2 = TERMINATION_DATE<customMapping>', '[/infotype02]'] for i in list; i = [infotype02] I tried using re expression to get the string between the [], expected result infotype02 : result = re.search('[(.*)]', i) Then tried to append the result.group(1) to a new list and it returned an error : lst.append(result.group(1)) AttributeError: 'NoneType' object has no attribute 'group' I don't understand what is wrong with my re expresion and why isn't it finding the string between the []
You can just use a simple for-loop to accomplish this: data = ['[infotype02]', 'lastModifiedOn = serial<customMapping>', 'customString18 = BADGE_NUMBER<move>', 'firstName = FIRST_NAME<move>', 'lastName = LAST_NAME<move>', 'customString29 = USER_NAME<move>', 'email = EMAIL_ADDRESS<move>', 'documenttype = DOC_TYPE<move>', 'documentnumber = DOC_SERIA<customMapping>', 'documentnumberx2 = DOC_NUMBER<customMapping>', 'issuedate = DOC_ISSUE_DATE<move>', 'issueauthority = DOC_ISSUER<move>', 'nationalId = CNP<move>', 'company = COMPANY<move>', 'phoneNumber = PHONE_NUMBER<move>', 'startDate = HIRE_DATE<customMapping>', 'startDatex2 = TERMINATION_DATE<customMapping>', '[/infotype02]', '[infotype02]', 'lastModifiedOn = serial<customMapping>', 'customString18 = BADGE_NUMBER<move>', 'firstName = FIRST_NAME<move>', 'lastName = LAST_NAME<move>', 'customString29 = USER_NAME<move>', 'email = EMAIL_ADDRESS<move>', 'documenttype = DOC_TYPE<move>', 'documentnumber = DOC_SERIA<customMapping>', 'documentnumberx2 = DOC_NUMBER<customMapping>', 'issuedate = DOC_ISSUE_DATE<move>', 'issueauthority = DOC_ISSUER<move>', 'nationalId = CNP<move>', 'company = COMPANY<move>', 'phoneNumber = PHONE_NUMBER<move>', 'startDate = HIRE_DATE<customMapping>', 'startDatex2 = TERMINATION_DATE<customMapping>', '[/infotype02]'] new_list = [] for d in data: if d[0] == '[' and not d[1] == '/': #if re.match(r"\[[^/](.*)\]", d): # If you want to use `re` new_list.append(d[1:-1]) print(new_list) Output: ['infotype02', 'infotype02'] As you have 2 of these tags in your given list.
How to get correct output shape in Unet?
I am building a u net for binary image image segmentation. I am using Tensorflow's tf.nn api. My input image has dimensions (256,256,3) and output binary image has dimensions(256,256,1). The output of the U net model must be (1,256,256,1) but output shape results to be (7,256,256,3).For the convolutional kernels I am using Tensorflow's truncated normal initializer with each datatype as float32. Am I creating multiple output layers somewhere in the code def get_filter(shape,na): w =tf.get_variable(name=na,shape=shape,dtype='float32',initializer=tf.truncated_normal_initializer(dtype='float32')) return w def unet(inp): #f1 = get_filter(shape=[3,3,3,16]) lay_16_1 = tf.nn.conv2d(inp,filter=get_filter(shape=[3,3,3,16],na='w_1'),strides=[1,1,1,1],padding='SAME',name='conv_16_1') lay_16_2 = tf.nn.relu(lay_16_1,name='re_16_1') lay_16_3 = tf.layers.batch_normalization(lay_16_2,axis=-1,name='bn_16') lay_16_4 = tf.nn.conv2d(lay_16_3,filter=get_filter([3,3,16,16],na='w_2'),strides=[1,1,1,1],padding='SAME',name='conv_16_2') lay_16_5 = tf.nn.relu(lay_16_4,name='re_16_2') lay_p1 = tf.nn.max_pool(lay_16_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_1') lay_32_1 = tf.nn.conv2d(lay_p1,filter=get_filter([3,3,16,32],na='w_3'),strides=[1,1,1,1],padding='SAME',name='conv_32_1') lay_32_2 = tf.nn.relu(lay_32_1,name='re_32_1') lay_32_3 = tf.layers.batch_normalization(lay_32_2,axis=-1,name='bn_32') lay_32_4 = tf.nn.conv2d(lay_32_3,filter=get_filter([3,3,32,32],na='w_4'),strides=[1,1,1,1],padding='SAME',name='conv_32_2') lay_32_5 = tf.nn.relu(lay_32_4,name='re_32_2') lay_p2 = tf.nn.max_pool(lay_32_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_2') lay_64_1 = tf.nn.conv2d(lay_p2,filter=get_filter([3,3,32,64],na='w_5'),strides=[1,1,1,1],padding='SAME',name='conv_64_1') lay_64_2 = tf.nn.relu(lay_64_1,name='re_64_1') lay_64_3 = tf.layers.batch_normalization(lay_64_2,axis=-1,name='bn_64') lay_64_4 = tf.nn.conv2d(lay_64_3,filter=get_filter([3,3,64,64],na='w_6'),strides=[1,1,1,1],padding='SAME',name='conv_64_2') lay_64_5 = tf.nn.relu(lay_64_4,name='re_64_2') lay_p3 = tf.nn.max_pool(lay_64_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_3') lay_128_1 = tf.nn.conv2d(lay_p3,filter=get_filter([3,3,64,128],na='w_7'),strides=[1,1,1,1],padding='SAME',name='conv_128_1') lay_128_2 = tf.nn.relu(lay_128_1,name='re_128_1') lay_128_3 = tf.layers.batch_normalization(lay_128_2,axis=-1,name='bn_128') lay_128_4 = tf.nn.conv2d(lay_128_3,filter=get_filter([3,3,128,128],na='w_8'),strides=[1,1,1,1],padding='SAME',name='conv_128_2') lay_128_5 = tf.nn.relu(lay_128_4,name='re_128_2') lay_p4 = tf.nn.max_pool(lay_128_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_4') lay_256_1 = tf.nn.conv2d(lay_p4,filter=get_filter([3,3,128,256],na='w_9'),strides=[1,1,1,1],padding='SAME',name='conv_256_1') lay_256_2 = tf.nn.relu(lay_256_1,name='re_256_1') lay_256_3 = tf.layers.batch_normalization(lay_256_2,axis=-1,name='bn_256') lay_256_4 = tf.nn.conv2d(lay_256_3,filter=get_filter([3,3,256,256],na='w_10'),strides=[1,1,1,1],padding='SAME',name='conv_256_2') lay_256_5 = tf.nn.relu(lay_256_4,name='re_256_2') lay_p5 = tf.nn.max_pool(lay_256_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_5') lay_512_1 = tf.nn.conv2d(lay_p5,filter=get_filter([3,3,256,512],na='w_11'),strides=[1,1,1,1],padding='SAME',name='conv_512_1') lay_512_2 = tf.nn.relu(lay_512_1,name='re_512_1') lay_512_3 = tf.layers.batch_normalization(lay_512_2,axis=-1,name='bn_512') lay_512_4 = tf.nn.conv2d(lay_512_3,filter=get_filter([3,3,512,512],na='w_12'),strides=[1,1,1,1],padding='SAME',name='conv_512_2') lay_512_5 = tf.nn.relu(lay_512_4,name='re_512_2') lay_p6 = tf.nn.max_pool(lay_512_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_6') lay_1024_1 = tf.nn.conv2d(lay_p6,filter=get_filter([3,3,512,1024],na='w_13'),strides=[1,1,1,1],padding='SAME',name='conv_1024_1') lay_1024_2 = tf.nn.relu(lay_1024_1,name='re_1024_1') lay_1024_3 = tf.layers.batch_normalization(lay_1024_2,axis=-1,name='bn_1024') lay_1024_4 = tf.nn.conv2d(lay_1024_3,filter=get_filter([3,3,1024,1024],na='w_14'),strides=[1,1,1,1],padding='SAME',name='conv_1024_2') lay_1024_5 = tf.nn.relu(lay_1024_4,name='re_1024_2') #lay_p7 = tf.nn.max_pool(lay_1024,ksize=[1,2,2,1],strides=[1,1,1,1],padding='SAME',name='pool_7') up_512 = tf.image.resize_images(images=lay_1024_5,size=[8,8],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) con_512_1 = tf.nn.conv2d(up_512,filter=get_filter([3,3,1024,512],na='w_15'),strides=[1,1,1,1],padding='SAME',name='mer_512_1') con_512_2 = tf.nn.relu(con_512_1,name='rel_512_1') mer_512 = tf.concat([lay_512_5,con_512_2],axis=0,name='mer_512_2') con_512_3 = tf.nn.conv2d(mer_512,filter=get_filter([3,3,512,512],na='w_16'),strides=[1,1,1,1],padding='SAME',name='mer_512_3') con_512_4 = tf.nn.relu(con_512_3,name='rel_512_2') con_512_5 = tf.layers.batch_normalization(con_512_4,axis=-1,name='mer_bn_512') con_512_6 = tf.nn.conv2d(con_512_5,filter=get_filter([3,3,512,512],na='w_17'),strides=[1,1,1,1],padding='SAME',name='mer_512_4') con_512_7 = tf.nn.relu(con_512_6,name='rel_512_3') up_256 = tf.image.resize_images(images=con_512_7,size=[16,16],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) con_256_1 = tf.nn.conv2d(up_256,filter=get_filter([3,3,512,256],na='w_18'),strides=[1,1,1,1],padding='SAME',name='mer_256_1') con_256_2 = tf.nn.relu(con_256_1,name='rel_256_1') mer_256 = tf.concat([lay_256_5,con_256_2],axis=0,name='mer_256_2') con_256_3 = tf.nn.conv2d(mer_256,filter=get_filter([3,3,256,256],na='w_19'),strides=[1,1,1,1],padding='SAME',name='mer_256_3') con_256_4 = tf.nn.relu(con_256_3,name='rel_256_2') con_256_5 = tf.layers.batch_normalization(con_256_4,axis=-1,name='mer_bn_256') con_256_6 = tf.nn.conv2d(con_256_5,filter=get_filter([3,3,256,256],na='w_20'),strides=[1,1,1,1],padding='SAME',name='mer_256_4') con_256_7 = tf.nn.relu(con_256_6,name='rel_256_3') up_128 = tf.image.resize_images(images=con_256_7,size=[32,32],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) con_128_1 = tf.nn.conv2d(up_128,filter=get_filter([3,3,256,128],na='w_21'),strides=[1,1,1,1],padding='SAME',name='mer_128_1') con_128_2 = tf.nn.relu(con_128_1,name='rel_128_1') mer_128 = tf.concat([lay_128_5,con_128_2],axis=0,name='mer_128_2') con_128_3 = tf.nn.conv2d(mer_128,filter=get_filter([3,3,128,128],na='w_22'),strides=[1,1,1,1],padding='SAME',name='mer_128_3') con_128_4 = tf.nn.relu(con_128_3,name='rel_128_2') con_128_5 = tf.layers.batch_normalization(con_128_4,axis=-1,name='mer_bn_128') con_128_6 = tf.nn.conv2d(con_128_5,filter=get_filter([3,3,128,128],na='w_23'),strides=[1,1,1,1],padding='SAME',name='mer_128_4') con_128_7 = tf.nn.relu(con_128_6,name='rel_128_3') up_64 = tf.image.resize_images(images=con_128_7,size=[64,64],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) con_64_1 = tf.nn.conv2d(up_64,filter=get_filter([3,3,128,64],na='w_24'),strides=[1,1,1,1],padding='SAME',name='mer_64_1') con_64_2 = tf.nn.relu(con_64_1,name='rel_64_1') mer_64 = tf.concat([lay_64_5,con_64_2],axis=0,name='mer_64_2') con_64_3 = tf.nn.conv2d(mer_64,filter=get_filter([3,3,64,64],na='w_25'),strides=[1,1,1,1],padding='SAME',name='mer_64_3') con_64_4 = tf.nn.relu(con_64_3,name='rel_64_2') con_64_5 = tf.layers.batch_normalization(con_64_4,axis=-1,name='mer_bn_64') con_64_6 = tf.nn.conv2d(con_64_5,filter=get_filter([3,3,64,64],na='w_26'),strides=[1,1,1,1],padding='SAME',name='mer_64_4') con_64_7 = tf.nn.relu(con_64_6,name='rel_64_3') up_32 = tf.image.resize_images(images=con_64_7,size=[128,128],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) con_32_1 = tf.nn.conv2d(up_32,filter=get_filter([3,3,64,32],na='w_27'),strides=[1,1,1,1],padding='SAME',name='mer_32_1') con_32_2 = tf.nn.relu(con_32_1,name='rel_32_1') mer_32 = tf.concat([lay_32_5,con_32_2],axis=0,name='mer_32_2') con_32_3 = tf.nn.conv2d(mer_32,filter=get_filter([3,3,32,32],na='w_28'),strides=[1,1,1,1],padding='SAME',name='mer_32_3') con_32_4 = tf.nn.relu(con_32_3,name='rel_32_2') con_32_5 = tf.layers.batch_normalization(con_32_4,axis=-1,name='mer_bn_32') con_32_6 = tf.nn.conv2d(con_32_5,filter=get_filter([3,3,32,32],na='w_29'),strides=[1,1,1,1],padding='SAME',name='mer_32_4') con_32_7 = tf.nn.relu(con_32_6,name='rel_32_3') up_16 = tf.image.resize_images(images=con_32_7,size=[256,256],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR) con_16_1 = tf.nn.conv2d(up_16,filter=get_filter([3,3,32,16],na='w_30'),strides=[1,1,1,1],padding='SAME',name='mer_16_1') con_16_2 = tf.nn.relu(con_16_1,name='rel_16_1') mer_16 = tf.concat([lay_16_5,con_16_2],axis=0,name='mer_16_2') con_16_3 = tf.nn.conv2d(mer_16,filter=get_filter([3,3,16,16],na='w_31'),strides=[1,1,1,1],padding='SAME',name='mer_16_3') con_16_4 = tf.nn.relu(con_16_3,name='rel_16_2') con_16_5 = tf.layers.batch_normalization(con_16_4,axis=-1,name='mer_bn_16') con_16_6 = tf.nn.conv2d(con_16_5,filter=get_filter([3,3,16,16],na='w_32'),strides=[1,1,1,1],padding='SAME',name='mer_16_4') con_16_7 = tf.nn.relu(con_16_6,name='rel_16_3') fin_img = tf.nn.conv2d(con_16_7,filter=get_filter([1,1,16,1],na='w_33'),strides=[1,1,1,1],padding='SAME',name='final_image') #fin_img = tf.nn.sigmoid(fin_img) return fin_img
Proper way to format date for Fedex API XML
I have a Django application where I am trying to make a call to Fedex's API to send out a shipping label for people wanting to send in a product for cash. When I try to make the call though it says there is a data validation issue with the Expiration field in the XML I am filling out. I swear this has worked in the past with me formatting the date as "YYYY-MM-DD", but now it is not. I read that with Fedex, you need to format the date as ISO, but that is also not passing the data validation. I am using a python package created to help with tapping Fedex's API. Django view function for sending API Call def Fedex(request, quote): label_link = '' expiration_date = datetime.datetime.now() + datetime.timedelta(days=10) # formatted_date = "%s-%s-%s" % (expiration_date.year, expiration_date.month, expiration_date.day) formatted_date = expiration_date.replace(microsecond=0).isoformat() if quote.device_type != 'laptop': box_length = 9 box_width = 12 box_height = 3 else: box_length = 12 box_width = 14 box_height = 3 logging.basicConfig(stream=sys.stdout, level=logging.INFO) ## Page 411 of FedEx Dev Guide - 20.14 Email Labels CONFIG_OBJ = FedexConfig(key=settings.FEDEX_KEY, password=settings.FEDEX_PASSWORD, account_number=settings.FEDEX_ACCOUNT, meter_number=settings.FEDEX_METER, use_test_server=settings.USE_FEDEX_TEST) fxreq = FedexCreatePendingShipRequestEmail(CONFIG_OBJ, customer_transaction_id='xxxxxx id:01') fxreq.RequestedShipment.ServiceType = 'FEDEX_GROUND' fxreq.RequestedShipment.PackagingType = 'YOUR_PACKAGING' fxreq.RequestedShipment.DropoffType = 'REGULAR_PICKUP' fxreq.RequestedShipment.ShipTimestamp = datetime.datetime.now() # Special fields for the email label fxreq.RequestedShipment.SpecialServicesRequested.SpecialServiceTypes = ('RETURN_SHIPMENT', 'PENDING_SHIPMENT') fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.Type = 'EMAIL' fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.ExpirationDate = formatted_date email_address = fxreq.create_wsdl_object_of_type('EMailRecipient') email_address.EmailAddress = quote.email email_address.Role = 'SHIPMENT_COMPLETOR' # RETURN SHIPMENT DETAIL fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnType = ('PENDING') fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnEMailDetail = fxreq.create_wsdl_object_of_type( 'ReturnEMailDetail') fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnEMailDetail.MerchantPhoneNumber = 'x-xxx-xxx-xxxx' fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.EmailLabelDetail.Recipients = [email_address] fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.EmailLabelDetail.Message = "Xxxxxx Xxxxxx" fxreq.RequestedShipment.LabelSpecification = {'LabelFormatType': 'COMMON2D', 'ImageType': 'PDF'} fxreq.RequestedShipment.Shipper.Contact.PersonName = quote.first_name + ' ' + quote.last_name fxreq.RequestedShipment.Shipper.Contact.CompanyName = "" fxreq.RequestedShipment.Shipper.Contact.PhoneNumber = quote.phone fxreq.RequestedShipment.Shipper.Address.StreetLines.append(quote.address) fxreq.RequestedShipment.Shipper.Address.City = quote.city fxreq.RequestedShipment.Shipper.Address.StateOrProvinceCode = quote.state fxreq.RequestedShipment.Shipper.Address.PostalCode = quote.zip fxreq.RequestedShipment.Shipper.Address.CountryCode = settings.FEDEX_COUNTRY_CODE fxreq.RequestedShipment.Recipient.Contact.PhoneNumber = settings.FEDEX_PHONE_NUMBER fxreq.RequestedShipment.Recipient.Address.StreetLines = settings.FEDEX_STREET_LINES fxreq.RequestedShipment.Recipient.Address.City = settings.FEDEX_CITY fxreq.RequestedShipment.Recipient.Address.StateOrProvinceCode = settings.FEDEX_STATE_OR_PROVINCE_CODE fxreq.RequestedShipment.Recipient.Address.PostalCode = settings.FEDEX_POSTAL_CODE fxreq.RequestedShipment.Recipient.Address.CountryCode = settings.FEDEX_COUNTRY_CODE fxreq.RequestedShipment.Recipient.AccountNumber = settings.FEDEX_ACCOUNT fxreq.RequestedShipment.Recipient.Contact.PersonName = '' fxreq.RequestedShipment.Recipient.Contact.CompanyName = 'Xxxxxx Xxxxxx' fxreq.RequestedShipment.Recipient.Contact.EMailAddress = 'xxxxxx#xxxxxxxxx' # Details of Person Who is Paying for the Shipping fxreq.RequestedShipment.ShippingChargesPayment.PaymentType = 'SENDER' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.AccountNumber = settings.FEDEX_ACCOUNT fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.PersonName = 'Xxxxx Xxxxx' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.CompanyName = 'Xxxxx Xxxxxx' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.PhoneNumber = 'x-xxx-xxx-xxxx' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.EMailAddress = 'xxxxxxx#xxxxxxxxx' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.StreetLines = 'Xxxxx N. xXxxxxx' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.City = 'Xxxxxxx' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.StateOrProvinceCode = 'XX' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.PostalCode = 'xxxxx' fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.CountryCode = 'US' # Package Info package1 = fxreq.create_wsdl_object_of_type('RequestedPackageLineItem') package1.SequenceNumber = '1' package1.Weight.Value = 1 package1.Weight.Units = "LB" package1.Dimensions.Length = box_length package1.Dimensions.Width = box_width package1.Dimensions.Height = box_height package1.Dimensions.Units = "IN" package1.ItemDescription = 'Phone' fxreq.RequestedShipment.RequestedPackageLineItems.append(package1) fxreq.RequestedShipment.PackageCount = '1' try: fxreq.send_request() label_link = str(fxreq.response.CompletedShipmentDetail.AccessDetail.AccessorDetails[0].EmailLabelUrl) except Exception as exc: print('Fedex Error') print('===========') print(exc) print('==========') return label_link Error Log Error:cvc-datatype-valid.1.2.1: \\'2017-11-3\\' is not a valid value for \\'date\\'.\\ncvc-type.3.1.3: The value \\'2017-11-3\\' of element \\'ns0:ExpirationDate\\' is not valid."\\n }\\n }' (Error code: -1)