Values Not displaying in graph when using plotly candlestick graphs - python

I am trying to Graph some values from an excel file. I cannot change anything on the excel. I used .getgroup() with multiple columns to get main values from each month, and then put them into a list which I used to be displayed in a Candlestick graph. I get no errors whatsoever, but apart from the x values, no other data is displayed on the graph.
import plotly.graph_objects as go
import pandas as pd
from datetime import datetime
def main():
df = pd.read_excel('DatosUnited.xlsx')
##Formato Mean: mean/mes/año/Columna
##OPEN
monthYear = df.groupby(['Month', 'Year'])
june2019 = monthYear.get_group((6, 2019))
june2019Open = june2019['Open']
mJUN19O = june2019.mean()
july2019 = monthYear.get_group((7, 2019))
july2019Open = july2019['Open']
mJUL19O = july2019.mean()
aug2019 = monthYear.get_group((8, 2019))
aug2019Open = aug2019['Open']
mAUG19O = aug2019.mean()
sep2019 = monthYear.get_group((9, 2019))
sep2019Open = aug2019['Open']
mSEP19O = sep2019.mean()
oct2019 = monthYear.get_group((10, 2019))
oct2019Open = oct2019['Open']
mOCT19O = sep2019.mean()
nov2019 = monthYear.get_group((11, 2019))
nov2019Open = nov2019['Open']
mNOV19O = nov2019.mean()
dic2019 = monthYear.get_group((12, 2019))
nov2019Open = nov2019['Open']
mDIC19O = dic2019.mean()
##High
june2019 = monthYear.get_group((6, 2019))
june2019High = june2019['High']
mJUN19H = june2019.mean()
july2019 = monthYear.get_group((7, 2019))
july2019High = july2019['High']
mJUL19H = july2019.mean()
aug2019 = monthYear.get_group((8, 2019))
aug2019High = aug2019['High']
mAUG19H = aug2019.mean()
sep2019 = monthYear.get_group((9, 2019))
sep2019High = aug2019['High']
mSEP19H = sep2019.mean()
oct2019 = monthYear.get_group((10, 2019))
oct2019High = oct2019['High']
mOCT19H = sep2019.mean()
nov2019 = monthYear.get_group((11, 2019))
nov2019High = nov2019['High']
mNOV19H = nov2019.mean()
dic2019 = monthYear.get_group((12, 2019))
nov2019High = nov2019['High']
mDIC19H = dic2019.mean()
##LOW
june2019 = monthYear.get_group((6, 2019))
june2019Low = june2019['Low']
mJUN19L = june2019.mean()
july2019 = monthYear.get_group((7, 2019))
july2019Low = july2019['Low']
mJUL19L = july2019.mean()
aug2019 = monthYear.get_group((8, 2019))
aug2019Low = aug2019['High']
mAUG19L = aug2019.mean()
sep2019 = monthYear.get_group((9, 2019))
sep2019Low = aug2019['Low']
mSEP19L = sep2019.mean()
oct2019 = monthYear.get_group((10, 2019))
oct2019Low = oct2019['Low']
mOCT19L = sep2019.mean()
nov2019 = monthYear.get_group((11, 2019))
nov2019Low = nov2019['Low']
mNOV19L = nov2019.mean()
dic2019 = monthYear.get_group((12, 2019))
nov2019Low = nov2019['Low']
mDIC19L = dic2019.mean()
##Close/Price
june2019 = monthYear.get_group((6, 2019))
june2019C = june2019['Close/Price']
mJUN19c = june2019.mean()
july2019 = monthYear.get_group((7, 2019))
july2019Low = july2019['Close/Price']
mJUL19c = july2019.mean()
aug2019 = monthYear.get_group((8, 2019))
aug2019C = aug2019['Close/Price']
mAUG19c = aug2019.mean()
sep2019 = monthYear.get_group((9, 2019))
sep2019C = aug2019['Close/Price']
mSEP19c = sep2019.mean()
oct2019 = monthYear.get_group((10, 2019))
oct2019Low = oct2019['Close/Price']
mOCT19c = sep2019.mean()
nov2019 = monthYear.get_group((11, 2019))
nov2019C = nov2019['Close/Price']
mNOV19c = nov2019.mean()
dic2019 = monthYear.get_group((12, 2019))
nov2019C = nov2019['Close/Price']
mDIC19c = dic2019.mean()
openMeans = [mJUN19O, mJUL19O, mAUG19O, mSEP19O, mOCT19O, mNOV19O, mDIC19O]
highMeans = [mJUN19H, mJUL19H, mAUG19H, mSEP19H, mOCT19H, mNOV19H, mDIC19H]
lowMeans = [mJUN19L, mJUL19L, mAUG19L, mSEP19L, mOCT19L, mNOV19L, mDIC19L]
closeMeans = [mJUN19c, mJUL19c, mAUG19c, mSEP19c, mOCT19c, mNOV19c, mDIC19c]
dates = ['6-2019', '7-2019','8-2019','9-2019','10-2019','11-2019','12-2019']
fig = go.Figure(data=[go.Candlestick(x= dates,
open = openMeans,
high = highMeans,
low= lowMeans,
close = closeMeans)])
fig.show()
main()
I believe there is something wrong regarding the format in which the mean values are stored, but I can´t seem to find the solution

Related

ValueError: X has 41 features, but RandomForestClassifier is expecting 42 features as input

I am trying to code a machine learning model that predicts the outcome of college basketball games (Code shown below)
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
folder = '../input/mens-march-mania-2022/MDataFiles_Stage1/'
Seeds = pd.read_csv(folder+'MNCAATourneySeeds.csv')
Conferences = pd.read_csv(folder+'MTeamConferences.csv')
team_ids = pd.read_csv(folder + 'MTeams.csv')
reg_season_data = pd.read_csv(folder+'MRegularSeasonDetailedResults.csv')
TourneyCompact = pd.read_csv(folder+'MNCAATourneyCompactResults.csv')
tournament_data = pd.read_csv(folder + 'MNCAATourneyDetailedResults.csv')
display(reg_season_data.columns.values)
WinTeams = pd.DataFrame()
LoseTeams = pd.DataFrame()
columns = ['Season', 'TeamID', 'Points', 'LTeamID', 'OppPoints',
'Loc', 'NumOT', 'FGM', 'FGA', 'FGM3', 'FGA3', 'FTM', 'FTA',
'OR', 'DR', 'Ast', 'TO', 'Stl', 'Blk', 'PF', 'LFGM', 'LFGA',
'LFGM3', 'LFGA3', 'LFTM', 'LFTA', 'LOR', 'LDR', 'LAst', 'LTO',
'LStl', 'LBlk', 'LPF']
WinTeams[columns] = reg_season_data[['Season', 'WTeamID', 'WScore', 'LTeamID', 'LScore',
'WLoc', 'NumOT', 'WFGM', 'WFGA', 'WFGM3', 'WFGA3', 'WFTM', 'WFTA',
'WOR', 'WDR', 'WAst', 'WTO', 'WStl', 'WBlk', 'WPF', 'LFGM', 'LFGA',
'LFGM3', 'LFGA3', 'LFTM', 'LFTA', 'LOR', 'LDR', 'LAst', 'LTO',
'LStl', 'LBlk', 'LPF']]
WinTeams['Wins'] = 1
WinTeams['Loses'] = 0
LoseTeams[columns] = reg_season_data[['Season', 'LTeamID', 'LScore', 'WTeamID', 'WScore',
'WLoc', 'NumOT', 'LFGM', 'LFGA', 'LFGM3', 'LFGA3', 'LFTM', 'LFTA',
'LOR', 'LDR', 'LAst', 'LTO', 'LStl', 'LBlk', 'LPF', 'WFGM', 'WFGA',
'WFGM3', 'WFGA3', 'WFTM', 'WFTA', 'WOR', 'WDR', 'WAst', 'WTO',
'WStl', 'WBlk', 'WPF']]
def change_loc(loc):
if loc == 'H':
return 'A'
elif loc == 'A':
return 'H'
else:
return 'N'
LoseTeams['Loc'] = LoseTeams['Loc'].apply(change_loc)
LoseTeams['Wins'] = 0
LoseTeams['Loses'] = 1
WinLoseTeams = pd.concat([WinTeams, LoseTeams])
combinedTeams = WinLoseTeams.groupby(['Season', 'TeamID']).sum()
combinedTeams['NumGames'] = combinedTeams['Wins'] + combinedTeams['Loses']
display(combinedTeams.columns.values)
RegularSeasonInput = pd.DataFrame()
RegularSeasonInput['WinRatio'] = combinedTeams['Wins']/combinedTeams['NumGames']
RegularSeasonInput['PointsPerGame'] = combinedTeams['Points']/combinedTeams['NumGames']
RegularSeasonInput['PointsAllowedPerGame'] = combinedTeams['OppPoints']/combinedTeams['NumGames']
RegularSeasonInput['PointsRatio'] = combinedTeams['Points']/combinedTeams['OppPoints']
RegularSeasonInput['OTsPerGame'] = combinedTeams['NumOT']/combinedTeams['NumGames']
RegularSeasonInput['FGperGame'] = combinedTeams['FGM']/combinedTeams['NumGames']
RegularSeasonInput['FGRatio'] = combinedTeams['FGM']/combinedTeams['FGA']
RegularSeasonInput['FGallowedPerGame'] = combinedTeams['LFGM']/combinedTeams['NumGames']
RegularSeasonInput['OppFGRatio'] = combinedTeams['LFGM']/combinedTeams['LFGA']
RegularSeasonInput['Opp3FGRatio'] = combinedTeams['LFGM3']/combinedTeams['LFGA3']
RegularSeasonInput['FTallowedPerGame'] = combinedTeams['LFTM']/combinedTeams['NumGames']
RegularSeasonInput['OffensiveReboundRatio'] = combinedTeams['OR'] / (combinedTeams['OR'] + combinedTeams['LDR'])
RegularSeasonInput['AstPerGame'] = combinedTeams['Ast']/combinedTeams['NumGames']
RegularSeasonInput['LDefensiveReboundRatio'] = combinedTeams['LDR'] / (combinedTeams['LDR'] + combinedTeams['OR'])
RegularSeasonInput['TOPerGame'] = combinedTeams['TO']/combinedTeams['NumGames']
RegularSeasonInput['StlPerGame'] = combinedTeams['Stl']/combinedTeams['NumGames']
RegularSeasonInput['BlkPerGame'] = combinedTeams['Blk']/combinedTeams['NumGames']
RegularSeasonInput['PFPerGame'] = combinedTeams['PF']/combinedTeams['NumGames']
RegularSeasonInput['LTOPerGame'] = combinedTeams['LTO']/combinedTeams['NumGames']
RegularSeasonInput['LStlPerGame'] = combinedTeams['LStl']/combinedTeams['NumGames']
RegularSeasonInput['LBlkPerGame'] = combinedTeams['LBlk']/combinedTeams['NumGames']
RegularSeasonInput['LPFPerGame'] = combinedTeams['LPF']/combinedTeams['NumGames']
RegularSeasonInput['PossesionsPerGame'] = (combinedTeams['FGA'] - combinedTeams['OR'] + combinedTeams['TO'] + (0.475*combinedTeams['FTA']))/ combinedTeams['NumGames']
RegularSeasonInput['OppPossesionsPerGame'] = (combinedTeams['LFGA'] - combinedTeams['LOR'] + combinedTeams['LTO'] + (0.475*combinedTeams['LFTA']))/ combinedTeams['NumGames']
RegularSeasonInput['PointsPerPossesion'] = (combinedTeams['Points']/combinedTeams['NumGames'])/((combinedTeams['FGA'] - combinedTeams['OR'] + combinedTeams['TO'] + (0.475*combinedTeams['FTA']))/ combinedTeams['NumGames'])
RegularSeasonInput['TrueShooting%'] = combinedTeams['Points']/(2*(combinedTeams['FGA'] + 0.44 * combinedTeams['FTA']))
RegularSeasonInput['OppTrueShooting%'] = combinedTeams['OppPoints']/(2*(combinedTeams['LFGA'] + 0.44 * combinedTeams['LFTA']))
RegularSeasonInput['EffectiveFG%'] = (combinedTeams['FGM'] + 0.5 * combinedTeams['FGM3'])/combinedTeams['FGA']
RegularSeasonInput['OppEffectiveFG%'] = (combinedTeams['LFGM'] + 0.5 * combinedTeams['LFGM3'])/combinedTeams['LFGA']
RegularSeasonInput['AstToTORatio'] = combinedTeams['Ast']/ combinedTeams['TO']
RegularSeasonInput['OppAstToTORatio'] = combinedTeams['LAst']/ combinedTeams['LTO']
RegularSeasonInput['Efficiency'] = (combinedTeams['Points'] + combinedTeams['OR'] + combinedTeams['DR'] + combinedTeams['Ast'] + combinedTeams['Stl'] + combinedTeams['Blk'] - (combinedTeams['FGA'] - combinedTeams['FGM']) - (combinedTeams['FTA'] - combinedTeams['FTM']) - combinedTeams['TO'])/combinedTeams['NumGames']
RegularSeasonInput['OppEfficiency'] = (combinedTeams['OppPoints'] + combinedTeams['LOR'] + combinedTeams['LDR'] + combinedTeams['LAst'] + combinedTeams['LStl'] + combinedTeams['LBlk'] - (combinedTeams['LFGA'] - combinedTeams['LFGM']) - (combinedTeams['LFTA'] - combinedTeams['LFTM']) - combinedTeams['LTO'])/combinedTeams['NumGames']
RegularSeasonInput['OppFTRate'] = combinedTeams['LFTA']/combinedTeams['LFGA']
RegularSeasonInput['OffensiveEfficiencyPer100'] = (combinedTeams['Points']/(combinedTeams['FGA'] - combinedTeams['OR'] + combinedTeams['TO'] + (0.475*combinedTeams['FTA'])))*100
RegularSeasonInput['DefensiveEfficiencyPer100'] = (combinedTeams['OppPoints']/(combinedTeams['FGA'] - combinedTeams['OR'] + combinedTeams['TO'] + (0.475*combinedTeams['FTA'])))*100
RegularSeasonInput['OppOffensiveEfficiencyPer100'] = (combinedTeams['OppPoints']/(combinedTeams['LFGA'] - combinedTeams['LOR'] + combinedTeams['LTO'] + (0.475*combinedTeams['LFTA'])))*100
RegularSeasonInput['OppDefensiveEfficiencyPer100'] = (combinedTeams['Points']/(combinedTeams['LFGA'] - combinedTeams['LOR'] + combinedTeams['LTO'] + (0.475*combinedTeams['LFTA'])))*100
RegularSeasonInput['Opp2P%'] = (combinedTeams['LFGM'] - combinedTeams['LFGM3'])/(combinedTeams['LFGA'] - combinedTeams['LFGA3'])
RegularSeasonInput['2P%'] = (combinedTeams['FGM'] - combinedTeams['FGM3'])/(combinedTeams['FGA'] - combinedTeams['FGA3'])
RegularSeasonInput['Ew%'] = combinedTeams['Points']**11.5/(combinedTeams['Points']**11.5 + combinedTeams['OppPoints']**11.5)
display(RegularSeasonInput)
seed_dict = Seeds.set_index(['Season', 'TeamID'])
display(seed_dict.index.values)
TourneyInput = pd.DataFrame()
winIDs = TourneyCompact['WTeamID']
loseIDs = TourneyCompact['LTeamID']
season = TourneyCompact['Season']
winners = pd.DataFrame()
winners[['Season', 'Team1', 'Team2']] = TourneyCompact[['Season', 'WTeamID', 'LTeamID']]
winners['Result'] = 1
losers = pd.DataFrame()
losers[['Season', 'Team1', 'Team2']] = TourneyCompact[['Season', 'LTeamID', 'WTeamID']]
losers['Result'] = 0
TourneyInput = pd.concat([winners, losers])
TourneyInput = TourneyInput[TourneyInput['Season']>=2003].reset_index(drop=True)
team1seeds = []
team2seeds = []
for x in range(len(TourneyInput)):
idx = (TourneyInput['Season'][x], TourneyInput['Team1'][x])
seed = seed_dict.loc[idx].values[0]
if len(seed) == 4:
seed = int(seed[1:-1])
else:
seed = int(seed[1:])
team1seeds.append(seed)
idx = (TourneyInput['Season'][x], TourneyInput['Team2'][x])
seed = seed_dict.loc[idx].values[0]
if len(seed) == 4:
seed = int(seed[1:-1])
else:
seed = int(seed[1:])
team2seeds.append(seed)
TourneyInput['Team1Seed'] = team1seeds
TourneyInput['Team2Seed'] = team2seeds
display(TourneyInput)
outscores = []
for x in range (len(TourneyInput)):
idx = (TourneyInput['Season'][x], TourneyInput['Team1'][x])
team1score = RegularSeasonInput.loc[idx]
team1score['Seed'] = TourneyInput['Team1Seed'][x]
idx = (TourneyInput['Season'][x], TourneyInput['Team2'][x])
team2score = RegularSeasonInput.loc[idx]
team2score['Seed'] = TourneyInput['Team2Seed'][x]
outscore = team1score - team2score
outscore['Result'] = TourneyInput['Result'][x]
outscores.append(outscore)
outscores = pd.DataFrame(outscores)
display(outscores)
corrs = round(outscores.corr(), 2)
display(np.abs(corrs['Result']))
import seaborn as sns
plt.figure(figsize=(15,10))
sns.heatmap(corrs)
plt.show()
X = outscores[outscores.columns[:-1]].values
Y = outscores['Result'].values
np.random.seed(1)
idx = np.random.permutation(len(X))
train_idx = idx[:int(-0.2*len(X))]
test_idx = idx[int(-0.2*len(X)):]
X_train = X[train_idx]
X_test = X[test_idx]
Y_train = Y[train_idx]
Y_test = Y[test_idx]
mins = X_train.min(axis=0)
maxs = X_train.max(axis=0)
X_train = (X_train - mins)/(maxs - mins)
X_test = (X_test - mins)/(maxs - mins)
print(X_train.shape, X_test.shape, Y_train.shape, Y_test.shape)
Whenever I run the cell below it comes up with the error. X has 41 features, but RandomForestClassifier is expecting 42 features as input.
def predict_matchup(team1, team2, season, team1_seed, team2_seed):
team1_name = list(team_ids[team_ids['TeamID'] == team1]['TeamName'])[0]
team2_name = list(team_ids[team_ids['TeamID'] == team2]['TeamName'])[0]
print(f'{team1_seed} {team1_name} vs {team2_seed} {team2_name}')
season_data = RegularSeasonInput.query(f'Season == {season}').reset_index()
team1_data = season_data.query(f'TeamID == {team1}')
team2_data = season_data.query(f'TeamID == {team2}')
match_data = team1_data.merge(team2_data, on='Season', suffixes=['', '_Opponent'])
match_data['Seed_Diff'] = team2_seed - team1_seed
match_data = match_data[['WinRatio', 'PointsPerGame', 'PointsAllowedPerGame', 'PointsRatio',
'OTsPerGame','FGperGame', 'FGRatio', 'FGallowedPerGame',
'OppFGRatio', 'Opp3FGRatio', 'FTallowedPerGame',
'OffensiveReboundRatio', 'AstPerGame',
'LDefensiveReboundRatio', 'TOPerGame', 'StlPerGame', 'BlkPerGame',
'PFPerGame', 'LTOPerGame', 'LStlPerGame', 'LPFPerGame', 'PossesionsPerGame',
'OppPossesionsPerGame','PointsPerPossesion', 'TrueShooting%', 'OppTrueShooting%',
'EffectiveFG%', 'OppEffectiveFG%', 'AstToTORatio', 'OppAstToTORatio',
'Efficiency', 'OppEfficiency', 'OppFTRate', 'OffensiveEfficiencyPer100',
'DefensiveEfficiencyPer100', 'OppOffensiveEfficiencyPer100',
'OppDefensiveEfficiencyPer100', 'Opp2P%', '2P%', 'Ew%', 'LBlkPerGame']]
model.predict(match_data)
predict_matchup(1333, 1393, 2003, 1, 3)
from sklearn.ensemble import RandomForestClassifier
model = RandomForestClassifier(random_state=3)
model = model.fit(X_train, Y_train)
model.score(X_test, Y_test)
I don't know what I am doing please help. You do not understand the frustration that this project has caused me
When running the code above I expected it to output the models predictions based on all of the inputs.

Asammdf: Rename Channel Group

i am working with asammdf to plot signals and now i have a problem, how can i rename the channel groups with the signal names?
here is my code:
mdf = MDF()
sigs = []
for equipment in table_list:
print("Table name:", equipment[0])
df = pd.read_sql_query('select * from ' + equipment[0], con)
df = df.replace(np.nan, 0)
if equipment[0] == 'state':
df_time = (df['id']-df.iloc[0]['id'])
else:
df_time = (df['ts']-df.iloc[0]['ts']) * 1e-6
df.pop('ts')
sigs = []
for signal in df.columns.to_list():
df[signal]
test_signal = Signal(samples=df[signal], timestamps=df_time,
name=signal,
unit='')
sigs.append(test_signal)
mdf.append(sigs)
mdf.save('..\\Output\\test_complete.mf4', overwrite=True)
You should change the channel group comment
mdf = MDF()
sigs = []
for equipment in table_list:
print("Table name:", equipment[0])
df = pd.read_sql_query('select * from ' + equipment[0], con)
df = df.replace(np.nan, 0)
if equipment[0] == 'state':
df_time = (df['id']-df.iloc[0]['id'])
else:
df_time = (df['ts']-df.iloc[0]['ts']) * 1e-6
df.pop('ts')
sigs = []
for signal in df.columns.to_list():
df[signal]
test_signal = Signal(samples=df[signal], timestamps=df_time,
name=signal,
unit='')
sigs.append(test_signal)
mdf.append(sigs)
channel_group = mdf.groups[-1].channel_group
channel_group.comment = "fancy name"
mdf.save('..\\Output\\test_complete.mf4', overwrite=True)

How to get correct output shape in Unet?

I am building a u net for binary image image segmentation. I am using Tensorflow's tf.nn api. My input image has dimensions (256,256,3) and output binary image has dimensions(256,256,1). The output of the U net model must be (1,256,256,1) but output shape results to be (7,256,256,3).For the convolutional kernels I am using Tensorflow's truncated normal initializer with each datatype as float32. Am I creating multiple output layers somewhere in the code
def get_filter(shape,na):
w =tf.get_variable(name=na,shape=shape,dtype='float32',initializer=tf.truncated_normal_initializer(dtype='float32'))
return w
def unet(inp):
#f1 = get_filter(shape=[3,3,3,16])
lay_16_1 = tf.nn.conv2d(inp,filter=get_filter(shape=[3,3,3,16],na='w_1'),strides=[1,1,1,1],padding='SAME',name='conv_16_1')
lay_16_2 = tf.nn.relu(lay_16_1,name='re_16_1')
lay_16_3 = tf.layers.batch_normalization(lay_16_2,axis=-1,name='bn_16')
lay_16_4 = tf.nn.conv2d(lay_16_3,filter=get_filter([3,3,16,16],na='w_2'),strides=[1,1,1,1],padding='SAME',name='conv_16_2')
lay_16_5 = tf.nn.relu(lay_16_4,name='re_16_2')
lay_p1 = tf.nn.max_pool(lay_16_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_1')
lay_32_1 = tf.nn.conv2d(lay_p1,filter=get_filter([3,3,16,32],na='w_3'),strides=[1,1,1,1],padding='SAME',name='conv_32_1')
lay_32_2 = tf.nn.relu(lay_32_1,name='re_32_1')
lay_32_3 = tf.layers.batch_normalization(lay_32_2,axis=-1,name='bn_32')
lay_32_4 = tf.nn.conv2d(lay_32_3,filter=get_filter([3,3,32,32],na='w_4'),strides=[1,1,1,1],padding='SAME',name='conv_32_2')
lay_32_5 = tf.nn.relu(lay_32_4,name='re_32_2')
lay_p2 = tf.nn.max_pool(lay_32_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_2')
lay_64_1 = tf.nn.conv2d(lay_p2,filter=get_filter([3,3,32,64],na='w_5'),strides=[1,1,1,1],padding='SAME',name='conv_64_1')
lay_64_2 = tf.nn.relu(lay_64_1,name='re_64_1')
lay_64_3 = tf.layers.batch_normalization(lay_64_2,axis=-1,name='bn_64')
lay_64_4 = tf.nn.conv2d(lay_64_3,filter=get_filter([3,3,64,64],na='w_6'),strides=[1,1,1,1],padding='SAME',name='conv_64_2')
lay_64_5 = tf.nn.relu(lay_64_4,name='re_64_2')
lay_p3 = tf.nn.max_pool(lay_64_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_3')
lay_128_1 = tf.nn.conv2d(lay_p3,filter=get_filter([3,3,64,128],na='w_7'),strides=[1,1,1,1],padding='SAME',name='conv_128_1')
lay_128_2 = tf.nn.relu(lay_128_1,name='re_128_1')
lay_128_3 = tf.layers.batch_normalization(lay_128_2,axis=-1,name='bn_128')
lay_128_4 = tf.nn.conv2d(lay_128_3,filter=get_filter([3,3,128,128],na='w_8'),strides=[1,1,1,1],padding='SAME',name='conv_128_2')
lay_128_5 = tf.nn.relu(lay_128_4,name='re_128_2')
lay_p4 = tf.nn.max_pool(lay_128_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_4')
lay_256_1 = tf.nn.conv2d(lay_p4,filter=get_filter([3,3,128,256],na='w_9'),strides=[1,1,1,1],padding='SAME',name='conv_256_1')
lay_256_2 = tf.nn.relu(lay_256_1,name='re_256_1')
lay_256_3 = tf.layers.batch_normalization(lay_256_2,axis=-1,name='bn_256')
lay_256_4 = tf.nn.conv2d(lay_256_3,filter=get_filter([3,3,256,256],na='w_10'),strides=[1,1,1,1],padding='SAME',name='conv_256_2')
lay_256_5 = tf.nn.relu(lay_256_4,name='re_256_2')
lay_p5 = tf.nn.max_pool(lay_256_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_5')
lay_512_1 = tf.nn.conv2d(lay_p5,filter=get_filter([3,3,256,512],na='w_11'),strides=[1,1,1,1],padding='SAME',name='conv_512_1')
lay_512_2 = tf.nn.relu(lay_512_1,name='re_512_1')
lay_512_3 = tf.layers.batch_normalization(lay_512_2,axis=-1,name='bn_512')
lay_512_4 = tf.nn.conv2d(lay_512_3,filter=get_filter([3,3,512,512],na='w_12'),strides=[1,1,1,1],padding='SAME',name='conv_512_2')
lay_512_5 = tf.nn.relu(lay_512_4,name='re_512_2')
lay_p6 = tf.nn.max_pool(lay_512_5,ksize=[1,2,2,1],strides=[1,2,2,1],padding='SAME',name='pool_6')
lay_1024_1 = tf.nn.conv2d(lay_p6,filter=get_filter([3,3,512,1024],na='w_13'),strides=[1,1,1,1],padding='SAME',name='conv_1024_1')
lay_1024_2 = tf.nn.relu(lay_1024_1,name='re_1024_1')
lay_1024_3 = tf.layers.batch_normalization(lay_1024_2,axis=-1,name='bn_1024')
lay_1024_4 = tf.nn.conv2d(lay_1024_3,filter=get_filter([3,3,1024,1024],na='w_14'),strides=[1,1,1,1],padding='SAME',name='conv_1024_2')
lay_1024_5 = tf.nn.relu(lay_1024_4,name='re_1024_2')
#lay_p7 = tf.nn.max_pool(lay_1024,ksize=[1,2,2,1],strides=[1,1,1,1],padding='SAME',name='pool_7')
up_512 = tf.image.resize_images(images=lay_1024_5,size=[8,8],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
con_512_1 = tf.nn.conv2d(up_512,filter=get_filter([3,3,1024,512],na='w_15'),strides=[1,1,1,1],padding='SAME',name='mer_512_1')
con_512_2 = tf.nn.relu(con_512_1,name='rel_512_1')
mer_512 = tf.concat([lay_512_5,con_512_2],axis=0,name='mer_512_2')
con_512_3 = tf.nn.conv2d(mer_512,filter=get_filter([3,3,512,512],na='w_16'),strides=[1,1,1,1],padding='SAME',name='mer_512_3')
con_512_4 = tf.nn.relu(con_512_3,name='rel_512_2')
con_512_5 = tf.layers.batch_normalization(con_512_4,axis=-1,name='mer_bn_512')
con_512_6 = tf.nn.conv2d(con_512_5,filter=get_filter([3,3,512,512],na='w_17'),strides=[1,1,1,1],padding='SAME',name='mer_512_4')
con_512_7 = tf.nn.relu(con_512_6,name='rel_512_3')
up_256 = tf.image.resize_images(images=con_512_7,size=[16,16],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
con_256_1 = tf.nn.conv2d(up_256,filter=get_filter([3,3,512,256],na='w_18'),strides=[1,1,1,1],padding='SAME',name='mer_256_1')
con_256_2 = tf.nn.relu(con_256_1,name='rel_256_1')
mer_256 = tf.concat([lay_256_5,con_256_2],axis=0,name='mer_256_2')
con_256_3 = tf.nn.conv2d(mer_256,filter=get_filter([3,3,256,256],na='w_19'),strides=[1,1,1,1],padding='SAME',name='mer_256_3')
con_256_4 = tf.nn.relu(con_256_3,name='rel_256_2')
con_256_5 = tf.layers.batch_normalization(con_256_4,axis=-1,name='mer_bn_256')
con_256_6 = tf.nn.conv2d(con_256_5,filter=get_filter([3,3,256,256],na='w_20'),strides=[1,1,1,1],padding='SAME',name='mer_256_4')
con_256_7 = tf.nn.relu(con_256_6,name='rel_256_3')
up_128 = tf.image.resize_images(images=con_256_7,size=[32,32],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
con_128_1 = tf.nn.conv2d(up_128,filter=get_filter([3,3,256,128],na='w_21'),strides=[1,1,1,1],padding='SAME',name='mer_128_1')
con_128_2 = tf.nn.relu(con_128_1,name='rel_128_1')
mer_128 = tf.concat([lay_128_5,con_128_2],axis=0,name='mer_128_2')
con_128_3 = tf.nn.conv2d(mer_128,filter=get_filter([3,3,128,128],na='w_22'),strides=[1,1,1,1],padding='SAME',name='mer_128_3')
con_128_4 = tf.nn.relu(con_128_3,name='rel_128_2')
con_128_5 = tf.layers.batch_normalization(con_128_4,axis=-1,name='mer_bn_128')
con_128_6 = tf.nn.conv2d(con_128_5,filter=get_filter([3,3,128,128],na='w_23'),strides=[1,1,1,1],padding='SAME',name='mer_128_4')
con_128_7 = tf.nn.relu(con_128_6,name='rel_128_3')
up_64 = tf.image.resize_images(images=con_128_7,size=[64,64],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
con_64_1 = tf.nn.conv2d(up_64,filter=get_filter([3,3,128,64],na='w_24'),strides=[1,1,1,1],padding='SAME',name='mer_64_1')
con_64_2 = tf.nn.relu(con_64_1,name='rel_64_1')
mer_64 = tf.concat([lay_64_5,con_64_2],axis=0,name='mer_64_2')
con_64_3 = tf.nn.conv2d(mer_64,filter=get_filter([3,3,64,64],na='w_25'),strides=[1,1,1,1],padding='SAME',name='mer_64_3')
con_64_4 = tf.nn.relu(con_64_3,name='rel_64_2')
con_64_5 = tf.layers.batch_normalization(con_64_4,axis=-1,name='mer_bn_64')
con_64_6 = tf.nn.conv2d(con_64_5,filter=get_filter([3,3,64,64],na='w_26'),strides=[1,1,1,1],padding='SAME',name='mer_64_4')
con_64_7 = tf.nn.relu(con_64_6,name='rel_64_3')
up_32 = tf.image.resize_images(images=con_64_7,size=[128,128],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
con_32_1 = tf.nn.conv2d(up_32,filter=get_filter([3,3,64,32],na='w_27'),strides=[1,1,1,1],padding='SAME',name='mer_32_1')
con_32_2 = tf.nn.relu(con_32_1,name='rel_32_1')
mer_32 = tf.concat([lay_32_5,con_32_2],axis=0,name='mer_32_2')
con_32_3 = tf.nn.conv2d(mer_32,filter=get_filter([3,3,32,32],na='w_28'),strides=[1,1,1,1],padding='SAME',name='mer_32_3')
con_32_4 = tf.nn.relu(con_32_3,name='rel_32_2')
con_32_5 = tf.layers.batch_normalization(con_32_4,axis=-1,name='mer_bn_32')
con_32_6 = tf.nn.conv2d(con_32_5,filter=get_filter([3,3,32,32],na='w_29'),strides=[1,1,1,1],padding='SAME',name='mer_32_4')
con_32_7 = tf.nn.relu(con_32_6,name='rel_32_3')
up_16 = tf.image.resize_images(images=con_32_7,size=[256,256],method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
con_16_1 = tf.nn.conv2d(up_16,filter=get_filter([3,3,32,16],na='w_30'),strides=[1,1,1,1],padding='SAME',name='mer_16_1')
con_16_2 = tf.nn.relu(con_16_1,name='rel_16_1')
mer_16 = tf.concat([lay_16_5,con_16_2],axis=0,name='mer_16_2')
con_16_3 = tf.nn.conv2d(mer_16,filter=get_filter([3,3,16,16],na='w_31'),strides=[1,1,1,1],padding='SAME',name='mer_16_3')
con_16_4 = tf.nn.relu(con_16_3,name='rel_16_2')
con_16_5 = tf.layers.batch_normalization(con_16_4,axis=-1,name='mer_bn_16')
con_16_6 = tf.nn.conv2d(con_16_5,filter=get_filter([3,3,16,16],na='w_32'),strides=[1,1,1,1],padding='SAME',name='mer_16_4')
con_16_7 = tf.nn.relu(con_16_6,name='rel_16_3')
fin_img = tf.nn.conv2d(con_16_7,filter=get_filter([1,1,16,1],na='w_33'),strides=[1,1,1,1],padding='SAME',name='final_image')
#fin_img = tf.nn.sigmoid(fin_img)
return fin_img

Matplotlib saving plot preferrences without the data

Currently I am looking at saving a matplotlib plot of unknown state as it is user facing. The way that I am intending to do this is by taking the figure and putting the preferences into a dictionary. However this can be done by saving any base type into the dictionary and then utilizing the json library to save and load this dictionary to file.
The overall aim is to have OS independence and cross matplotlib version compatibility.
I've prototyped saving the preferences/settings of the plot to dictionary:
import numpy as np
from matplotlib import ticker
import matplotlib.pyplot as plt
import matplotlib.colors
import json
def get_dict_from_fig(fig):
fig_dict = {}
axes_list = []
for ax in fig.axes:
axes_list.append(get_dict_for_axes(ax))
fig_dict["Axes"] = axes_list
fig_dict["Properties"] = get_dict_from_fig_properties(fig)
return fig_dict
def get_dict_for_axes(ax):
ax_dict = {}
# Get the axis properties
ax_dict["Properties"] = get_dict_from_axes_properties(ax)
# Get lines from the axes and store it's data
lines_list = []
for index, line in enumerate(ax.lines):
lines_list.append(get_dict_from_line(line, index))
ax_dict["Lines"] = lines_list
texts_list = []
for text in ax.texts:
texts_list.append(get_dict_from_text(text))
ax_dict["Texts"] = texts_list
ax_dict["Title"] = get_dict_from_text(ax.title)
ax_dict["XAxis Title"] = get_dict_from_text(ax.xaxis.label)
ax_dict["YAxis Title"] = get_dict_from_text(ax.yaxis.label)
# Potentially need to handle artists that are Text
artist_text_dict = {}
for artist in ax.artists:
if isinstance(artist, matplotlib.text.Text):
artist_text_dict = get_dict_from_text(artist)
ax_dict["Text from artists"] = artist_text_dict
legend_dict = {}
legend = ax.get_legend()
if legend is not None and legend.get_visible():
legend_dict = get_dict_from_legend(legend)
legend_dict["Visible"] = True
ax_dict["Legend"] = legend_dict
return ax_dict
def get_dict_from_axes_properties(ax):
prop_dict = {}
prop_dict["Bounds"] = ax.get_position().bounds
prop_dict["Dynamic"] = ax.get_navigate()
prop_dict["Axison"] = ax.axison
prop_dict["Frame On"] = ax.get_frame_on()
prop_dict["XAxis Properties"] = get_dict_from_axis_properties(ax.xaxis)
prop_dict["YAxis Properties"] = get_dict_from_axis_properties(ax.yaxis)
# XAxis scale and Xlim
prop_dict["XAxis Scale"] = ax.xaxis.get_scale()
prop_dict["XLim"] = ax.get_xlim()
# YAxis scale and Ylim
prop_dict["YAxis Scale"] = ax.xaxis.get_scale()
prop_dict["YLim"] = ax.get_ylim()
return prop_dict
def get_dict_from_axis_properties(ax):
prop_dict = {}
label1On = ax._major_tick_kw.get('label1On', True)
if isinstance(ax, matplotlib.axis.XAxis):
if label1On:
prop_dict["Position"] = "Bottom"
else:
prop_dict["Position"] = "Top"
elif isinstance(ax, matplotlib.axis.YAxis):
if label1On:
prop_dict["Position"] = "Left"
else:
prop_dict["Position"] = "Right"
else:
raise ValueError("Value passed is not a valid axis")
prop_dict["nTicks"] = len(ax.get_major_locator()())
if isinstance(ax.get_major_locator(), ticker.FixedLocator):
prop_dict["Tick Values"] = list(ax.get_major_locator())
else:
prop_dict["Tick Values"] = None
formatter = ax.get_major_formatter()
if isinstance(formatter, ticker.FixedFormatter):
prop_dict["Tick Format"] = list(formatter.seq)
else:
prop_dict["Tick Format"] = ""
labels = ax.get_ticklabels()
if labels:
prop_dict["Font size"] = labels[0].get_fontsize()
else:
prop_dict["Font size"] = ""
prop_dict["Scale"] = ax.get_scale()
prop_dict["Grid Style"] = get_dict_for_grid_style(ax)
prop_dict["Visible"] = ax.get_visible()
return prop_dict
def get_dict_for_grid_style(ax):
grid_style = {}
gridlines = ax.get_gridlines()
if ax._gridOnMajor and len(gridlines) > 0:
grid_style["Color"] = matplotlib.colors.to_hex(gridlines[0].get_color())
grid_style["Alpha"] = gridlines[0].get_alpha()
grid_style["Grid On"] = True
else:
grid_style["Grid On"] = False
return grid_style
def get_dict_from_line(line, index=0):
line_dict = {}
line_dict["Line Index"] = index
line_dict["Label"] = line.get_label()
line_dict["Alpha"] = line.get_alpha()
if line_dict["Alpha"] is None:
line_dict["Alpha"] = 1
line_dict["Color"] = matplotlib.colors.to_hex(line.get_color())
line_dict["Linewidth"] = line.get_linewidth()
line_dict["Line Style"] = line.get_linestyle()
line_dict["Marker Style"] = get_dict_from_marker_style(line)
return line_dict
def get_dict_from_marker_style(line):
style_dict = {}
style_dict["Face Color"] = matplotlib.colors.to_hex(line.get_markerfacecolor())
style_dict["Edge Color"] = matplotlib.colors.to_hex(line.get_markeredgecolor())
style_dict["Edge Width"] = line.get_markeredgewidth()
style_dict["Marker Type"] = line.get_marker()
style_dict["Marker Size"] = line.get_markersize()
style_dict["ZOrder"] = line.get_zorder()
return style_dict
def get_dict_from_text(text):
text_dict = {}
text_dict["Text"] = text.get_text()
if text_dict["Text"]:
text_dict["Transform"] = text.get_transform()
text_dict["Position"] = text.get_position()
text_dict["Style"] = get_dict_from_text_style(text)
return text_dict
def get_dict_from_text_style(text):
style_dict = {}
style_dict["Alpha"] = text.get_alpha()
if style_dict["Alpha"] is None:
style_dict["Alpha"] = 1
style_dict["Text Size"] = text.get_size()
style_dict["Color"] = matplotlib.colors.to_hex(text.get_color())
style_dict["hAlign"] = text.get_horizontalalignment()
style_dict["vAlign"] = text.get_verticalalignment()
style_dict["mAlign"] = text._multialignment
style_dict["Rotation"] = text.get_rotation()
style_dict["ZOrder"] = text.get_zorder()
return style_dict
def get_dict_from_legend(legend):
legend_dict = {}
legend_elements_list = get_list_of_legend_children(legend)
legend_elements_list.append(legend.legendPatch)
text_list = []
line_list = []
for child in legend_elements_list:
try:
if isinstance(child, matplotlib.text.Text):
if child.get_text() != None:
text_list.append(get_dict_from_text(child))
if isinstance(child, matplotlib.lines.Line2D):
line_list.append(get_dict_from_line(child))
except NotImplementedError:
# Basically do nothing
pass
legend_dict["Text"] = text_list
legend_dict["Line List"] = line_list
return legend_dict
def get_list_of_legend_children(legend):
legend_list = []
if hasattr(legend, 'get_children') and len(legend.get_children()) > 0:
for child in legend.get_children():
legend_list.append(get_list_of_legend_children(child))
else:
legend_list.append(legend)
return legend_list
def get_dict_from_fig_properties(fig):
fig_dict = {}
fig_dict["Fig width"] = fig.get_figwidth()
fig_dict["Fig height"] = fig.get_figheight()
fig_dict["dpi"] = fig.dpi
return fig_dict
XVals = np.array([1, 2, 3])
YVals = np.array([1, 2, 3])
plt.plot(XVals, YVals)
dictionary = get_dict_from_fig(plt.gcf())
f = open("./savefile.json", "w+")
f.write(json.dumps(dictionary, indent=4))
I was wondering if there was any way to do this already, perhaps with a maintained library? I've tried to find something to do it and nothing that I could find was overly useful, besides inspiration. I have already used mpld3 for inspiration.
I should probably have mentioned this earlier but when saving it is key to load back the data that has been saved, else there would be little point in saving it.

Proper way to format date for Fedex API XML

I have a Django application where I am trying to make a call to Fedex's API to send out a shipping label for people wanting to send in a product for cash. When I try to make the call though it says there is a data validation issue with the Expiration field in the XML I am filling out. I swear this has worked in the past with me formatting the date as "YYYY-MM-DD", but now it is not. I read that with Fedex, you need to format the date as ISO, but that is also not passing the data validation. I am using a python package created to help with tapping Fedex's API.
Django view function for sending API Call
def Fedex(request, quote):
label_link = ''
expiration_date = datetime.datetime.now() + datetime.timedelta(days=10)
# formatted_date = "%s-%s-%s" % (expiration_date.year, expiration_date.month, expiration_date.day)
formatted_date = expiration_date.replace(microsecond=0).isoformat()
if quote.device_type != 'laptop':
box_length = 9
box_width = 12
box_height = 3
else:
box_length = 12
box_width = 14
box_height = 3
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
## Page 411 of FedEx Dev Guide - 20.14 Email Labels
CONFIG_OBJ = FedexConfig(key=settings.FEDEX_KEY, password=settings.FEDEX_PASSWORD, account_number=settings.FEDEX_ACCOUNT,
meter_number=settings.FEDEX_METER, use_test_server=settings.USE_FEDEX_TEST)
fxreq = FedexCreatePendingShipRequestEmail(CONFIG_OBJ, customer_transaction_id='xxxxxx id:01')
fxreq.RequestedShipment.ServiceType = 'FEDEX_GROUND'
fxreq.RequestedShipment.PackagingType = 'YOUR_PACKAGING'
fxreq.RequestedShipment.DropoffType = 'REGULAR_PICKUP'
fxreq.RequestedShipment.ShipTimestamp = datetime.datetime.now()
# Special fields for the email label
fxreq.RequestedShipment.SpecialServicesRequested.SpecialServiceTypes = ('RETURN_SHIPMENT', 'PENDING_SHIPMENT')
fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.Type = 'EMAIL'
fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.ExpirationDate = formatted_date
email_address = fxreq.create_wsdl_object_of_type('EMailRecipient')
email_address.EmailAddress = quote.email
email_address.Role = 'SHIPMENT_COMPLETOR'
# RETURN SHIPMENT DETAIL
fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnType = ('PENDING')
fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnEMailDetail = fxreq.create_wsdl_object_of_type(
'ReturnEMailDetail')
fxreq.RequestedShipment.SpecialServicesRequested.ReturnShipmentDetail.ReturnEMailDetail.MerchantPhoneNumber = 'x-xxx-xxx-xxxx'
fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.EmailLabelDetail.Recipients = [email_address]
fxreq.RequestedShipment.SpecialServicesRequested.PendingShipmentDetail.EmailLabelDetail.Message = "Xxxxxx Xxxxxx"
fxreq.RequestedShipment.LabelSpecification = {'LabelFormatType': 'COMMON2D', 'ImageType': 'PDF'}
fxreq.RequestedShipment.Shipper.Contact.PersonName = quote.first_name + ' ' + quote.last_name
fxreq.RequestedShipment.Shipper.Contact.CompanyName = ""
fxreq.RequestedShipment.Shipper.Contact.PhoneNumber = quote.phone
fxreq.RequestedShipment.Shipper.Address.StreetLines.append(quote.address)
fxreq.RequestedShipment.Shipper.Address.City = quote.city
fxreq.RequestedShipment.Shipper.Address.StateOrProvinceCode = quote.state
fxreq.RequestedShipment.Shipper.Address.PostalCode = quote.zip
fxreq.RequestedShipment.Shipper.Address.CountryCode = settings.FEDEX_COUNTRY_CODE
fxreq.RequestedShipment.Recipient.Contact.PhoneNumber = settings.FEDEX_PHONE_NUMBER
fxreq.RequestedShipment.Recipient.Address.StreetLines = settings.FEDEX_STREET_LINES
fxreq.RequestedShipment.Recipient.Address.City = settings.FEDEX_CITY
fxreq.RequestedShipment.Recipient.Address.StateOrProvinceCode = settings.FEDEX_STATE_OR_PROVINCE_CODE
fxreq.RequestedShipment.Recipient.Address.PostalCode = settings.FEDEX_POSTAL_CODE
fxreq.RequestedShipment.Recipient.Address.CountryCode = settings.FEDEX_COUNTRY_CODE
fxreq.RequestedShipment.Recipient.AccountNumber = settings.FEDEX_ACCOUNT
fxreq.RequestedShipment.Recipient.Contact.PersonName = ''
fxreq.RequestedShipment.Recipient.Contact.CompanyName = 'Xxxxxx Xxxxxx'
fxreq.RequestedShipment.Recipient.Contact.EMailAddress = 'xxxxxx#xxxxxxxxx'
# Details of Person Who is Paying for the Shipping
fxreq.RequestedShipment.ShippingChargesPayment.PaymentType = 'SENDER'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.AccountNumber = settings.FEDEX_ACCOUNT
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.PersonName = 'Xxxxx Xxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.CompanyName = 'Xxxxx Xxxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.PhoneNumber = 'x-xxx-xxx-xxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Contact.EMailAddress = 'xxxxxxx#xxxxxxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.StreetLines = 'Xxxxx N. xXxxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.City = 'Xxxxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.StateOrProvinceCode = 'XX'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.PostalCode = 'xxxxx'
fxreq.RequestedShipment.ShippingChargesPayment.Payor.ResponsibleParty.Address.CountryCode = 'US'
# Package Info
package1 = fxreq.create_wsdl_object_of_type('RequestedPackageLineItem')
package1.SequenceNumber = '1'
package1.Weight.Value = 1
package1.Weight.Units = "LB"
package1.Dimensions.Length = box_length
package1.Dimensions.Width = box_width
package1.Dimensions.Height = box_height
package1.Dimensions.Units = "IN"
package1.ItemDescription = 'Phone'
fxreq.RequestedShipment.RequestedPackageLineItems.append(package1)
fxreq.RequestedShipment.PackageCount = '1'
try:
fxreq.send_request()
label_link = str(fxreq.response.CompletedShipmentDetail.AccessDetail.AccessorDetails[0].EmailLabelUrl)
except Exception as exc:
print('Fedex Error')
print('===========')
print(exc)
print('==========')
return label_link
Error Log
Error:cvc-datatype-valid.1.2.1: \\'2017-11-3\\' is not a valid value for \\'date\\'.\\ncvc-type.3.1.3: The value \\'2017-11-3\\' of element \\'ns0:ExpirationDate\\' is not valid."\\n }\\n }' (Error code: -1)

Categories