I came across a very useful set of scripts on the Shane Lynn for the
Analysis of Weather data. The first script, used to scrape data from Weather Underground, is as follows:
import requests
import pandas as pd
from dateutil import parser, rrule
from datetime import datetime, time, date
import time
def getRainfallData(station, day, month, year):
"""
Function to return a data frame of minute-level weather data for a single Wunderground PWS station.
Args:
station (string): Station code from the Wunderground website
day (int): Day of month for which data is requested
month (int): Month for which data is requested
year (int): Year for which data is requested
Returns:
Pandas Dataframe with weather data for specified station and date.
"""
url = "http://www.wunderground.com/weatherstation/WXDailyHistory.asp?ID={station}&day={day}&month={month}&year={year}&graphspan=day&format=1"
full_url = url.format(station=station, day=day, month=month, year=year)
# Request data from wunderground data
response = requests.get(full_url, headers={'User-agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'})
data = response.text
# remove the excess <br> from the text data
data = data.replace('<br>', '')
# Convert to pandas dataframe (fails if issues with weather station)
try:
dataframe = pd.read_csv(io.StringIO(data), index_col=False)
dataframe['station'] = station
except Exception as e:
print("Issue with date: {}-{}-{} for station {}".format(day,month,year, station))
return None
return dataframe
# Generate a list of all of the dates we want data for
start_date = "2016-08-01"
end_date = "2016-08-31"
start = parser.parse(start_date)
end = parser.parse(end_date)
dates = list(rrule.rrule(rrule.DAILY, dtstart=start, until=end))
# Create a list of stations here to download data for
stations = ["ILONDON28"]
# Set a backoff time in seconds if a request fails
backoff_time = 10
data = {}
# Gather data for each station in turn and save to CSV.
for station in stations:
print("Working on {}".format(station))
data[station] = []
for date in dates:
# Print period status update messages
if date.day % 10 == 0:
print("Working on date: {} for station {}".format(date, station))
done = False
while done == False:
try:
weather_data = getRainfallData(station, date.day, date.month, date.year)
done = True
except ConnectionError as e:
# May get rate limited by Wunderground.com, backoff if so.
print("Got connection error on {}".format(date))
print("Will retry in {} seconds".format(backoff_time))
time.sleep(10)
# Add each processed date to the overall data
data[station].append(weather_data)
# Finally combine all of the individual days and output to CSV for analysis.
pd.concat(data[station]).to_csv("data/{}_weather.csv".format(station))
However, I get the error:
Working on ILONDONL28
Issue with date: 1-8-2016 for station ILONDONL28
Issue with date: 2-8-2016 for station ILONDONL28
Issue with date: 3-8-2016 for station ILONDONL28
Issue with date: 4-8-2016 for station ILONDONL28
Issue with date: 5-8-2016 for station ILONDONL28
Issue with date: 6-8-2016 for station ILONDONL28
Can anyone help me with this error?
The data for the chosen station and the time period is available, as shown at this link.
The output you are getting is because an exception is being raised. If you added a print e you would see that this is because import io was missing from the top of the script. Secondly, the station name you gave was out by one character. Try the following:
import io
import requests
import pandas as pd
from dateutil import parser, rrule
from datetime import datetime, time, date
import time
def getRainfallData(station, day, month, year):
"""
Function to return a data frame of minute-level weather data for a single Wunderground PWS station.
Args:
station (string): Station code from the Wunderground website
day (int): Day of month for which data is requested
month (int): Month for which data is requested
year (int): Year for which data is requested
Returns:
Pandas Dataframe with weather data for specified station and date.
"""
url = "http://www.wunderground.com/weatherstation/WXDailyHistory.asp?ID={station}&day={day}&month={month}&year={year}&graphspan=day&format=1"
full_url = url.format(station=station, day=day, month=month, year=year)
# Request data from wunderground data
response = requests.get(full_url)
data = response.text
# remove the excess <br> from the text data
data = data.replace('<br>', '')
# Convert to pandas dataframe (fails if issues with weather station)
try:
dataframe = pd.read_csv(io.StringIO(data), index_col=False)
dataframe['station'] = station
except Exception as e:
print("Issue with date: {}-{}-{} for station {}".format(day,month,year, station))
return None
return dataframe
# Generate a list of all of the dates we want data for
start_date = "2016-08-01"
end_date = "2016-08-31"
start = parser.parse(start_date)
end = parser.parse(end_date)
dates = list(rrule.rrule(rrule.DAILY, dtstart=start, until=end))
# Create a list of stations here to download data for
stations = ["ILONDONL28"]
# Set a backoff time in seconds if a request fails
backoff_time = 10
data = {}
# Gather data for each station in turn and save to CSV.
for station in stations:
print("Working on {}".format(station))
data[station] = []
for date in dates:
# Print period status update messages
if date.day % 10 == 0:
print("Working on date: {} for station {}".format(date, station))
done = False
while done == False:
try:
weather_data = getRainfallData(station, date.day, date.month, date.year)
done = True
except ConnectionError as e:
# May get rate limited by Wunderground.com, backoff if so.
print("Got connection error on {}".format(date))
print("Will retry in {} seconds".format(backoff_time))
time.sleep(10)
# Add each processed date to the overall data
data[station].append(weather_data)
# Finally combine all of the individual days and output to CSV for analysis.
pd.concat(data[station]).to_csv(r"data/{}_weather.csv".format(station))
Giving you an output CSV file starting as follows:
,Time,TemperatureC,DewpointC,PressurehPa,WindDirection,WindDirectionDegrees,WindSpeedKMH,WindSpeedGustKMH,Humidity,HourlyPrecipMM,Conditions,Clouds,dailyrainMM,SoftwareType,DateUTC,station
0,2016-08-01 00:05:00,17.8,11.6,1017.5,ESE,120,0.0,0.0,67,0.0,,,0.0,WeatherCatV2.31B93,2016-07-31 23:05:00,ILONDONL28
1,2016-08-01 00:20:00,17.7,11.0,1017.5,SE,141,0.0,0.0,65,0.0,,,0.0,WeatherCatV2.31B93,2016-07-31 23:20:00,ILONDONL28
2,2016-08-01 00:35:00,17.5,10.8,1017.5,South,174,0.0,0.0,65,0.0,,,0.0,WeatherCatV2.31B93,2016-07-31 23:35:00,ILONDONL28
If you are not getting a CSV file, I suggest you add a full path to the output filename.
Related
I'm working on python code to update and append token price and volume data using gate.io's API to a .csv file. Basically trying to check to see if it's up to date, and update with the most recently hour's data if not. The below code isn't throwing any errors, but it's not working. My columns are all in the same order as they are in the code. Any assistance would be greatly appreciated, thank you
import requests
import pandas as pd
from datetime import datetime
# Define API endpoint and parameters
host = "https://api.gateio.ws"
prefix = "/api/v4"
url = '/spot/candlesticks'
currency_pair = "BTC_USDT"
interval = "1h"
# Read the existing data from the csv file
df = pd.read_csv("price_calcs.csv")
# Extract the last timestamp from the csv file
last_timestamp = df["time1"].iloc[-1]
# Convert the timestamp to datetime and add an hour to get the new "from" parameter
from_time = datetime.utcfromtimestamp(last_timestamp).strftime('%Y-%m-%d %H:%M:%S')
to_time = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
# Use the last timestamp to make a 'GET' request to the API to get the latest hourly data for the token
query_params = {"currency_pair": currency_pair, "from": from_time, "to": to_time, "interval": interval}
r = requests.get(host + prefix + url, params=query_params)
# Append the new data to the existing data from the csv file
new_data = pd.DataFrame(r.json(), columns=["time1", "volume1", "close1", "high1", "low1", "open1", "volume2"])
df = pd.concat([df, new_data])
# Write the updated data to the csv file
df.to_csv("price_calcs.csv", index=False)
Nevermind figured it out myself
I can't figure out how to get data for a given day. Using the annual line in my code, I know the milisecond value of give date.
1612159200000.00 AAPL 2/1/2021 6:00
1612418400000.00 AAPL 2/4/2021 6:00
But putting these value in the code doesn't work
data=get_price_history(symbol=i, endDate=1612418400000 , startDate=1612159200000, frequency=1, frequencyType='daily')
import requests
import pandas as pd
import time
import datetime
# tickers_list= ['AAPL', 'AMGN', 'AXP']
# print(len(tickers_list))
key = '****'
def get_price_history(**kwargs):
url = 'https://api.tdameritrade.com/v1/marketdata/{}/pricehistory'.format(kwargs.get('symbol'))
params = {}
params.update({'apikey': key})
for arg in kwargs:
parameter = {arg: kwargs.get(arg)}
params.update(parameter)
return requests.get(url, params=params).json()
tickers_list= ['AAPL', 'AMGN','WMT']
for i in tickers_list:
# get data 1 year 1 day frequency -- good
# data=get_price_history(symbol=i, period=1, periodType='year', frequency=1, frequencyType='daily')
data=get_price_history(symbol=i, endDate=1612418400000 , startDate=1612159200000, frequency=1, frequencyType='daily')
historical['date'] = pd.to_datetime(historical['datetime'], unit='ms')
info=pd.DataFrame(data['candles'])
historical=pd.concat([historical,info])
historical
From the Ameritrade Price History API documentation:
6 Months / 1 Day, including today's data:
https://api.tdameritrade.com/v1/marketdata/XYZ/pricehistory?periodType=month&frequencyType=daily&endDate=1464825600000
Note that periodType=month is specified because the default periodType is day which is not compatible with the frequencyType daily
So it seems that this line in your code:
data=get_price_history(symbol=i, endDate=1612418400000 , startDate=1612159200000, frequency=1, frequencyType='daily')
is missing a valid periodType parameter. Try:
data=get_price_history(symbol=i, endDate=1612418400000 , startDate=1612159200000, frequency=1, periodType='month', frequencyType='daily')
step1: you need a valid session.
step2: you could use the tda api funcion get_price_history()
see example that I successfully used to get daily data given a start and end date
from tda.auth import easy_client
# need a valid refresh token to use easy_client
Client = easy_client(
api_key='APIKEY',
redirect_uri='https://localhost',
token_path='/tmp/token.json')
# getting the daily data given a a date
# get daily data given start and end dat
resp = Client.get_price_history('AAPL',
period_type=Client.PriceHistory.PeriodType.YEAR,
start_datetime= datetime(2019,9,30),
end_datetime= datetime(2019,10,30) ,
frequency_type=Client.PriceHistory.FrequencyType.DAILY,
frequency=Client.PriceHistory.Frequency.DAILY)
assert resp.status_code == httpx.codes.OK
history = resp.json()
aapl = pd.DataFrame(history)
I have a class assignment to write a python program to download end-of-day data last 25 years the major global stock market indices from Yahoo Finance:
Dow Jones Index (USA)
S&P 500 (USA)
NASDAQ (USA)
DAX (Germany)
FTSE (UK)
HANGSENG (Hong Kong)
KOSPI (Korea)
CNX NIFTY (India)
Unfortunately, when I run the program an error occurs.
File "C:\ProgramData\Anaconda2\lib\site-packages\yahoofinancials__init__.py", line 91, in format_date
form_date = datetime.datetime.fromtimestamp(int(in_date)).strftime('%Y-%m-%d')
ValueError: timestamp out of range for platform localtime()/gmtime() function
If you see below, you can see the code that I have written. I'm trying to debug my mistakes. Can you help me out please? Thanks
from yahoofinancials import YahooFinancials
import pandas as pd
# Select Tickers and stock history dates
index1 = '^DJI'
index2 = '^GSPC'
index3 = '^IXIC'
index4 = '^GDAXI'
index5 = '^FTSE'
index6 = '^HSI'
index7 = '^KS11'
index8 = '^NSEI'
freq = 'daily'
start_date = '1993-06-30'
end_date = '2018-06-30'
# Function to clean data extracts
def clean_stock_data(stock_data_list):
new_list = []
for rec in stock_data_list:
if 'type' not in rec.keys():
new_list.append(rec)
return new_list
# Construct yahoo financials objects for data extraction
dji_financials = YahooFinancials(index1)
gspc_financials = YahooFinancials(index2)
ixic_financials = YahooFinancials(index3)
gdaxi_financials = YahooFinancials(index4)
ftse_financials = YahooFinancials(index5)
hsi_financials = YahooFinancials(index6)
ks11_financials = YahooFinancials(index7)
nsei_financials = YahooFinancials(index8)
# Clean returned stock history data and remove dividend events from price history
daily_dji_data = clean_stock_data(dji_financials
.get_historical_stock_data(start_date, end_date, freq)[index1]['prices'])
daily_gspc_data = clean_stock_data(gspc_financials
.get_historical_stock_data(start_date, end_date, freq)[index2]['prices'])
daily_ixic_data = clean_stock_data(ixic_financials
.get_historical_stock_data(start_date, end_date, freq)[index3]['prices'])
daily_gdaxi_data = clean_stock_data(gdaxi_financials
.get_historical_stock_data(start_date, end_date, freq)[index4]['prices'])
daily_ftse_data = clean_stock_data(ftse_financials
.get_historical_stock_data(start_date, end_date, freq)[index5]['prices'])
daily_hsi_data = clean_stock_data(hsi_financials
.get_historical_stock_data(start_date, end_date, freq)[index6]['prices'])
daily_ks11_data = clean_stock_data(ks11_financials
.get_historical_stock_data(start_date, end_date, freq)[index7]['prices'])
daily_nsei_data = clean_stock_data(nsei_financials
.get_historical_stock_data(start_date, end_date, freq)[index8]['prices'])
stock_hist_data_list = [{'^DJI': daily_dji_data}, {'^GSPC': daily_gspc_data}, {'^IXIC': daily_ixic_data},
{'^GDAXI': daily_gdaxi_data}, {'^FTSE': daily_ftse_data}, {'^HSI': daily_hsi_data},
{'^KS11': daily_ks11_data}, {'^NSEI': daily_nsei_data}]
# Function to construct data frame based on a stock and it's market index
def build_data_frame(data_list1, data_list2, data_list3, data_list4, data_list5, data_list6, data_list7, data_list8):
data_dict = {}
i = 0
for list_item in data_list2:
if 'type' not in list_item.keys():
data_dict.update({list_item['formatted_date']: {'^DJI': data_list1[i]['close'], '^GSPC': list_item['close'],
'^IXIC': data_list3[i]['close'], '^GDAXI': data_list4[i]['close'],
'^FTSE': data_list5[i]['close'], '^HSI': data_list6[i]['close'],
'^KS11': data_list7[i]['close'], '^NSEI': data_list8[i]['close']}})
i += 1
tseries = pd.to_datetime(list(data_dict.keys()))
df = pd.DataFrame(data=list(data_dict.values()), index=tseries,
columns=['^DJI', '^GSPC', '^IXIC', '^GDAXI', '^FTSE', '^HSI', '^KS11', '^NSEI']).sort_index()
return df
Your problem is your datetime stamps are in the wrong format. If you look at the error code it clugely tells you:
datetime.datetime.fromtimestamp(int(in_date)).strftime('%Y-%m-%d')
Notice the int(in_date) part?
It wants the unix timestamp. There are several ways to get this, out of the time module or the calendar module, or using Arrow.
import datetime
import calendar
date = datetime.datetime.strptime("1993-06-30", "%Y-%m-%d")
start_date = calendar.timegm(date.utctimetuple())
* UPDATED *
OK so I fixed up to the dataframes portion. Here is my current code:
# Select Tickers and stock history dates
index = {'DJI' : YahooFinancials('^DJI'),
'GSPC' : YahooFinancials('^GSPC'),
'IXIC':YahooFinancials('^IXIC'),
'GDAXI':YahooFinancials('^GDAXI'),
'FTSE':YahooFinancials('^FTSE'),
'HSI':YahooFinancials('^HSI'),
'KS11':YahooFinancials('^KS11'),
'NSEI':YahooFinancials('^NSEI')}
freq = 'daily'
start_date = '1993-06-30'
end_date = '2018-06-30'
# Clean returned stock history data and remove dividend events from price history
daily = {}
for k in index:
tmp = index[k].get_historical_stock_data(start_date, end_date, freq)
if tmp:
daily[k] = tmp['^{}'.format(k)]['prices'] if 'prices' in tmp['^{}'.format(k)] else []
Unfortunately I had to fix a couple things in the yahoo module. For the class YahooFinanceETL:
#staticmethod
def format_date(in_date, convert_type):
try:
x = int(in_date)
convert_type = 'standard'
except:
convert_type = 'unixstamp'
if convert_type == 'standard':
if in_date < 0:
form_date = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=in_date)
else:
form_date = datetime.datetime.fromtimestamp(int(in_date)).strftime('%Y-%m-%d')
else:
split_date = in_date.split('-')
d = date(int(split_date[0]), int(split_date[1]), int(split_date[2]))
form_date = int(time.mktime(d.timetuple()))
return form_date
AND:
# private static method to scrap data from yahoo finance
#staticmethod
def _scrape_data(url, tech_type, statement_type):
response = requests.get(url)
soup = BeautifulSoup(response.content, "html.parser")
script = soup.find("script", text=re.compile("root.App.main")).text
data = loads(re.search("root.App.main\s+=\s+(\{.*\})", script).group(1))
if tech_type == '' and statement_type != 'history':
stores = data["context"]["dispatcher"]["stores"]["QuoteSummaryStore"]
elif tech_type != '' and statement_type != 'history':
stores = data["context"]["dispatcher"]["stores"]["QuoteSummaryStore"][tech_type]
else:
if "HistoricalPriceStore" in data["context"]["dispatcher"]["stores"] :
stores = data["context"]["dispatcher"]["stores"]["HistoricalPriceStore"]
else:
stores = data["context"]["dispatcher"]["stores"]["QuoteSummaryStore"]
return stores
You will want to look at the daily dict, and rewrite your build_data_frame function, which it should be a lot simpler now since you are working with a dictionary already.
I am actually the maintainer and author of YahooFinancials. I just saw this post and wanted to personally apologize for the inconvenience and let you all know I will be working on fixing the module this evening.
Could you please open an issue on the module's Github page detailing this?
It would also be very helpful to know which version of python you were running when you encountered these issues.
https://github.com/JECSand/yahoofinancials/issues
I am at work right now, however as soon as I get home in ~7 hours or so I will attempt to code a fix and release it. I'll also work on the exception handling. I try my best to maintain this module, but my day (and often night time) job is rather demanding. I will report back with the final results of these fixes and publish to pypi when it is done and stable.
Also if anyone else has any feedback or personal fixes made you can offer, it would be a huge huge help in fixing this. Proper credit will be given of course. I am also in desperate need of contributers, so if anyone is interested in that as well let me know. I am really wanting to take YahooFinancials to the next level and have this project become a stable and reliable alternative for free financial data for python projects.
Thank you for your patience and for using YahooFinancials.
I'm having a python issue which I cannot seem to understand. Not sure if I need to use if statements but because I'm new to python, I'm not actually sure how to code this little issue.
Virtually this is the issue I have. For the departure calendar, I want python to be able to do the following:
View 'Your date'. If there's a flight (doesn't matter if lowfare or normal), click it. If not then move onto the next available date that does have a flight and click that.
Will need to be able to move to the next month if no date is available in the current month (I have an example code for this).
For the return calendar, I want it to do the same thing but ensure it selects a date at least 7 days after the selected departure date.
That's virtually my question, how to do that?
Below is the html of the depature calendar (return calendar is exactly the same except it's inboundsearchresults rather than outbound search results):
Below I have a sample code which works when selecting from an ordinary date picker (this is used in the page before the url) if you want to use that template and manipulate it:
# select depart date
datepicker = driver.find_element_by_id("departure-date-selector")
actions.move_to_element(datepicker).click().perform()
# find the calendar, month and year picker and the current date
calendar = driver.find_element_by_id("departureDateContainer")
month_picker = Select(calendar.find_element_by_class_name("ui-datepicker-month"))
year_picker = Select(calendar.find_element_by_class_name("ui-datepicker-year"))
current_date = calendar.find_element_by_class_name("ui-datepicker-current-day")
# printing out current date
month = month_picker.first_selected_option.text
year = year_picker.first_selected_option.text
print("Current departure date: {day} {month} {year}".format(day=current_date.text, month=month, year=year))
# see if we have an available date in this month
try:
next_available_date = current_date.find_element_by_xpath("following::td[#data-handler='selectDay' and ancestor::div/#id='departureDateContainer']")
print("Found an available departure date: {day} {month} {year}".format(day=next_available_date.text, month=month, year=year))
next_available_date.click()
except NoSuchElementException:
# looping over until the next available date found
while True:
# click next, if not found, select the next year
try:
calendar.find_element_by_class_name("ui-datepicker-next").click()
except NoSuchElementException:
# select next year
year = Select(calendar.find_element_by_class_name("ui-datepicker-year"))
year.select_by_visible_text(str(int(year.first_selected_option.text) + 1))
# reporting current processed month and year
month = Select(calendar.find_element_by_class_name("ui-datepicker-month")).first_selected_option.text
year = Select(calendar.find_element_by_class_name("ui-datepicker-year")).first_selected_option.text
print("Processing {month} {year}".format(month=month, year=year))
try:
next_available_date = calendar.find_element_by_xpath(".//td[#data-handler='selectDay']")
print("Found an available departure date: {day} {month} {year}".format(day=next_available_date.text, month=month, year=year))
next_available_date.click()
break
except NoSuchElementException:
continue
The idea is to define a reusable function - calling it select_date() that receives a "calendar" WebElement and an optional minimum date. This function would first look for the Your date in the calendar and if it is there and it is more than minimum (if given) click it and return the date. If there is no Your date, look for the available "flight" days and, if minimum date is given and the date is more than or equal to it, click it and return the date.
Working implementation:
from datetime import datetime, timedelta
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def select_date(calendar, mininum_date=None):
try:
# check if "Your Date" is there
your_date_elm = calendar.find_element_by_class_name("your-date")
your_date = your_date_elm.get_attribute("data-date")
print("Found 'Your Date': " + your_date)
your_date_elm.click()
# check if your_date against the minimum date if given
your_date = datetime.strptime(your_date, "%Y-%m-%d")
if mininum_date and your_date < mininum_date:
raise NoSuchElementException("Minimum date violation")
return your_date
except NoSuchElementException:
flight_date = None
flight_date_elm = None
while True:
print("Processing " + calendar.find_element_by_css_selector("div.subheader > p").text)
try:
if mininum_date:
flight_date_elms = calendar.find_elements_by_class_name("flights")
flight_date_elm = next(flight_date_elm for flight_date_elm in flight_date_elms
if datetime.strptime(flight_date_elm.get_attribute("data-date"), "%Y-%m-%d") >= mininum_date)
else:
flight_date_elm = calendar.find_element_by_class_name("flights")
except (StopIteration, NoSuchElementException):
calendar.find_element_by_partial_link_text("Next month").click()
# if found - print out the date, click and exit the loop
if flight_date_elm:
flight_date = flight_date_elm.get_attribute("data-date")
print("Found 'Flight Date': " + flight_date)
flight_date_elm.click()
break
return datetime.strptime(flight_date, "%Y-%m-%d")
driver = webdriver.Firefox()
driver.get("http://www.jet2.com/cheap-flights/leeds-bradford/antalya/2016-03-01/2016-04-12?adults=2&children=2&infants=1&childages=4%2c6")
wait = WebDriverWait(driver, 10)
# get the outbound date
outbound = wait.until(EC.visibility_of_element_located((By.ID, "outboundsearchresults")))
outbound_date = select_date(outbound)
# get the inbound date
inbound = driver.find_element_by_id("inboundsearchresults")
inbound_minimum_date = outbound_date + timedelta(days=7)
inbound_date = select_date(inbound, mininum_date=inbound_minimum_date)
print(outbound_date, inbound_date)
driver.close()
For the provided in the question URL, it prints:
Processing March 2016
Found 'Flight Date': 2016-03-28
Processing April 2016
Found 'Flight Date': 2016-04-04
2016-03-28 00:00:00 2016-04-04 00:00:00
The two dates printed at the end are the departure and the return dates.
Let me know if you need any clarifications and hope it helps.
I've been working on the quandl API recently and I've been stuck on an issue for a while.
My question is how to create a method on the difference between One
date and the date before for a stock index, Data seems to come out as
an array as an example: [[u'2015-04-30', 17840.52]] for the Dow Jones
Industrial Average. I'd like to also create a way to get the change
from one day away from the latest one. Say getting Friday's stock and
the change between that and the day before.
My code:
def fetchData(apikey, url):
'''Returns JSON data of the Dow Jones Average.'''
parameters = {'rows' : 1, 'auth_token' : apikey}
req = requests.get(url, params=parameters)
data = json.loads(req.content)
parsedData = []
stockData = {}
for datum in data:
if data['code'] == 'COMP':
stockData['name'] = data['name']
stockData['description'] = '''The NASDAQ Composite Index measures all
NASDAQ domestic and international based common type stocks listed on The NASDAQ Stock Market.'''
stockData['data'] = data['data']
stockData['code'] = data['code']
else:
stockData['name'] = data['name']
stockData['description'] = data['description']
stockData['data'] = data['data']
stockData['code'] = data['code']
parsedData.append(stockData)
return parsedData
I've attempted to just tack on [1] on data to get just the current day but both the issue of getting the day before has kinda stumped me.