For NASDAQ
ftp.nasdaqtrader.com
hover over product login and choose Market Data, daily list.
You will need to set up an account
You can get historical data from yahoo finance,
here's an app most of this is from Maik Rosenheinrich blog.
I only added class info:
MaikFetchStockData.py
# Note original code by:
# Modified (slightly) by Larz60+, made into a class that cam be imported
# also added command line usage: python MaikFetchStockData.py symbol
# if run from command line without symbol, will ask for one
# returns a tab delimited csv file.
# you can also supply start and end dates, and reporting interval
import requests # [handles the http interactions](http://docs.python-requests.org/en/master/)
from bs4 import BeautifulSoup # beautiful soup handles the html to text conversion and more
import re # regular expressions are necessary for finding the crumb (more on crumbs later)
from datetime import datetime # string to datetime object conversion
from time import mktime # mktime transforms datetime objects to unix timestamps
import sys
class MaikFetchStockData:
def __init__(self):
pass
def _get_crumbs_and_cookies(self, stock):
"""
The first function calls the website of a selected stock and collects the cookies and crumb.
We reuse the headers in the subsequent function to mimic the same browser.
get crumb and cookies for historical data csv download from yahoo finance
parameters: stock - short-handle identifier of the company
returns a tuple of header, crumb and cookie
"""
# url = 'https://finance.yahoo.com/quote/{}/history'.format(stock)
url = f"https://finance.yahoo.com/quote/{stock}/history"
with requests.session():
header = {'Connection': 'keep-alive',
'Expires': '-1',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) \
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36'
}
website = requests.get(url, headers=header)
soup = BeautifulSoup(website.text, 'lxml')
crumb = re.findall('"CrumbStore":{"crumb":"(.+?)"}', str(soup))
return (header, crumb[0], website.cookies)
#
def convert_to_unix(self, date):
"""
The second function just converts the provided start/stop dates into unix timestamps, because yahoo finance uses these in the request url.
converts date to unix timestamp
parameters: date - in format (dd-mm-yyyy)
returns integer unix timestamp
"""
datum = datetime.strptime(date, '%d-%m-%Y')
return int(mktime(datum.timetuple()))
def load_csv_data(self, stock, interval='1d', day_begin='01-01-1991', day_end='02-11-2021'):
"""
While the last function does the actual requesting of the historical stock data.
This function returns a list of the individual daily values, containing the header (row 0) and subsequently one line per day.
queries yahoo finance api to receive historical data in csv file format
parameters:
stock - short-handle identifier of the company
interval - 1d, 1wk, 1mo - daily, weekly monthly data
day_begin - starting date for the historical data (format: dd-mm-yyyy)
day_end - final date of the data (format: dd-mm-yyyy)
returns a list of comma seperated value lines
"""
day_begin_unix = self.convert_to_unix(day_begin)
day_end_unix = self.convert_to_unix(day_end)
header, crumb, cookies = self._get_crumbs_and_cookies(stock)
with requests.session():
url = 'https://query1.finance.yahoo.com/v7/finance/download/' \
'{stock}?period1={day_begin}&period2={day_end}&interval={interval}&events=history&crumb={crumb}' \
.format(stock=stock, day_begin=day_begin_unix, day_end=day_end_unix, interval=interval, crumb=crumb)
website = requests.get(url, headers=header, cookies=cookies)
return website.text.split('\n')[:-1]
def get_csv_data(self, ticker='^DJI', interval='1d', day_begin='01-01-1991', day_end='02-11-2021'):
return self.load_csv_data(ticker, interval='1d', day_begin='01-01-1991', day_end='02-11-2021')
def main(argv):
getquote = MaikFetchStockData()
print(f"length: {len(argv)}")
if len(argv) == 1:
stock = input("Please enter stock symbol: ")
else:
stock = argv[1]
csvdata = getquote.load_csv_data(stock)
print(csvdata)
if __name__ == '__main__':
main(sys.argv)
sample output for
python MaikFetchStockData.py ^DJI > DJI.csv
Output:
['Date,Open,High,Low,Close,Adj Close,Volume',
'1992-01-02,1357.599976,1357.619995,1330.900024,1342.099976,1342.099976,41400',
'1992-01-03,1342.300049,1354.630005,1342.300049,1350.699951,1350.699951,45900',
'1992-01-06,1350.699951,1366.219971,1349.219971,1365.500000,1365.500000,48800',
'1992-01-07,1365.500000,1383.030029,1361.550049,1383.000000,1383.000000,57000',
...
'2021-02-05,12727.879883,12833.669922,12727.879883,12788.509766,12788.509766,852300',
'2021-02-08,12793.919922,12894.870117,12782.839844,12874.919922,12874.919922,1231400',
'2021-02-09,12871.200195,12993.620117,12780.129883,12959.900391,12959.900391,830100',
'2021-02-10,12976.240234,13041.500000,12885.309570,12954.530273,12954.530273,809300']
to use in your own program,
from MaikFetchStockData import MaikFetchStockData
class myclass:
def __init__(self):
self.fetch_stockdata = MaikFetchStockData().get_csv_data
def get_stuff(self, symbol):
return self.fetch_stockdata(symbol)
if __name__ == '__main__':
mc = myclass()
print(mc.get_stuff('MSFT'))