748 lines
25 KiB
Python
748 lines
25 KiB
Python
### -------------------- LIBARIES --------------------
|
|
import datetime
|
|
import time
|
|
import json
|
|
import yfinance as yf
|
|
import pandas as pd
|
|
import requests
|
|
|
|
|
|
|
|
### -------------------- GLOBAL VARIABLES --------------------
|
|
# PROGRAMM
|
|
json_result = {}
|
|
t_sleep_py = 10 # Programm cooldown timer
|
|
t_sleep_api = 1 # YF-API cooldown timer
|
|
|
|
# NOTION
|
|
notion_token = "secret_b7PiPL2FqC9QEikqkAEWOht7LmzPMIJMWTzUPWwbw4H"
|
|
notion_headers = {
|
|
"Authorization": "Bearer " + notion_token,
|
|
"Content-Type": "application/json",
|
|
"Notion-Version": "2022-02-22"
|
|
}
|
|
notion_db_id_trades = "95f7a2b697a249d4892d60d855d31bda"
|
|
notion_db_id_performance = "1c010a5f51bd806f90d8e76a1286cfd4"
|
|
|
|
# TRMNL
|
|
trmnl_page_id = "a791a6f1-dcac-4ea2-89fb-6c7be76d74b7"
|
|
trmnl_base_url = "https://usetrmnl.com/api/custom_plugins/"
|
|
trmnl_headers = {"Content-Type": "application/json"}
|
|
|
|
# TRMNL Special WebHook Payload Strcuture
|
|
# The outer "merge variables" is required to send to data through
|
|
test_data = '''{
|
|
"merge_variables":
|
|
{"key": "data"}
|
|
}'''
|
|
|
|
# TRMNL Diagramm Structure
|
|
trmnl_object = '''{"merge_variables": {"key": "payload"}}'''
|
|
|
|
|
|
|
|
### -------------------- EXAMPLE STRUCTURES --------------------
|
|
# NOTION FORMATED DATA STRUCTURE
|
|
''' trades[notion_page_id] = {
|
|
'ticker' : notion_page["properties"]["Ticker"]["select"]["name"],
|
|
'date_open' : date_open,
|
|
'date_close' : date_close,
|
|
'course_open' : notion_page["properties"]["Open (€)"]["number"],
|
|
'course_close' : notion_page["properties"]["Close (€)"]["number"],
|
|
'course_current' : notion_page["properties"]["Current (€)"]["number"],
|
|
'irr' : notion_page["properties"]["IRR (%)"]["number"],
|
|
'units' : notion_page["properties"]["Units"]["number"],
|
|
'dividends' : notion_page["properties"]["Dividends (€)"]["number"]
|
|
}
|
|
'''
|
|
|
|
# YF FORMATTED DATA STRUCTURE
|
|
'''yf_data[notion_page_id] = [1670 rows x 2 columns]
|
|
Date Close Dividends
|
|
2018-02-14 7.358605 0.0
|
|
2018-02-15 7.366426 0.0
|
|
2018-02-16 7.366426 0.0
|
|
2018-02-19 7.366426 0.0
|
|
2018-02-20 7.366426 0.0
|
|
... ... ...
|
|
2025-06-13 11.024000 0.0
|
|
2025-06-16 11.050000 0.0
|
|
2025-06-17 11.076000 0.0
|
|
2025-06-18 11.098500 0.0
|
|
2025-06-19 11.129000 0.0
|
|
'''
|
|
|
|
# TRMNL CHART-DATA STRUCTURE
|
|
'''var data = [{"name":"Current","data":[["2024-12-31",3982.23],......,["2024-12-01",946.02]]},{"name":"Comparison","data":[["2024-12-30",590.56],......,["2024-12-01",425.28]]}];'''
|
|
|
|
|
|
|
|
### -------------------- FUNCTIONS --------------------
|
|
# ------------------#
|
|
# LEVEL 1 FUNCTIONS #
|
|
# ------------------#
|
|
# CALVULATE THE IRR
|
|
def calculate_irr(date_now, date_open, value_now, value_open):
|
|
error = False
|
|
irr = 0.0
|
|
|
|
try:
|
|
# Count the number in days
|
|
a = date_now - date_open
|
|
a = a.days
|
|
|
|
# Am Tag des Kaufs selbst, liegt das Delta in Tagen bei 0
|
|
# Um dennoch einen IRR kalkulieren zu können, wird das Delta auf 1 gsetzt
|
|
if a == 0:
|
|
a = 1
|
|
|
|
a = a / 365 # Umrechnung auf Jahresanteil, um auch den Jahreszinssatz zu bekommen
|
|
b = value_now / value_open
|
|
|
|
# Catch negative IRRs
|
|
if b < 0:
|
|
b = b * (-1)
|
|
irr = b**(1/a) # matematisch identisch zur b-ten Wurzel von a
|
|
irr = irr * (-1)
|
|
else:
|
|
irr = b**(1/a) # matematisch identisch zur b-ten Wurzel von a
|
|
except:
|
|
error = True
|
|
|
|
# Return data if successful
|
|
if error == True:
|
|
print("[ERROR] Calculation of irr")
|
|
return error
|
|
else:
|
|
return irr
|
|
|
|
# GET THE DAY OF THE OLDEST TRADE
|
|
def get_date_open_oldest_trade(trades):
|
|
# Identify the open date for the oldest trade
|
|
date_open_oldest_trade = datetime.date.today()
|
|
for i in trades:
|
|
if trades[i]["date_open"] < date_open_oldest_trade:
|
|
date_open_oldest_trade = trades[i]["date_open"]
|
|
return date_open_oldest_trade
|
|
|
|
# CREATES LIST OF UNIQUE TICKERS
|
|
def filter_list_of_tickers(trades):
|
|
tickers = []
|
|
try:
|
|
for i in trades:
|
|
# Fetch ticker belonging to trade
|
|
ticker = trades[i]['ticker']
|
|
# Add ticker to list, if not already present
|
|
if ticker not in tickers:
|
|
tickers.append(ticker)
|
|
print("[SUCCESS] Creating a list of {} tickers".format(len(tickers)))
|
|
return tickers
|
|
except:
|
|
print("[ERROR] Creating a list of tickers")
|
|
return tickers
|
|
|
|
# NOTION FETCH PAGES
|
|
def notion_get_pages(db_id_trades, num_pages=None):
|
|
try:
|
|
# ------------------ FETCH THE FIRST 100 PAGES FROM A DB
|
|
# Prepare Request
|
|
url = f"https://api.notion.com/v1/databases/{db_id_trades}/query"
|
|
get_all = num_pages is None # If num_pages is None, get all pages, otherwise just the defined number.
|
|
page_size = 100 if get_all else num_pages
|
|
payload = {"page_size": page_size}
|
|
|
|
# Make Request
|
|
raw_response = requests.post(url, json=payload, headers=notion_headers)
|
|
|
|
# Process Reply
|
|
parsed_response = raw_response.json()
|
|
result = parsed_response["results"]
|
|
|
|
# ------------------ FETCH 100 MORE PAGES AS OFTEN AS REQUIRED
|
|
while parsed_response["has_more"] and get_all:
|
|
# Prepare Request
|
|
payload = {"page_size": page_size, "start_cursor": parsed_response["next_cursor"]}
|
|
url = f"https://api.notion.com/v1/databases/{db_id_trades}/query"
|
|
|
|
# Make Request
|
|
raw_response = requests.post(url, json=payload, headers=notion_headers)
|
|
|
|
# Process Reply
|
|
parsed_response = raw_response.json()
|
|
result.extend(parsed_response["results"])
|
|
|
|
# Logging
|
|
print("[SUCCESS] Fetching Data from Notion for database: {}".format(db_id_trades))
|
|
return result
|
|
except Exception:
|
|
print("[ERROR] Fetching Data from Notion with error: {}".format(Exception))
|
|
return True # Return True when there was an error
|
|
|
|
# NOTION UPDATE PAGES
|
|
def notion_update_page(page_id: str, data: dict):
|
|
url = f"https://api.notion.com/v1/pages/{page_id}"
|
|
payload = {"properties": data}
|
|
results = requests.patch(url, json=payload, headers=notion_headers)
|
|
return results
|
|
|
|
# TRMNL UPDATE DIAGRAMMS
|
|
'''
|
|
def push_trmnl_update_chart(wklydict_numbers, dict_chart, trmnl_page_id):
|
|
# --------------------------------------------------------------------------------- #
|
|
# REQUIREMENTS:
|
|
# The first-level-entry of dict_numbers needs be the "key" for referencing in TRMNL
|
|
# At least 3 keys are expected for numbers to fill the screen
|
|
# The json_chart data does not need a key
|
|
# Instead, each first-level-entry represents a series in the chart by name
|
|
# --------------------------------------------------------------------------------- #
|
|
|
|
# Create the data-structure for the chart-object
|
|
payload = {}
|
|
payload = dict_numbers
|
|
|
|
# Create the data-structure for the chart-object
|
|
chart = []
|
|
for entry in json_chart:
|
|
series = {}
|
|
name = entry[0]
|
|
dict_chart_values = entry[1]
|
|
series["name"] = name
|
|
series["data"] = dict_chart_values
|
|
chart.append(series)
|
|
# {"name" : name, "data" : [[date, value], [date, value]]}
|
|
|
|
try:
|
|
data = json.dumps(json_data, indent=2) # Converts a python-dictionary into a json
|
|
url = trmnl_base_url + trmnl_page_id
|
|
reply = requests.post(url, data=data, headers = trmnl_headers)
|
|
if reply.status_code == 200:
|
|
print("[SUCCESS] Pushing data to TRMNL")
|
|
elif reply.status_code == 429:
|
|
print("[WARNING] Exceeded TRMNL's API rate limits")
|
|
else:
|
|
print(f"[ERROR] Pushing data to TRMNL with server reply code: {reply.status_code}")
|
|
except Exception as e:
|
|
print("[ERROR] Pushing data to TRMNL with error code: {}".format(e))
|
|
'''
|
|
|
|
|
|
|
|
# ------------------#
|
|
# LEVEL 2 FUNCTIONS #
|
|
# ------------------#
|
|
# CREATE LIST OF WEEKLY DATES
|
|
def create_list_wkl_dates(trades):
|
|
index_date = get_date_open_oldest_trade(trades)
|
|
|
|
# Move start date from weekend to weekday
|
|
while index_date.weekday() > 5:
|
|
index_date = index_date + datetime.timedelta(days=1)
|
|
|
|
# Create list
|
|
list_wkl_dates = []
|
|
while index_date <= datetime.date.today():
|
|
list_wkl_dates.append(index_date.isoformat())
|
|
index_date = index_date + datetime.timedelta(days=7)
|
|
|
|
# Logging
|
|
print("[SUCCESS] Generating list of weekly dates with {} entries".format(len(list_wkl_dates)))
|
|
return list_wkl_dates
|
|
|
|
# NOTION FETCH & FORMAT DATA
|
|
def fetch_format_notion_data(db_id_trades):
|
|
trades = {}
|
|
error = False
|
|
|
|
data = notion_get_pages(db_id_trades)
|
|
|
|
if data is True:
|
|
error = True
|
|
else:
|
|
for i in data:
|
|
# Each page is loaded as a dictionary
|
|
notion_page = dict(i)
|
|
|
|
# Handling desired missing entries
|
|
try:
|
|
date_close = notion_page["properties"]["Close"]["date"]
|
|
date_close = date_close["start"]
|
|
date_close = datetime.date(*map(int, date_close.split('-')))
|
|
except:
|
|
date_close = 0
|
|
|
|
# Handeling non-desired missing entries (by skipping this trade)
|
|
try:
|
|
# Try extracting values
|
|
trade = {}
|
|
|
|
# Format date-open
|
|
date_open = notion_page["properties"]["Open"]["date"]
|
|
date_open = date_open["start"]
|
|
date_open = datetime.date(*map(int, date_open.split('-')))
|
|
|
|
# Combine data into json structure
|
|
trade = {
|
|
'ticker' : notion_page["properties"]["Ticker"]["select"]["name"],
|
|
'date_open' : date_open,
|
|
'date_close' : date_close,
|
|
'course_open' : notion_page["properties"]["Open (€)"]["number"],
|
|
'course_close' : notion_page["properties"]["Close (€)"]["number"],
|
|
'course_current' : notion_page["properties"]["Current (€)"]["number"],
|
|
'irr' : notion_page["properties"]["IRR (%)"]["number"],
|
|
'units' : notion_page["properties"]["Units"]["number"],
|
|
'dividends' : notion_page["properties"]["Dividends (€)"]["number"]
|
|
}
|
|
|
|
# Save values
|
|
notion_page_id = notion_page["id"] # Use as key for the dictionary
|
|
trades[notion_page_id] = trade
|
|
|
|
except:
|
|
print("[ERROR] Skipped an entry in the notion trades-db - Missing values?")
|
|
|
|
# Return data if successful
|
|
if error == True:
|
|
return error
|
|
else:
|
|
return trades
|
|
|
|
# YFINANCE FETCH & FORMAT DATA
|
|
def fetch_format_yf_data(tickers):
|
|
yf_data = {}
|
|
error = False
|
|
for i in tickers:
|
|
ticker = i
|
|
|
|
try:
|
|
api = yf.Ticker(ticker)
|
|
data = api.history(period="max")
|
|
print("[SUCCESS] Fetching data from yahoo-finance for ticker: {}".format(ticker))
|
|
|
|
# Convert to Pandas DataFrame
|
|
data = pd.DataFrame(data)
|
|
|
|
# Delete the columns "Stock Splits", "High", "Low" and "Open"
|
|
del data['Open']
|
|
del data['Low']
|
|
del data['High']
|
|
del data['Volume']
|
|
try:
|
|
del data['Stock Splits']
|
|
del data['Capital Gains']
|
|
except:
|
|
print("[INFO] Failed cleaning some unused data provided by yahoo-finance for ticker: {}".format(ticker))
|
|
|
|
# Get the Number of rows in data
|
|
data_rows = data.shape[0]
|
|
|
|
# Create new index without the time from the existing datetime64-index
|
|
old_index = data.index
|
|
new_index = []
|
|
x = 0
|
|
while x < data_rows:
|
|
date = pd.Timestamp.date(old_index[x]) # Converts the "Pandas Timestamp"-object to a "date" object
|
|
new_index.append(date)
|
|
x+=1
|
|
|
|
# Add the new index to the dataframe and set it as the index
|
|
data.insert(1, 'Date', new_index)
|
|
data.set_index('Date', inplace=True)
|
|
|
|
# Save the data-frame to the yf_data dict
|
|
yf_data[ticker] = data
|
|
|
|
# Wait for the API to cool down
|
|
time.sleep(t_sleep_api)
|
|
|
|
except:
|
|
error = True
|
|
print("[ERROR] Fetching data from yahoo-finance for ticker: {}".format(ticker))
|
|
|
|
# Create file for easier development
|
|
data.to_csv('yf_data_{}.csv'.format(ticker), index=False)
|
|
with open("yf_data.txt", "w") as f:
|
|
f.write(str(yf_data))
|
|
|
|
# Return data if successful
|
|
if error == True:
|
|
return error
|
|
else:
|
|
return yf_data
|
|
|
|
# UPDATE NOTION-TRADES-DATABASE
|
|
def push_notion_trades_update(trades):
|
|
error = False
|
|
for notion_page_id in trades:
|
|
try:
|
|
# The irr is stored in the format 1.2534
|
|
# Notion need the format 0,2534
|
|
irr_notion = trades[notion_page_id]['irr'] - 1
|
|
irr_notion = round(irr_notion, 4)
|
|
|
|
# Construct Notion-Update-Object
|
|
notion_update = {
|
|
"Current (€)": {
|
|
"number": trades[notion_page_id]['course_current']
|
|
},
|
|
"IRR (%)": {
|
|
"number": irr_notion
|
|
},
|
|
"Dividends (€)": {
|
|
"number": trades[notion_page_id]['dividends']
|
|
}
|
|
}
|
|
# Update the properties of the corresponding notion-page
|
|
notion_update_page(notion_page_id, notion_update)
|
|
print("[SUCCESS] Updating page /w id: {}".format(notion_page_id))
|
|
except:
|
|
error = True
|
|
print("[ERROR] Updating page /w id: {}".format(notion_page_id))
|
|
|
|
# Wait for the API to cool off
|
|
time.sleep(t_sleep_api)
|
|
|
|
# When an error occured, the vaiable is true
|
|
return error
|
|
|
|
# CALC CURRENT VALUES PER TRADE
|
|
def calc_current_value_per_trade(trades, history_per_trade):
|
|
|
|
# Loop over all trades
|
|
for trade_id in trades:
|
|
|
|
# Determine, what values to fetch based on whether the trade was closed already
|
|
date_closed = trades[trade_id]["date_close"]
|
|
if date_closed == 0:
|
|
|
|
# If trade still open, use performance data from today
|
|
index_date_iso = datetime.date.today().isoformat()
|
|
|
|
else:
|
|
# If trade closed, use performance data from close-date
|
|
index_date_iso = date_closed.isoformat()
|
|
|
|
# Fetch data from history and save for this trade
|
|
trades[trade_id]["course_current"] = history_per_trade[index_date_iso][trade_id]['current_course']
|
|
trades[trade_id]["irr"] = history_per_trade[index_date_iso][trade_id]['current_irr']
|
|
trades[trade_id]["dividends"] = history_per_trade[index_date_iso][trade_id]['total_dividends']
|
|
|
|
print ("[SUCCESS] Calculating current value per trade")
|
|
return trades
|
|
|
|
|
|
|
|
# ------------------#
|
|
# LEVEL 3 FUNCTIONS #
|
|
# ------------------#
|
|
# FILTER ANY HISTORY OBJECT TO SELECTED DATES
|
|
def filter_history_by_list(history, dates_list):
|
|
filtered_history = {}
|
|
# Loop over all days
|
|
for index_date in history:
|
|
# Check, if the history-date is in the filter-list
|
|
if index_date in dates_list:
|
|
# If so, add this date-entry to the filtered history object
|
|
filtered_history[index_date] = history[index_date]
|
|
# Logging
|
|
print ("[SUCCESS] Filtering History")
|
|
return filtered_history
|
|
|
|
# CALC HISTORY PER TRADE
|
|
def calc_history_per_trade(trades, yf_data):
|
|
# Create support variables
|
|
history_per_trade = {}
|
|
total_dividends = 0
|
|
warning_count = 0
|
|
date_open_oldest_trade = get_date_open_oldest_trade(trades)
|
|
|
|
# ------------------ LOOP OVER ALL TRADES
|
|
for trade_id in trades:
|
|
|
|
# ------------------ PREPARE FOR THE (NEXT) LOOP OVER ALL DAYS
|
|
# Set / Reset the index-date to the oldest trade day
|
|
# Resetting is required so that the calculations for the next trade start with day 1
|
|
index_date = date_open_oldest_trade
|
|
|
|
# Set the initial value for the course on the previous day to 0
|
|
# Just in case the very first trade was made on a weekend somehow, where there is no yfinance data available
|
|
previous_course = 0.0
|
|
|
|
# Check, if the trade was closed already
|
|
# If it was not, set the closure date to the future (Trick 17)
|
|
if trades[trade_id]["date_close"] == 0:
|
|
date_close = datetime.date.today() + datetime.timedelta(days=1)
|
|
else:
|
|
date_close = trades[trade_id]["date_close"]
|
|
date_open = trades[trade_id]["date_open"]
|
|
|
|
# Keep ticker for connecting performance later
|
|
ticker = trades[trade_id]['ticker']
|
|
|
|
# ------------------ DETERMINE THE COUSE PER DAY
|
|
while index_date != datetime.date.today() + datetime.timedelta(days=1):
|
|
|
|
# Fetch course for the day & eventual dividends from yf_data
|
|
try:
|
|
current_course = yf_data[ticker].at[index_date, 'Close']
|
|
current_dividends_per_ticker = yf_data[ticker].at[index_date, 'Dividends']
|
|
|
|
# Catch missing yf-data (eg. for weekends) by reusing course from previous day
|
|
except:
|
|
current_course = previous_course
|
|
current_dividends_per_ticker = 0.0 # there are never dividends on non-trading days
|
|
warning_count = warning_count +1 # Increase the warning count
|
|
|
|
# Catch the special case of the day when the trade was closed
|
|
# In this case, the current course needs to be overwritten with the sell-value
|
|
if date_close == index_date:
|
|
current_course = trades[trade_id]['course_close']
|
|
|
|
# Save the result for the next iteration
|
|
# This setup also makes it possible, that a previous course is passed down across mutiple days
|
|
# This makes sense is case i.e. for a weekend
|
|
previous_course = current_course
|
|
|
|
# ------------------ CALCULATE PERFORMANCE IF REQUIRED
|
|
if index_date >= date_open and index_date <= date_close:
|
|
# Calculate performance values
|
|
current_amount = trades[trade_id]['units']
|
|
current_invested = current_amount * trades[trade_id]['course_open']
|
|
total_dividends = total_dividends + current_amount * current_dividends_per_ticker
|
|
current_value = current_amount * current_course
|
|
current_value_with_dividends = current_value + total_dividends
|
|
current_irr = calculate_irr(index_date, date_open, current_value_with_dividends, current_invested)
|
|
total_performanance = current_value_with_dividends - current_invested
|
|
|
|
if current_value_with_dividends == 0:
|
|
print("0-value Error with ticker: {}".format(ticker))
|
|
|
|
else:
|
|
# Write 0, if trade is not relevant for current timeframe
|
|
current_amount = 0
|
|
current_invested = 0.00
|
|
total_dividends = 0.00
|
|
current_value = 0.00
|
|
current_irr = 0.00
|
|
total_performanance = 0.0
|
|
|
|
# ------------------ STORE RESULTS
|
|
index_date_iso = index_date.isoformat()
|
|
|
|
# Store all values into a dict
|
|
dict_a = {}
|
|
dict_a['current_amount'] = current_amount
|
|
dict_a['current_invested'] = current_invested
|
|
dict_a['total_dividends'] = total_dividends
|
|
dict_a['current_value'] = current_value
|
|
dict_a['current_irr'] = current_irr
|
|
dict_a['current_course'] = current_course
|
|
dict_a['total_performanance'] = total_performanance
|
|
|
|
# Check if the date is already present
|
|
if index_date_iso in history_per_trade:
|
|
dict_b = history_per_trade[index_date_iso]
|
|
else:
|
|
dict_b = {}
|
|
# Add the values to the trade_id value-pair
|
|
dict_b[trade_id] = dict_a
|
|
|
|
# Update the hostory_per_trade
|
|
history_per_trade.update({index_date_iso : dict_b})
|
|
|
|
# ------------------ NEXT ITERATION
|
|
index_date = index_date + datetime.timedelta(days=1)
|
|
|
|
# ------------------ LOGGING
|
|
if warning_count > 0:
|
|
print("[WARNING] Calculating history for trade: {} with ticker: {}".format(trade_id, ticker))
|
|
print(" No yf-data available in {} cases of ticker & date".format(warning_count))
|
|
print(" Probably reason is non-trading-days eg. weekends")
|
|
print(" Used values from previous trade-day instead")
|
|
else:
|
|
print("[SUCCESS] Calculating history for trade: {} with ticker: {}".format(trade_id, ticker))
|
|
|
|
# Reset warning count for the next trade
|
|
warning_count = 0
|
|
|
|
data = json.dumps(history_per_trade, indent=2) # Converts a python-dictionary into a json
|
|
|
|
# Logging
|
|
print("[SUCCESS] Calculating history for trades")
|
|
with open("history_per_trade.json", "w") as f:
|
|
f.write(data)
|
|
|
|
return history_per_trade
|
|
|
|
# CALC THE HISTORY PER TRADE & OVERALL
|
|
def calc_history_per_ticker(history_per_trade, tickers, trades):
|
|
|
|
# ------------------ CREATE JSON OBJECT
|
|
# Create the json-dict
|
|
history_per_ticker = {}
|
|
|
|
# Loop over each date entry in the history
|
|
for date_entry in history_per_trade:
|
|
|
|
# Set initial values to avoid NULL errors
|
|
initial_values = {}
|
|
initial_values["current_invested"] = 0
|
|
initial_values["total_dividends"] = 0
|
|
initial_values["current_value"] = 0
|
|
initial_values["current_irr"] = 0
|
|
initial_values["total_performanance"] = 0
|
|
|
|
# Create a dict to store the results per day and ticker
|
|
dict_daily = {}
|
|
for ticker in tickers:
|
|
dict_daily[ticker] = initial_values
|
|
# Added only for ticker entries, not for the "total" value
|
|
dict_daily[ticker]["current_amount"] = 0
|
|
initial_values["current_course"] = 0
|
|
|
|
# Add values for a ticker called "total"
|
|
dict_daily["total"] = initial_values
|
|
|
|
# Loop over each trade-entry for that day
|
|
for trade_id in history_per_trade[date_entry]:
|
|
|
|
# Extract data from the history_per_trade
|
|
trade_amount = history_per_trade[date_entry][trade_id]['current_amount']
|
|
trade_invested = history_per_trade[date_entry][trade_id]['current_invested']
|
|
trade_dividends = history_per_trade[date_entry][trade_id]['total_dividends']
|
|
trade_value = history_per_trade[date_entry][trade_id]['current_value']
|
|
trade_irr = history_per_trade[date_entry][trade_id]['current_irr']
|
|
trade_course = history_per_trade[date_entry][trade_id]['current_course']
|
|
trade_performanance = history_per_trade[date_entry][trade_id]['total_performanance']
|
|
|
|
# Lookup the ticker by the trade-id
|
|
ticker = trades[trade_id]["ticker"]
|
|
|
|
# Extract data from the history_per_ticker
|
|
ticker_amount = dict_daily[ticker]['current_amount']
|
|
ticker_invested = dict_daily[ticker]['current_invested']
|
|
ticker_dividends = dict_daily[ticker]['total_dividends']
|
|
ticker_value = dict_daily[ticker]['current_value']
|
|
ticker_irr = dict_daily[ticker]['current_irr']
|
|
ticker_performanance = dict_daily[ticker]['total_performanance']
|
|
|
|
# Overwrite the values in the history_per_ticker
|
|
dict_daily[ticker]['current_amount'] = ticker_amount + trade_amount # Simple addition works
|
|
dict_daily[ticker]['current_invested'] = ticker_invested + trade_invested
|
|
dict_daily[ticker]['total_dividends'] = ticker_dividends + trade_dividends
|
|
dict_daily[ticker]['current_value'] = ticker_value + trade_value
|
|
dict_daily[ticker]['total_performanance'] = ticker_performanance + trade_performanance
|
|
dict_daily[ticker]['current_course'] = trade_course # Simple overwrite is fine, as the course is the same for all trades
|
|
if ticker_invested == 0 and trade_invested == 0:
|
|
dict_daily[ticker]['current_irr'] = 0
|
|
# Catch 0 values
|
|
else:
|
|
dict_daily[ticker]['current_irr'] = (ticker_irr * ticker_invested + trade_irr * trade_invested) / (ticker_invested + trade_invested)
|
|
# --> IRR is adjusted based on the trade values. This way a trade of 25% of the initial trade volume has only a 25% influence on the irr
|
|
|
|
# Calculate the "total" entry after finishing with all the trades
|
|
for ticker in tickers:
|
|
|
|
# Same logic as above, but shortended code
|
|
dict_daily["total"]['total_dividends'] = dict_daily["total"]['total_dividends'] + dict_daily[ticker]['total_dividends']
|
|
dict_daily["total"]['current_value'] = dict_daily["total"]['current_value'] + dict_daily[ticker]['current_value']
|
|
dict_daily["total"]['total_performanance'] = dict_daily["total"]['total_performanance'] + dict_daily[ticker]['total_performanance']
|
|
|
|
# Extracting the values before rewriting them, to preserve them for the IRR calculation
|
|
total_invested = dict_daily["total"]['current_invested']
|
|
ticker_invested = dict_daily[ticker]['current_invested']
|
|
dict_daily["total"]['current_invested'] = total_invested + ticker_invested
|
|
|
|
# Extracting the values before rewriting them, to preserve them for the IRR calculation
|
|
total_irr = dict_daily["total"]['current_irr']
|
|
ticker_irr = dict_daily[ticker]['current_irr']
|
|
if ticker_invested == 0 and total_invested == 0:
|
|
dict_daily["total"]['current_irr'] = 0
|
|
else:
|
|
dict_daily["total"]['current_irr'] = (total_irr * total_invested + ticker_irr * ticker_invested) / (total_invested + ticker_invested)
|
|
|
|
# Finally, write the results for this day-entry to the history_per_ticker
|
|
history_per_ticker[date_entry] = dict_daily
|
|
|
|
# Logging
|
|
print("[SUCCESS] Calculating history per ticker")
|
|
data = json.dumps(history_per_ticker, indent=2) # Converts a python-dictionary into a json
|
|
with open("history_per_ticker.json", "w") as f:
|
|
f.write(data)
|
|
return history_per_ticker
|
|
|
|
|
|
|
|
### -------------------- MAIN PROGRAMM --------------------
|
|
while True:
|
|
# Clear variables
|
|
trades = {}
|
|
yf_data = {}
|
|
history_per_trade = {}
|
|
tickers = []
|
|
error = False
|
|
|
|
# Fetch Data from the "trades" db in notion
|
|
# Contains input fields (i.e. date_open) as well as output fields (i.e. irr, close current)
|
|
trades = fetch_format_notion_data(notion_db_id_trades)
|
|
if trades == True:
|
|
error = True
|
|
|
|
if error == False:
|
|
# Generates a list with unique tickers and no duplicates to reduce workload for the yfinance api
|
|
tickers = filter_list_of_tickers(trades)
|
|
if tickers == True:
|
|
error = True
|
|
|
|
if error == False:
|
|
yf_data = fetch_format_yf_data(tickers)
|
|
if yf_data == True:
|
|
error = True
|
|
|
|
if error == False:
|
|
# Calculates & stores a history per trade indexed by the notion database_id
|
|
history_per_trade = calc_history_per_trade(trades, yf_data)
|
|
if history_per_trade == True:
|
|
error = True
|
|
|
|
if error == False:
|
|
# Selects the most current values for the irr, current course etc. and saves them to the trades-object
|
|
# This object stores notion pages with all properties
|
|
# By "overwriting" the output properties (i.e. irr) and then overwriting the notion-pages, i can store fresh data
|
|
trades = calc_current_value_per_trade(trades, history_per_trade)
|
|
|
|
if error == False:
|
|
# Calculates & stores a history per ticker AND a total across all tickers indexed by the ticker name
|
|
# Errors in calculating the history should not stop the remaining steps
|
|
history_total = calc_history_per_ticker(history_per_trade, tickers, trades)
|
|
else:
|
|
# Just clean code practice, so that the parmateter is not undefined
|
|
history_total = 0
|
|
|
|
if error == False:
|
|
# Creates a list containing one date per week
|
|
list_wkl_dates = create_list_wkl_dates(trades)
|
|
else:
|
|
# Just clean code practice, so that the parmateter is not undefined
|
|
list_wkl_dates = 0
|
|
|
|
if error == False:
|
|
history_total_wkl = filter_history_by_list(history_total, list_wkl_dates)
|
|
|
|
if error == False:
|
|
# Updates / replaces all pages in the notion "trades"
|
|
push_notion_trades_update(trades)
|
|
|
|
# if error == False:
|
|
# push_trmnl_update(test_data, trmnl_id)
|
|
|
|
# Check for clear execution
|
|
if error == False:
|
|
print("[SUCCESS] Completed cycle at: {}".format(datetime.datetime.now()))
|
|
else:
|
|
print("[ERROR] Failed cycle at: {}".format(datetime.datetime.now()))
|
|
|
|
# Sleep for some minutes
|
|
print("[INFO] Waiting a few minutes before the next execution")
|
|
print("----------------------------------------------------------------")
|
|
time.sleep(t_sleep_py * 60)
|