Restoring Backup

This commit is contained in:
2025-10-03 20:56:17 +02:00
parent fb099ed5da
commit 1392a27b7d
3 changed files with 16762 additions and 1 deletions

View File

@@ -1,2 +1,20 @@
# Notion-Performance-Tracker
# Bot_Notion_Performance_Tracker
This Bot is intended to fetch stock-data, calculate performance and add key-kpis to a notion-db matching tracker symbols in said notion-db
Current Features:
- Information/KPIs on individual trades-level
- Fetch & format data from a given Notion-table
- Fetch & format historical data from yfinance
- Calculating Dividends
- Calculate the daily IRR (incl. Dividends)
- Pushing updates to Notion
Current Limitation:
- Dividends may not be calculated correctly...the total seems low...
Feature Pipeline:
- Information/KPIs on the whole portfolio
- Function to create a correctly formated table in notion
- Secure way of storing secret keys
- Substracting the effect of inflation

16102
history_per_trade.json Normal file

File diff suppressed because it is too large Load Diff

641
main.py Normal file
View File

@@ -0,0 +1,641 @@
### -------------------- LIBARIES --------------------
import datetime
import time
import json
import yfinance as yf
import pandas as pd
import requests
import os
### -------------------- GLOBAL VARIABLES --------------------
# PROGRAMM
json_result = {}
t_sleep_py = 10 # Programm cooldown timer
t_sleep_api = 1 # YF-API cooldown timer
# NOTION
notion_token = "secret_b7PiPL2FqC9QEikqkAEWOht7LmzPMIJMWTzUPWwbw4H"
notion_headers = {
"Authorization": "Bearer " + notion_token,
"Content-Type": "application/json",
"Notion-Version": "2022-02-22"
}
notion_db_id_trades = "95f7a2b697a249d4892d60d855d31bda"
notion_db_id_performance = "1c010a5f51bd806f90d8e76a1286cfd4"
# TRMNL
trmnl_page_id = "a791a6f1-dcac-4ea2-89fb-6c7be76d74b7"
trmnl_base_url = "https://usetrmnl.com/api/custom_plugins/"
trmnl_headers = {"Content-Type": "application/json"}
# TRMNL Special WebHook Payload Strcuture
# The outer "merge variables" is required to send to data through
test_data = '''{
"merge_variables":
{"key": "data"}
}'''
# TRMNL Diagramm Structure
trmnl_object = '''{"merge_variables": {"key": "payload"}}'''
### -------------------- EXAMPLE STRUCTURES --------------------
# NOTION FORMATED DATA STRUCTURE
''' trades[notion_page_id] = {
'ticker' : notion_page["properties"]["Ticker"]["select"]["name"],
'date_open' : date_open,
'date_close' : date_close,
'course_open' : notion_page["properties"]["Open (€)"]["number"],
'course_close' : notion_page["properties"]["Close (€)"]["number"],
'course_current' : notion_page["properties"]["Current (€)"]["number"],
'irr' : notion_page["properties"]["IRR (%)"]["number"],
'units' : notion_page["properties"]["Units"]["number"],
'dividends' : notion_page["properties"]["Dividends (€)"]["number"]
}
'''
# YF FORMATTED DATA STRUCTURE
'''yf_data[notion_page_id] = [1670 rows x 2 columns]
Date Close Dividends
2018-02-14 7.358605 0.0
2018-02-15 7.366426 0.0
2018-02-16 7.366426 0.0
2018-02-19 7.366426 0.0
2018-02-20 7.366426 0.0
... ... ...
2025-06-13 11.024000 0.0
2025-06-16 11.050000 0.0
2025-06-17 11.076000 0.0
2025-06-18 11.098500 0.0
2025-06-19 11.129000 0.0
'''
# TRMNL CHART-DATA STRUCTURE
'''var data = [{"name":"Current","data":[["2024-12-31",3982.23],......,["2024-12-01",946.02]]},{"name":"Comparison","data":[["2024-12-30",590.56],......,["2024-12-01",425.28]]}];'''
### -------------------- FUNCTIONS --------------------
# ------------------#
# LEVEL 1 FUNCTIONS #
# ------------------#
# CALVULATE THE IRR
def calculate_irr(date_now, date_open, value_now, value_open):
error = False
irr = 0.0
try:
# Prepare IRR calculation
a = date_now - date_open
a = a.days
a = a / 365 # Umrechnung von Tagesrendite auf Jahresrendite
b = value_now / value_open
# Catch negative IRRs
if b < 0:
b = b * (-1)
irr = b**(1/a) # matematisch identisch zur b-ten Wurzel von a
irr = irr * (-1)
else:
irr = b**(1/a) # matematisch identisch zur b-ten Wurzel von a
except:
error = True
# Return data if successful
if error == True:
print("[ERROR] Calculation of irr")
return error
else:
return irr
# GET THE DAY OF THE OLDEST TRADE
def get_date_open_oldest_trade(trades):
# Identify the open date for the oldest trade
date_open_oldest_trade = datetime.date.today()
for i in trades:
if trades[i]["date_open"] < date_open_oldest_trade:
date_open_oldest_trade = trades[i]["date_open"]
return date_open_oldest_trade
# CREATES LIST OF UNIQUE TICKERS
def filter_list_of_tickers(trades):
tickers = []
try:
for i in trades:
# Fetch ticker belonging to trade
ticker = trades[i]['ticker']
# Add ticker to list, if not already present
if ticker not in tickers:
tickers.append(ticker)
print("[SUCCESS] Creating a list of {} tickers".format(len(tickers)))
return tickers
except:
print("[ERROR] Creating a list of tickers")
return tickers
# NOTION FETCH PAGES
def notion_get_pages(db_id_trades, num_pages=None):
try:
# ------------------ FETCH THE FIRST 100 PAGES FROM A DB
# Prepare Request
url = f"https://api.notion.com/v1/databases/{db_id_trades}/query"
get_all = num_pages is None # If num_pages is None, get all pages, otherwise just the defined number.
page_size = 100 if get_all else num_pages
payload = {"page_size": page_size}
# Make Request
raw_response = requests.post(url, json=payload, headers=notion_headers)
# Process Reply
parsed_response = raw_response.json()
result = parsed_response["results"]
# ------------------ FETCH 100 MORE PAGES AS OFTEN AS REQUIRED
while parsed_response["has_more"] and get_all:
# Prepare Request
payload = {"page_size": page_size, "start_cursor": parsed_response["next_cursor"]}
url = f"https://api.notion.com/v1/databases/{db_id_trades}/query"
# Make Request
raw_response = requests.post(url, json=payload, headers=notion_headers)
# Process Reply
parsed_response = raw_response.json()
result.extend(parsed_response["results"])
# Logging
print("[SUCCESS] Fetching Data from Notion for database: {}".format(db_id_trades))
return result
except Exception:
print("[ERROR] Fetching Data from Notion with error: {}".format(Exception))
return True # Return True when there was an error
# NOTION UPDATE PAGES
def notion_update_page(page_id: str, data: dict):
url = f"https://api.notion.com/v1/pages/{page_id}"
payload = {"properties": data}
results = requests.patch(url, json=payload, headers=notion_headers)
return results
# TRMNL UPDATE DIAGRAMMS
'''
def push_trmnl_update_chart(wklydict_numbers, dict_chart, trmnl_page_id):
# --------------------------------------------------------------------------------- #
# REQUIREMENTS:
# The first-level-entry of dict_numbers needs be the "key" for referencing in TRMNL
# At least 3 keys are expected for numbers to fill the screen
# The json_chart data does not need a key
# Instead, each first-level-entry represents a series in the chart by name
# --------------------------------------------------------------------------------- #
# Create the data-structure for the chart-object
payload = {}
payload = dict_numbers
# Create the data-structure for the chart-object
chart = []
for entry in json_chart:
series = {}
name = entry[0]
dict_chart_values = entry[1]
series["name"] = name
series["data"] = dict_chart_values
chart.append(series)
# {"name" : name, "data" : [[date, value], [date, value]]}
try:
data = json.dumps(json_data, indent=2) # Converts a python-dictionary into a json
url = trmnl_base_url + trmnl_page_id
reply = requests.post(url, data=data, headers = trmnl_headers)
if reply.status_code == 200:
print("[SUCCESS] Pushing data to TRMNL")
elif reply.status_code == 429:
print("[WARNING] Exceeded TRMNL's API rate limits")
else:
print(f"[ERROR] Pushing data to TRMNL with server reply code: {reply.status_code}")
except Exception as e:
print("[ERROR] Pushing data to TRMNL with error code: {}".format(e))
'''
# ------------------#
# LEVEL 2 FUNCTIONS #
# ------------------#
# CREATE LIST OF WEEKLY DATES
def create_list_wkl_dates(trades):
index_date = get_date_open_oldest_trade(trades)
# Move start date from weekend to weekday
while index_date.weekday() > 5:
index_date = index_date + datetime.timedelta(days=1)
# Create list
list_wkl_dates = []
while index_date <= datetime.date.today():
list_wkl_dates.append(index_date.isoformat())
index_date = index_date + datetime.timedelta(days=7)
# Logging
print("[SUCCESS] Generating list of weekly dates with {} entries".format(len(list_wkl_dates)))
return list_wkl_dates
# NOTION FETCH & FORMAT DATA
def fetch_format_notion_data(db_id_trades):
trades = {}
error = False
data = notion_get_pages(db_id_trades)
if data is True:
error = True
else:
for i in data:
# Each page is loaded as a dictionary
notion_page = dict(i)
# Handling missing entry
try:
date_open = notion_page["properties"]["Open"]["date"]
date_open = date_open["start"]
date_open = datetime.date(*map(int, date_open.split('-')))
except:
date_open = 0
try:
date_close = notion_page["properties"]["Close"]["date"]
date_close = date_close["start"]
date_close = datetime.date(*map(int, date_close.split('-')))
except:
date_close = 0
# Save values
notion_page_id = notion_page["id"] # Use as key for the dictionary
trade = {}
trade = {
'ticker' : notion_page["properties"]["Ticker"]["select"]["name"],
'date_open' : date_open,
'date_close' : date_close,
'course_open' : notion_page["properties"]["Open (€)"]["number"],
'course_close' : notion_page["properties"]["Close (€)"]["number"],
'course_current' : notion_page["properties"]["Current (€)"]["number"],
'irr' : notion_page["properties"]["IRR (%)"]["number"],
'units' : notion_page["properties"]["Units"]["number"],
'dividends' : notion_page["properties"]["Dividends (€)"]["number"]
}
trades[notion_page_id] = trade
# Return data if successful
if error == True:
return error
else:
return trades
# YFINANCE FETCH & FORMAT DATA
def fetch_format_yf_data(tickers):
yf_data = {}
error = False
for i in tickers:
ticker = i
try:
api = yf.Ticker(ticker)
data = api.history(period="max")
print("[SUCCESS] Fetching data from yahoo-finance for ticker: {}".format(ticker))
# Convert to Pandas DataFrame
data = pd.DataFrame(data)
# Delete the columns "Stock Splits", "High", "Low" and "Open"
del data['Open']
del data['Low']
del data['High']
del data['Volume']
try:
del data['Stock Splits']
del data['Capital Gains']
except:
print("[INFO] Failed cleaning some unused data provided by yahoo-finance for ticker: {}".format(ticker))
# Get the Number of rows in data
data_rows = data.shape[0]
# Create new index without the time from the existing datetime64-index
old_index = data.index
new_index = []
x = 0
while x < data_rows:
date = pd.Timestamp.date(old_index[x]) # Converts the "Pandas Timestamp"-object to a "date" object
new_index.append(date)
x+=1
# Add the new index to the dataframe and set it as the index
data.insert(1, 'Date', new_index)
data.set_index('Date', inplace=True)
# Save the data-frame to the yf_data dict
yf_data[i] = data
# Wait for the API to cool down
time.sleep(t_sleep_api)
except:
error = True
print("[ERROR] Fetching data from yahoo-finance for ticker: {}".format(ticker))
# Return data if successful
if error == True:
return error
else:
return yf_data
# UPDATE NOTION-TRADES-DATABASE
def push_notion_trades_update(trades):
error = False
for notion_page_id in trades:
try:
# Construct Notion-Update-Object
irr_notion = round(trades[notion_page_id]['irr']/100, 4) # Covert to decimal-number for notion
notion_update = {
"Current (€)": {
"number": trades[notion_page_id]['course_current']
},
"IRR (%)": {
"number": irr_notion
},
"Dividends (€)": {
"number": trades[notion_page_id]['dividends']
}
}
# Update the properties of the corresponding notion-page
notion_update_page(notion_page_id, notion_update)
print("[SUCCESS] Updating page /w id: {}".format(notion_page_id))
except:
error = True
print("[ERROR] Updating page /w id: {}".format(notion_page_id))
# Wait for the API to cool off
time.sleep(t_sleep_api)
# When an error occured, the vaiable is true
return error
# CALC CURRENT VALUES PER TRADE
def calc_current_value_per_trade(trades, history_per_trade):
# Loop over all trades
for trade_id in trades:
# Determine, what values to fetch based on whether the trade was closed already
date_closed = trades[trade_id]["date_close"]
if date_closed == 0:
# If trade still open, use performance data from today
index_date_iso = datetime.date.today().isoformat()
else:
# If trade closed, use performance data from close-date
index_date_iso = date_closed.isoformat()
# Fetch data from history and save for this trade
trades[trade_id]["course_current"] = history_per_trade[index_date_iso][trade_id]['current_course']
trades[trade_id]["irr"] = history_per_trade[index_date_iso][trade_id]['current_irr']
trades[trade_id]["dividends"] = history_per_trade[index_date_iso][trade_id]['total_dividends']
print ("[SUCCESS] Calculating current value per trade")
return trades
# ------------------#
# LEVEL 3 FUNCTIONS #
# ------------------#
# FILTER ANY HISTORY OBJECT TO SELECTED DATES
def filter_history_by_list(history, dates_list):
filtered_history = {}
# Loop over all days
for index_date in history:
# Check, if the history-date is in the filter-list
if index_date in dates_list:
# If so, add this date-entry to the filtered history object
filtered_history[index_date] = history[index_date]
# Logging
print ("[SUCCESS] Filtering History")
return filtered_history
# CALC THE HISTORY OVERALL
def calc_history_total(history_per_trade, trades):
# ------------------ CREATE JSON OBJECT
# Create core layer
values = {}
values['current_invested'] = 0.0
values['current_value'] = 0.0
values['current_irr'] = 0.0
values['current_performance'] = 0.0
# Create dates-list
index_date = get_date_open_oldest_trade(trades)
dates = []
while index_date <= datetime.date.today():
dates.append(index_date.isoformat())
index_date = index_date + datetime.timedelta(days=1)
# Create dates-layer
history_total = {}
for date in dates:
history_total[date] = values
# ------------------ AGGREGATE DATA
# Loop over all dates
i = 0
for index_date in history_per_trade:
# Trade values stored in history per trade
trade_values = history_per_trade[index_date]
# Loop over all trades
for trade_id in trade_values:
# Individual values stored per trade and day
values = trade_values[trade_id]
# Aggregate data in history_total
history_total[index_date]['current_invested'] = history_total[index_date]['current_invested'] + values['current_invested']
history_total[index_date]['current_value'] = history_total[index_date]['current_value'] + values['current_value']
# Weighted irr sum
weighted_irr_sum = values['current_irr'] * values['current_invested']
history_total[index_date]['current_irr'] = history_total[index_date]['current_irr'] + weighted_irr_sum
# Weighted irr & total performance
if history_total[index_date]['current_invested'] == 0.0:
history_total[index_date]['current_irr'] = 0.0
else:
history_total[index_date]['current_irr'] = history_total[index_date]['current_irr'] / history_total[index_date]['current_invested']
history_total[index_date]['total_performance'] = history_total[index_date]['current_value'] + history_total[index_date]['current_invested']
# Increase index
i = i+1
# Logging
print("[SUCCESS] Calculating history overall")
return history_total
# CALC HISTORY PER TRADE
def calc_history_per_trade(trades, yf_data):
# Create support variables
history_per_trade = {}
total_dividends = 0
warning_count = 0
date_open_oldest_trade = get_date_open_oldest_trade(trades)
# ------------------ LOOP OVER ALL TRADES
for trade_id in trades:
# ------------------ PREPARE FOR THE (NEXT) LOOP OVER ALL DAYS
# Set / Reset the index-date to the oldest trade day
# Resetting is required so that the calculations for the next trade start with day 1
index_date = date_open_oldest_trade
# Set the initial value for the course on the previous day to 0
# Just in case the very first trade was made on a weekend somehow, where there is no yfinance data available
previous_course = 0.0
# Check, if the trade was closed already
# If it was not, set the closure date to the future (Trick 17)
if trades[trade_id]["date_close"] == 0:
date_close = datetime.date.today() + datetime.timedelta(days=1)
else:
date_close = trades[trade_id]["date_close"]
date_open = trades[trade_id]["date_open"]
# Keep ticker for connecting performance later
ticker = trades[trade_id]['ticker']
# ------------------ DETERMINE THE COUSE PER DAY
while index_date != datetime.date.today() + datetime.timedelta(days=1):
# Fetch course for the day & eventual dividends from yf_data
try:
current_course = yf_data[trade_id].at[index_date, 'Close']
current_dividends_per_ticker = yf_data[trade_id].at[index_date, 'Dividends']
# Catch missing yf-data (eg. for weekends) by reusing course from previous day
except:
current_course = previous_course
current_dividends_per_ticker = 0.0 # there are never dividends on non-trading days
warning_count = warning_count +1 # Increase the warning count
# Catch the special case of the day when the trade was closed
# In this case, the current course needs to be overwritten with the sell-value
if date_close == index_date:
current_course = trades[trade_id]['course_close']
# ------------------ CALCULATE PERFORMANCE IF REQUIRED
if date_open >= index_date and date_close <= index_date:
# Calculate performance values
current_amount = trades[trade_id]['units']
current_invested = current_amount * trades[trade_id]['course_open']
total_dividends = total_dividends + current_amount * current_dividends_per_ticker
current_value = current_amount * current_course
current_irr = calculate_irr(index_date, date_open, current_value, current_invested)
total_performanance = current_value - current_invested
else:
# Write 0, if trade is not relevant for current timeframe
current_course = 0.0
current_amount = 0
current_invested = 0.00
total_dividends = 0.00
current_value = 0.00
current_irr = 0.00
total_performanance = 0.0
# ------------------ STORE RESULTS
index_date_iso = index_date.isoformat()
# Store all values into a dict
dict_a = {}
dict_a['current_amount'] = current_amount
dict_a['current_invested'] = current_invested
dict_a['total_dividends'] = total_dividends
dict_a['current_value'] = current_value
dict_a['current_irr'] = current_irr
dict_a['current_course'] = current_course
dict_a['total_performanance'] = total_performanance
# Check if the date is already present
if index_date_iso in history_per_trade:
dict_b = history_per_trade[index_date_iso]
else:
dict_b = {}
# Add the values to the trade_id value-pair
dict_b[trade_id] = dict_a
# Update the hostory_per_trade
history_per_trade.update({index_date_iso : dict_b})
# ------------------ NEXT ITERATION
index_date = index_date + datetime.timedelta(days=1)
# ------------------ LOGGING
print("[SUCCESS] Calculating history for trade: {} with ticker: {}".format(trade_id, ticker))
# ------------------ LOGGING
if warning_count > 0:
print("[WARNING] Calculating history per trade:")
print("[WARNING] No yf-data available in {} cases of ticker & date".format(warning_count))
print("[WARNING] Probably reason is non-trading-days eg. weekends")
print("[WARNING] Used values from previous trade-day instead")
data = json.dumps(history_per_trade, indent=2) # Converts a python-dictionary into a json
# Create file for easier development
with open("history_per_trade.json", "w") as f:
f.write(data)
return history_per_trade
### -------------------- MAIN PROGRAMM --------------------
while True:
# Clear variables
trades = {}
yf_data = {}
history_per_trade = {}
tickers = []
error = False
# Execute Functions
trades = fetch_format_notion_data(notion_db_id_trades)
if trades == True:
error = True
if error == False:
tickers = filter_list_of_tickers(trades)
if tickers == True:
error = True
if error == False:
yf_data = fetch_format_yf_data(tickers)
if yf_data == True:
error = True
if error == False:
history_per_trade = calc_history_per_trade(trades, yf_data)
if error == False:
trades = calc_current_value_per_trade(trades, history_per_trade)
if error == False:
list_wkl_dates = create_list_wkl_dates(trades)
if error == False:
history_total = calc_history_total(history_per_trade, trades)
if error == False:
history_total_wkl = filter_history_by_list(history_total, list_wkl_dates)
# if error == False:
# push_trmnl_update(test_data, trmnl_id)
# Check for clear execution
if error == False:
print("[SUCCESS] Completed cycle at: {}".format(datetime.datetime.now()))
else:
print("[ERROR] Failed cycle at: {}".format(datetime.datetime.now()))
# Sleep for some minutes
print("[INFO] Waiting a few minutes before the next execution")
print("----------------------------------------------------------------")
time.sleep(t_sleep_py * 60)