Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
"""Module to handle initialization, imports, for DeFiLlama class"""
"""Module to handle initialization, imports, for DeFiLlama class

DeFiLLama API Docs: https://defillama.com/docs/api
"""


from .defillama import *
128 changes: 82 additions & 46 deletions pycaw/messari/defillama/defillama.py → pycaw/defillama/defillama.py
Original file line number Diff line number Diff line change
@@ -1,38 +1,60 @@
"""This module is meant to contain the DeFiLlama class"""
"""This module is meant to contain the DeFiLlama class

DeFiLLama API Docs: https://defillama.com/docs/api
"""

# Global imports
import datetime
from string import Template
from typing import Union, List, Dict

from pycaw.defillama import helpers
from pycaw.messari import dataloader
from pycaw.messari import utils

import pandas as pd
from typing import Union, List, Dict

from messari.dataloader import DataLoader
# Local imports
from messari.utils import validate_input, get_taxonomy_dict, time_filter_df
from .helpers import format_df

##########################
# URL Endpoints
##########################
DL_PROTOCOLS_URL = "https://api.llama.fi/protocols"
DL_GLOBAL_TVL_URL = "https://api.llama.fi/charts/"
DL_CURRENT_PROTOCOL_TVL_URL = Template("https://api.llama.fi/tvl/$slug")
DL_CHAIN_TVL_URL = Template("https://api.llama.fi/charts/$chain")
DL_GET_PROTOCOL_TVL_URL = Template("https://api.llama.fi/protocol/$slug")


class DeFiLlama(DataLoader):
"""This class is a wrapper around the DeFi Llama API
"""
class DeFiLlama(dataloader.DataLoader):
"""This class is a wrapper around the DeFi Llama API"""

def __init__(self):
messari_to_dl_dict = get_taxonomy_dict("messari_to_dl.json")
DataLoader.__init__(self, api_dict=None, taxonomy_dict=messari_to_dl_dict)
api_urls: Dict[str, str]

def get_protocol_tvl_timeseries(self, asset_slugs: Union[str, List],
start_date: Union[str, datetime.datetime] = None,
end_date: Union[str, datetime.datetime] = None) -> pd.DataFrame:
def __init__(self):
messari_to_dl_dict = utils.get_taxonomy_dict("messari_to_dl.json")
dataloader.DataLoader.__init__(
self, api_dict=None, taxonomy_dict=messari_to_dl_dict
)

@property
def api_urls(self) -> Dict[str, str]:
_endpoint_preamble: str = "https://api.llama.fi"
urls = dict(
# List all protocols on defillama along with their tvl
protocols="/".join([_endpoint_preamble, "protocols"]),
# Get historical TVL of a protocol and breakdowns by token and chain
get_protocol_tvl="/".join([_endpoint_preamble, "protocol", "{_slug}"]),
# Get historical TVL on DeFi on all chains
global_tvl="/".join([_endpoint_preamble, "charts"]),
# Get historical TVL of a chain
chain_tvl="/".join([_endpoint_preamble, "charts", "{_chain}"]),
# Get current TVL of a protocol
current_protocol_tvl="/".join([_endpoint_preamble, "tvl", "{_slug}"]),
# Get current TVL of all chains
all_chains_tvl="/".join([_endpoint_preamble, "chains"]),
)
return urls
# TODO test: Check that the urls aren't broken now.

def get_protocol_tvl_timeseries(
self,
asset_slugs: Union[str, List],
start_date: Union[str, datetime.datetime] = None,
end_date: Union[str, datetime.datetime] = None,
) -> pd.DataFrame:
"""Returns times TVL of a protocol with token amounts as a pandas DataFrame.
Returned DataFrame is indexed by df[protocol][chain][asset].

Expand All @@ -59,7 +81,7 @@ def get_protocol_tvl_timeseries(self, asset_slugs: Union[str, List],

slug_df_list: List = []
for slug in slugs:
endpoint_url = DL_GET_PROTOCOL_TVL_URL.substitute(slug=slug)
endpoint_url = self.api_urls["get_protocol_tvl"].format(_slug=slug)
protocol = self.get_response(endpoint_url)

###########################
Expand Down Expand Up @@ -95,13 +117,15 @@ def get_protocol_tvl_timeseries(self, asset_slugs: Union[str, List],
chain_tvl_tokens_usd_df = pd.DataFrame(chain_tvl_tokens_usd)

# fix indexes
chain_tvl_df = format_df(chain_tvl_df)
chain_tvl_tokens_df = format_df(chain_tvl_tokens_df)
chain_tvl_tokens_usd_df = format_df(chain_tvl_tokens_usd_df)
chain_tvl_df = helpers.format_df(chain_tvl_df)
chain_tvl_tokens_df = helpers.format_df(chain_tvl_tokens_df)
chain_tvl_tokens_usd_df = helpers.format_df(chain_tvl_tokens_usd_df)
chain_tvl_tokens_usd_df = chain_tvl_tokens_usd_df.add_suffix("_usd")

# concat tokens and tokensInUsd
joint_tokens_df = pd.concat([chain_tvl_tokens_df, chain_tvl_tokens_usd_df], axis=1)
joint_tokens_df = pd.concat(
[chain_tvl_tokens_df, chain_tvl_tokens_usd_df], axis=1
)
# Join total chain TVL w/ token TVL
chain_df = chain_tvl_df.join(joint_tokens_df)
chain_df_list.append(chain_df)
Expand All @@ -119,7 +143,7 @@ def get_protocol_tvl_timeseries(self, asset_slugs: Union[str, List],
token[key] = value
token.pop("tokens", None)
tokens_df = pd.DataFrame(tokens)
tokens_df = format_df(tokens_df)
tokens_df = helpers.format_df(tokens_df)

## tokens in USD
tokens_usd = protocol["tokensInUsd"]
Expand All @@ -128,13 +152,13 @@ def get_protocol_tvl_timeseries(self, asset_slugs: Union[str, List],
token[key] = value
token.pop("tokens", None)
tokens_usd_df = pd.DataFrame(tokens_usd)
tokens_usd_df = format_df(tokens_usd_df)
tokens_usd_df = helpers.format_df(tokens_usd_df)
tokens_usd_df = tokens_usd_df.add_suffix("_usd")

# Get total tvl across chains
tvl = protocol["tvl"]
total_tvl_df = pd.DataFrame(tvl)
total_tvl_df = format_df(total_tvl_df)
total_tvl_df = helpers.format_df(total_tvl_df)

# Working
joint_tokens_df = pd.concat([tokens_df, tokens_usd_df], axis=1)
Expand All @@ -150,11 +174,16 @@ def get_protocol_tvl_timeseries(self, asset_slugs: Union[str, List],
total_slugs_df = pd.concat(slug_df_list, keys=slugs, axis=1)
total_slugs_df.sort_index(inplace=True)

total_slugs_df = time_filter_df(total_slugs_df, start_date=start_date, end_date=end_date)
total_slugs_df = utils.time_filter_df(
total_slugs_df, start_date=start_date, end_date=end_date
)
return total_slugs_df

def get_global_tvl_timeseries(self, start_date: Union[str, datetime.datetime] = None,
end_date: Union[str, datetime.datetime] = None) -> pd.DataFrame:
def get_global_tvl_timeseries(
self,
start_date: Union[str, datetime.datetime] = None,
end_date: Union[str, datetime.datetime] = None,
) -> pd.DataFrame:
"""Returns timeseries TVL from total of all Defi Llama supported protocols

Parameters
Expand All @@ -170,15 +199,20 @@ def get_global_tvl_timeseries(self, start_date: Union[str, datetime.datetime] =
DataFrame
DataFrame containing timeseries tvl data for every protocol
"""
global_tvl = self.get_response(DL_GLOBAL_TVL_URL)
global_tvl = self.get_response(self.api_urls["global_tvl"])
global_tvl_df = pd.DataFrame(global_tvl)
global_tvl_df = format_df(global_tvl_df)
global_tvl_df = time_filter_df(global_tvl_df, start_date=start_date, end_date=end_date)
global_tvl_df = helpers.format_df(global_tvl_df)
global_tvl_df = utils.time_filter_df(
global_tvl_df, start_date=start_date, end_date=end_date
)
return global_tvl_df

def get_chain_tvl_timeseries(self, chains_in: Union[str, List],
start_date: Union[str, datetime.datetime] = None,
end_date: Union[str, datetime.datetime] = None) -> pd.DataFrame:
def get_chain_tvl_timeseries(
self,
chains_in: Union[str, List],
start_date: Union[str, datetime.datetime] = None,
end_date: Union[str, datetime.datetime] = None,
) -> pd.DataFrame:
"""Retrive timeseries TVL for a given chain

Parameters
Expand All @@ -197,20 +231,22 @@ def get_chain_tvl_timeseries(self, chains_in: Union[str, List],
DataFrame
DataFrame containing timeseries tvl data for each chain
"""
chains = validate_input(chains_in)
chains = utils.validate_input(chains_in)

chain_df_list = []
for chain in chains:
endpoint_url = DL_CHAIN_TVL_URL.substitute(chain=chain)
endpoint_url = self.api_urls["chain_tvl"].format(_chain=chain)
response = self.get_response(endpoint_url)
chain_df = pd.DataFrame(response)
chain_df = format_df(chain_df)
chain_df = helpers.format_df(chain_df)
chain_df_list.append(chain_df)

# Join DataFrames from each chain & return
chains_df = pd.concat(chain_df_list, axis=1)
chains_df.columns = chains
chains_df = time_filter_df(chains_df, start_date=start_date, end_date=end_date)
chains_df = utils.time_filter_df(
chains_df, start_date=start_date, end_date=end_date
)
return chains_df

def get_current_tvl(self, asset_slugs: Union[str, List]) -> Dict:
Expand All @@ -226,11 +262,11 @@ def get_current_tvl(self, asset_slugs: Union[str, List]) -> Dict:
DataFrame
Pandas Series for tvl indexed by each slug {slug: tvl, ...}
"""
slugs = validate_input(asset_slugs)
slugs = utils.validate_input(asset_slugs)

tvl_dict = {}
for slug in slugs:
endpoint_url = DL_CURRENT_PROTOCOL_TVL_URL.substitute(slug=slug)
endpoint_url = self.api_urls["current_protocol_tvl"].format(_slug=slug)
tvl = self.get_response(endpoint_url)
if isinstance(tvl, float):
tvl_dict[slug] = tvl
Expand All @@ -250,7 +286,7 @@ def get_protocols(self) -> pd.DataFrame:
DataFrame
DataFrame with one column per DeFi Llama supported protocol
"""
protocols = self.get_response(DL_PROTOCOLS_URL)
protocols = self.get_response(self.api_urls["protocols"])

protocol_dict = {}
for protocol in protocols:
Expand Down
26 changes: 13 additions & 13 deletions pycaw/messari/defillama/helpers.py → pycaw/defillama/helpers.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
"""This module is dedicated to helpers for the DeFiLlama class"""
"""A module with helper functions for the DeFiLlama class

Methods:
format_df: Replaces dates and drops duplicates.
"""


import pandas as pd
Expand All @@ -7,27 +11,23 @@
def format_df(df_in: pd.DataFrame) -> pd.DataFrame:
"""format a typical DF from DL, replace date & drop duplicates

Parameters
----------
df_in: pd.DataFrame
input DataFrame
Args:
df_in (pd.DataFrame): input DataFrame

Returns
-------
DataFrame
formated pandas DataFrame
Returns:
(pd.DataFrame): formated pandas DataFrame
"""

# set date to index
df_new = df_in
if 'date' in df_in.columns:
df_new.set_index('date', inplace=True)
df_new.index = pd.to_datetime(df_new.index, unit='s', origin='unix')
if "date" in df_in.columns:
df_new.set_index("date", inplace=True)
df_new.index = pd.to_datetime(df_new.index, unit="s", origin="unix")
df_new.index = df_new.index.date

# drop duplicates
# NOTE: sometimes DeFi Llama has duplicate dates, choosing to just keep the last
# NOTE: Data for duplicates is not the same
# TODO: Investigate which data should be kept (currently assuming last is more recent
df_new = df_new[~df_new.index.duplicated(keep='last')]
df_new = df_new[~df_new.index.duplicated(keep="last")]
return df_new
6 changes: 0 additions & 6 deletions pycaw/etherscan/etherscan_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,6 @@ def _validate_timestamp_format(self,
timestamp: Union[int, str, pd.Timestamp]):
raise NotImplementedError() # TODO

"""
@tenacity.retry(stop=tenacity.stop_after_attempt(3),
wait=tenacity.wait_exponential(min=0.1, max=5, multiplier=2))
@ratelimit.sleep_and_retry
@ratelimit.limits(calls=30, period=1) # period (float) is in seconds.
"""
def run_query(self, query: str, rate_limit: bool = True) -> Dict[str, Any]:
"""Func is wrapped with some ultimate limiters to ensure this method is
never callled too much. However, the batch-call function should also
Expand Down
1 change: 1 addition & 0 deletions pycaw/tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""Tests for the pycaw package."""
import dotenv

dotenv.load_dotenv()
71 changes: 71 additions & 0 deletions pycaw/tests/cmc_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
#!/usr/bin/env python

import os
import json
import pytest
from pycaw import cmc
from typing import Any, Dict, List, Union


class TestCoinMarketCapAPI:
@pytest.fixture
def cmc_api(self) -> cmc.CoinMarketCapAPI:
return cmc.CoinMarketCapAPI()

def test_cmc_id_map(self, cmc_api: cmc.CoinMarketCapAPI):
symbols: List[str] = ["BTC", "ETH"]
cmc_id_maps: List[dict] = cmc_api.cmc_id_map(symbols=symbols)
assert isinstance(cmc_id_maps, list)
assert isinstance(cmc_id_maps[0], dict)
assert all([k in cmc_id_maps[0].keys() for k in ["id", "slug", "name"]])

@pytest.fixture
def cmc_id_maps(self) -> List[dict]:
return [
{
"id": 1,
"name": "Bitcoin",
"symbol": "BTC",
"slug": "bitcoin",
"rank": 1,
"is_active": 1,
"first_historical_data": "2013-04-28T18:47:21.000Z",
"last_historical_data": "2021-11-19T00:59:02.000Z",
"platform": None,
},
{
"id": 1027,
"name": "Ethereum",
"symbol": "ETH",
"slug": "ethereum",
"rank": 2,
"is_active": 1,
"first_historical_data": "2015-08-07T14:49:30.000Z",
"last_historical_data": "2021-11-19T00:59:02.000Z",
"platform": None,
},
]

def test_save_cmc_id_maps(
self, cmc_api: cmc.CoinMarketCapAPI, cmc_id_maps: List[dict]
):
"""Tests whether the CMC ID Map query saves correctly."""

temp_filename: str = "temp-foo.json"
temp_save_path = temp_filename

assert not os.path.exists(temp_save_path)
cmc_api._save_cmc_id_maps(cmc_id_maps=cmc_id_maps, filename=temp_filename)
with open(temp_save_path, mode="r") as f:
saved_cmc_id_maps: List[dict] = json.load(f)
assert isinstance(saved_cmc_id_maps, list)
assert len(saved_cmc_id_maps) == 2
assert all(
[
[k in dict_.keys() for k in ["id", "name", "symbol"]]
for dict_ in saved_cmc_id_maps
]
)

os.remove(temp_save_path)
assert not os.path.exists(temp_save_path)
Loading