Python

pip install velodata

GitHub, PyPi

Examples

Quick Start
from velodata import lib as velo

# new velo client
client = velo.client('api_key')

# get futures and pick one
future = client.get_futures()[0] 

# get futures columns and pick two
columns = client.get_futures_columns()[:2]

# last 10 minutes in 1 minute resolution
params = {
      'type': 'futures',
      'columns': columns,
      'exchanges': [future['exchange']],
      'products': [future['product']],
      'begin': client.timestamp() - 1000 * 60 * 11,
      'end': client.timestamp(),
      'resolution': '1m'
    }
    
# returns dataframe
print(client.get_rows(params))
Aggregated Spot CVD
from velodata import lib as velo

day_in_ms = 1000 * 60 * 60 * 24

# new velo client
client = velo.client('api_key')

# from one day ago in 10 minute resolution
params = {
      'type': 'spot',
      'columns': ['buy_dollar_volume', 'sell_dollar_volume'],
      'exchanges': ['coinbase', 'binance', 'bybit-spot'],
      'products': ['ETHUSDT', 'ETH-USD'],
      'begin': client.timestamp() - day_in_ms,
      'end': client.timestamp(),
      'resolution': '10m'
    }
    
# returns dataframe
df = client.get_rows(params)

# aggregate all exchanges
df = df.groupby(df['time']).sum(numeric_only=True)

# compute volume delta
df['delta'] = df['buy_dollar_volume'] - df['sell_dollar_volume']

# cumulative
print(df['delta'].cumsum())
OI-Weighted Funding Rate
from velodata import lib as velo

hour_in_ms = 1000 * 60 * 60

# new velo client
client = velo.client('api_key')

# from one hour ago in 1 minute resolution
params = {
      'type': 'futures',
      'columns': ['funding_rate', 'coin_open_interest_close'],
      'exchanges': ['binance-futures', 'bybit', 'okex-swap'],
      'coins': ['SOL'],
      'begin': client.timestamp() - hour_in_ms,
      'end': client.timestamp(),
      'resolution': '1m'
    }
    
# returns dataframe
df = client.get_rows(params)

# oi-weighted funding = SUM(funding*OI) / SUM(OI)
df['funding_rate'] = df['funding_rate'] * df['coin_open_interest_close']
df = df.groupby(df['time']).sum(numeric_only=True)
df['funding_rate'] = df['funding_rate'] / df['coin_open_interest_close']

print(df['funding_rate'])
Spot-Vol Correlation
from velodata import lib as velo

day_in_ms = 1000 * 60 * 60 * 24

# new velo client
client = velo.client('api_key')

# from 5 days ago in 1 hour resolution
params = {
      'type': 'options',
      'columns': ['dvol_close', 'index_price'],
      'exchanges': ['deribit'],
      'products': ['BTC'],
      'begin': client.timestamp() - day_in_ms * 5,
      'end': client.timestamp(),
      'resolution': '1h'
    }
    
# returns dataframe
df = client.get_rows(params)

# simple rolling 24 hour correlation
print(
df['dvol_close'].pct_change().rolling(24).corr(df['index_price'].pct_change())
)
Futures Basis

Special case: `3m_basis_ann` is available for BTC and ETH. To request this data, do not specify products, exchanges, or any additional columns

from velodata import lib as velo

day_in_ms = 1000 * 60 * 60 * 24

# new velo client
client = velo.client('api_key')

# from 5 days ago in 1 hour resolution
params = {
      'type': 'futures',
      'columns': ['3m_basis_ann'],
      'coins': ['BTC', 'ETH'],
      'begin': client.timestamp() - day_in_ms * 5,
      'end': client.timestamp(),
      'resolution': '1h'
    }
    
# returns dataframe
print(client.get_rows(params))
Large Request

Both get_rows and stream_rows handle batching of large requests automatically, but if you wish to receive batch responses individually rather than in one final DataFrame you should use stream_rows.

from velodata import lib as velo

day_in_ms = 1000 * 60 * 60 * 24

# new velo client
client = velo.client('api_key')

# from 5 days ago in 1 minute resolution
# 2 columns * 4 products * 1 exchange * 7200 rows = 57600 values
params = {
      'type': 'futures',
      'columns': ['open_price', 'close_price'],
      'exchanges': ['binance-futures'],
      'products': ['LTCUSDT', 'ETCUSDT', 'BCHUSDT', 'SOLUSDT'],
      'begin': client.timestamp() - day_in_ms * 5,
      'end': client.timestamp(),
      'resolution': '1m'
    }
    
# creates 3 batches (57600 values / 22500 limit)
batches = client.batch_rows(params)  

# prints each batch as it finishes
for df in client.stream_rows(batches):
  print(df) 

Helper Methods

  • Key status: get_status()

  • Supported futures: get_futures()

  • Supported options: get_options()

  • Supported spot pairs: get_spot()

  • Supported futures columns: get_futures_columns()

  • Supported options columns: get_options_columns()

  • Supported spot columns: get_spot_columns()

  • Millisecond timestamp: timestamp()


Data Methods

Get rows

get_rows(params)

  • type: 'futures', 'options', or 'spot'

  • exchanges, products, coins, columns: lists

  • begin, end: millisecond timestamps

  • resolution: minutes (integer) or resolution (string)

Returns DataFrame

If both `coins` and `products` are specified, only `products` will be used

Batch rows

batch_rows(params)

  • type: 'futures', 'options', or 'spot'

  • exchanges, products, coins, columns: lists

  • begin, end: millisecond timestamps

  • resolution: minutes (integer)

Returns list for use in stream_rows

If both `coins` and `products` are specified, only `products` will be used

Stream rows

stream_rows(batches)

  • batches: list returned from batch_rows

Yields DataFrame

Get options term structure

get_term_structure(coins)

  • coins: list

Returns DataFrame

Latest values only

Get market caps

get_market_caps(coins)

  • coins: list

Returns DataFrame

Latest values only

Last updated