Skip to content

Commit

Permalink
Merge pull request #575 from Lumiwealth/dev
Browse files Browse the repository at this point in the history
update master
  • Loading branch information
grzesir authored Oct 12, 2024
2 parents 823e63c + 21810c5 commit ddae7b1
Show file tree
Hide file tree
Showing 41 changed files with 1,614 additions and 32,119 deletions.
55 changes: 52 additions & 3 deletions lumibot/backtesting/backtesting_broker.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def _submit_order(self, order):
"""TODO: Why is this not used for Backtesting, but it is used for real brokers?"""
pass

def _get_balances_at_broker(self, quote_asset):
def _get_balances_at_broker(self, quote_asset, strategy):
"""
Get the balances of the broker
"""
Expand Down Expand Up @@ -343,6 +343,13 @@ def _process_filled_order(self, order, price, quantity):
"""
BackTesting needs to create/update positions when orders are filled becuase there is no broker to do it
"""
# This is a parent order, typically for a Multileg strategy. The parent order itself is expected to be
# filled after all child orders are filled.
if order.is_parent():
order.avg_fill_price = price
order.quantity = quantity
return super()._process_filled_order(order, price, quantity) # Do not store parent order positions

existing_position = self.get_tracked_position(order.strategy, order.asset)

# Currently perfect fill price in backtesting!
Expand All @@ -352,7 +359,7 @@ def _process_filled_order(self, order, price, quantity):
if existing_position:
position.add_order(order, quantity) # Add will update quantity, but not double count the order
if position.quantity == 0:
logging.info("Position %r liquidated" % position)
logging.info(f"Position {position} liquidated")
self._filled_positions.remove(position)
else:
self._filled_positions.append(position) # New position, add it to the tracker
Expand Down Expand Up @@ -399,10 +406,34 @@ def submit_order(self, order):
)
return order

def submit_orders(self, orders, **kwargs):
def submit_orders(self, orders, is_multileg=False, **kwargs):
results = []
for order in orders:
results.append(self.submit_order(order))

if is_multileg:
# Each leg uses a different option asset, just use the base symbol.
symbol = orders[0].asset.symbol
parent_asset = Asset(symbol=symbol)
parent_order = Order(
asset=parent_asset,
strategy=orders[0].strategy,
order_class=Order.OrderClass.MULTILEG,
side=orders[0].side,
quantity=orders[0].quantity,
type=orders[0].type,
tag=orders[0].tag,
status=Order.OrderStatus.SUBMITTED
)

for o in orders:
o.parent_identifier = parent_order.identifier

parent_order.child_orders = orders
self._unprocessed_orders.append(parent_order)
self.stream.dispatch(self.NEW_ORDER, order=parent_order)
return [parent_order]

return results

def cancel_order(self, order):
Expand Down Expand Up @@ -571,6 +602,24 @@ def process_pending_orders(self, strategy):
if order.dependent_order_filled or order.status == self.CANCELED_ORDER:
continue

# Multileg parent orders will wait for child orders to fill before processing
if order.is_parent():
# If this is the final fill for a multileg order, mark the parent order as filled
if all([o.is_filled() for o in order.child_orders]):
parent_qty = sum([abs(o.quantity) for o in order.child_orders])
child_prices = [o.get_fill_price() if o.is_buy_order() else -o.get_fill_price()
for o in order.child_orders]
parent_price = sum(child_prices)
self.stream.dispatch(
self.FILLED_ORDER,
wait_until_complete=True,
order=order,
price=parent_price,
filled_quantity=parent_qty,
)

continue

# Check validity if current date > valid date, cancel order. todo valid date
asset = order.asset if order.asset.asset_type != "crypto" else (order.asset, order.quote)

Expand Down
100 changes: 52 additions & 48 deletions lumibot/backtesting/thetadata_backtesting.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,15 @@ def __init__(
pandas_data=None,
username=None,
password=None,
use_quote_data=True,
**kwargs,
):
super().__init__(datetime_start=datetime_start, datetime_end=datetime_end, pandas_data=pandas_data, **kwargs)

self._username = username
self._password = password
self._username = username
self._password = password
self._use_quote_data = use_quote_data

self.kill_processes_by_name("ThetaTerminal.jar")

def is_weekend(self, date):
Expand Down Expand Up @@ -60,7 +63,7 @@ def kill_processes_by_name(self, keyword):
except Exception as e:
print(f"An error occurred during kill process: {e}")

def update_pandas_data(self, asset, quote, length, timestep, start_dt=None):
def _update_pandas_data(self, asset, quote, length, timestep, start_dt=None):
"""
Get asset data and update the self.pandas_data dictionary.
Expand Down Expand Up @@ -147,6 +150,7 @@ def update_pandas_data(self, asset, quote, length, timestep, start_dt=None):
try:
# Get ohlc data from ThetaData
date_time_now = self.get_datetime()
df_ohlc = None
df_ohlc = thetadata_helper.get_price_data(
self._username,
self._password,
Expand All @@ -158,38 +162,47 @@ def update_pandas_data(self, asset, quote, length, timestep, start_dt=None):
dt=date_time_now,
datastyle="ohlc"
)
# Get quote data from ThetaData
df_quote = thetadata_helper.get_price_data(
self._username,
self._password,
asset_separated,
start_datetime,
self.datetime_end,
timespan=ts_unit,
quote_asset=quote_asset,
dt=date_time_now,
datastyle="quote"
)

# Check if we have data
if df_ohlc is None or df_quote is None:
if df_ohlc is None:
logging.info(f"\nSKIP: No OHLC data found for {asset_separated} from ThetaData")
return None

# Combine the ohlc and quote data
df = pd.concat([df_ohlc, df_quote], axis=1, join='inner')
if self._use_quote_data:
# Get quote data from ThetaData
df_quote = thetadata_helper.get_price_data(
self._username,
self._password,
asset_separated,
start_datetime,
self.datetime_end,
timespan=ts_unit,
quote_asset=quote_asset,
dt=date_time_now,
datastyle="quote"
)

# Check if we have data
if df_quote is None:
logging.info(f"\nSKIP: No QUOTE data found for {quote_asset} from ThetaData")
return None

# Combine the ohlc and quote data
df = pd.concat([df_ohlc, df_quote], axis=1, join='inner')
else:
df = df_ohlc

except Exception as e:
logging.error(traceback.format_exc())
raise Exception("Error getting data from ThetaData") from e

if df is None:
if df is None or df.empty:
return None

pandas_data = []
data = Data(asset_separated, df, timestep=ts_unit, quote=quote_asset)
pandas_data.append(data)
pandas_data_updated = self._set_pandas_data_keys(pandas_data)

return pandas_data_updated
pandas_data_update = self._set_pandas_data_keys([data])
if pandas_data_update is not None:
# Add the keys to the self.pandas_data dictionary
self.pandas_data.update(pandas_data_update)
self._data_store.update(pandas_data_update)


def _pull_source_symbol_bars(
self,
Expand All @@ -201,11 +214,11 @@ def _pull_source_symbol_bars(
exchange=None,
include_after_hours=True,
):
pandas_data_update = self.update_pandas_data(asset, quote, length, timestep)

if pandas_data_update is not None:
# Add the keys to the self.pandas_data dictionary
self.pandas_data.update(pandas_data_update)
try:
dt = self.get_datetime()
self._update_pandas_data(asset, quote, 1, timestep, dt)
except Exception as e:
logging.error(f"\nERROR: _pull_source_symbol_bars from ThetaData: {e}, {dt}, asset:{asset}")

return super()._pull_source_symbol_bars(
asset, length, timestep, timeshift, quote, exchange, include_after_hours
Expand All @@ -222,10 +235,7 @@ def get_historical_prices_between_dates(
start_date=None,
end_date=None,
):
pandas_data_update = self.update_pandas_data(asset, quote, 1, timestep)
if pandas_data_update is not None:
# Add the keys to the self.pandas_data dictionary
self.pandas_data.update(pandas_data_update)
self._update_pandas_data(asset, quote, 1, timestep)

response = super()._pull_source_symbol_bars_between_dates(
asset, timestep, quote, exchange, include_after_hours, start_date, end_date
Expand All @@ -240,25 +250,19 @@ def get_historical_prices_between_dates(
def get_last_price(self, asset, timestep="minute", quote=None, exchange=None, **kwargs):
try:
dt = self.get_datetime()
pandas_data_update = self.update_pandas_data(asset, quote, 1, timestep, dt)
if pandas_data_update is not None:
# Add the keys to the self.pandas_data dictionary
self.pandas_data.update(pandas_data_update)
self._data_store.update(pandas_data_update)
self._update_pandas_data(asset, quote, 1, timestep, dt)
except Exception as e:
logging.info(f"\nError get_last_price from ThetaData: {e}, {dt}, asset:{asset}")
logging.error(f"\nERROR: get_last_price from ThetaData: {e}, {dt}, asset:{asset}")

return super().get_last_price(asset=asset, quote=quote, exchange=exchange)

def get_quote(self, asset, timestep="minute", quote=None, exchange=None, **kwargs):
try:
dt = self.get_datetime()
pandas_data_update = self.update_pandas_data(asset, quote, 1, timestep, dt)
if pandas_data_update is not None:
# Add the keys to the self.pandas_data dictionary
self.pandas_data.update(pandas_data_update)
self._data_store.update(pandas_data_update)
self._update_pandas_data(asset, quote, 1, timestep, dt)
except Exception as e:
logging.info(f"\nError get_quote from ThetaData: {e}, {dt}, asset:{asset}")
logging.error(f"\nnERROR: get_quote from ThetaData: {e}, {dt}, asset:{asset}")

return super().get_quote(asset=asset, quote=quote, exchange=exchange)

def get_chains(self, asset):
Expand Down
4 changes: 2 additions & 2 deletions lumibot/brokers/alpaca.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def get_time_to_close(self):

# =========Positions functions==================

def _get_balances_at_broker(self, quote_asset):
def _get_balances_at_broker(self, quote_asset, strategy):
"""Get's the current actual cash, positions value, and total
liquidation value from Alpaca.
Expand All @@ -248,7 +248,7 @@ def _get_balances_at_broker(self, quote_asset):
tuple of float
(cash, positions_value, total_liquidation_value)
"""

response = self.api.get_account()
total_cash_value = float(response.cash)
gross_positions_value = float(response.long_market_value) - float(response.short_market_value)
Expand Down
Loading

0 comments on commit ddae7b1

Please sign in to comment.