Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 19 additions & 3 deletions examples/algorithms/test_algo/test_algo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,24 +3,40 @@

import numpy as np
import structlog
from pydantic import BaseModel

from ziplime.config.base_algorithm_config import BaseAlgorithmConfig
from ziplime.domain.bar_data import BarData
from ziplime.finance.execution import MarketOrder
from ziplime.trading.trading_algorithm import TradingAlgorithm

logger = structlog.get_logger(__name__)


class EquityToTrade(BaseModel):
symbol: str
target_percentage: float


class AlgorithmConfig(BaseAlgorithmConfig):
currency: str
equities_to_trade: list[EquityToTrade]


async def initialize(context):
context.assets = [
await context.symbol("META"),
await context.symbol("AAPL"),
await context.symbol("AMZN"),
await context.symbol("NFLX"),
await context.symbol("GOOGL")
]
# read config file
logger.info("Algorithm config: ", config=context.algorithm.config)


async def handle_data(context, data):
num_assets = len(context.assets)
target_percent = 1.0 / num_assets
for asset in context.assets:
await context.order_target_percent(asset=asset, target=target_percent, style=MarketOrder())

await context.order_target_percent(asset=asset,
target=target_percent, style=MarketOrder())
19 changes: 0 additions & 19 deletions examples/ingest_assets_data_grpc.py

This file was deleted.

41 changes: 0 additions & 41 deletions examples/ingest_data_grpc.py

This file was deleted.

2 changes: 1 addition & 1 deletion examples/ingest_data_limex_hub.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ async def ingest_data_limex_hub():
# STEP 1: Define symbols, date range and frequency of the data that we are going to ingest
symbols = ["META", "AAPL", "AMZN", "NFLX", "GOOGL"]
start_date = datetime.datetime(year=2025, month=1, day=1, tzinfo=datetime.timezone.utc)
end_date = datetime.datetime(year=2025, month=2, day=27, tzinfo=datetime.timezone.utc)
end_date = datetime.datetime(year=2025, month=10, day=27, tzinfo=datetime.timezone.utc)
data_frequency = datetime.timedelta(minutes=1)
# STEP 2: Initialize market data source and data bundle source - LimexHub
market_data_bundle_source = LimexHubDataSource.from_env()
Expand Down
9 changes: 3 additions & 6 deletions examples/run_simulation_daily.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,7 @@

from ziplime.core.ingest_data import get_asset_service
from ziplime.core.run_simulation import run_simulation
from ziplime.data.services.bundle_service import BundleService
from ziplime.data.services.file_system_bundle_registry import FileSystemBundleRegistry
from ziplime.finance.commission import PerShare, DEFAULT_PER_SHARE_COST, DEFAULT_MINIMUM_COST_PER_EQUITY_TRADE, \
PerContract, DEFAULT_PER_CONTRACT_COST, EquityCommissionModel
from ziplime.finance.commission import PerShare, DEFAULT_PER_SHARE_COST, DEFAULT_MINIMUM_COST_PER_EQUITY_TRADE

logger = structlog.get_logger(__name__)

Expand Down Expand Up @@ -55,8 +52,8 @@ async def _run_simulation():
)

custom_data_sources = []
custom_data_sources.append(
await bundle_service.load_bundle(bundle_name="limex_us_fundamental_data", bundle_version=None))
# custom_data_sources.append(
# await bundle_service.load_bundle(bundle_name="limex_us_fundamental_data", bundle_version=None))

equity_commission = PerShare(
cost=DEFAULT_PER_SHARE_COST,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,9 @@
import polars as pl
import structlog

from ziplime.gens.domain.single_execution_clock import SingleExecutionClock
from ziplime.utils.bundle_utils import get_bundle_service
from ziplime.utils.calendar_utils import get_calendar
from ziplime.utils.logging_utils import configure_logging

from pathlib import Path
Expand All @@ -14,27 +17,22 @@

from ziplime.core.ingest_data import get_asset_service
from ziplime.core.run_simulation import run_simulation
from ziplime.data.services.bundle_service import BundleService
from ziplime.data.services.file_system_bundle_registry import FileSystemBundleRegistry
from ziplime.finance.commission import PerShare, DEFAULT_PER_SHARE_COST, DEFAULT_MINIMUM_COST_PER_EQUITY_TRADE

logger = structlog.get_logger(__name__)


async def _run_simulation():
bundle_storage_path = str(Path(Path.home(), ".ziplime", "data"))
bundle_registry = FileSystemBundleRegistry(base_data_path=bundle_storage_path)
bundle_service = BundleService(bundle_registry=bundle_registry)
start_date = datetime.datetime(year=2025, month=1, day=3, tzinfo=pytz.timezone("America/New_York"))
end_date = datetime.datetime(year=2025, month=2, day=1, tzinfo=pytz.timezone("America/New_York"))
emission_rate = datetime.timedelta(days=1)
bundle_service = get_bundle_service()

asset_service = get_asset_service(
clear_asset_db=False,
# db_path=str(pathlib.Path(__file__).parent.parent.resolve().joinpath("data", "assets.sqlite"))
db_path=str(pathlib.Path(__file__).parent.parent.resolve().joinpath("data", "assets.sqlite"))
)
symbols = ["META", "AAPL", "AMZN", "NFLX", "GOOGL", "VXX"]
ny = pytz.timezone("America/New_York")
start_local = datetime.datetime(2025, 9, 1, 0, 0) # 2025-09-01 00:00 local clock time
end_local = datetime.datetime(2025, 9, 17, 0, 0) # 2025-09-01 00:00 local clock time

start_date = ny.localize(start_local) # Correct: EDT (UTC-04:00)
end_date = ny.localize(end_local)
# Use aggregations if you ingested data of frequnecy less than 1 day
aggregations = [
pl.col("open").first(),
pl.col("high").max(),
Expand All @@ -43,18 +41,33 @@ async def _run_simulation():
pl.col("volume").sum(),
pl.col("symbol").last()
]
market_data_bundle = await bundle_service.load_bundle(bundle_name="grpc_daily_data",
market_data_bundle = await bundle_service.load_bundle(bundle_name="limex_us_minute_data",
bundle_version=None,
frequency=datetime.timedelta(days=1),
start_date=start_date,
end_date=end_date + datetime.timedelta(days=1),
symbols=symbols,
aggregations=aggregations
end_date=end_date,
symbols=["META", "AAPL", "AMZN", "NFLX", "GOOGL",
],
start_auction_delta=datetime.timedelta(minutes=15),
end_auction_delta=datetime.timedelta(minutes=15),
aggregations=aggregations,
)

custom_data_sources = []
custom_data_sources.append(
await bundle_service.load_bundle(bundle_name="limex_us_fundamental_data", bundle_version=None))

# By default, SimulationExchange with LIME name is used
equity_commission = PerShare(
cost=DEFAULT_PER_SHARE_COST,
min_trade_cost=DEFAULT_MINIMUM_COST_PER_EQUITY_TRADE,

)
clock = SingleExecutionClock(
trading_calendar=get_calendar("NYSE"),
start_date=start_date,
end_date=end_date,
emission_rate=emission_rate,
)
# run daily simulation
res, errors = await run_simulation(
start_date=start_date,
Expand All @@ -63,15 +76,17 @@ async def _run_simulation():
algorithm_file=str(Path("algorithms/test_algo/test_algo.py").absolute()),
total_cash=100000.0,
market_data_source=market_data_bundle,
custom_data_sources=[],
custom_data_sources=custom_data_sources,
config_file=str(Path("algorithms/test_algo/test_algo_config.json").absolute()),
emission_rate=datetime.timedelta(days=1),
benchmark_asset_symbol="VXX",
emission_rate=emission_rate,
benchmark_asset_symbol="AAPL",
benchmark_returns=None,
stop_on_error=False,
stop_on_error=True,
asset_service=asset_service,
# default_exchange_name="BATS"
equity_commission=equity_commission,
clock=clock
)

if errors:
logger.error(errors)
print(res.head(n=10).to_markdown())
Expand Down
1 change: 1 addition & 0 deletions examples/run_simulation_minute.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ async def _run_simulation():

# run daily simulation
res, errors = await run_simulation(
asset_service=asset_service,
start_date=start_date,
end_date=end_date,
trading_calendar="NYSE",
Expand Down
1 change: 1 addition & 0 deletions examples/run_simulation_minute_custom_csv_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ async def _run_simulation():
benchmark_asset_symbol="SPX",
benchmark_returns=None,
stop_on_error=False,
asset_service=asset_service
)

if errors:
Expand Down
Loading