Skip to content

Commit 5a2c483

Browse files
committed
feat(python): support datetime inputs for replay
1 parent 2aca0ec commit 5a2c483

3 files changed

Lines changed: 100 additions & 14 deletions

File tree

README.md

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,9 @@
66

77
<br/>
88

9-
Python `tardis-dev` package provides convenient access to tick-level historical cryptocurrency market data in exchange-native format. It focuses on two primary workflows: replaying historical market data and downloading CSV datasets. If you need normalized replay or real-time streaming, use the Node.js client or Tardis Machine.
9+
Python `tardis-dev` package provides convenient access to tick-level historical cryptocurrency market data in exchange-native format. It focuses on two primary workflows: replaying historical market data and downloading historical market data as CSV files. If you need normalized replay or real-time streaming, use the Node.js client or Tardis Machine.
10+
11+
`replay()` accepts ISO date strings or Python `datetime` values. Naive datetimes are treated as UTC.
1012

1113
<br/>
1214

@@ -37,7 +39,7 @@ asyncio.run(main())
3739

3840
<br/>
3941

40-
- [CSV dataset downloads](https://docs.tardis.dev/downloadable-csv-files/overview)
42+
- [historical market data downloads as CSV files](https://docs.tardis.dev/python-client/quickstart#csv-dataset-downloads)
4143

4244
<br/>
4345

@@ -64,7 +66,7 @@ pip install tardis-dev
6466

6567
- [Quickstart](https://docs.tardis.dev/python-client/quickstart)
6668
- [Replaying Historical Data](https://docs.tardis.dev/python-client/replaying-historical-data)
67-
- [Downloadable CSV files](https://docs.tardis.dev/downloadable-csv-files/overview)
69+
- [CSV Dataset Downloads](https://docs.tardis.dev/python-client/quickstart#csv-dataset-downloads)
6870
- [Migration Notice](https://docs.tardis.dev/python-client/migration-notice)
6971

7072
<br/>

tardis_dev/replay.py

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@ class Response(NamedTuple):
2929

3030
async def replay(
3131
exchange: str,
32-
from_date: str,
33-
to_date: str,
32+
from_date: Union[str, datetime],
33+
to_date: Union[str, datetime],
3434
filters: Optional[Sequence[Channel]] = None,
3535
*,
3636
api_key: str = "",
@@ -310,20 +310,28 @@ def _validate_replay_args(
310310
raise ValueError(f"Invalid 'symbols[]' argument: {channel.symbols}. Please provide list of symbol strings.")
311311

312312

313-
def _parse_date(name: str, value: str) -> datetime:
313+
def _parse_date(name: str, value: Union[str, datetime]) -> datetime:
314314
if value is None:
315315
raise ValueError(
316316
f"Invalid '{name}' argument: {value}. Please provide valid ISO date string. "
317317
"https://docs.python.org/3/library/datetime.html#datetime.date.fromisoformat"
318318
)
319319

320-
try:
321-
return dateutil.parser.isoparse(value)
322-
except (TypeError, ValueError) as exc:
323-
raise ValueError(
324-
f"Invalid '{name}' argument: {value}. Please provide valid ISO date string. "
325-
"https://docs.python.org/3/library/datetime.html#datetime.date.fromisoformat"
326-
) from exc
320+
if isinstance(value, datetime):
321+
parsed = value
322+
else:
323+
try:
324+
parsed = dateutil.parser.isoparse(value)
325+
except (TypeError, ValueError) as exc:
326+
raise ValueError(
327+
f"Invalid '{name}' argument: {value}. Please provide valid ISO date string. "
328+
"https://docs.python.org/3/library/datetime.html#datetime.date.fromisoformat"
329+
) from exc
330+
331+
if parsed.tzinfo is None:
332+
return parsed.replace(tzinfo=timezone.utc)
333+
334+
return parsed.astimezone(timezone.utc)
327335

328336

329337
def _get_slice_cache_path(

tests/test_replay.py

Lines changed: 77 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import importlib
22
import gzip
3-
from datetime import datetime
3+
from datetime import datetime, timedelta, timezone
44
from pathlib import Path
55

66
import pytest
@@ -128,6 +128,82 @@ def test_replay_formats_query_date_as_utc_z():
128128
assert _format_replay_query_date(datetime.fromisoformat("2019-05-01T00:00:00+00:00")) == "2019-05-01T00:00:00.000Z"
129129

130130

131+
@pytest.mark.asyncio
132+
async def test_replay_accepts_naive_datetime_inputs_as_utc(monkeypatch, tmp_path: Path):
133+
cache_dir = tmp_path / "cache"
134+
filters = _live_replay_filters()
135+
captured = {}
136+
137+
slice_path = Path(
138+
_get_slice_cache_path(
139+
str(cache_dir),
140+
LIVE_REPLAY_EXCHANGE,
141+
datetime(2019, 5, 1, 0, 0, tzinfo=timezone.utc),
142+
filters,
143+
)
144+
)
145+
slice_path.parent.mkdir(parents=True, exist_ok=True)
146+
147+
with gzip.open(slice_path, "wb") as file:
148+
file.write(b'2019-05-01T00:00:00.0000000Z {"table":"trade","action":"partial","data":[{"symbol":"BTCUSD"}]}\n')
149+
150+
async def fake_fetch_data_to_replay(**kwargs):
151+
captured.update(kwargs)
152+
return None
153+
154+
monkeypatch.setattr(replay_module, "_fetch_data_to_replay", fake_fetch_data_to_replay)
155+
156+
results = []
157+
async for item in replay(
158+
exchange=LIVE_REPLAY_EXCHANGE,
159+
from_date=datetime(2019, 5, 1, 0, 0),
160+
to_date=datetime(2019, 5, 1, 0, 1),
161+
filters=filters,
162+
cache_dir=str(cache_dir),
163+
):
164+
results.append(item)
165+
166+
assert len(results) == 1
167+
assert captured["from_date"] == datetime(2019, 5, 1, 0, 0, tzinfo=timezone.utc)
168+
assert captured["to_date"] == datetime(2019, 5, 1, 0, 1, tzinfo=timezone.utc)
169+
170+
171+
@pytest.mark.asyncio
172+
async def test_replay_converts_timezone_aware_datetime_inputs_to_utc(monkeypatch, tmp_path: Path):
173+
cache_dir = tmp_path / "cache"
174+
filters = _live_replay_filters()
175+
captured = {}
176+
177+
utc_from_date = datetime(2019, 5, 1, 0, 0, tzinfo=timezone.utc)
178+
utc_to_date = datetime(2019, 5, 1, 0, 1, tzinfo=timezone.utc)
179+
180+
slice_path = Path(_get_slice_cache_path(str(cache_dir), LIVE_REPLAY_EXCHANGE, utc_from_date, filters))
181+
slice_path.parent.mkdir(parents=True, exist_ok=True)
182+
183+
with gzip.open(slice_path, "wb") as file:
184+
file.write(b'2019-05-01T00:00:00.0000000Z {"table":"trade","action":"partial","data":[{"symbol":"BTCUSD"}]}\n')
185+
186+
async def fake_fetch_data_to_replay(**kwargs):
187+
captured.update(kwargs)
188+
return None
189+
190+
monkeypatch.setattr(replay_module, "_fetch_data_to_replay", fake_fetch_data_to_replay)
191+
192+
results = []
193+
async for item in replay(
194+
exchange=LIVE_REPLAY_EXCHANGE,
195+
from_date=datetime(2019, 5, 1, 2, 0, tzinfo=timezone(timedelta(hours=2))),
196+
to_date=datetime(2019, 5, 1, 2, 1, tzinfo=timezone(timedelta(hours=2))),
197+
filters=filters,
198+
cache_dir=str(cache_dir),
199+
):
200+
results.append(item)
201+
202+
assert len(results) == 1
203+
assert captured["from_date"] == utc_from_date
204+
assert captured["to_date"] == utc_to_date
205+
206+
131207
@pytest.mark.asyncio
132208
async def test_replay_raw_mode_returns_bytes(monkeypatch, tmp_path: Path):
133209
cache_dir = tmp_path / "cache"

0 commit comments

Comments
 (0)