diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml
index aac7854..085cdb3 100644
--- a/.github/workflows/build-test.yml
+++ b/.github/workflows/build-test.yml
@@ -14,7 +14,7 @@ jobs:
# fail it if doesn't conform to black
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: psf/black@stable
with:
options: "--check --verbose"
@@ -25,10 +25,10 @@ jobs:
strategy:
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
- python-version: ["3.8", "3.11"]
+ python-version: ["3.9", "3.11"]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index b3f8d54..9d20b05 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -12,7 +12,7 @@ jobs:
deploy:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
@@ -38,7 +38,7 @@ jobs:
run: |
i=0
while [ $i -lt 12 ] && [ "${{ github.ref_name }}" != $(pip index versions -i https://test.pypi.org/simple --pre market_prices | cut -d'(' -f2 | cut -d')' -f1 | sed 1q) ];\
- do echo "waiting for package to appear in test index, i is $i, sleeping 5s"; sleep 5s; echo "woken up"; ((i++)); echo "next i is $i"; done
+ do echo "waiting for package to appear in test index, i is $i"; echo "sleeping 5s"; sleep 5s; echo "woken up"; ((i++)); echo "next i is $i"; done
pip install --index-url https://test.pypi.org/simple market_prices==${{ github.ref_name }} --no-deps
pip install -r etc/requirements.txt
python -c 'import market_prices;print(market_prices.__version__)'
@@ -58,6 +58,6 @@ jobs:
run: |
i=0
while [ $i -lt 12 ] && [ "${{ github.ref_name }}" != $(pip index versions -i https://pypi.org/simple --pre market_prices | cut -d'(' -f2 | cut -d')' -f1 | sed 1q) ];\
- do echo "waiting for package to appear in index, i is $i, sleeping 5s"; sleep 5s; echo "woken up"; ((i++)); echo "next i is $i"; done
+ do echo "waiting for package to appear in index, i is $i"; echo "sleeping 5s"; sleep 5s; echo "woken up"; ((i++)); echo "next i is $i"; done
pip install --index-url https://pypi.org/simple market_prices==${{ github.ref_name }}
python -c 'import market_prices;print(market_prices.__version__)'
diff --git a/.gitignore b/.gitignore
index 2f726bd..11d7b5d 100644
--- a/.gitignore
+++ b/.gitignore
@@ -107,9 +107,9 @@ celerybeat.pid
# Environments
.env
-.venv
-env/
-venv/
+.venv*/
+env
+venv*/
ENV/
env.bak/
venv.bak/
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 3935253..6d38f47 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -4,16 +4,16 @@ repos:
hooks:
- id: check-yaml
- repo: https://github.com/psf/black
- rev: 22.3.0
+ rev: 23.7.0
hooks:
- id: black
# It is recommended to specify the latest version of Python
# supported by your project here, or alternatively use
# pre-commit's default_language_version, see
# https://pre-commit.com/#top_level-default_language_version
- language_version: python3.8
+ language_version: python3.11
- repo: https://github.com/PyCQA/flake8
- rev: 4.0.1
+ rev: 6.1.0
hooks:
- id: flake8
additional_dependencies: [flake8-docstrings]
\ No newline at end of file
diff --git a/.pylintrc b/.pylintrc
index e8cb158..ce4d0f0 100644
--- a/.pylintrc
+++ b/.pylintrc
@@ -82,8 +82,8 @@ max-line-length=100
max-module-lines=2000
[TYPING]
-py-version=3.8
-runtime-typing=no
+py-version=3.9
+runtime-typing=True
[PARAMETER_DOCUMENTATION]
accept-no-param-doc=no
diff --git a/docs/developers/typing_doc.md b/docs/developers/typing_doc.md
index 71ce761..a76728a 100644
--- a/docs/developers/typing_doc.md
+++ b/docs/developers/typing_doc.md
@@ -18,10 +18,9 @@ Errors arising for reasons listed in the comments towards the top fo the `mypy.i
### mptypes.py
-Types specific to `market_prices` are defined on the `mptypes.py` module. These include
-type aliases, custom pydantic types and internal enums.
+Types specific to `market_prices` are defined on the `mptypes.py` module. These include type aliases, custom types and internal enums.
-The annotation of any public parameter that takes an mptype should begin `mptypes.` in order to explictly declare the type as being specific to `market_prices`.
+The type annotation of any public parameter that takes a type defined on the mptypes module should begin `mptypes.`. This is to explictly declare the type as being specific to `market_prices`.
## Documentation
diff --git a/docs/public/parsing.md b/docs/public/parsing.md
index f794a8f..63e98bc 100644
--- a/docs/public/parsing.md
+++ b/docs/public/parsing.md
@@ -1,34 +1,44 @@
# Parsing
-`market_prices` uses the [pydantic library](https://pydantic-docs.helpmanual.io/) to parse parameters received by public functions and methods. Pydantic ensures that the type passed to a formal parameter conforms with the parameter's type annotation. Passing a object with an invalid type will raise a `pydantic.ValidationError` with a message advising of the invalid inputs and what was expected.
+`market_prices` uses the [`valimp`](https://github.com/maread99/valimp) library to parse parameters received by public functions and methods.
+
+## Type validation
+ The `valimp.parse` decorator ensures that the objects passed to a function's parameters conform with the corresponding type annotation. Where a parameter takes a container this validation extends to validating the type of the container items. For example, an input for the following parameter would be validated as being a `dict` and each item of that dictionary would be validated as having the key as a `str` and the value as either an `int` or a `float`:
+
+```python
+param: dict[str, Union[int, float]]
+```
+
+An instance of `valimp.InputsError` is raised if at least one object passed to a function does not confrom with the corresponding type annotation.
## Coercing
-When a parameter receives an invalid type pydantic will try to coerce it to a valid type. For example, a parameter annoated with `int` could be passed a `str` "3" which would be coerced to the `int` 3. It could also be passed a `float` 2.99 which would be coerced the `int` 2!
+An instance of `valimp.Coerce` in a parameter's annotation simply indicates that the object will
+be subsequently coerced to a specific type. For example, the following 'start' parameter can take an object of type `pd.Timestamp`, `str`, `datetime.datetime`, `int`, or `float`. In all cases the object will be coerced to a `pd.Timestamp` (NB a None value is never coerced).
-Parameters that do not allow coercing are typed with a pydantic 'Strict' type, for example `pydantic.StrictInt`.
+```python
+start: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+] = None,
+```
+
+(NB The type annotation is wrapped in `typing.Annotated` and the `valimp.Coerce` instance is passed to the annotated metadata.)
-## Custom pydantic types
-`market_prices` defines custom pydantic types for certain parameters. The parsing of custom types may perform additional validations and define default values.
+## Ad-hoc validation
-For example, the type `mptypes.PricesTimezone` is defined for parameters that allow a timezone to be specified by way of a symbol or `pytz` timezone object. The parsing process checks that the input is of a valid type and value and then passes through a pytz timezone object to the formal parameter.
+An instance of `valimp.Parser` in the type annotation indicates that the input will be subsequently parsed before reaching the decorated function. This parsing may undertake further validation or dynamically assign a default value. For example, the following 'session' parameter will be coerced to a `pd.Timestamp` which in turn will be verified as representing a date (as opposed to a time) by the `parsing.verify_datetimestamp` function.
-The type's documentation includes the requirements for input to be considered valid.
```python
->>> from market_prices.mptypes import DateTimestamp
->>> help(DateTimestamp)
+session: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ Parser(parsing.verify_datetimestamp, parse_none=False),
+] = None,
```
- Help on class DateTimestamp in module market_prices.mptypes:
-
- class DateTimestamp(Timestamp)
- | Type to parse to a pd.Timestamp and validate as a date.
- |
- | Considered a valid date (rather than a time), if:
- | - no time component or time component defined as 00:00.
- | - tz-naive.
- |
- | A parameter annotated with this class can take any object that is
- | acceptable as a single-argument input to pd.Timestamp:
- | Union[pd.Timestamp, str, datetime.datetime, int, float]
- |
- | The formal parameter will be assigned a pd.Timestamp.
\ No newline at end of file
+
+In this case if the input does not represent a date then `parsing.verify_datetimestamp` will raise an appropriate error (the parsing functions' documentation offer advices as to what's required for an input to be considered valid).
+
+(NB The `parse_none` argument indicates to the `parse` decorator that a `None` value should not be parsed.)
+
+(NB The type annotation is wrapped in `typing.Annotated` and the `valimp.Parser` instance is passed to the annotated metadata.)
\ No newline at end of file
diff --git a/docs/public/typing.md b/docs/public/typing.md
index 4b165e5..7f4e71a 100644
--- a/docs/public/typing.md
+++ b/docs/public/typing.md
@@ -5,19 +5,17 @@ Third party types will usually be defined with a full dotted path from the packa
## mptypes
-`market_prices` defines [type aliases](#Type-aliases) and [custom pydantic types](#Custom-pydantic-types) to annotate some parameters of public methods. Such types are all defined in the `mptypes.py` module.
+`market_prices` defines [type aliases](#Type-aliases) and custom types to annotate some parameters of public methods. Such types are defined in the `mptypes.py` module.
-When a parameter takes an mptype the underlying valid types are expressed in the 'Parameters' section of the method's documentation.
+When a parameter takes an mptype the underlying valid types are expressed in the 'Parameters' section of the method's documentation.
### Type aliases
-`market_prices` uses type aliases to represent multiple underlying types that are acceptable input. The underlying types can be inspected by calling the type alias:
+`market_prices` occassionally uses type aliases to represent multiple underlying types that are acceptable input. The underlying types can be inspected by calling the type alias:
```python
->>> from market_prices.mptypes import Calendar
+>>> from market_prices.mptypes import Symbols, Calendar
+>>> Symbols
+typing.Union[list[str], str]
>>> Calendar
-typing.Union[pydantic.types.StrictStr, exchange_calendars.exchange_calendar.ExchangeCalendar]
+typing.Union[str, exchange_calendars.exchange_calendar.ExchangeCalendar]
```
-
-### Custom pydantic types
-
-The [parsing](./parsing.md) documentation explains how custom pydantic types are sometimes used to validate and parse parameters.
\ No newline at end of file
diff --git a/docs/tutorials/data_availability.ipynb b/docs/tutorials/data_availability.ipynb
index b547ab7..190f7d5 100644
--- a/docs/tutorials/data_availability.ipynb
+++ b/docs/tutorials/data_availability.ipynb
@@ -51,9 +51,10 @@
],
"source": [
"import pandas as pd\n",
- "now = pd.Timestamp.now(tz=\"UTC\").floor(\"T\")\n",
+ "from zoneinfo import ZoneInfo\n",
+ "now = pd.Timestamp.now(tz=ZoneInfo(\"UTC\")).floor(\"T\")\n",
"print(f\"{now!r}\")\n",
- "print(f\"{now.astimezone('America/New_York')!r}\")"
+ "print(f\"{now.astimezone(ZoneInfo('America/New_York'))!r}\")"
]
},
{
@@ -77,7 +78,6 @@
"outputs": [],
"source": [
"from market_prices import PricesYahoo, helpers\n",
- "import pytz\n",
"from market_prices.support import tutorial_helpers as th"
]
},
diff --git a/docs/tutorials/intervals.ipynb b/docs/tutorials/intervals.ipynb
index 4c3656c..8225d2f 100644
--- a/docs/tutorials/intervals.ipynb
+++ b/docs/tutorials/intervals.ipynb
@@ -46,9 +46,10 @@
],
"source": [
"import pandas as pd\n",
- "now = pd.Timestamp.now(tz=\"UTC\").floor(\"T\")\n",
+ "from zoneinfo import ZoneInfo\n",
+ "now = pd.Timestamp.now(tz=ZoneInfo(\"UTC\")).floor(\"T\")\n",
"print(f\"{now!r}\")\n",
- "print(f\"{now.astimezone('America/New_York')!r}\")"
+ "print(f\"{now.astimezone(ZoneInfo('America/New_York'))!r}\")"
]
},
{
@@ -72,7 +73,6 @@
"outputs": [],
"source": [
"from market_prices import PricesYahoo\n",
- "import pytz\n",
"import pandas as pd\n",
"from market_prices.support import tutorial_helpers as th"
]
@@ -2658,9 +2658,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "mkt_prices 3.8.2",
+ "display_name": "venv",
"language": "python",
- "name": "mkt_prices"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
@@ -2672,7 +2672,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.2"
+ "version": "3.8.10"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
diff --git a/docs/tutorials/other_get_options.ipynb b/docs/tutorials/other_get_options.ipynb
index 0d25a44..e2eae47 100644
--- a/docs/tutorials/other_get_options.ipynb
+++ b/docs/tutorials/other_get_options.ipynb
@@ -32,23 +32,24 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 1,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "Timestamp('2022-05-13 12:24:00+0000', tz='UTC')\n",
- "Timestamp('2022-05-13 08:24:00-0400', tz='America/New_York')\n"
+ "Timestamp('2023-09-09 19:03:00+0000', tz='UTC')\n",
+ "Timestamp('2023-09-09 15:03:00-0400', tz='America/New_York')\n"
]
}
],
"source": [
"import pandas as pd\n",
- "now = pd.Timestamp.now(tz=\"UTC\").floor(\"T\")\n",
+ "from zoneinfo import ZoneInfo\n",
+ "now = pd.Timestamp.now(tz=ZoneInfo(\"UTC\")).floor(\"T\")\n",
"print(f\"{now!r}\")\n",
- "print(f\"{now.astimezone('America/New_York')!r}\")"
+ "print(f\"{now.astimezone(ZoneInfo('America/New_York'))!r}\")"
]
},
{
@@ -67,7 +68,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
@@ -85,7 +86,7 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
@@ -121,16 +122,16 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- "Timestamp('2022-04-19 00:00:00')"
+ "Timestamp('2023-08-11 00:00:00')"
]
},
- "execution_count": 5,
+ "execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
@@ -142,7 +143,7 @@
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 5,
"metadata": {},
"outputs": [
{
@@ -191,184 +192,184 @@
" \n",
"
\n",
" \n",
- " 2022-04-05 | \n",
- " 313.269989 | \n",
- " 314.869995 | \n",
- " 309.869995 | \n",
- " 310.880005 | \n",
- " 23156700 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " 10112.0 | \n",
- " 10350.000000 | \n",
- " 10078.323242 | \n",
- " 10334.0 | \n",
- " 2529617.0 | \n",
- "
\n",
- " \n",
- " 2022-04-06 | \n",
- " 305.190002 | \n",
- " 307.000000 | \n",
- " 296.709991 | \n",
- " 299.500000 | \n",
- " 40110400 | \n",
- " 109.099998 | \n",
- " 109.800003 | \n",
- " 107.099998 | \n",
- " 107.599998 | \n",
- " 46638063.0 | \n",
- " 10242.0 | \n",
- " 10481.076172 | \n",
- " 10227.108398 | \n",
- " 10462.0 | \n",
- " 2251895.0 | \n",
- "
\n",
- " \n",
- " 2022-04-07 | \n",
- " 296.660004 | \n",
- " 303.649994 | \n",
- " 296.350006 | \n",
- " 301.369995 | \n",
- " 31411200 | \n",
- " 106.400002 | \n",
- " 108.900002 | \n",
- " 105.199997 | \n",
- " 105.199997 | \n",
- " 35575706.0 | \n",
- " 10554.0 | \n",
- " 10680.000000 | \n",
- " 10523.267578 | \n",
- " 10668.0 | \n",
- " 3462762.0 | \n",
- "
\n",
- " \n",
- " 2022-04-08 | \n",
- " 300.440002 | \n",
- " 301.119995 | \n",
- " 296.279999 | \n",
- " 296.970001 | \n",
- " 24361900 | \n",
- " 102.800003 | \n",
- " 103.800003 | \n",
- " 101.500000 | \n",
- " 103.800003 | \n",
- " 38153586.0 | \n",
- " 10734.0 | \n",
- " 11000.000000 | \n",
- " 10713.238281 | \n",
- " 10930.0 | \n",
- " 2542196.0 | \n",
- "
\n",
- " \n",
- " 2022-04-11 | \n",
- " 291.790009 | \n",
- " 292.609985 | \n",
- " 285.000000 | \n",
- " 285.260010 | \n",
- " 34569300 | \n",
- " 102.800003 | \n",
- " 103.000000 | \n",
- " 97.849998 | \n",
- " 98.500000 | \n",
- " 40131164.0 | \n",
- " 10888.0 | \n",
- " 10950.000000 | \n",
- " 10746.000000 | \n",
- " 10858.0 | \n",
- " 2561255.0 | \n",
- "
\n",
- " \n",
- " 2022-04-12 | \n",
- " 289.239990 | \n",
- " 290.739990 | \n",
- " 280.489990 | \n",
- " 282.059998 | \n",
- " 30966700 | \n",
- " 98.500000 | \n",
- " 100.599998 | \n",
- " 96.349998 | \n",
+ " 2023-07-31 | \n",
+ " 336.920013 | \n",
+ " 337.700012 | \n",
+ " 333.359985 | \n",
+ " 335.920013 | \n",
+ " 25446000 | \n",
" 99.000000 | \n",
- " 39683531.0 | \n",
- " 10720.0 | \n",
- " 10754.000000 | \n",
- " 10398.000000 | \n",
- " 10504.0 | \n",
- " 2791098.0 | \n",
- "
\n",
- " \n",
- " 2022-04-13 | \n",
- " 282.730011 | \n",
- " 288.579987 | \n",
- " 281.299988 | \n",
- " 287.619995 | \n",
- " 21907200 | \n",
- " 98.400002 | \n",
- " 99.500000 | \n",
- " 97.000000 | \n",
- " 98.500000 | \n",
- " 24650087.0 | \n",
- " 10608.0 | \n",
- " 10640.419922 | \n",
- " 10464.099609 | \n",
- " 10510.0 | \n",
- " 2102995.0 | \n",
- "
\n",
- " \n",
- " 2022-04-14 | \n",
- " 288.089996 | \n",
- " 288.309998 | \n",
- " 279.320007 | \n",
- " 279.829987 | \n",
- " 28221600 | \n",
+ " 99.750000 | \n",
+ " 96.800003 | \n",
+ " 97.500000 | \n",
+ " 74290881 | \n",
+ " 11028.0 | \n",
+ " 11204.0 | \n",
+ " 11022.000000 | \n",
+ " 11186.0 | \n",
+ " 1639504 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-01 | \n",
+ " 335.190002 | \n",
+ " 338.540009 | \n",
+ " 333.700012 | \n",
+ " 336.339996 | \n",
+ " 18311900 | \n",
+ " 99.699997 | \n",
+ " 100.000000 | \n",
+ " 96.900002 | \n",
+ " 97.849998 | \n",
+ " 53949339 | \n",
+ " 11176.0 | \n",
+ " 11254.0 | \n",
+ " 11148.000000 | \n",
+ " 11196.0 | \n",
+ " 955804 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-02 | \n",
+ " 333.630005 | \n",
+ " 333.630005 | \n",
+ " 326.359985 | \n",
+ " 327.500000 | \n",
+ " 27761300 | \n",
" 97.650002 | \n",
- " 98.000000 | \n",
- " 93.849998 | \n",
- " 95.500000 | \n",
- " 52348941.0 | \n",
- " 10432.0 | \n",
- " 10574.000000 | \n",
- " 10392.000000 | \n",
- " 10536.0 | \n",
- " 1980089.0 | \n",
- "
\n",
- " \n",
- " 2022-04-18 | \n",
- " 278.910004 | \n",
- " 282.459991 | \n",
- " 278.339996 | \n",
- " 280.519989 | \n",
- " 20778000 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- "
\n",
- " \n",
- " 2022-04-19 | \n",
- " 279.380005 | \n",
- " 286.170013 | \n",
- " 278.410004 | \n",
- " 285.299988 | \n",
- " 22297700 | \n",
- " 92.550003 | \n",
- " 93.449997 | \n",
- " 90.900002 | \n",
- " 91.500000 | \n",
- " 34060706.0 | \n",
- " 10478.0 | \n",
- " 10582.000000 | \n",
- " 10428.000000 | \n",
- " 10500.0 | \n",
- " 3260649.0 | \n",
+ " 98.599998 | \n",
+ " 94.599998 | \n",
+ " 95.150002 | \n",
+ " 37885551 | \n",
+ " 11040.0 | \n",
+ " 11140.0 | \n",
+ " 10874.000000 | \n",
+ " 11096.0 | \n",
+ " 1371970 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-03 | \n",
+ " 326.000000 | \n",
+ " 329.880005 | \n",
+ " 325.950012 | \n",
+ " 326.660004 | \n",
+ " 18253700 | \n",
+ " 93.699997 | \n",
+ " 94.750000 | \n",
+ " 93.050003 | \n",
+ " 93.150002 | \n",
+ " 50021614 | \n",
+ " 11018.0 | \n",
+ " 11018.0 | \n",
+ " 10778.000000 | \n",
+ " 10900.0 | \n",
+ " 4910009 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-04 | \n",
+ " 331.880005 | \n",
+ " 335.140015 | \n",
+ " 327.239990 | \n",
+ " 327.779999 | \n",
+ " 23727700 | \n",
+ " 95.250000 | \n",
+ " 96.599998 | \n",
+ " 94.400002 | \n",
+ " 95.150002 | \n",
+ " 32949492 | \n",
+ " 10952.0 | \n",
+ " 10968.0 | \n",
+ " 10784.000000 | \n",
+ " 10910.0 | \n",
+ " 1506764 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-07 | \n",
+ " 328.369995 | \n",
+ " 331.109985 | \n",
+ " 327.519989 | \n",
+ " 330.109985 | \n",
+ " 17741500 | \n",
+ " 94.349998 | \n",
+ " 96.050003 | \n",
+ " 93.599998 | \n",
+ " 95.599998 | \n",
+ " 22359341 | \n",
+ " 10894.0 | \n",
+ " 10944.0 | \n",
+ " 10804.000000 | \n",
+ " 10886.0 | \n",
+ " 853789 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-08 | \n",
+ " 326.959991 | \n",
+ " 328.750000 | \n",
+ " 323.000000 | \n",
+ " 326.049988 | \n",
+ " 22327600 | \n",
+ " 94.099998 | \n",
+ " 94.300003 | \n",
+ " 92.699997 | \n",
+ " 93.050003 | \n",
+ " 35528837 | \n",
+ " 10940.0 | \n",
+ " 11100.0 | \n",
+ " 10834.000000 | \n",
+ " 11030.0 | \n",
+ " 1977417 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-09 | \n",
+ " 326.470001 | \n",
+ " 327.109985 | \n",
+ " 321.049988 | \n",
+ " 322.230011 | \n",
+ " 22373300 | \n",
+ " 92.349998 | \n",
+ " 94.400002 | \n",
+ " 92.050003 | \n",
+ " 94.099998 | \n",
+ " 30298227 | \n",
+ " 11110.0 | \n",
+ " 11212.0 | \n",
+ " 10980.642578 | \n",
+ " 11182.0 | \n",
+ " 2545313 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-10 | \n",
+ " 326.019989 | \n",
+ " 328.260010 | \n",
+ " 321.179993 | \n",
+ " 322.929993 | \n",
+ " 20113700 | \n",
+ " 93.800003 | \n",
+ " 94.699997 | \n",
+ " 92.349998 | \n",
+ " 94.300003 | \n",
+ " 34598900 | \n",
+ " 11166.0 | \n",
+ " 11184.0 | \n",
+ " 11042.000000 | \n",
+ " 11162.0 | \n",
+ " 2848203 | \n",
+ "
\n",
+ " \n",
+ " 2023-08-11 | \n",
+ " 320.260010 | \n",
+ " 322.410004 | \n",
+ " 319.209991 | \n",
+ " 321.010010 | \n",
+ " 24342600 | \n",
+ " 97.150002 | \n",
+ " 97.949997 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 61924422 | \n",
+ " 11066.0 | \n",
+ " 11124.0 | \n",
+ " 10912.000000 | \n",
+ " 11030.0 | \n",
+ " 2243755 | \n",
"
\n",
" \n",
"\n",
@@ -377,45 +378,45 @@
"text/plain": [
"symbol MSFT \\\n",
" open high low close volume \n",
- "2022-04-05 313.269989 314.869995 309.869995 310.880005 23156700 \n",
- "2022-04-06 305.190002 307.000000 296.709991 299.500000 40110400 \n",
- "2022-04-07 296.660004 303.649994 296.350006 301.369995 31411200 \n",
- "2022-04-08 300.440002 301.119995 296.279999 296.970001 24361900 \n",
- "2022-04-11 291.790009 292.609985 285.000000 285.260010 34569300 \n",
- "2022-04-12 289.239990 290.739990 280.489990 282.059998 30966700 \n",
- "2022-04-13 282.730011 288.579987 281.299988 287.619995 21907200 \n",
- "2022-04-14 288.089996 288.309998 279.320007 279.829987 28221600 \n",
- "2022-04-18 278.910004 282.459991 278.339996 280.519989 20778000 \n",
- "2022-04-19 279.380005 286.170013 278.410004 285.299988 22297700 \n",
+ "2023-07-31 336.920013 337.700012 333.359985 335.920013 25446000 \n",
+ "2023-08-01 335.190002 338.540009 333.700012 336.339996 18311900 \n",
+ "2023-08-02 333.630005 333.630005 326.359985 327.500000 27761300 \n",
+ "2023-08-03 326.000000 329.880005 325.950012 326.660004 18253700 \n",
+ "2023-08-04 331.880005 335.140015 327.239990 327.779999 23727700 \n",
+ "2023-08-07 328.369995 331.109985 327.519989 330.109985 17741500 \n",
+ "2023-08-08 326.959991 328.750000 323.000000 326.049988 22327600 \n",
+ "2023-08-09 326.470001 327.109985 321.049988 322.230011 22373300 \n",
+ "2023-08-10 326.019989 328.260010 321.179993 322.929993 20113700 \n",
+ "2023-08-11 320.260010 322.410004 319.209991 321.010010 24342600 \n",
"\n",
- "symbol 9988.HK \\\n",
- " open high low close volume \n",
- "2022-04-05 NaN NaN NaN NaN NaN \n",
- "2022-04-06 109.099998 109.800003 107.099998 107.599998 46638063.0 \n",
- "2022-04-07 106.400002 108.900002 105.199997 105.199997 35575706.0 \n",
- "2022-04-08 102.800003 103.800003 101.500000 103.800003 38153586.0 \n",
- "2022-04-11 102.800003 103.000000 97.849998 98.500000 40131164.0 \n",
- "2022-04-12 98.500000 100.599998 96.349998 99.000000 39683531.0 \n",
- "2022-04-13 98.400002 99.500000 97.000000 98.500000 24650087.0 \n",
- "2022-04-14 97.650002 98.000000 93.849998 95.500000 52348941.0 \n",
- "2022-04-18 NaN NaN NaN NaN NaN \n",
- "2022-04-19 92.550003 93.449997 90.900002 91.500000 34060706.0 \n",
+ "symbol 9988.HK AZN.L \\\n",
+ " open high low close volume open \n",
+ "2023-07-31 99.000000 99.750000 96.800003 97.500000 74290881 11028.0 \n",
+ "2023-08-01 99.699997 100.000000 96.900002 97.849998 53949339 11176.0 \n",
+ "2023-08-02 97.650002 98.599998 94.599998 95.150002 37885551 11040.0 \n",
+ "2023-08-03 93.699997 94.750000 93.050003 93.150002 50021614 11018.0 \n",
+ "2023-08-04 95.250000 96.599998 94.400002 95.150002 32949492 10952.0 \n",
+ "2023-08-07 94.349998 96.050003 93.599998 95.599998 22359341 10894.0 \n",
+ "2023-08-08 94.099998 94.300003 92.699997 93.050003 35528837 10940.0 \n",
+ "2023-08-09 92.349998 94.400002 92.050003 94.099998 30298227 11110.0 \n",
+ "2023-08-10 93.800003 94.699997 92.349998 94.300003 34598900 11166.0 \n",
+ "2023-08-11 97.150002 97.949997 95.300003 95.300003 61924422 11066.0 \n",
"\n",
- "symbol AZN.L \n",
- " open high low close volume \n",
- "2022-04-05 10112.0 10350.000000 10078.323242 10334.0 2529617.0 \n",
- "2022-04-06 10242.0 10481.076172 10227.108398 10462.0 2251895.0 \n",
- "2022-04-07 10554.0 10680.000000 10523.267578 10668.0 3462762.0 \n",
- "2022-04-08 10734.0 11000.000000 10713.238281 10930.0 2542196.0 \n",
- "2022-04-11 10888.0 10950.000000 10746.000000 10858.0 2561255.0 \n",
- "2022-04-12 10720.0 10754.000000 10398.000000 10504.0 2791098.0 \n",
- "2022-04-13 10608.0 10640.419922 10464.099609 10510.0 2102995.0 \n",
- "2022-04-14 10432.0 10574.000000 10392.000000 10536.0 1980089.0 \n",
- "2022-04-18 NaN NaN NaN NaN NaN \n",
- "2022-04-19 10478.0 10582.000000 10428.000000 10500.0 3260649.0 "
+ "symbol \n",
+ " high low close volume \n",
+ "2023-07-31 11204.0 11022.000000 11186.0 1639504 \n",
+ "2023-08-01 11254.0 11148.000000 11196.0 955804 \n",
+ "2023-08-02 11140.0 10874.000000 11096.0 1371970 \n",
+ "2023-08-03 11018.0 10778.000000 10900.0 4910009 \n",
+ "2023-08-04 10968.0 10784.000000 10910.0 1506764 \n",
+ "2023-08-07 10944.0 10804.000000 10886.0 853789 \n",
+ "2023-08-08 11100.0 10834.000000 11030.0 1977417 \n",
+ "2023-08-09 11212.0 10980.642578 11182.0 2545313 \n",
+ "2023-08-10 11184.0 11042.000000 11162.0 2848203 \n",
+ "2023-08-11 11124.0 10912.000000 11030.0 2243755 "
]
},
- "execution_count": 6,
+ "execution_count": 5,
"metadata": {},
"output_type": "execute_result"
}
@@ -428,7 +429,7 @@
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 6,
"metadata": {},
"outputs": [
{
@@ -477,94 +478,94 @@
" \n",
" \n",
" \n",
- " [2022-04-18 09:30:00, 2022-04-18 09:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.760010 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 09:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.500000 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:35:00, 2022-04-18 09:40:00) | \n",
- " 280.760010 | \n",
- " 280.910004 | \n",
- " 279.549988 | \n",
- " 279.553802 | \n",
- " 550448.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:35:00, 2023-08-10 09:40:00) | \n",
+ " 326.149994 | \n",
+ " 326.636505 | \n",
+ " 325.489990 | \n",
+ " 326.160004 | \n",
+ " 365134.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11122.0 | \n",
+ " 11134.0 | \n",
+ " 11112.0 | \n",
+ " 11112.0 | \n",
+ " 12659.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:40:00, 2022-04-18 09:45:00) | \n",
- " 279.549988 | \n",
- " 280.339996 | \n",
- " 279.200012 | \n",
- " 279.769989 | \n",
- " 532356.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:40:00, 2023-08-10 09:45:00) | \n",
+ " 326.200012 | \n",
+ " 326.570007 | \n",
+ " 325.109985 | \n",
+ " 325.340088 | \n",
+ " 345965.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11114.0 | \n",
+ " 11138.0 | \n",
+ " 11114.0 | \n",
+ " 11122.0 | \n",
+ " 15176.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:45:00, 2022-04-18 09:50:00) | \n",
- " 279.869995 | \n",
- " 279.869995 | \n",
- " 279.010010 | \n",
- " 279.568909 | \n",
- " 379377.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:45:00, 2023-08-10 09:50:00) | \n",
+ " 325.450012 | \n",
+ " 326.510010 | \n",
+ " 325.450012 | \n",
+ " 326.390015 | \n",
+ " 304905.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11124.0 | \n",
+ " 11138.0 | \n",
+ " 11118.0 | \n",
+ " 11120.0 | \n",
+ " 9613.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:50:00, 2022-04-18 09:55:00) | \n",
- " 279.619995 | \n",
- " 279.785004 | \n",
- " 278.799988 | \n",
- " 279.070007 | \n",
- " 332695.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:50:00, 2023-08-10 09:55:00) | \n",
+ " 326.399994 | \n",
+ " 326.579987 | \n",
+ " 325.950012 | \n",
+ " 326.440002 | \n",
+ " 325788.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11120.0 | \n",
+ " 11140.0 | \n",
+ " 11116.0 | \n",
+ " 11134.0 | \n",
+ " 5078.0 | \n",
"
\n",
" \n",
" ... | \n",
@@ -585,12 +586,12 @@
" ... | \n",
"
\n",
" \n",
- " [2022-04-19 15:35:00, 2022-04-19 15:40:00) | \n",
- " 285.589996 | \n",
- " 286.109985 | \n",
- " 285.579987 | \n",
- " 285.920013 | \n",
- " 354635.0 | \n",
+ " [2023-08-11 15:35:00, 2023-08-11 15:40:00) | \n",
+ " 321.720001 | \n",
+ " 321.829987 | \n",
+ " 321.279999 | \n",
+ " 321.353210 | \n",
+ " 165967.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -603,12 +604,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-19 15:40:00, 2022-04-19 15:45:00) | \n",
- " 285.929993 | \n",
- " 285.929993 | \n",
- " 285.190002 | \n",
- " 285.260010 | \n",
- " 351330.0 | \n",
+ " [2023-08-11 15:40:00, 2023-08-11 15:45:00) | \n",
+ " 321.339996 | \n",
+ " 321.429993 | \n",
+ " 321.179993 | \n",
+ " 321.338013 | \n",
+ " 217661.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -621,12 +622,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-19 15:45:00, 2022-04-19 15:50:00) | \n",
- " 285.260010 | \n",
- " 285.380005 | \n",
- " 284.750000 | \n",
- " 285.230011 | \n",
- " 382935.0 | \n",
+ " [2023-08-11 15:45:00, 2023-08-11 15:50:00) | \n",
+ " 321.350006 | \n",
+ " 321.399994 | \n",
+ " 320.910004 | \n",
+ " 321.179993 | \n",
+ " 241240.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -639,12 +640,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-19 15:50:00, 2022-04-19 15:55:00) | \n",
- " 285.234985 | \n",
- " 286.170013 | \n",
- " 285.234985 | \n",
- " 285.320007 | \n",
- " 697921.0 | \n",
+ " [2023-08-11 15:50:00, 2023-08-11 15:55:00) | \n",
+ " 321.190002 | \n",
+ " 321.440002 | \n",
+ " 320.859985 | \n",
+ " 320.910004 | \n",
+ " 389375.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -657,12 +658,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-19 15:55:00, 2022-04-19 16:00:00) | \n",
- " 285.309998 | \n",
- " 285.809998 | \n",
- " 284.500000 | \n",
- " 285.380005 | \n",
- " 1235980.0 | \n",
+ " [2023-08-11 15:55:00, 2023-08-11 16:00:00) | \n",
+ " 320.894989 | \n",
+ " 321.269989 | \n",
+ " 320.589996 | \n",
+ " 321.089996 | \n",
+ " 1342284.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -682,64 +683,78 @@
"text/plain": [
"symbol MSFT \\\n",
" open high \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 281.899994 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 280.760010 280.910004 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 279.549988 280.339996 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 279.869995 279.869995 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 279.619995 279.785004 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 326.015015 326.890015 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 326.149994 326.636505 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 326.200012 326.570007 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 325.450012 326.510010 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 326.399994 326.579987 \n",
"... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 285.589996 286.109985 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 285.929993 285.929993 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 285.260010 285.380005 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 285.234985 286.170013 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 285.309998 285.809998 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 321.720001 321.829987 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 321.339996 321.429993 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 321.350006 321.399994 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 321.190002 321.440002 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 320.894989 321.269989 \n",
"\n",
"symbol \\\n",
" low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 280.760010 1282050.0 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 279.549988 279.553802 550448.0 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 279.200012 279.769989 532356.0 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 279.010010 279.568909 379377.0 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 278.799988 279.070007 332695.0 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 325.500000 326.189911 1094887.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 325.489990 326.160004 365134.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 325.109985 325.340088 345965.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 325.450012 326.390015 304905.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 325.950012 326.440002 325788.0 \n",
"... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 285.579987 285.920013 354635.0 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 285.190002 285.260010 351330.0 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 284.750000 285.230011 382935.0 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 285.234985 285.320007 697921.0 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 284.500000 285.380005 1235980.0 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 321.279999 321.353210 165967.0 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 321.179993 321.338013 217661.0 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 320.910004 321.179993 241240.0 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 320.859985 320.910004 389375.0 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 320.589996 321.089996 1342284.0 \n",
"\n",
"symbol 9988.HK \\\n",
" open high low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) NaN NaN NaN NaN NaN \n",
"... ... ... .. ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) NaN NaN NaN NaN NaN \n",
"\n",
- "symbol AZN.L \n",
- " open high low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) NaN NaN NaN NaN NaN \n",
- "... ... ... .. ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) NaN NaN NaN NaN NaN \n",
+ "symbol AZN.L \\\n",
+ " open high low \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 11118.0 11132.0 11114.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 11122.0 11134.0 11112.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 11114.0 11138.0 11114.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 11124.0 11138.0 11118.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 11120.0 11140.0 11116.0 \n",
+ "... ... ... ... \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) NaN NaN NaN \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) NaN NaN NaN \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) NaN NaN NaN \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) NaN NaN NaN \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) NaN NaN NaN \n",
+ "\n",
+ "symbol \n",
+ " close volume \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 11120.0 8815.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 11112.0 12659.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 11122.0 15176.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 11120.0 9613.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 11134.0 5078.0 \n",
+ "... ... ... \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) NaN NaN \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) NaN NaN \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) NaN NaN \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) NaN NaN \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) NaN NaN \n",
"\n",
"[288 rows x 15 columns]"
]
},
- "execution_count": 7,
+ "execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
@@ -768,7 +783,7 @@
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 7,
"metadata": {},
"outputs": [
{
@@ -777,7 +792,7 @@
"True"
]
},
- "execution_count": 8,
+ "execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
@@ -795,7 +810,7 @@
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 8,
"metadata": {},
"outputs": [
{
@@ -844,22 +859,22 @@
" \n",
"
\n",
" \n",
- " 2022-04-05 00:00:00+00:00 | \n",
- " 313.269989 | \n",
- " 314.869995 | \n",
- " 309.869995 | \n",
- " 310.880005 | \n",
- " 23156700 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " 10112.0 | \n",
- " 10350.0 | \n",
- " 10078.323242 | \n",
- " 10334.0 | \n",
- " 2529617.0 | \n",
+ " 2023-07-31 00:00:00+00:00 | \n",
+ " 336.920013 | \n",
+ " 337.700012 | \n",
+ " 333.359985 | \n",
+ " 335.920013 | \n",
+ " 25446000 | \n",
+ " 99.0 | \n",
+ " 99.75 | \n",
+ " 96.800003 | \n",
+ " 97.5 | \n",
+ " 74290881 | \n",
+ " 11028.0 | \n",
+ " 11204.0 | \n",
+ " 11022.0 | \n",
+ " 11186.0 | \n",
+ " 1639504 | \n",
"
\n",
" \n",
"\n",
@@ -868,18 +883,18 @@
"text/plain": [
"symbol MSFT \\\n",
" open high low close \n",
- "2022-04-05 00:00:00+00:00 313.269989 314.869995 309.869995 310.880005 \n",
+ "2023-07-31 00:00:00+00:00 336.920013 337.700012 333.359985 335.920013 \n",
"\n",
- "symbol 9988.HK AZN.L \\\n",
- " volume open high low close volume open \n",
- "2022-04-05 00:00:00+00:00 23156700 NaN NaN NaN NaN NaN 10112.0 \n",
+ "symbol 9988.HK \\\n",
+ " volume open high low close volume \n",
+ "2023-07-31 00:00:00+00:00 25446000 99.0 99.75 96.800003 97.5 74290881 \n",
"\n",
- "symbol \n",
- " high low close volume \n",
- "2022-04-05 00:00:00+00:00 10350.0 10078.323242 10334.0 2529617.0 "
+ "symbol AZN.L \n",
+ " open high low close volume \n",
+ "2023-07-31 00:00:00+00:00 11028.0 11204.0 11022.0 11186.0 1639504 "
]
},
- "execution_count": 9,
+ "execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
@@ -891,16 +906,16 @@
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- ""
+ "zoneinfo.ZoneInfo(key='UTC')"
]
},
- "execution_count": 10,
+ "execution_count": 9,
"metadata": {},
"output_type": "execute_result"
}
@@ -918,7 +933,7 @@
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": 10,
"metadata": {},
"outputs": [
{
@@ -927,7 +942,7 @@
"True"
]
},
- "execution_count": 11,
+ "execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
@@ -945,7 +960,7 @@
},
{
"cell_type": "code",
- "execution_count": 12,
+ "execution_count": 11,
"metadata": {},
"outputs": [
{
@@ -994,16 +1009,16 @@
" \n",
" \n",
" \n",
- " [2022-04-14 09:30:00, 2022-04-14 09:35:00) | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 09:35:00) | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
- " 97.650002 | \n",
- " 98.0 | \n",
- " 95.699997 | \n",
- " 95.699997 | \n",
+ " 93.75 | \n",
+ " 93.800003 | \n",
+ " 93.300003 | \n",
+ " 93.5 | \n",
" 0.0 | \n",
" NaN | \n",
" NaN | \n",
@@ -1016,24 +1031,20 @@
""
],
"text/plain": [
- "symbol MSFT \\\n",
- " open high low close volume \n",
- "[2022-04-14 09:30:00, 2022-04-14 09:35:00) NaN NaN NaN NaN NaN \n",
- "\n",
- "symbol 9988.HK \\\n",
- " open high low \n",
- "[2022-04-14 09:30:00, 2022-04-14 09:35:00) 97.650002 98.0 95.699997 \n",
+ "symbol MSFT 9988.HK \\\n",
+ " open high low close volume open \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) NaN NaN NaN NaN NaN 93.75 \n",
"\n",
- "symbol AZN.L \\\n",
- " close volume open high low \n",
- "[2022-04-14 09:30:00, 2022-04-14 09:35:00) 95.699997 0.0 NaN NaN NaN \n",
+ "symbol \\\n",
+ " high low close volume \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 93.800003 93.300003 93.5 0.0 \n",
"\n",
- "symbol \n",
- " close volume \n",
- "[2022-04-14 09:30:00, 2022-04-14 09:35:00) NaN NaN "
+ "symbol AZN.L \n",
+ " open high low close volume \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) NaN NaN NaN NaN NaN "
]
},
- "execution_count": 12,
+ "execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
@@ -1045,7 +1056,7 @@
},
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": 12,
"metadata": {},
"outputs": [
{
@@ -1054,7 +1065,7 @@
"True"
]
},
- "execution_count": 13,
+ "execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
@@ -1072,14 +1083,14 @@
},
{
"cell_type": "code",
- "execution_count": 14,
+ "execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "{'end': Timestamp('2022-04-19 15:30:00'), 'days': 2} \n",
+ "{'end': Timestamp('2023-08-11 15:30:00'), 'days': 2} \n",
" Europe/London\n"
]
}
@@ -1094,7 +1105,7 @@
},
{
"cell_type": "code",
- "execution_count": 15,
+ "execution_count": 14,
"metadata": {},
"outputs": [
{
@@ -1143,22 +1154,22 @@
" \n",
"
\n",
" \n",
- " [2022-04-14 15:30:00, 2022-04-14 15:35:00) | \n",
- " 283.670013 | \n",
- " 283.973389 | \n",
- " 283.339996 | \n",
- " 283.825012 | \n",
- " 238962.0 | \n",
+ " [2023-08-09 15:30:00, 2023-08-09 15:35:00) | \n",
+ " 324.200012 | \n",
+ " 324.579987 | \n",
+ " 324.190002 | \n",
+ " 324.339996 | \n",
+ " 161779.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
- " 10523.799805 | \n",
- " 10526.0 | \n",
- " 10520.0 | \n",
- " 10526.0 | \n",
- " 5684.0 | \n",
+ " 11184.0 | \n",
+ " 11190.0 | \n",
+ " 11178.0 | \n",
+ " 11188.0 | \n",
+ " 8633.0 | \n",
"
\n",
" \n",
"\n",
@@ -1167,26 +1178,26 @@
"text/plain": [
"symbol MSFT \\\n",
" open high \n",
- "[2022-04-14 15:30:00, 2022-04-14 15:35:00) 283.670013 283.973389 \n",
+ "[2023-08-09 15:30:00, 2023-08-09 15:35:00) 324.200012 324.579987 \n",
"\n",
"symbol \\\n",
" low close volume \n",
- "[2022-04-14 15:30:00, 2022-04-14 15:35:00) 283.339996 283.825012 238962.0 \n",
+ "[2023-08-09 15:30:00, 2023-08-09 15:35:00) 324.190002 324.339996 161779.0 \n",
"\n",
"symbol 9988.HK \\\n",
" open high low close volume \n",
- "[2022-04-14 15:30:00, 2022-04-14 15:35:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-09 15:30:00, 2023-08-09 15:35:00) NaN NaN NaN NaN NaN \n",
"\n",
- "symbol AZN.L \\\n",
- " open high low \n",
- "[2022-04-14 15:30:00, 2022-04-14 15:35:00) 10523.799805 10526.0 10520.0 \n",
+ "symbol AZN.L \\\n",
+ " open high low \n",
+ "[2023-08-09 15:30:00, 2023-08-09 15:35:00) 11184.0 11190.0 11178.0 \n",
"\n",
"symbol \n",
" close volume \n",
- "[2022-04-14 15:30:00, 2022-04-14 15:35:00) 10526.0 5684.0 "
+ "[2023-08-09 15:30:00, 2023-08-09 15:35:00) 11188.0 8633.0 "
]
},
- "execution_count": 15,
+ "execution_count": 14,
"metadata": {},
"output_type": "execute_result"
}
@@ -1198,7 +1209,7 @@
},
{
"cell_type": "code",
- "execution_count": 16,
+ "execution_count": 15,
"metadata": {},
"outputs": [
{
@@ -1207,7 +1218,7 @@
"True"
]
},
- "execution_count": 16,
+ "execution_count": 15,
"metadata": {},
"output_type": "execute_result"
}
@@ -1221,8 +1232,8 @@
"metadata": {},
"source": [
"To define a non-default output timezone, simply pass `tzout`. `tzout` can be defined in the same ways as `tzin`:\n",
- "* a string that's valid input to `pytz.timezone`.\n",
- "* an instance of `pytz.timezone`.\n",
+ "* a string that's valid input to `zoneinfo.ZoneInfo`.\n",
+ "* an instance of `zoneinfo.Zoneinfo`.\n",
"* a string of a symbol.\n",
"\n",
"Note that in each of the following examples the price data is the same, although the index timezone may change."
@@ -1230,7 +1241,7 @@
},
{
"cell_type": "code",
- "execution_count": 17,
+ "execution_count": 16,
"metadata": {},
"outputs": [
{
@@ -1279,46 +1290,46 @@
" \n",
" \n",
" \n",
- " [2022-04-18 13:30:00, 2022-04-18 13:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.76001 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 13:30:00, 2023-08-10 13:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.5 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol MSFT \\\n",
- " open high \n",
- "[2022-04-18 13:30:00, 2022-04-18 13:35:00) 278.910004 281.899994 \n",
+ "symbol MSFT \\\n",
+ " open high low \n",
+ "[2023-08-10 13:30:00, 2023-08-10 13:35:00) 326.015015 326.890015 325.5 \n",
"\n",
- "symbol \\\n",
- " low close volume \n",
- "[2022-04-18 13:30:00, 2022-04-18 13:35:00) 278.910004 280.76001 1282050.0 \n",
+ "symbol 9988.HK \\\n",
+ " close volume open \n",
+ "[2023-08-10 13:30:00, 2023-08-10 13:35:00) 326.189911 1094887.0 NaN \n",
"\n",
- "symbol 9988.HK \\\n",
- " open high low close volume \n",
- "[2022-04-18 13:30:00, 2022-04-18 13:35:00) NaN NaN NaN NaN NaN \n",
+ "symbol AZN.L \\\n",
+ " high low close volume open \n",
+ "[2023-08-10 13:30:00, 2023-08-10 13:35:00) NaN NaN NaN NaN 11118.0 \n",
"\n",
- "symbol AZN.L \n",
- " open high low close volume \n",
- "[2022-04-18 13:30:00, 2022-04-18 13:35:00) NaN NaN NaN NaN NaN "
+ "symbol \n",
+ " high low close volume \n",
+ "[2023-08-10 13:30:00, 2023-08-10 13:35:00) 11132.0 11114.0 11120.0 8815.0 "
]
},
- "execution_count": 17,
+ "execution_count": 16,
"metadata": {},
"output_type": "execute_result"
}
@@ -1330,16 +1341,16 @@
},
{
"cell_type": "code",
- "execution_count": 18,
+ "execution_count": 17,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- ""
+ "zoneinfo.ZoneInfo(key='UTC')"
]
},
- "execution_count": 18,
+ "execution_count": 17,
"metadata": {},
"output_type": "execute_result"
}
@@ -1350,14 +1361,14 @@
},
{
"cell_type": "code",
- "execution_count": 19,
+ "execution_count": 18,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
- "tz=\n",
+ "tz=zoneinfo.ZoneInfo(key='Asia/Hong_Kong')\n",
"\n"
]
},
@@ -1407,46 +1418,46 @@
" \n",
" \n",
" \n",
- " [2022-04-18 21:30:00, 2022-04-18 21:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.76001 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 21:30:00, 2023-08-10 21:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.5 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol MSFT \\\n",
- " open high \n",
- "[2022-04-18 21:30:00, 2022-04-18 21:35:00) 278.910004 281.899994 \n",
+ "symbol MSFT \\\n",
+ " open high low \n",
+ "[2023-08-10 21:30:00, 2023-08-10 21:35:00) 326.015015 326.890015 325.5 \n",
"\n",
- "symbol \\\n",
- " low close volume \n",
- "[2022-04-18 21:30:00, 2022-04-18 21:35:00) 278.910004 280.76001 1282050.0 \n",
+ "symbol 9988.HK \\\n",
+ " close volume open \n",
+ "[2023-08-10 21:30:00, 2023-08-10 21:35:00) 326.189911 1094887.0 NaN \n",
"\n",
- "symbol 9988.HK \\\n",
- " open high low close volume \n",
- "[2022-04-18 21:30:00, 2022-04-18 21:35:00) NaN NaN NaN NaN NaN \n",
+ "symbol AZN.L \\\n",
+ " high low close volume open \n",
+ "[2023-08-10 21:30:00, 2023-08-10 21:35:00) NaN NaN NaN NaN 11118.0 \n",
"\n",
- "symbol AZN.L \n",
- " open high low close volume \n",
- "[2022-04-18 21:30:00, 2022-04-18 21:35:00) NaN NaN NaN NaN NaN "
+ "symbol \n",
+ " high low close volume \n",
+ "[2023-08-10 21:30:00, 2023-08-10 21:35:00) 11132.0 11114.0 11120.0 8815.0 "
]
},
- "execution_count": 19,
+ "execution_count": 18,
"metadata": {},
"output_type": "execute_result"
}
@@ -1460,16 +1471,16 @@
},
{
"cell_type": "code",
- "execution_count": 20,
+ "execution_count": 19,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- ""
+ "zoneinfo.ZoneInfo(key='Asia/Hong_Kong')"
]
},
- "execution_count": 20,
+ "execution_count": 19,
"metadata": {},
"output_type": "execute_result"
}
@@ -1480,7 +1491,7 @@
},
{
"cell_type": "code",
- "execution_count": 21,
+ "execution_count": 20,
"metadata": {},
"outputs": [
{
@@ -1529,46 +1540,46 @@
" \n",
" \n",
" \n",
- " [2022-04-18 21:30:00, 2022-04-18 21:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.76001 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 21:30:00, 2023-08-10 21:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.5 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol MSFT \\\n",
- " open high \n",
- "[2022-04-18 21:30:00, 2022-04-18 21:35:00) 278.910004 281.899994 \n",
+ "symbol MSFT \\\n",
+ " open high low \n",
+ "[2023-08-10 21:30:00, 2023-08-10 21:35:00) 326.015015 326.890015 325.5 \n",
"\n",
- "symbol \\\n",
- " low close volume \n",
- "[2022-04-18 21:30:00, 2022-04-18 21:35:00) 278.910004 280.76001 1282050.0 \n",
+ "symbol 9988.HK \\\n",
+ " close volume open \n",
+ "[2023-08-10 21:30:00, 2023-08-10 21:35:00) 326.189911 1094887.0 NaN \n",
"\n",
- "symbol 9988.HK \\\n",
- " open high low close volume \n",
- "[2022-04-18 21:30:00, 2022-04-18 21:35:00) NaN NaN NaN NaN NaN \n",
+ "symbol AZN.L \\\n",
+ " high low close volume open \n",
+ "[2023-08-10 21:30:00, 2023-08-10 21:35:00) NaN NaN NaN NaN 11118.0 \n",
"\n",
- "symbol AZN.L \n",
- " open high low close volume \n",
- "[2022-04-18 21:30:00, 2022-04-18 21:35:00) NaN NaN NaN NaN NaN "
+ "symbol \n",
+ " high low close volume \n",
+ "[2023-08-10 21:30:00, 2023-08-10 21:35:00) 11132.0 11114.0 11120.0 8815.0 "
]
},
- "execution_count": 21,
+ "execution_count": 20,
"metadata": {},
"output_type": "execute_result"
}
@@ -1580,16 +1591,16 @@
},
{
"cell_type": "code",
- "execution_count": 22,
+ "execution_count": 21,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
- ""
+ "zoneinfo.ZoneInfo(key='Asia/Hong_Kong')"
]
},
- "execution_count": 22,
+ "execution_count": 21,
"metadata": {},
"output_type": "execute_result"
}
@@ -1656,94 +1667,94 @@
" \n",
" \n",
" \n",
- " [2020-05-12, 2020-05-12) | \n",
- " 186.800003 | \n",
- " 187.039993 | \n",
- " 182.300003 | \n",
- " 182.509995 | \n",
- " 32038200.0 | \n",
- " 198.899994 | \n",
- " 199.699997 | \n",
- " 197.199997 | \n",
- " 199.699997 | \n",
- " 14037259.0 | \n",
- " 8690.0 | \n",
- " 8871.0 | \n",
- " 8672.0 | \n",
- " 8856.0 | \n",
- " 1636800.0 | \n",
- "
\n",
- " \n",
- " [2020-05-13, 2020-05-13) | \n",
- " 182.550003 | \n",
- " 184.050003 | \n",
- " 176.539993 | \n",
- " 179.750000 | \n",
- " 44711500.0 | \n",
- " 195.500000 | \n",
- " 197.399994 | \n",
- " 194.300003 | \n",
- " 196.300003 | \n",
- " 26074457.0 | \n",
- " 8800.0 | \n",
- " 9056.0 | \n",
- " 8775.0 | \n",
- " 9004.0 | \n",
- " 2148708.0 | \n",
- "
\n",
- " \n",
- " [2020-05-14, 2020-05-14) | \n",
- " 177.539993 | \n",
- " 180.690002 | \n",
- " 175.679993 | \n",
- " 180.529999 | \n",
- " 41873900.0 | \n",
- " 194.500000 | \n",
- " 195.899994 | \n",
- " 194.100006 | \n",
- " 194.500000 | \n",
- " 19248894.0 | \n",
- " 8998.0 | \n",
- " 9027.0 | \n",
- " 8705.0 | \n",
- " 8765.0 | \n",
- " 2375656.0 | \n",
- "
\n",
- " \n",
- " [2020-05-15, 2020-05-15) | \n",
- " 179.059998 | \n",
- " 187.059998 | \n",
- " 177.000000 | \n",
- " 183.160004 | \n",
- " 46610400.0 | \n",
- " 195.000000 | \n",
- " 197.100006 | \n",
- " 194.100006 | \n",
- " 196.899994 | \n",
- " 16672799.0 | \n",
- " 8740.0 | \n",
- " 8796.0 | \n",
- " 8536.0 | \n",
- " 8671.0 | \n",
- " 2410210.0 | \n",
- "
\n",
- " \n",
- " [2020-05-18, 2020-05-18) | \n",
- " 185.750000 | \n",
- " 186.199997 | \n",
- " 183.960007 | \n",
- " 184.910004 | \n",
- " 35264500.0 | \n",
- " 198.300003 | \n",
- " 203.000000 | \n",
- " 197.399994 | \n",
- " 203.000000 | \n",
- " 26818890.0 | \n",
- " 8849.0 | \n",
- " 8898.0 | \n",
- " 8729.0 | \n",
- " 8790.0 | \n",
- " 2098697.0 | \n",
+ " [2021-09-08, 2021-09-08) | \n",
+ " 299.779999 | \n",
+ " 300.609985 | \n",
+ " 297.470001 | \n",
+ " 300.209991 | \n",
+ " 15046800.0 | \n",
+ " 170.000000 | \n",
+ " 173.399994 | \n",
+ " 169.300003 | \n",
+ " 170.899994 | \n",
+ " 24995877.0 | \n",
+ " 8471.0 | \n",
+ " 8498.0 | \n",
+ " 8300.65625 | \n",
+ " 8314.0 | \n",
+ " 1765026.0 | \n",
+ "
\n",
+ " \n",
+ " [2021-09-09, 2021-09-09) | \n",
+ " 300.820007 | \n",
+ " 302.140015 | \n",
+ " 297.000000 | \n",
+ " 297.250000 | \n",
+ " 19927000.0 | \n",
+ " 165.000000 | \n",
+ " 166.100006 | \n",
+ " 160.300003 | \n",
+ " 161.000000 | \n",
+ " 28379942.0 | \n",
+ " 8300.0 | \n",
+ " 8350.0 | \n",
+ " 8177.00000 | \n",
+ " 8204.0 | \n",
+ " 2589320.0 | \n",
+ "
\n",
+ " \n",
+ " [2021-09-10, 2021-09-10) | \n",
+ " 298.420013 | \n",
+ " 299.920013 | \n",
+ " 295.380005 | \n",
+ " 295.709991 | \n",
+ " 19633400.0 | \n",
+ " 163.699997 | \n",
+ " 168.199997 | \n",
+ " 163.399994 | \n",
+ " 167.899994 | \n",
+ " 27766697.0 | \n",
+ " 8171.0 | \n",
+ " 8208.0 | \n",
+ " 8135.00000 | \n",
+ " 8173.0 | \n",
+ " 1560501.0 | \n",
+ "
\n",
+ " \n",
+ " [2021-09-13, 2021-09-13) | \n",
+ " 297.549988 | \n",
+ " 298.540009 | \n",
+ " 294.079987 | \n",
+ " 296.989990 | \n",
+ " 23652900.0 | \n",
+ " 161.500000 | \n",
+ " 163.000000 | \n",
+ " 156.899994 | \n",
+ " 160.800003 | \n",
+ " 25905101.0 | \n",
+ " 8193.0 | \n",
+ " 8226.0 | \n",
+ " 8074.00000 | \n",
+ " 8098.0 | \n",
+ " 67933.0 | \n",
+ "
\n",
+ " \n",
+ " [2021-09-14, 2021-09-14) | \n",
+ " 299.559998 | \n",
+ " 301.390015 | \n",
+ " 298.100006 | \n",
+ " 299.790009 | \n",
+ " 21853400.0 | \n",
+ " 160.399994 | \n",
+ " 162.100006 | \n",
+ " 156.399994 | \n",
+ " 158.300003 | \n",
+ " 22361537.0 | \n",
+ " 8097.0 | \n",
+ " 8149.0 | \n",
+ " 8073.00000 | \n",
+ " 8130.0 | \n",
+ " 1969925.0 | \n",
"
\n",
" \n",
" ... | \n",
@@ -1764,12 +1775,12 @@
" ... | \n",
"
\n",
" \n",
- " [2022-04-18 19:35:00, 2022-04-18 19:40:00) | \n",
- " 279.290009 | \n",
- " 279.380005 | \n",
- " 278.920013 | \n",
- " 279.149994 | \n",
- " 225411.0 | \n",
+ " [2023-08-10 19:35:00, 2023-08-10 19:40:00) | \n",
+ " 322.540009 | \n",
+ " 322.940002 | \n",
+ " 322.480011 | \n",
+ " 322.670013 | \n",
+ " 125398.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -1782,12 +1793,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-18 19:40:00, 2022-04-18 19:45:00) | \n",
- " 279.149994 | \n",
- " 279.380005 | \n",
- " 278.880005 | \n",
- " 279.119995 | \n",
- " 238573.0 | \n",
+ " [2023-08-10 19:40:00, 2023-08-10 19:45:00) | \n",
+ " 322.684998 | \n",
+ " 322.984192 | \n",
+ " 322.450012 | \n",
+ " 322.540009 | \n",
+ " 163060.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -1800,12 +1811,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-18 19:45:00, 2022-04-18 19:50:00) | \n",
- " 279.130005 | \n",
- " 279.130005 | \n",
- " 278.489990 | \n",
- " 278.619995 | \n",
- " 404372.0 | \n",
+ " [2023-08-10 19:45:00, 2023-08-10 19:50:00) | \n",
+ " 322.519989 | \n",
+ " 323.269989 | \n",
+ " 322.329987 | \n",
+ " 322.690002 | \n",
+ " 170099.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -1818,12 +1829,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-18 19:50:00, 2022-04-18 19:55:00) | \n",
- " 278.600006 | \n",
- " 279.059998 | \n",
- " 278.339996 | \n",
- " 279.029999 | \n",
- " 420672.0 | \n",
+ " [2023-08-10 19:50:00, 2023-08-10 19:55:00) | \n",
+ " 322.660004 | \n",
+ " 323.383789 | \n",
+ " 322.549988 | \n",
+ " 323.089996 | \n",
+ " 192185.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -1836,12 +1847,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-18 19:55:00, 2022-04-18 20:00:00) | \n",
- " 279.440002 | \n",
- " 280.619995 | \n",
- " 279.399994 | \n",
- " 280.600006 | \n",
- " 868407.0 | \n",
+ " [2023-08-10 19:55:00, 2023-08-10 20:00:00) | \n",
+ " 323.040009 | \n",
+ " 323.299988 | \n",
+ " 322.820007 | \n",
+ " 322.950012 | \n",
+ " 747402.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -1855,95 +1866,109 @@
"
\n",
" \n",
"\n",
- "578 rows × 15 columns
\n",
+ "707 rows × 15 columns
\n",
""
],
"text/plain": [
"symbol MSFT \\\n",
" open high \n",
- "[2020-05-12, 2020-05-12) 186.800003 187.039993 \n",
- "[2020-05-13, 2020-05-13) 182.550003 184.050003 \n",
- "[2020-05-14, 2020-05-14) 177.539993 180.690002 \n",
- "[2020-05-15, 2020-05-15) 179.059998 187.059998 \n",
- "[2020-05-18, 2020-05-18) 185.750000 186.199997 \n",
+ "[2021-09-08, 2021-09-08) 299.779999 300.609985 \n",
+ "[2021-09-09, 2021-09-09) 300.820007 302.140015 \n",
+ "[2021-09-10, 2021-09-10) 298.420013 299.920013 \n",
+ "[2021-09-13, 2021-09-13) 297.549988 298.540009 \n",
+ "[2021-09-14, 2021-09-14) 299.559998 301.390015 \n",
"... ... ... \n",
- "[2022-04-18 19:35:00, 2022-04-18 19:40:00) 279.290009 279.380005 \n",
- "[2022-04-18 19:40:00, 2022-04-18 19:45:00) 279.149994 279.380005 \n",
- "[2022-04-18 19:45:00, 2022-04-18 19:50:00) 279.130005 279.130005 \n",
- "[2022-04-18 19:50:00, 2022-04-18 19:55:00) 278.600006 279.059998 \n",
- "[2022-04-18 19:55:00, 2022-04-18 20:00:00) 279.440002 280.619995 \n",
+ "[2023-08-10 19:35:00, 2023-08-10 19:40:00) 322.540009 322.940002 \n",
+ "[2023-08-10 19:40:00, 2023-08-10 19:45:00) 322.684998 322.984192 \n",
+ "[2023-08-10 19:45:00, 2023-08-10 19:50:00) 322.519989 323.269989 \n",
+ "[2023-08-10 19:50:00, 2023-08-10 19:55:00) 322.660004 323.383789 \n",
+ "[2023-08-10 19:55:00, 2023-08-10 20:00:00) 323.040009 323.299988 \n",
"\n",
"symbol \\\n",
" low close \n",
- "[2020-05-12, 2020-05-12) 182.300003 182.509995 \n",
- "[2020-05-13, 2020-05-13) 176.539993 179.750000 \n",
- "[2020-05-14, 2020-05-14) 175.679993 180.529999 \n",
- "[2020-05-15, 2020-05-15) 177.000000 183.160004 \n",
- "[2020-05-18, 2020-05-18) 183.960007 184.910004 \n",
+ "[2021-09-08, 2021-09-08) 297.470001 300.209991 \n",
+ "[2021-09-09, 2021-09-09) 297.000000 297.250000 \n",
+ "[2021-09-10, 2021-09-10) 295.380005 295.709991 \n",
+ "[2021-09-13, 2021-09-13) 294.079987 296.989990 \n",
+ "[2021-09-14, 2021-09-14) 298.100006 299.790009 \n",
"... ... ... \n",
- "[2022-04-18 19:35:00, 2022-04-18 19:40:00) 278.920013 279.149994 \n",
- "[2022-04-18 19:40:00, 2022-04-18 19:45:00) 278.880005 279.119995 \n",
- "[2022-04-18 19:45:00, 2022-04-18 19:50:00) 278.489990 278.619995 \n",
- "[2022-04-18 19:50:00, 2022-04-18 19:55:00) 278.339996 279.029999 \n",
- "[2022-04-18 19:55:00, 2022-04-18 20:00:00) 279.399994 280.600006 \n",
+ "[2023-08-10 19:35:00, 2023-08-10 19:40:00) 322.480011 322.670013 \n",
+ "[2023-08-10 19:40:00, 2023-08-10 19:45:00) 322.450012 322.540009 \n",
+ "[2023-08-10 19:45:00, 2023-08-10 19:50:00) 322.329987 322.690002 \n",
+ "[2023-08-10 19:50:00, 2023-08-10 19:55:00) 322.549988 323.089996 \n",
+ "[2023-08-10 19:55:00, 2023-08-10 20:00:00) 322.820007 322.950012 \n",
"\n",
"symbol 9988.HK \\\n",
" volume open \n",
- "[2020-05-12, 2020-05-12) 32038200.0 198.899994 \n",
- "[2020-05-13, 2020-05-13) 44711500.0 195.500000 \n",
- "[2020-05-14, 2020-05-14) 41873900.0 194.500000 \n",
- "[2020-05-15, 2020-05-15) 46610400.0 195.000000 \n",
- "[2020-05-18, 2020-05-18) 35264500.0 198.300003 \n",
+ "[2021-09-08, 2021-09-08) 15046800.0 170.000000 \n",
+ "[2021-09-09, 2021-09-09) 19927000.0 165.000000 \n",
+ "[2021-09-10, 2021-09-10) 19633400.0 163.699997 \n",
+ "[2021-09-13, 2021-09-13) 23652900.0 161.500000 \n",
+ "[2021-09-14, 2021-09-14) 21853400.0 160.399994 \n",
"... ... ... \n",
- "[2022-04-18 19:35:00, 2022-04-18 19:40:00) 225411.0 NaN \n",
- "[2022-04-18 19:40:00, 2022-04-18 19:45:00) 238573.0 NaN \n",
- "[2022-04-18 19:45:00, 2022-04-18 19:50:00) 404372.0 NaN \n",
- "[2022-04-18 19:50:00, 2022-04-18 19:55:00) 420672.0 NaN \n",
- "[2022-04-18 19:55:00, 2022-04-18 20:00:00) 868407.0 NaN \n",
+ "[2023-08-10 19:35:00, 2023-08-10 19:40:00) 125398.0 NaN \n",
+ "[2023-08-10 19:40:00, 2023-08-10 19:45:00) 163060.0 NaN \n",
+ "[2023-08-10 19:45:00, 2023-08-10 19:50:00) 170099.0 NaN \n",
+ "[2023-08-10 19:50:00, 2023-08-10 19:55:00) 192185.0 NaN \n",
+ "[2023-08-10 19:55:00, 2023-08-10 20:00:00) 747402.0 NaN \n",
"\n",
"symbol \\\n",
" high low \n",
- "[2020-05-12, 2020-05-12) 199.699997 197.199997 \n",
- "[2020-05-13, 2020-05-13) 197.399994 194.300003 \n",
- "[2020-05-14, 2020-05-14) 195.899994 194.100006 \n",
- "[2020-05-15, 2020-05-15) 197.100006 194.100006 \n",
- "[2020-05-18, 2020-05-18) 203.000000 197.399994 \n",
+ "[2021-09-08, 2021-09-08) 173.399994 169.300003 \n",
+ "[2021-09-09, 2021-09-09) 166.100006 160.300003 \n",
+ "[2021-09-10, 2021-09-10) 168.199997 163.399994 \n",
+ "[2021-09-13, 2021-09-13) 163.000000 156.899994 \n",
+ "[2021-09-14, 2021-09-14) 162.100006 156.399994 \n",
"... ... ... \n",
- "[2022-04-18 19:35:00, 2022-04-18 19:40:00) NaN NaN \n",
- "[2022-04-18 19:40:00, 2022-04-18 19:45:00) NaN NaN \n",
- "[2022-04-18 19:45:00, 2022-04-18 19:50:00) NaN NaN \n",
- "[2022-04-18 19:50:00, 2022-04-18 19:55:00) NaN NaN \n",
- "[2022-04-18 19:55:00, 2022-04-18 20:00:00) NaN NaN \n",
+ "[2023-08-10 19:35:00, 2023-08-10 19:40:00) NaN NaN \n",
+ "[2023-08-10 19:40:00, 2023-08-10 19:45:00) NaN NaN \n",
+ "[2023-08-10 19:45:00, 2023-08-10 19:50:00) NaN NaN \n",
+ "[2023-08-10 19:50:00, 2023-08-10 19:55:00) NaN NaN \n",
+ "[2023-08-10 19:55:00, 2023-08-10 20:00:00) NaN NaN \n",
"\n",
"symbol AZN.L \\\n",
" close volume open \n",
- "[2020-05-12, 2020-05-12) 199.699997 14037259.0 8690.0 \n",
- "[2020-05-13, 2020-05-13) 196.300003 26074457.0 8800.0 \n",
- "[2020-05-14, 2020-05-14) 194.500000 19248894.0 8998.0 \n",
- "[2020-05-15, 2020-05-15) 196.899994 16672799.0 8740.0 \n",
- "[2020-05-18, 2020-05-18) 203.000000 26818890.0 8849.0 \n",
+ "[2021-09-08, 2021-09-08) 170.899994 24995877.0 8471.0 \n",
+ "[2021-09-09, 2021-09-09) 161.000000 28379942.0 8300.0 \n",
+ "[2021-09-10, 2021-09-10) 167.899994 27766697.0 8171.0 \n",
+ "[2021-09-13, 2021-09-13) 160.800003 25905101.0 8193.0 \n",
+ "[2021-09-14, 2021-09-14) 158.300003 22361537.0 8097.0 \n",
"... ... ... ... \n",
- "[2022-04-18 19:35:00, 2022-04-18 19:40:00) NaN NaN NaN \n",
- "[2022-04-18 19:40:00, 2022-04-18 19:45:00) NaN NaN NaN \n",
- "[2022-04-18 19:45:00, 2022-04-18 19:50:00) NaN NaN NaN \n",
- "[2022-04-18 19:50:00, 2022-04-18 19:55:00) NaN NaN NaN \n",
- "[2022-04-18 19:55:00, 2022-04-18 20:00:00) NaN NaN NaN \n",
+ "[2023-08-10 19:35:00, 2023-08-10 19:40:00) NaN NaN NaN \n",
+ "[2023-08-10 19:40:00, 2023-08-10 19:45:00) NaN NaN NaN \n",
+ "[2023-08-10 19:45:00, 2023-08-10 19:50:00) NaN NaN NaN \n",
+ "[2023-08-10 19:50:00, 2023-08-10 19:55:00) NaN NaN NaN \n",
+ "[2023-08-10 19:55:00, 2023-08-10 20:00:00) NaN NaN NaN \n",
"\n",
- "symbol \n",
- " high low close volume \n",
- "[2020-05-12, 2020-05-12) 8871.0 8672.0 8856.0 1636800.0 \n",
- "[2020-05-13, 2020-05-13) 9056.0 8775.0 9004.0 2148708.0 \n",
- "[2020-05-14, 2020-05-14) 9027.0 8705.0 8765.0 2375656.0 \n",
- "[2020-05-15, 2020-05-15) 8796.0 8536.0 8671.0 2410210.0 \n",
- "[2020-05-18, 2020-05-18) 8898.0 8729.0 8790.0 2098697.0 \n",
- "... ... ... ... ... \n",
- "[2022-04-18 19:35:00, 2022-04-18 19:40:00) NaN NaN NaN NaN \n",
- "[2022-04-18 19:40:00, 2022-04-18 19:45:00) NaN NaN NaN NaN \n",
- "[2022-04-18 19:45:00, 2022-04-18 19:50:00) NaN NaN NaN NaN \n",
- "[2022-04-18 19:50:00, 2022-04-18 19:55:00) NaN NaN NaN NaN \n",
- "[2022-04-18 19:55:00, 2022-04-18 20:00:00) NaN NaN NaN NaN \n",
+ "symbol \\\n",
+ " high low close \n",
+ "[2021-09-08, 2021-09-08) 8498.0 8300.65625 8314.0 \n",
+ "[2021-09-09, 2021-09-09) 8350.0 8177.00000 8204.0 \n",
+ "[2021-09-10, 2021-09-10) 8208.0 8135.00000 8173.0 \n",
+ "[2021-09-13, 2021-09-13) 8226.0 8074.00000 8098.0 \n",
+ "[2021-09-14, 2021-09-14) 8149.0 8073.00000 8130.0 \n",
+ "... ... ... ... \n",
+ "[2023-08-10 19:35:00, 2023-08-10 19:40:00) NaN NaN NaN \n",
+ "[2023-08-10 19:40:00, 2023-08-10 19:45:00) NaN NaN NaN \n",
+ "[2023-08-10 19:45:00, 2023-08-10 19:50:00) NaN NaN NaN \n",
+ "[2023-08-10 19:50:00, 2023-08-10 19:55:00) NaN NaN NaN \n",
+ "[2023-08-10 19:55:00, 2023-08-10 20:00:00) NaN NaN NaN \n",
+ "\n",
+ "symbol \n",
+ " volume \n",
+ "[2021-09-08, 2021-09-08) 1765026.0 \n",
+ "[2021-09-09, 2021-09-09) 2589320.0 \n",
+ "[2021-09-10, 2021-09-10) 1560501.0 \n",
+ "[2021-09-13, 2021-09-13) 67933.0 \n",
+ "[2021-09-14, 2021-09-14) 1969925.0 \n",
+ "... ... \n",
+ "[2023-08-10 19:35:00, 2023-08-10 19:40:00) NaN \n",
+ "[2023-08-10 19:40:00, 2023-08-10 19:45:00) NaN \n",
+ "[2023-08-10 19:45:00, 2023-08-10 19:50:00) NaN \n",
+ "[2023-08-10 19:50:00, 2023-08-10 19:55:00) NaN \n",
+ "[2023-08-10 19:55:00, 2023-08-10 20:00:00) NaN \n",
"\n",
- "[578 rows x 15 columns]"
+ "[707 rows x 15 columns]"
]
},
"execution_count": 23,
@@ -1964,7 +1989,7 @@
{
"data": {
"text/plain": [
- ""
+ "zoneinfo.ZoneInfo(key='UTC')"
]
},
"execution_count": 24,
@@ -2041,94 +2066,94 @@
" \n",
" \n",
" \n",
- " [2022-04-18 09:30:00, 2022-04-18 09:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.760010 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 09:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.500000 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:35:00, 2022-04-18 09:40:00) | \n",
- " 280.760010 | \n",
- " 280.910004 | \n",
- " 279.549988 | \n",
- " 279.553802 | \n",
- " 550448.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:35:00, 2023-08-10 09:40:00) | \n",
+ " 326.149994 | \n",
+ " 326.636505 | \n",
+ " 325.489990 | \n",
+ " 326.160004 | \n",
+ " 365134.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11122.0 | \n",
+ " 11134.0 | \n",
+ " 11112.0 | \n",
+ " 11112.0 | \n",
+ " 12659.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:40:00, 2022-04-18 09:45:00) | \n",
- " 279.549988 | \n",
- " 280.339996 | \n",
- " 279.200012 | \n",
- " 279.769989 | \n",
- " 532356.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:40:00, 2023-08-10 09:45:00) | \n",
+ " 326.200012 | \n",
+ " 326.570007 | \n",
+ " 325.109985 | \n",
+ " 325.340088 | \n",
+ " 345965.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11114.0 | \n",
+ " 11138.0 | \n",
+ " 11114.0 | \n",
+ " 11122.0 | \n",
+ " 15176.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:45:00, 2022-04-18 09:50:00) | \n",
- " 279.869995 | \n",
- " 279.869995 | \n",
- " 279.010010 | \n",
- " 279.568909 | \n",
- " 379377.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:45:00, 2023-08-10 09:50:00) | \n",
+ " 325.450012 | \n",
+ " 326.510010 | \n",
+ " 325.450012 | \n",
+ " 326.390015 | \n",
+ " 304905.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11124.0 | \n",
+ " 11138.0 | \n",
+ " 11118.0 | \n",
+ " 11120.0 | \n",
+ " 9613.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:50:00, 2022-04-18 09:55:00) | \n",
- " 279.619995 | \n",
- " 279.785004 | \n",
- " 278.799988 | \n",
- " 279.070007 | \n",
- " 332695.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:50:00, 2023-08-10 09:55:00) | \n",
+ " 326.399994 | \n",
+ " 326.579987 | \n",
+ " 325.950012 | \n",
+ " 326.440002 | \n",
+ " 325788.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11120.0 | \n",
+ " 11140.0 | \n",
+ " 11116.0 | \n",
+ " 11134.0 | \n",
+ " 5078.0 | \n",
"
\n",
" \n",
" ... | \n",
@@ -2149,12 +2174,12 @@
" ... | \n",
"
\n",
" \n",
- " [2022-04-19 15:35:00, 2022-04-19 15:40:00) | \n",
- " 285.589996 | \n",
- " 286.109985 | \n",
- " 285.579987 | \n",
- " 285.920013 | \n",
- " 354635.0 | \n",
+ " [2023-08-11 15:35:00, 2023-08-11 15:40:00) | \n",
+ " 321.720001 | \n",
+ " 321.829987 | \n",
+ " 321.279999 | \n",
+ " 321.353210 | \n",
+ " 165967.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -2167,12 +2192,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-19 15:40:00, 2022-04-19 15:45:00) | \n",
- " 285.929993 | \n",
- " 285.929993 | \n",
- " 285.190002 | \n",
- " 285.260010 | \n",
- " 351330.0 | \n",
+ " [2023-08-11 15:40:00, 2023-08-11 15:45:00) | \n",
+ " 321.339996 | \n",
+ " 321.429993 | \n",
+ " 321.179993 | \n",
+ " 321.338013 | \n",
+ " 217661.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -2185,12 +2210,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-19 15:45:00, 2022-04-19 15:50:00) | \n",
- " 285.260010 | \n",
- " 285.380005 | \n",
- " 284.750000 | \n",
- " 285.230011 | \n",
- " 382935.0 | \n",
+ " [2023-08-11 15:45:00, 2023-08-11 15:50:00) | \n",
+ " 321.350006 | \n",
+ " 321.399994 | \n",
+ " 320.910004 | \n",
+ " 321.179993 | \n",
+ " 241240.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -2203,12 +2228,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-19 15:50:00, 2022-04-19 15:55:00) | \n",
- " 285.234985 | \n",
- " 286.170013 | \n",
- " 285.234985 | \n",
- " 285.320007 | \n",
- " 697921.0 | \n",
+ " [2023-08-11 15:50:00, 2023-08-11 15:55:00) | \n",
+ " 321.190002 | \n",
+ " 321.440002 | \n",
+ " 320.859985 | \n",
+ " 320.910004 | \n",
+ " 389375.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -2221,12 +2246,12 @@
" NaN | \n",
"
\n",
" \n",
- " [2022-04-19 15:55:00, 2022-04-19 16:00:00) | \n",
- " 285.309998 | \n",
- " 285.809998 | \n",
- " 284.500000 | \n",
- " 285.380005 | \n",
- " 1235980.0 | \n",
+ " [2023-08-11 15:55:00, 2023-08-11 16:00:00) | \n",
+ " 320.894989 | \n",
+ " 321.269989 | \n",
+ " 320.589996 | \n",
+ " 321.089996 | \n",
+ " 1342284.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
@@ -2246,59 +2271,73 @@
"text/plain": [
"symbol MSFT \\\n",
" open high \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 281.899994 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 280.760010 280.910004 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 279.549988 280.339996 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 279.869995 279.869995 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 279.619995 279.785004 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 326.015015 326.890015 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 326.149994 326.636505 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 326.200012 326.570007 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 325.450012 326.510010 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 326.399994 326.579987 \n",
"... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 285.589996 286.109985 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 285.929993 285.929993 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 285.260010 285.380005 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 285.234985 286.170013 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 285.309998 285.809998 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 321.720001 321.829987 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 321.339996 321.429993 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 321.350006 321.399994 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 321.190002 321.440002 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 320.894989 321.269989 \n",
"\n",
"symbol \\\n",
" low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 280.760010 1282050.0 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 279.549988 279.553802 550448.0 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 279.200012 279.769989 532356.0 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 279.010010 279.568909 379377.0 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 278.799988 279.070007 332695.0 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 325.500000 326.189911 1094887.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 325.489990 326.160004 365134.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 325.109985 325.340088 345965.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 325.450012 326.390015 304905.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 325.950012 326.440002 325788.0 \n",
"... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 285.579987 285.920013 354635.0 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 285.190002 285.260010 351330.0 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 284.750000 285.230011 382935.0 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 285.234985 285.320007 697921.0 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 284.500000 285.380005 1235980.0 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 321.279999 321.353210 165967.0 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 321.179993 321.338013 217661.0 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 320.910004 321.179993 241240.0 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 320.859985 320.910004 389375.0 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 320.589996 321.089996 1342284.0 \n",
"\n",
"symbol 9988.HK \\\n",
" open high low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) NaN NaN NaN NaN NaN \n",
"... ... ... .. ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) NaN NaN NaN NaN NaN \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) NaN NaN NaN NaN NaN \n",
"\n",
- "symbol AZN.L \n",
- " open high low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) NaN NaN NaN NaN NaN \n",
- "... ... ... .. ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) NaN NaN NaN NaN NaN \n",
+ "symbol AZN.L \\\n",
+ " open high low \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 11118.0 11132.0 11114.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 11122.0 11134.0 11112.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 11114.0 11138.0 11114.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 11124.0 11138.0 11118.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 11120.0 11140.0 11116.0 \n",
+ "... ... ... ... \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) NaN NaN NaN \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) NaN NaN NaN \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) NaN NaN NaN \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) NaN NaN NaN \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) NaN NaN NaN \n",
+ "\n",
+ "symbol \n",
+ " close volume \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 11120.0 8815.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 11112.0 12659.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 11122.0 15176.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 11120.0 9613.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 11134.0 5078.0 \n",
+ "... ... ... \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) NaN NaN \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) NaN NaN \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) NaN NaN \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) NaN NaN \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) NaN NaN \n",
"\n",
"[288 rows x 15 columns]"
]
@@ -2375,94 +2414,94 @@
" \n",
"
\n",
" \n",
- " [2022-04-18 09:30:00, 2022-04-18 09:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.760010 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 09:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.500000 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:35:00, 2022-04-18 09:40:00) | \n",
- " 280.760010 | \n",
- " 280.910004 | \n",
- " 279.549988 | \n",
- " 279.553802 | \n",
- " 550448.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:35:00, 2023-08-10 09:40:00) | \n",
+ " 326.149994 | \n",
+ " 326.636505 | \n",
+ " 325.489990 | \n",
+ " 326.160004 | \n",
+ " 365134.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11122.0 | \n",
+ " 11134.0 | \n",
+ " 11112.0 | \n",
+ " 11112.0 | \n",
+ " 12659.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:40:00, 2022-04-18 09:45:00) | \n",
- " 279.549988 | \n",
- " 280.339996 | \n",
- " 279.200012 | \n",
- " 279.769989 | \n",
- " 532356.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:40:00, 2023-08-10 09:45:00) | \n",
+ " 326.200012 | \n",
+ " 326.570007 | \n",
+ " 325.109985 | \n",
+ " 325.340088 | \n",
+ " 345965.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11114.0 | \n",
+ " 11138.0 | \n",
+ " 11114.0 | \n",
+ " 11122.0 | \n",
+ " 15176.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:45:00, 2022-04-18 09:50:00) | \n",
- " 279.869995 | \n",
- " 279.869995 | \n",
- " 279.010010 | \n",
- " 279.568909 | \n",
- " 379377.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:45:00, 2023-08-10 09:50:00) | \n",
+ " 325.450012 | \n",
+ " 326.510010 | \n",
+ " 325.450012 | \n",
+ " 326.390015 | \n",
+ " 304905.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11124.0 | \n",
+ " 11138.0 | \n",
+ " 11118.0 | \n",
+ " 11120.0 | \n",
+ " 9613.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:50:00, 2022-04-18 09:55:00) | \n",
- " 279.619995 | \n",
- " 279.785004 | \n",
- " 278.799988 | \n",
- " 279.070007 | \n",
- " 332695.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:50:00, 2023-08-10 09:55:00) | \n",
+ " 326.399994 | \n",
+ " 326.579987 | \n",
+ " 325.950012 | \n",
+ " 326.440002 | \n",
+ " 325788.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11120.0 | \n",
+ " 11140.0 | \n",
+ " 11116.0 | \n",
+ " 11134.0 | \n",
+ " 5078.0 | \n",
"
\n",
" \n",
" ... | \n",
@@ -2483,93 +2522,93 @@
" ... | \n",
"
\n",
" \n",
- " [2022-04-19 15:35:00, 2022-04-19 15:40:00) | \n",
- " 285.589996 | \n",
- " 286.109985 | \n",
- " 285.579987 | \n",
- " 285.920013 | \n",
- " 354635.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:35:00, 2023-08-11 15:40:00) | \n",
+ " 321.720001 | \n",
+ " 321.829987 | \n",
+ " 321.279999 | \n",
+ " 321.353210 | \n",
+ " 165967.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
- " [2022-04-19 15:40:00, 2022-04-19 15:45:00) | \n",
- " 285.929993 | \n",
- " 285.929993 | \n",
- " 285.190002 | \n",
- " 285.260010 | \n",
- " 351330.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:40:00, 2023-08-11 15:45:00) | \n",
+ " 321.339996 | \n",
+ " 321.429993 | \n",
+ " 321.179993 | \n",
+ " 321.338013 | \n",
+ " 217661.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
- " [2022-04-19 15:45:00, 2022-04-19 15:50:00) | \n",
- " 285.260010 | \n",
- " 285.380005 | \n",
- " 284.750000 | \n",
- " 285.230011 | \n",
- " 382935.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:45:00, 2023-08-11 15:50:00) | \n",
+ " 321.350006 | \n",
+ " 321.399994 | \n",
+ " 320.910004 | \n",
+ " 321.179993 | \n",
+ " 241240.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
- " [2022-04-19 15:50:00, 2022-04-19 15:55:00) | \n",
- " 285.234985 | \n",
- " 286.170013 | \n",
- " 285.234985 | \n",
- " 285.320007 | \n",
- " 697921.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:50:00, 2023-08-11 15:55:00) | \n",
+ " 321.190002 | \n",
+ " 321.440002 | \n",
+ " 320.859985 | \n",
+ " 320.910004 | \n",
+ " 389375.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
- " [2022-04-19 15:55:00, 2022-04-19 16:00:00) | \n",
- " 285.309998 | \n",
- " 285.809998 | \n",
- " 284.500000 | \n",
- " 285.380005 | \n",
- " 1235980.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:55:00, 2023-08-11 16:00:00) | \n",
+ " 320.894989 | \n",
+ " 321.269989 | \n",
+ " 320.589996 | \n",
+ " 321.089996 | \n",
+ " 1342284.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
@@ -2580,73 +2619,73 @@
"text/plain": [
"symbol MSFT \\\n",
" open high \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 281.899994 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 280.760010 280.910004 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 279.549988 280.339996 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 279.869995 279.869995 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 279.619995 279.785004 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 326.015015 326.890015 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 326.149994 326.636505 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 326.200012 326.570007 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 325.450012 326.510010 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 326.399994 326.579987 \n",
"... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 285.589996 286.109985 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 285.929993 285.929993 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 285.260010 285.380005 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 285.234985 286.170013 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 285.309998 285.809998 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 321.720001 321.829987 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 321.339996 321.429993 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 321.350006 321.399994 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 321.190002 321.440002 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 320.894989 321.269989 \n",
"\n",
"symbol \\\n",
" low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 280.760010 1282050.0 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 279.549988 279.553802 550448.0 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 279.200012 279.769989 532356.0 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 279.010010 279.568909 379377.0 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 278.799988 279.070007 332695.0 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 325.500000 326.189911 1094887.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 325.489990 326.160004 365134.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 325.109985 325.340088 345965.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 325.450012 326.390015 304905.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 325.950012 326.440002 325788.0 \n",
"... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 285.579987 285.920013 354635.0 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 285.190002 285.260010 351330.0 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 284.750000 285.230011 382935.0 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 285.234985 285.320007 697921.0 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 284.500000 285.380005 1235980.0 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 321.279999 321.353210 165967.0 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 321.179993 321.338013 217661.0 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 320.910004 321.179993 241240.0 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 320.859985 320.910004 389375.0 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 320.589996 321.089996 1342284.0 \n",
"\n",
"symbol 9988.HK \\\n",
" open high low \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) NaN NaN NaN \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) NaN NaN NaN \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) NaN NaN NaN \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) NaN NaN NaN \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) NaN NaN NaN \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) NaN NaN NaN \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) NaN NaN NaN \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) NaN NaN NaN \n",
"... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 91.400002 91.400002 91.400002 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 91.400002 91.400002 91.400002 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 91.400002 91.400002 91.400002 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 91.400002 91.400002 91.400002 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 91.400002 91.400002 91.400002 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 95.300003 95.300003 95.300003 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 95.300003 95.300003 95.300003 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 95.300003 95.300003 95.300003 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 95.300003 95.300003 95.300003 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 95.300003 95.300003 95.300003 \n",
"\n",
"symbol AZN.L \\\n",
" close volume open \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) NaN NaN NaN \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) NaN NaN NaN \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) NaN NaN NaN \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) NaN NaN 11118.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) NaN NaN 11122.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) NaN NaN 11114.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) NaN NaN 11124.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) NaN NaN 11120.0 \n",
"... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 91.400002 0.0 10506.0 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 91.400002 0.0 10506.0 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 91.400002 0.0 10506.0 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 91.400002 0.0 10506.0 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 91.400002 0.0 10506.0 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 95.300003 0.0 11022.0 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 95.300003 0.0 11022.0 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 95.300003 0.0 11022.0 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 95.300003 0.0 11022.0 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 95.300003 0.0 11022.0 \n",
"\n",
- "symbol \n",
- " high low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN NaN \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) NaN NaN NaN NaN \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) NaN NaN NaN NaN \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) NaN NaN NaN NaN \n",
- "... ... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 10506.0 10506.0 10506.0 0.0 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 10506.0 10506.0 10506.0 0.0 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 10506.0 10506.0 10506.0 0.0 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 10506.0 10506.0 10506.0 0.0 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 10506.0 10506.0 10506.0 0.0 \n",
+ "symbol \n",
+ " high low close volume \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 11132.0 11114.0 11120.0 8815.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 11134.0 11112.0 11112.0 12659.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 11138.0 11114.0 11122.0 15176.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 11138.0 11118.0 11120.0 9613.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 11140.0 11116.0 11134.0 5078.0 \n",
+ "... ... ... ... ... \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 11022.0 11022.0 11022.0 0.0 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 11022.0 11022.0 11022.0 0.0 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 11022.0 11022.0 11022.0 0.0 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 11022.0 11022.0 11022.0 0.0 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 11022.0 11022.0 11022.0 0.0 \n",
"\n",
"[288 rows x 15 columns]"
]
@@ -2711,94 +2750,94 @@
" \n",
" \n",
" \n",
- " [2022-04-18 09:30:00, 2022-04-18 09:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.760010 | \n",
- " 1282050.0 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 0.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 0.0 | \n",
- "
\n",
- " \n",
- " [2022-04-18 09:35:00, 2022-04-18 09:40:00) | \n",
- " 280.760010 | \n",
- " 280.910004 | \n",
- " 279.549988 | \n",
- " 279.553802 | \n",
- " 550448.0 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 0.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 0.0 | \n",
- "
\n",
- " \n",
- " [2022-04-18 09:40:00, 2022-04-18 09:45:00) | \n",
- " 279.549988 | \n",
- " 280.339996 | \n",
- " 279.200012 | \n",
- " 279.769989 | \n",
- " 532356.0 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 09:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.500000 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
" 0.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
+ "
\n",
+ " \n",
+ " [2023-08-10 09:35:00, 2023-08-10 09:40:00) | \n",
+ " 326.149994 | \n",
+ " 326.636505 | \n",
+ " 325.489990 | \n",
+ " 326.160004 | \n",
+ " 365134.0 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
" 0.0 | \n",
- "
\n",
- " \n",
- " [2022-04-18 09:45:00, 2022-04-18 09:50:00) | \n",
- " 279.869995 | \n",
- " 279.869995 | \n",
- " 279.010010 | \n",
- " 279.568909 | \n",
- " 379377.0 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
+ " 11122.0 | \n",
+ " 11134.0 | \n",
+ " 11112.0 | \n",
+ " 11112.0 | \n",
+ " 12659.0 | \n",
+ "
\n",
+ " \n",
+ " [2023-08-10 09:40:00, 2023-08-10 09:45:00) | \n",
+ " 326.200012 | \n",
+ " 326.570007 | \n",
+ " 325.109985 | \n",
+ " 325.340088 | \n",
+ " 345965.0 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
" 0.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
+ " 11114.0 | \n",
+ " 11138.0 | \n",
+ " 11114.0 | \n",
+ " 11122.0 | \n",
+ " 15176.0 | \n",
+ "
\n",
+ " \n",
+ " [2023-08-10 09:45:00, 2023-08-10 09:50:00) | \n",
+ " 325.450012 | \n",
+ " 326.510010 | \n",
+ " 325.450012 | \n",
+ " 326.390015 | \n",
+ " 304905.0 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
" 0.0 | \n",
- "
\n",
- " \n",
- " [2022-04-18 09:50:00, 2022-04-18 09:55:00) | \n",
- " 279.619995 | \n",
- " 279.785004 | \n",
- " 278.799988 | \n",
- " 279.070007 | \n",
- " 332695.0 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 92.650002 | \n",
- " 0.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
- " 10452.0 | \n",
+ " 11124.0 | \n",
+ " 11138.0 | \n",
+ " 11118.0 | \n",
+ " 11120.0 | \n",
+ " 9613.0 | \n",
+ "
\n",
+ " \n",
+ " [2023-08-10 09:50:00, 2023-08-10 09:55:00) | \n",
+ " 326.399994 | \n",
+ " 326.579987 | \n",
+ " 325.950012 | \n",
+ " 326.440002 | \n",
+ " 325788.0 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
+ " 97.099998 | \n",
" 0.0 | \n",
+ " 11120.0 | \n",
+ " 11140.0 | \n",
+ " 11116.0 | \n",
+ " 11134.0 | \n",
+ " 5078.0 | \n",
"
\n",
" \n",
" ... | \n",
@@ -2819,93 +2858,93 @@
" ... | \n",
"
\n",
" \n",
- " [2022-04-19 15:35:00, 2022-04-19 15:40:00) | \n",
- " 285.589996 | \n",
- " 286.109985 | \n",
- " 285.579987 | \n",
- " 285.920013 | \n",
- " 354635.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:35:00, 2023-08-11 15:40:00) | \n",
+ " 321.720001 | \n",
+ " 321.829987 | \n",
+ " 321.279999 | \n",
+ " 321.353210 | \n",
+ " 165967.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
- " [2022-04-19 15:40:00, 2022-04-19 15:45:00) | \n",
- " 285.929993 | \n",
- " 285.929993 | \n",
- " 285.190002 | \n",
- " 285.260010 | \n",
- " 351330.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:40:00, 2023-08-11 15:45:00) | \n",
+ " 321.339996 | \n",
+ " 321.429993 | \n",
+ " 321.179993 | \n",
+ " 321.338013 | \n",
+ " 217661.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
- " [2022-04-19 15:45:00, 2022-04-19 15:50:00) | \n",
- " 285.260010 | \n",
- " 285.380005 | \n",
- " 284.750000 | \n",
- " 285.230011 | \n",
- " 382935.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:45:00, 2023-08-11 15:50:00) | \n",
+ " 321.350006 | \n",
+ " 321.399994 | \n",
+ " 320.910004 | \n",
+ " 321.179993 | \n",
+ " 241240.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
- " [2022-04-19 15:50:00, 2022-04-19 15:55:00) | \n",
- " 285.234985 | \n",
- " 286.170013 | \n",
- " 285.234985 | \n",
- " 285.320007 | \n",
- " 697921.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:50:00, 2023-08-11 15:55:00) | \n",
+ " 321.190002 | \n",
+ " 321.440002 | \n",
+ " 320.859985 | \n",
+ " 320.910004 | \n",
+ " 389375.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
- " [2022-04-19 15:55:00, 2022-04-19 16:00:00) | \n",
- " 285.309998 | \n",
- " 285.809998 | \n",
- " 284.500000 | \n",
- " 285.380005 | \n",
- " 1235980.0 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
- " 91.400002 | \n",
+ " [2023-08-11 15:55:00, 2023-08-11 16:00:00) | \n",
+ " 320.894989 | \n",
+ " 321.269989 | \n",
+ " 320.589996 | \n",
+ " 321.089996 | \n",
+ " 1342284.0 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
+ " 95.300003 | \n",
" 0.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
- " 10506.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
+ " 11022.0 | \n",
" 0.0 | \n",
"
\n",
" \n",
@@ -2916,73 +2955,73 @@
"text/plain": [
"symbol MSFT \\\n",
" open high \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 281.899994 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 280.760010 280.910004 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 279.549988 280.339996 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 279.869995 279.869995 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 279.619995 279.785004 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 326.015015 326.890015 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 326.149994 326.636505 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 326.200012 326.570007 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 325.450012 326.510010 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 326.399994 326.579987 \n",
"... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 285.589996 286.109985 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 285.929993 285.929993 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 285.260010 285.380005 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 285.234985 286.170013 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 285.309998 285.809998 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 321.720001 321.829987 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 321.339996 321.429993 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 321.350006 321.399994 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 321.190002 321.440002 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 320.894989 321.269989 \n",
"\n",
"symbol \\\n",
" low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 280.760010 1282050.0 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 279.549988 279.553802 550448.0 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 279.200012 279.769989 532356.0 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 279.010010 279.568909 379377.0 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 278.799988 279.070007 332695.0 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 325.500000 326.189911 1094887.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 325.489990 326.160004 365134.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 325.109985 325.340088 345965.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 325.450012 326.390015 304905.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 325.950012 326.440002 325788.0 \n",
"... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 285.579987 285.920013 354635.0 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 285.190002 285.260010 351330.0 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 284.750000 285.230011 382935.0 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 285.234985 285.320007 697921.0 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 284.500000 285.380005 1235980.0 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 321.279999 321.353210 165967.0 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 321.179993 321.338013 217661.0 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 320.910004 321.179993 241240.0 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 320.859985 320.910004 389375.0 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 320.589996 321.089996 1342284.0 \n",
"\n",
"symbol 9988.HK \\\n",
" open high low \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 92.650002 92.650002 92.650002 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 92.650002 92.650002 92.650002 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 92.650002 92.650002 92.650002 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 92.650002 92.650002 92.650002 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 92.650002 92.650002 92.650002 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 97.099998 97.099998 97.099998 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 97.099998 97.099998 97.099998 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 97.099998 97.099998 97.099998 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 97.099998 97.099998 97.099998 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 97.099998 97.099998 97.099998 \n",
"... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 91.400002 91.400002 91.400002 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 91.400002 91.400002 91.400002 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 91.400002 91.400002 91.400002 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 91.400002 91.400002 91.400002 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 91.400002 91.400002 91.400002 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 95.300003 95.300003 95.300003 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 95.300003 95.300003 95.300003 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 95.300003 95.300003 95.300003 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 95.300003 95.300003 95.300003 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 95.300003 95.300003 95.300003 \n",
"\n",
"symbol AZN.L \\\n",
" close volume open \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 92.650002 0.0 10452.0 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 92.650002 0.0 10452.0 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 92.650002 0.0 10452.0 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 92.650002 0.0 10452.0 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 92.650002 0.0 10452.0 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 97.099998 0.0 11118.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 97.099998 0.0 11122.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 97.099998 0.0 11114.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 97.099998 0.0 11124.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 97.099998 0.0 11120.0 \n",
"... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 91.400002 0.0 10506.0 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 91.400002 0.0 10506.0 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 91.400002 0.0 10506.0 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 91.400002 0.0 10506.0 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 91.400002 0.0 10506.0 \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 95.300003 0.0 11022.0 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 95.300003 0.0 11022.0 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 95.300003 0.0 11022.0 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 95.300003 0.0 11022.0 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 95.300003 0.0 11022.0 \n",
"\n",
- "symbol \n",
- " high low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 10452.0 10452.0 10452.0 0.0 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 10452.0 10452.0 10452.0 0.0 \n",
- "[2022-04-18 09:40:00, 2022-04-18 09:45:00) 10452.0 10452.0 10452.0 0.0 \n",
- "[2022-04-18 09:45:00, 2022-04-18 09:50:00) 10452.0 10452.0 10452.0 0.0 \n",
- "[2022-04-18 09:50:00, 2022-04-18 09:55:00) 10452.0 10452.0 10452.0 0.0 \n",
- "... ... ... ... ... \n",
- "[2022-04-19 15:35:00, 2022-04-19 15:40:00) 10506.0 10506.0 10506.0 0.0 \n",
- "[2022-04-19 15:40:00, 2022-04-19 15:45:00) 10506.0 10506.0 10506.0 0.0 \n",
- "[2022-04-19 15:45:00, 2022-04-19 15:50:00) 10506.0 10506.0 10506.0 0.0 \n",
- "[2022-04-19 15:50:00, 2022-04-19 15:55:00) 10506.0 10506.0 10506.0 0.0 \n",
- "[2022-04-19 15:55:00, 2022-04-19 16:00:00) 10506.0 10506.0 10506.0 0.0 \n",
+ "symbol \n",
+ " high low close volume \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 11132.0 11114.0 11120.0 8815.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 11134.0 11112.0 11112.0 12659.0 \n",
+ "[2023-08-10 09:40:00, 2023-08-10 09:45:00) 11138.0 11114.0 11122.0 15176.0 \n",
+ "[2023-08-10 09:45:00, 2023-08-10 09:50:00) 11138.0 11118.0 11120.0 9613.0 \n",
+ "[2023-08-10 09:50:00, 2023-08-10 09:55:00) 11140.0 11116.0 11134.0 5078.0 \n",
+ "... ... ... ... ... \n",
+ "[2023-08-11 15:35:00, 2023-08-11 15:40:00) 11022.0 11022.0 11022.0 0.0 \n",
+ "[2023-08-11 15:40:00, 2023-08-11 15:45:00) 11022.0 11022.0 11022.0 0.0 \n",
+ "[2023-08-11 15:45:00, 2023-08-11 15:50:00) 11022.0 11022.0 11022.0 0.0 \n",
+ "[2023-08-11 15:50:00, 2023-08-11 15:55:00) 11022.0 11022.0 11022.0 0.0 \n",
+ "[2023-08-11 15:55:00, 2023-08-11 16:00:00) 11022.0 11022.0 11022.0 0.0 \n",
"\n",
"[288 rows x 15 columns]"
]
@@ -3545,35 +3584,35 @@
" \n",
" \n",
" \n",
- " [2022-04-18 09:30:00, 2022-04-18 09:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.760010 | \n",
- " 1282050.0 | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 09:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.50000 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:35:00, 2022-04-18 09:40:00) | \n",
- " 280.760010 | \n",
- " 280.910004 | \n",
- " 279.549988 | \n",
- " 279.553802 | \n",
- " 550448.0 | \n",
+ " [2023-08-10 09:35:00, 2023-08-10 09:40:00) | \n",
+ " 326.149994 | \n",
+ " 326.636505 | \n",
+ " 325.48999 | \n",
+ " 326.160004 | \n",
+ " 365134.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol MSFT \\\n",
- " open high \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 281.899994 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 280.760010 280.910004 \n",
+ "symbol MSFT \\\n",
+ " open high low \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 326.015015 326.890015 325.50000 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 326.149994 326.636505 325.48999 \n",
"\n",
- "symbol \n",
- " low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 280.760010 1282050.0 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 279.549988 279.553802 550448.0 "
+ "symbol \n",
+ " close volume \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 326.189911 1094887.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 326.160004 365134.0 "
]
},
"execution_count": 31,
@@ -3624,30 +3663,30 @@
" \n",
" \n",
" \n",
- " 2022-04-05 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " 2023-07-31 | \n",
+ " 99.000000 | \n",
+ " 99.75 | \n",
+ " 96.800003 | \n",
+ " 97.500000 | \n",
+ " 74290881.0 | \n",
"
\n",
" \n",
- " 2022-04-06 | \n",
- " 109.099998 | \n",
- " 109.800003 | \n",
- " 107.099998 | \n",
- " 107.599998 | \n",
- " 46638063.0 | \n",
+ " 2023-08-01 | \n",
+ " 99.699997 | \n",
+ " 100.00 | \n",
+ " 96.900002 | \n",
+ " 97.849998 | \n",
+ " 53949339.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol 9988.HK \n",
- " open high low close volume\n",
- "2022-04-05 NaN NaN NaN NaN NaN\n",
- "2022-04-06 109.099998 109.800003 107.099998 107.599998 46638063.0"
+ "symbol 9988.HK \n",
+ " open high low close volume\n",
+ "2023-07-31 99.000000 99.75 96.800003 97.500000 74290881.0\n",
+ "2023-08-01 99.699997 100.00 96.900002 97.849998 53949339.0"
]
},
"execution_count": 32,
@@ -3733,65 +3772,65 @@
" \n",
" \n",
" \n",
- " [2022-04-18 09:30:00, 2022-04-18 09:35:00) | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.760010 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 09:35:00) | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.50000 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
- " [2022-04-18 09:35:00, 2022-04-18 09:40:00) | \n",
- " 280.760010 | \n",
- " 280.910004 | \n",
- " 279.549988 | \n",
- " 279.553802 | \n",
- " 550448.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " [2023-08-10 09:35:00, 2023-08-10 09:40:00) | \n",
+ " 326.149994 | \n",
+ " 326.636505 | \n",
+ " 325.48999 | \n",
+ " 326.160004 | \n",
+ " 365134.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11122.0 | \n",
+ " 11134.0 | \n",
+ " 11112.0 | \n",
+ " 11112.0 | \n",
+ " 12659.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol MSFT \\\n",
- " open high \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 281.899994 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 280.760010 280.910004 \n",
+ "symbol MSFT \\\n",
+ " open high low \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 326.015015 326.890015 325.50000 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 326.149994 326.636505 325.48999 \n",
"\n",
- "symbol \\\n",
- " low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) 278.910004 280.760010 1282050.0 \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) 279.549988 279.553802 550448.0 \n",
+ "symbol 9988.HK \\\n",
+ " close volume open \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 326.189911 1094887.0 NaN \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 326.160004 365134.0 NaN \n",
"\n",
- "symbol 9988.HK \\\n",
- " open high low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN NaN NaN \n",
+ "symbol AZN.L \\\n",
+ " high low close volume open \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) NaN NaN NaN NaN 11118.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) NaN NaN NaN NaN 11122.0 \n",
"\n",
- "symbol AZN.L \n",
- " open high low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 09:35:00) NaN NaN NaN NaN NaN \n",
- "[2022-04-18 09:35:00, 2022-04-18 09:40:00) NaN NaN NaN NaN NaN "
+ "symbol \n",
+ " high low close volume \n",
+ "[2023-08-10 09:30:00, 2023-08-10 09:35:00) 11132.0 11114.0 11120.0 8815.0 \n",
+ "[2023-08-10 09:35:00, 2023-08-10 09:40:00) 11134.0 11112.0 11112.0 12659.0 "
]
},
"execution_count": 33,
@@ -3854,60 +3893,60 @@
" \n",
" \n",
" \n",
- " 2022-04-18 09:30:00-04:00 | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.760010 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " 2023-08-10 09:30:00-04:00 | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.50000 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
- " 2022-04-18 09:35:00-04:00 | \n",
- " 280.760010 | \n",
- " 280.910004 | \n",
- " 279.549988 | \n",
- " 279.553802 | \n",
- " 550448.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " 2023-08-10 09:35:00-04:00 | \n",
+ " 326.149994 | \n",
+ " 326.636505 | \n",
+ " 325.48999 | \n",
+ " 326.160004 | \n",
+ " 365134.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11122.0 | \n",
+ " 11134.0 | \n",
+ " 11112.0 | \n",
+ " 11112.0 | \n",
+ " 12659.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol MSFT \\\n",
- " open high low close \n",
- "2022-04-18 09:30:00-04:00 278.910004 281.899994 278.910004 280.760010 \n",
- "2022-04-18 09:35:00-04:00 280.760010 280.910004 279.549988 279.553802 \n",
+ "symbol MSFT \\\n",
+ " open high low close \n",
+ "2023-08-10 09:30:00-04:00 326.015015 326.890015 325.50000 326.189911 \n",
+ "2023-08-10 09:35:00-04:00 326.149994 326.636505 325.48999 326.160004 \n",
"\n",
- "symbol 9988.HK AZN.L \\\n",
- " volume open high low close volume open high \n",
- "2022-04-18 09:30:00-04:00 1282050.0 NaN NaN NaN NaN NaN NaN NaN \n",
- "2022-04-18 09:35:00-04:00 550448.0 NaN NaN NaN NaN NaN NaN NaN \n",
+ "symbol 9988.HK AZN.L \\\n",
+ " volume open high low close volume open \n",
+ "2023-08-10 09:30:00-04:00 1094887.0 NaN NaN NaN NaN NaN 11118.0 \n",
+ "2023-08-10 09:35:00-04:00 365134.0 NaN NaN NaN NaN NaN 11122.0 \n",
"\n",
- "symbol \n",
- " low close volume \n",
- "2022-04-18 09:30:00-04:00 NaN NaN NaN \n",
- "2022-04-18 09:35:00-04:00 NaN NaN NaN "
+ "symbol \n",
+ " high low close volume \n",
+ "2023-08-10 09:30:00-04:00 11132.0 11114.0 11120.0 8815.0 \n",
+ "2023-08-10 09:35:00-04:00 11134.0 11112.0 11112.0 12659.0 "
]
},
"execution_count": 34,
@@ -3970,60 +4009,60 @@
" \n",
" \n",
" \n",
- " 2022-04-18 09:35:00-04:00 | \n",
- " 278.910004 | \n",
- " 281.899994 | \n",
- " 278.910004 | \n",
- " 280.760010 | \n",
- " 1282050.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " 2023-08-10 09:35:00-04:00 | \n",
+ " 326.015015 | \n",
+ " 326.890015 | \n",
+ " 325.50000 | \n",
+ " 326.189911 | \n",
+ " 1094887.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11118.0 | \n",
+ " 11132.0 | \n",
+ " 11114.0 | \n",
+ " 11120.0 | \n",
+ " 8815.0 | \n",
"
\n",
" \n",
- " 2022-04-18 09:40:00-04:00 | \n",
- " 280.760010 | \n",
- " 280.910004 | \n",
- " 279.549988 | \n",
- " 279.553802 | \n",
- " 550448.0 | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
- " NaN | \n",
+ " 2023-08-10 09:40:00-04:00 | \n",
+ " 326.149994 | \n",
+ " 326.636505 | \n",
+ " 325.48999 | \n",
+ " 326.160004 | \n",
+ " 365134.0 | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
" NaN | \n",
+ " 11122.0 | \n",
+ " 11134.0 | \n",
+ " 11112.0 | \n",
+ " 11112.0 | \n",
+ " 12659.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol MSFT \\\n",
- " open high low close \n",
- "2022-04-18 09:35:00-04:00 278.910004 281.899994 278.910004 280.760010 \n",
- "2022-04-18 09:40:00-04:00 280.760010 280.910004 279.549988 279.553802 \n",
+ "symbol MSFT \\\n",
+ " open high low close \n",
+ "2023-08-10 09:35:00-04:00 326.015015 326.890015 325.50000 326.189911 \n",
+ "2023-08-10 09:40:00-04:00 326.149994 326.636505 325.48999 326.160004 \n",
"\n",
- "symbol 9988.HK AZN.L \\\n",
- " volume open high low close volume open high \n",
- "2022-04-18 09:35:00-04:00 1282050.0 NaN NaN NaN NaN NaN NaN NaN \n",
- "2022-04-18 09:40:00-04:00 550448.0 NaN NaN NaN NaN NaN NaN NaN \n",
+ "symbol 9988.HK AZN.L \\\n",
+ " volume open high low close volume open \n",
+ "2023-08-10 09:35:00-04:00 1094887.0 NaN NaN NaN NaN NaN 11118.0 \n",
+ "2023-08-10 09:40:00-04:00 365134.0 NaN NaN NaN NaN NaN 11122.0 \n",
"\n",
- "symbol \n",
- " low close volume \n",
- "2022-04-18 09:35:00-04:00 NaN NaN NaN \n",
- "2022-04-18 09:40:00-04:00 NaN NaN NaN "
+ "symbol \n",
+ " high low close volume \n",
+ "2023-08-10 09:35:00-04:00 11132.0 11114.0 11120.0 8815.0 \n",
+ "2023-08-10 09:40:00-04:00 11134.0 11112.0 11112.0 12659.0 "
]
},
"execution_count": 35,
@@ -4040,7 +4079,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "NOTE: If side is passed the the returned prices table will not have access to the `.pt` accessor."
+ "NOTE: If side is passed then the returned prices table will not have access to the `.pt` accessor."
]
},
{
@@ -4059,7 +4098,7 @@
"```\n",
"---------------------------------------------------------------------------\n",
"ValueError Traceback (most recent call last)\n",
- " in \n",
+ "c:\\market_prices\\docs\\tutorials\\other_get_options.ipynb Cell 62 line 1\n",
"----> 1 df.pt\n",
"\n",
"ValueError: PT accessor not available where index is a pd.DatatimeIndex with one or more indices that have a time component (Index must be pd.IntervalIndex if any indice includes a time component).\n",
@@ -4108,39 +4147,39 @@
" \n",
" \n",
" \n",
- " 2022-04-05 | \n",
- " 310.880005 | \n",
- " NaN | \n",
- " 10334.0 | \n",
+ " 2023-07-31 | \n",
+ " 335.920013 | \n",
+ " 97.500000 | \n",
+ " 11186.0 | \n",
"
\n",
" \n",
- " 2022-04-06 | \n",
- " 299.500000 | \n",
- " 107.599998 | \n",
- " 10462.0 | \n",
+ " 2023-08-01 | \n",
+ " 336.339996 | \n",
+ " 97.849998 | \n",
+ " 11196.0 | \n",
"
\n",
" \n",
- " 2022-04-07 | \n",
- " 301.369995 | \n",
- " 105.199997 | \n",
- " 10668.0 | \n",
+ " 2023-08-02 | \n",
+ " 327.500000 | \n",
+ " 95.150002 | \n",
+ " 11096.0 | \n",
"
\n",
" \n",
- " 2022-04-08 | \n",
- " 296.970001 | \n",
- " 103.800003 | \n",
- " 10930.0 | \n",
+ " 2023-08-03 | \n",
+ " 326.660004 | \n",
+ " 93.150002 | \n",
+ " 10900.0 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- "symbol MSFT 9988.HK AZN.L\n",
- "2022-04-05 310.880005 NaN 10334.0\n",
- "2022-04-06 299.500000 107.599998 10462.0\n",
- "2022-04-07 301.369995 105.199997 10668.0\n",
- "2022-04-08 296.970001 103.800003 10930.0"
+ "symbol MSFT 9988.HK AZN.L\n",
+ "2023-07-31 335.920013 97.500000 11186.0\n",
+ "2023-08-01 336.339996 97.849998 11196.0\n",
+ "2023-08-02 327.500000 95.150002 11096.0\n",
+ "2023-08-03 326.660004 93.150002 10900.0"
]
},
"execution_count": 37,
@@ -4216,28 +4255,28 @@
" \n",
" \n",
" \n",
- " [2022-04-18 09:30:00, 2022-04-18 10:30:00) | \n",
- " 278.910004 | \n",
- " 282.390015 | \n",
- " 278.790009 | \n",
- " 281.989990 | \n",
- " 5547464 | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 10:30:00) | \n",
+ " 326.015015 | \n",
+ " 328.260010 | \n",
+ " 325.109985 | \n",
+ " 326.250000 | \n",
+ " 4302172 | \n",
"
\n",
" \n",
- " [2022-04-18 10:30:00, 2022-04-18 11:30:00) | \n",
- " 281.989990 | \n",
- " 282.459991 | \n",
- " 279.750000 | \n",
- " 280.010010 | \n",
- " 2311993 | \n",
+ " [2023-08-10 10:30:00, 2023-08-10 11:30:00) | \n",
+ " 326.230011 | \n",
+ " 327.174988 | \n",
+ " 323.040009 | \n",
+ " 324.234985 | \n",
+ " 2950799 | \n",
"
\n",
" \n",
- " [2022-04-18 11:30:00, 2022-04-18 12:30:00) | \n",
- " 280.010010 | \n",
- " 280.160004 | \n",
- " 278.750000 | \n",
- " 278.799988 | \n",
- " 2029667 | \n",
+ " [2023-08-10 11:30:00, 2023-08-10 12:30:00) | \n",
+ " 324.260010 | \n",
+ " 325.049988 | \n",
+ " 322.079987 | \n",
+ " 323.250000 | \n",
+ " 2184202 | \n",
"
\n",
" \n",
"\n",
@@ -4246,15 +4285,15 @@
"text/plain": [
"symbol MSFT \\\n",
" open high \n",
- "[2022-04-18 09:30:00, 2022-04-18 10:30:00) 278.910004 282.390015 \n",
- "[2022-04-18 10:30:00, 2022-04-18 11:30:00) 281.989990 282.459991 \n",
- "[2022-04-18 11:30:00, 2022-04-18 12:30:00) 280.010010 280.160004 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 10:30:00) 326.015015 328.260010 \n",
+ "[2023-08-10 10:30:00, 2023-08-10 11:30:00) 326.230011 327.174988 \n",
+ "[2023-08-10 11:30:00, 2023-08-10 12:30:00) 324.260010 325.049988 \n",
"\n",
"symbol \n",
" low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 10:30:00) 278.790009 281.989990 5547464 \n",
- "[2022-04-18 10:30:00, 2022-04-18 11:30:00) 279.750000 280.010010 2311993 \n",
- "[2022-04-18 11:30:00, 2022-04-18 12:30:00) 278.750000 278.799988 2029667 "
+ "[2023-08-10 09:30:00, 2023-08-10 10:30:00) 325.109985 326.250000 4302172 \n",
+ "[2023-08-10 10:30:00, 2023-08-10 11:30:00) 323.040009 324.234985 2950799 \n",
+ "[2023-08-10 11:30:00, 2023-08-10 12:30:00) 322.079987 323.250000 2184202 "
]
},
"execution_count": 39,
@@ -4301,28 +4340,28 @@
" \n",
" \n",
" \n",
- " [2022-04-18 09:30:00, 2022-04-18 10:30:00) | \n",
- " 278.910004 | \n",
- " 282.390015 | \n",
- " 278.790009 | \n",
- " 281.989990 | \n",
- " 5547464 | \n",
+ " [2023-08-10 09:30:00, 2023-08-10 10:30:00) | \n",
+ " 326.015015 | \n",
+ " 328.260010 | \n",
+ " 325.109985 | \n",
+ " 326.250000 | \n",
+ " 4302172 | \n",
"
\n",
" \n",
- " [2022-04-18 10:30:00, 2022-04-18 11:30:00) | \n",
- " 281.989990 | \n",
- " 282.459991 | \n",
- " 279.750000 | \n",
- " 280.010010 | \n",
- " 2311993 | \n",
+ " [2023-08-10 10:30:00, 2023-08-10 11:30:00) | \n",
+ " 326.230011 | \n",
+ " 327.174988 | \n",
+ " 323.040009 | \n",
+ " 324.234985 | \n",
+ " 2950799 | \n",
"
\n",
" \n",
- " [2022-04-18 11:30:00, 2022-04-18 12:30:00) | \n",
- " 280.010010 | \n",
- " 280.160004 | \n",
- " 278.750000 | \n",
- " 278.799988 | \n",
- " 2029667 | \n",
+ " [2023-08-10 11:30:00, 2023-08-10 12:30:00) | \n",
+ " 324.260010 | \n",
+ " 325.049988 | \n",
+ " 322.079987 | \n",
+ " 323.250000 | \n",
+ " 2184202 | \n",
"
\n",
" \n",
"\n",
@@ -4330,14 +4369,14 @@
],
"text/plain": [
" open high \\\n",
- "[2022-04-18 09:30:00, 2022-04-18 10:30:00) 278.910004 282.390015 \n",
- "[2022-04-18 10:30:00, 2022-04-18 11:30:00) 281.989990 282.459991 \n",
- "[2022-04-18 11:30:00, 2022-04-18 12:30:00) 280.010010 280.160004 \n",
+ "[2023-08-10 09:30:00, 2023-08-10 10:30:00) 326.015015 328.260010 \n",
+ "[2023-08-10 10:30:00, 2023-08-10 11:30:00) 326.230011 327.174988 \n",
+ "[2023-08-10 11:30:00, 2023-08-10 12:30:00) 324.260010 325.049988 \n",
"\n",
" low close volume \n",
- "[2022-04-18 09:30:00, 2022-04-18 10:30:00) 278.790009 281.989990 5547464 \n",
- "[2022-04-18 10:30:00, 2022-04-18 11:30:00) 279.750000 280.010010 2311993 \n",
- "[2022-04-18 11:30:00, 2022-04-18 12:30:00) 278.750000 278.799988 2029667 "
+ "[2023-08-10 09:30:00, 2023-08-10 10:30:00) 325.109985 326.250000 4302172 \n",
+ "[2023-08-10 10:30:00, 2023-08-10 11:30:00) 323.040009 324.234985 2950799 \n",
+ "[2023-08-10 11:30:00, 2023-08-10 12:30:00) 322.079987 323.250000 2184202 "
]
},
"execution_count": 40,
@@ -4359,9 +4398,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "mkt_prices 3.8.2",
+ "display_name": "market_prices_ve_39",
"language": "python",
- "name": "mkt_prices"
+ "name": "market_prices_ve_39"
},
"language_info": {
"codemirror_mode": {
@@ -4373,7 +4412,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.2"
+ "version": "3.9.13"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
diff --git a/docs/tutorials/other_prices_methods.ipynb b/docs/tutorials/other_prices_methods.ipynb
index 4e9ae86..78edac2 100644
--- a/docs/tutorials/other_prices_methods.ipynb
+++ b/docs/tutorials/other_prices_methods.ipynb
@@ -41,9 +41,10 @@
],
"source": [
"import pandas as pd\n",
- "now = pd.Timestamp.now(tz=\"UTC\").floor(\"T\")\n",
+ "from zoneinfo import ZoneInfo\n",
+ "now = pd.Timestamp.now(tz=ZoneInfo(\"UTC\")).floor(\"T\")\n",
"print(f\"{now!r}\")\n",
- "print(f\"{now.astimezone('America/New_York')!r}\")"
+ "print(f\"{now.astimezone(ZoneInfo('America/New_York'))!r}\")"
]
},
{
diff --git a/docs/tutorials/periods.ipynb b/docs/tutorials/periods.ipynb
index d535661..cd6fe5d 100644
--- a/docs/tutorials/periods.ipynb
+++ b/docs/tutorials/periods.ipynb
@@ -59,7 +59,7 @@
"outputs": [],
"source": [
"from market_prices import PricesYahoo\n",
- "import pytz\n",
+ "from zoneinfo import ZoneInfo\n",
"import pandas as pd\n",
"from market_prices.support import tutorial_helpers as th"
]
@@ -2513,7 +2513,7 @@
],
"source": [
"# using the start and end values defined for the 1min interval example above\n",
- "tz = pytz.timezone(\"US/Pacific\")\n",
+ "tz = ZoneInfo(\"US/Pacific\")\n",
"start_pacific, end_pacific = start.astimezone(tz), end.astimezone(tz)\n",
"start_pacific, end_pacific"
]
@@ -2933,7 +2933,7 @@
}
],
"source": [
- "tzout = pytz.timezone(\"US/Eastern\")\n",
+ "tzout = ZoneInfo(\"US/Eastern\")\n",
"df = prices.get(\"1min\", start_pacific, end_pacific, tzin=tz, tzout=tzout)\n",
"df"
]
@@ -3154,7 +3154,7 @@
}
],
"source": [
- "prices.get(\"1D\", \"2021-12-01 09:31\", end=pd.Timestamp(\"2021-12-07\", tz=pytz.UTC))"
+ "prices.get(\"1D\", \"2021-12-01 09:31\", end=pd.Timestamp(\"2021-12-07\", tz=ZoneInfo(\"UTC\")))"
]
},
{
diff --git a/docs/tutorials/prices.ipynb b/docs/tutorials/prices.ipynb
index 0a77a4d..dd50c04 100644
--- a/docs/tutorials/prices.ipynb
+++ b/docs/tutorials/prices.ipynb
@@ -42,7 +42,7 @@
"metadata": {},
"outputs": [],
"source": [
- "import pytz"
+ "from zoneinfo import ZoneInfo"
]
},
{
@@ -316,7 +316,7 @@
],
"source": [
"# last 30 mins of data at 10min intervals\n",
- "prices.get(\"10min\", minutes=30, tzout=pytz.UTC)"
+ "prices.get(\"10min\", minutes=30, tzout=ZoneInfo(\"UTC\"))"
]
},
{
@@ -586,7 +586,7 @@
}
],
"source": [
- "df = prices.get(\"10min\", minutes=30, tzout=pytz.UTC, lead_symbol='BTC-USD')\n",
+ "df = prices.get(\"10min\", minutes=30, tzout=ZoneInfo(\"UTC\"), lead_symbol='BTC-USD')\n",
"df"
]
},
diff --git a/docs/tutorials/pt_accessor.ipynb b/docs/tutorials/pt_accessor.ipynb
index 31b078c..e8d9968 100644
--- a/docs/tutorials/pt_accessor.ipynb
+++ b/docs/tutorials/pt_accessor.ipynb
@@ -55,9 +55,10 @@
],
"source": [
"import pandas as pd\n",
- "now = pd.Timestamp.now(tz=\"UTC\").floor(\"T\")\n",
+ "from zoneinfo import ZoneInfo\n",
+ "now = pd.Timestamp.now(tz=ZoneInfo(\"UTC\")).floor(\"T\")\n",
"print(f\"{now!r}\")\n",
- "print(f\"{now.astimezone('America/New_York')!r}\")"
+ "print(f\"{now.astimezone(ZoneInfo('America/New_York'))!r}\")"
]
},
{
@@ -81,7 +82,6 @@
"outputs": [],
"source": [
"from market_prices import PricesYahoo\n",
- "import pytz\n",
"from market_prices.support import tutorial_helpers as th\n",
"from market_prices import helpers"
]
@@ -2124,7 +2124,7 @@
}
],
"source": [
- "ts = pd.Timestamp(\"2022-05-10 19:22\", tz=pytz.UTC)\n",
+ "ts = pd.Timestamp(\"2022-05-10 19:22\", tz=ZoneInfo(\"UTC\"))\n",
"df_intraday.pt.convert_to_table_tz(ts)"
]
},
@@ -7677,7 +7677,7 @@
"The `.pt.operate` has a host of optional kwargs that collectively provide for undertaking multiple 'tidying' operations with a single call.\n",
"\n",
"The following are convenience options that in turn call methods already covered above:\n",
- "* **`tz`** to change the index's timezone. For `PTIntraday` any `pytz` timezone or `None` are valid values. For all other PT classes only `None` and \"UTC\" (or `pytz.UTC`) are valid values.\n",
+ "* **`tz`** to change the index's timezone. For `PTIntraday` any `zoneinfo.ZoneInfo` instance or `None` are valid values. For all other PT classes only `None` and \"UTC\" (or `ZoneInfo(\"UTC\")`) are valid values.\n",
"* **`fill`** can take \"bfill\", \"ffill\" or \"both\" and behaves as `.pt.fillna`.\n",
"* __`data_for_all*`__. If passed as `True` these options behave as the eponymous `.pt` methods covered above:\n",
" * `data_for_all`\n",
@@ -9522,9 +9522,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "mkt_prices 3.8.2",
+ "display_name": "venv",
"language": "python",
- "name": "mkt_prices"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
@@ -9536,7 +9536,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.2"
+ "version": "3.8.10"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
diff --git a/docs/tutorials/quickstart.ipynb b/docs/tutorials/quickstart.ipynb
index 885765a..de30d0c 100644
--- a/docs/tutorials/quickstart.ipynb
+++ b/docs/tutorials/quickstart.ipynb
@@ -51,7 +51,7 @@
"source": [
"from market_prices import PricesYahoo\n",
"import pandas as pd\n",
- "import pytz\n",
+ "from zoneinfo import ZoneInfo\n",
"from market_prices.support import tutorial_helpers as th\n",
"from market_prices import helpers"
]
@@ -4456,7 +4456,7 @@
"minute = xnys.session_close(session) - pd.Timedelta(47, \"T\")\n",
"print(f\"{minute=}\\n\") # for reference\n",
"\n",
- "prices_mult.price_at(minute, tz=pytz.UTC)"
+ "prices_mult.price_at(minute, tz=ZoneInfo(\"UTC\"))"
]
},
{
@@ -4783,9 +4783,9 @@
],
"metadata": {
"kernelspec": {
- "display_name": "mkt_prices 3.8.2",
+ "display_name": "market_prices_ve_39",
"language": "python",
- "name": "mkt_prices"
+ "name": "market_prices_ve_39"
},
"language_info": {
"codemirror_mode": {
@@ -4797,7 +4797,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.8.2"
+ "version": "3.9.13"
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
diff --git a/docs/tutorials/specific_query_methods.ipynb b/docs/tutorials/specific_query_methods.ipynb
index a6c224e..bd548ab 100644
--- a/docs/tutorials/specific_query_methods.ipynb
+++ b/docs/tutorials/specific_query_methods.ipynb
@@ -43,9 +43,10 @@
],
"source": [
"import pandas as pd\n",
- "now = pd.Timestamp.now(tz=\"UTC\").floor(\"T\")\n",
+ "from zoneinfo import ZoneInfo\n",
+ "now = pd.Timestamp.now(tz=ZoneInfo(\"UTC\")).floor(\"T\")\n",
"print(f\"{now!r}\")\n",
- "print(f\"{now.astimezone('America/New_York')!r}\")"
+ "print(f\"{now.astimezone(ZoneInfo('America/New_York'))!r}\")"
]
},
{
@@ -69,7 +70,6 @@
"outputs": [],
"source": [
"import pandas as pd\n",
- "import pytz\n",
"from market_prices import PricesYahoo\n",
"from market_prices.support import tutorial_helpers as th"
]
@@ -2061,7 +2061,7 @@
}
],
"source": [
- "tz = pytz.timezone(\"Australia/Perth\")\n",
+ "tz = ZoneInfo(\"Australia/Perth\")\n",
"minute_ = minute.astimezone(tz)\n",
"print(f\"{minute_=}\\n\") # for reference\n",
"\n",
diff --git a/docs/tutorials_docs.md b/docs/tutorials_docs.md
index 611213a..433e436 100644
--- a/docs/tutorials_docs.md
+++ b/docs/tutorials_docs.md
@@ -70,7 +70,7 @@ The `.pt` accessor opens the door to a wealth of functionality to interrogate an
## Other documentation
### Parsing
-[parsing.md](./public/parsing.md) offers a short explanation of how `market_prices` uses pydantic to validate parameters of public method.
+[parsing.md](./public/parsing.md) offers a explanation of how the [`valimp`](https://github.com/maread99/valimp) library is used to validate and otherwise parse inputs to public functions.
### Typing
[typing.md](./public/typing.md) covers:
diff --git a/etc/requirements.txt b/etc/requirements.txt
index a9f6b6c..0f81a1c 100644
--- a/etc/requirements.txt
+++ b/etc/requirements.txt
@@ -1,50 +1,41 @@
#
-# This file is autogenerated by pip-compile with Python 3.8
+# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
-# pip-compile --output-file=etc/requirements/requirements.txt pyproject.toml
+# pip-compile --output-file=etc/requirements.txt pyproject.toml
#
-annotated-types==0.5.0
- # via pydantic
-certifi==2023.5.7
+certifi==2023.7.22
# via requests
-charset-normalizer==3.1.0
+charset-normalizer==3.2.0
# via requests
colorama==0.4.6
# via tqdm
-exchange-calendars==4.2.8
+exchange-calendars==4.5
# via market-prices (pyproject.toml)
idna==3.4
# via requests
korean-lunar-calendar==0.3.1
# via exchange-calendars
-lxml==4.9.2
+lxml==4.9.3
# via yahooquery
-numpy==1.24.4
+numpy==1.25.2
# via
# exchange-calendars
# market-prices (pyproject.toml)
# pandas
-pandas==2.0.3
+pandas==2.1.0
# via
# exchange-calendars
# market-prices (pyproject.toml)
# yahooquery
-pydantic==2.0
- # via market-prices (pyproject.toml)
-pydantic-core==2.0.1
- # via pydantic
pyluach==2.2.0
# via exchange-calendars
python-dateutil==2.8.2
# via
# exchange-calendars
# pandas
-pytz==2023.3
- # via
- # exchange-calendars
- # market-prices (pyproject.toml)
- # pandas
+pytz==2023.3.post1
+ # via pandas
requests==2.31.0
# via requests-futures
requests-futures==1.0.1
@@ -53,16 +44,16 @@ six==1.16.0
# via python-dateutil
toolz==0.12.0
# via exchange-calendars
-tqdm==4.65.0
+tqdm==4.66.1
# via yahooquery
-typing-extensions==4.7.0
- # via
- # annotated-types
- # pydantic
- # pydantic-core
tzdata==2023.3
- # via pandas
-urllib3==2.0.3
+ # via
+ # exchange-calendars
+ # market-prices (pyproject.toml)
+ # pandas
+urllib3==2.0.4
# via requests
-yahooquery==2.3.1
+valimp==0.1
+ # via market-prices (pyproject.toml)
+yahooquery==2.3.2
# via market-prices (pyproject.toml)
diff --git a/etc/requirements_dependabot/requirements_tests.txt b/etc/requirements_dependabot/requirements_tests.txt
index e2e77df..ca05331 100644
--- a/etc/requirements_dependabot/requirements_tests.txt
+++ b/etc/requirements_dependabot/requirements_tests.txt
@@ -1,43 +1,41 @@
#
-# This file is autogenerated by pip-compile with Python 3.8
+# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
-# pip-compile --extra=tests --output-file=etc/requirements_tests.txt pyproject.toml
+# pip-compile --extra=tests --output-file=etc/requirements_dependabot/requirements_tests.txt pyproject.toml
#
-annotated-types==0.5.0
- # via pydantic
attrs==23.1.0
# via hypothesis
-black==23.3.0
+black==23.9.0
# via market-prices (pyproject.toml)
blosc2==2.0.0
# via tables
-certifi==2023.5.7
+certifi==2023.7.22
# via requests
-charset-normalizer==3.1.0
+charset-normalizer==3.2.0
# via requests
-click==8.1.3
+click==8.1.7
# via black
colorama==0.4.6
# via
# click
# pytest
# tqdm
-cython==0.29.35
+cython==3.0.2
# via tables
-exceptiongroup==1.1.1
+exceptiongroup==1.1.3
# via
# hypothesis
# pytest
-exchange-calendars==4.2.8
+exchange-calendars==4.5
# via market-prices (pyproject.toml)
-flake8==6.0.0
+flake8==6.1.0
# via
# flake8-docstrings
# market-prices (pyproject.toml)
flake8-docstrings==1.7.0
# via market-prices (pyproject.toml)
-hypothesis==6.80.0
+hypothesis==6.84.2
# via market-prices (pyproject.toml)
idna==3.4
# via requests
@@ -45,7 +43,7 @@ iniconfig==2.0.0
# via pytest
korean-lunar-calendar==0.3.1
# via exchange-calendars
-lxml==4.9.2
+lxml==4.9.3
# via yahooquery
mccabe==0.7.0
# via flake8
@@ -53,9 +51,9 @@ msgpack==1.0.5
# via blosc2
mypy-extensions==1.0.0
# via black
-numexpr==2.8.4
+numexpr==2.8.5
# via tables
-numpy==1.24.4
+numpy==1.25.2
# via
# exchange-calendars
# market-prices (pyproject.toml)
@@ -67,32 +65,28 @@ packaging==23.1
# black
# pytest
# tables
-pandas==2.0.3
+pandas==2.1.0
# via
# exchange-calendars
# market-prices (pyproject.toml)
# yahooquery
-pathspec==0.11.1
+pathspec==0.11.2
# via black
-platformdirs==3.8.0
+platformdirs==3.10.0
# via black
-pluggy==1.2.0
+pluggy==1.3.0
# via pytest
py-cpuinfo==9.0.0
# via tables
-pycodestyle==2.10.0
+pycodestyle==2.11.0
# via flake8
-pydantic==2.0
- # via market-prices (pyproject.toml)
-pydantic-core==2.0.1
- # via pydantic
pydocstyle==6.3.0
# via flake8-docstrings
-pyflakes==3.0.1
+pyflakes==3.1.0
# via flake8
pyluach==2.2.0
# via exchange-calendars
-pytest==7.4.0
+pytest==7.4.2
# via
# market-prices (pyproject.toml)
# pytest-mock
@@ -102,11 +96,8 @@ python-dateutil==2.8.2
# via
# exchange-calendars
# pandas
-pytz==2023.3
- # via
- # exchange-calendars
- # market-prices (pyproject.toml)
- # pandas
+pytz==2023.3.post1
+ # via pandas
requests==2.31.0
# via requests-futures
requests-futures==1.0.1
@@ -125,19 +116,18 @@ tomli==2.0.1
# pytest
toolz==0.12.0
# via exchange-calendars
-tqdm==4.65.0
+tqdm==4.66.1
# via yahooquery
-types-pytz==2023.3.0.0
- # via market-prices (pyproject.toml)
-typing-extensions==4.7.0
- # via
- # annotated-types
- # black
- # pydantic
- # pydantic-core
+typing-extensions==4.7.1
+ # via black
tzdata==2023.3
- # via pandas
-urllib3==2.0.3
+ # via
+ # exchange-calendars
+ # market-prices (pyproject.toml)
+ # pandas
+urllib3==2.0.4
# via requests
-yahooquery==2.3.1
+valimp==0.1
+ # via market-prices (pyproject.toml)
+yahooquery==2.3.2
# via market-prices (pyproject.toml)
diff --git a/etc/requirements_dev.txt b/etc/requirements_dev.txt
index 9bab0ef..b3b7c59 100644
--- a/etc/requirements_dev.txt
+++ b/etc/requirements_dev.txt
@@ -1,28 +1,26 @@
#
-# This file is autogenerated by pip-compile with Python 3.8
+# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
# pip-compile --extra=dev --output-file=etc/requirements_dev.txt pyproject.toml
#
-annotated-types==0.5.0
- # via pydantic
-astroid==2.15.5
+astroid==2.15.6
# via pylint
attrs==23.1.0
# via hypothesis
-black==23.3.0
+black==23.9.0
# via market-prices (pyproject.toml)
blosc2==2.0.0
# via tables
-build==0.10.0
+build==1.0.3
# via pip-tools
-certifi==2023.5.7
+certifi==2023.7.22
# via requests
-cfgv==3.3.1
+cfgv==3.4.0
# via pre-commit
-charset-normalizer==3.1.0
+charset-normalizer==3.2.0
# via requests
-click==8.1.3
+click==8.1.7
# via
# black
# pip-tools
@@ -33,32 +31,34 @@ colorama==0.4.6
# pylint
# pytest
# tqdm
-cython==0.29.35
+cython==3.0.2
# via tables
-dill==0.3.6
+dill==0.3.7
# via pylint
-distlib==0.3.6
+distlib==0.3.7
# via virtualenv
-exceptiongroup==1.1.1
+exceptiongroup==1.1.3
# via
# hypothesis
# pytest
-exchange-calendars==4.2.8
+exchange-calendars==4.5
# via market-prices (pyproject.toml)
-filelock==3.12.2
+filelock==3.12.3
# via virtualenv
-flake8==6.0.0
+flake8==6.1.0
# via
# flake8-docstrings
# market-prices (pyproject.toml)
flake8-docstrings==1.7.0
# via market-prices (pyproject.toml)
-hypothesis==6.80.0
+hypothesis==6.84.2
# via market-prices (pyproject.toml)
-identify==2.5.24
+identify==2.5.27
# via pre-commit
idna==3.4
# via requests
+importlib-metadata==6.8.0
+ # via build
iniconfig==2.0.0
# via pytest
isort==5.12.0
@@ -67,7 +67,7 @@ korean-lunar-calendar==0.3.1
# via exchange-calendars
lazy-object-proxy==1.9.0
# via astroid
-lxml==4.9.2
+lxml==4.9.3
# via yahooquery
mccabe==0.7.0
# via
@@ -75,7 +75,7 @@ mccabe==0.7.0
# pylint
msgpack==1.0.5
# via blosc2
-mypy==1.4.1
+mypy==1.5.1
# via market-prices (pyproject.toml)
mypy-extensions==1.0.0
# via
@@ -84,9 +84,9 @@ mypy-extensions==1.0.0
# mypy
nodeenv==1.8.0
# via pre-commit
-numexpr==2.8.4
+numexpr==2.8.5
# via tables
-numpy==1.24.4
+numpy==1.25.2
# via
# exchange-calendars
# market-prices (pyproject.toml)
@@ -100,45 +100,41 @@ packaging==23.1
# build
# pytest
# tables
-pandas==2.0.3
+pandas==2.1.0
# via
# exchange-calendars
# market-prices (pyproject.toml)
# yahooquery
-pandas-stubs==2.0.2.230605
+pandas-stubs==2.0.3.230814
# via market-prices (pyproject.toml)
-pathspec==0.11.1
+pathspec==0.11.2
# via black
-pip-tools==6.14.0
+pip-tools==7.3.0
# via market-prices (pyproject.toml)
-platformdirs==3.8.0
+platformdirs==3.10.0
# via
# black
# pylint
# virtualenv
-pluggy==1.2.0
+pluggy==1.3.0
# via pytest
-pre-commit==3.3.3
+pre-commit==3.4.0
# via market-prices (pyproject.toml)
py-cpuinfo==9.0.0
# via tables
-pycodestyle==2.10.0
+pycodestyle==2.11.0
# via flake8
-pydantic==2.0
- # via market-prices (pyproject.toml)
-pydantic-core==2.0.1
- # via pydantic
pydocstyle==6.3.0
# via flake8-docstrings
-pyflakes==3.0.1
+pyflakes==3.1.0
# via flake8
-pylint==2.17.4
+pylint==2.17.5
# via market-prices (pyproject.toml)
pyluach==2.2.0
# via exchange-calendars
pyproject-hooks==1.0.0
# via build
-pytest==7.4.0
+pytest==7.4.2
# via
# market-prices (pyproject.toml)
# pytest-mock
@@ -148,12 +144,9 @@ python-dateutil==2.8.2
# via
# exchange-calendars
# pandas
-pytz==2023.3
- # via
- # exchange-calendars
- # market-prices (pyproject.toml)
- # pandas
-pyyaml==6.0
+pytz==2023.3.post1
+ # via pandas
+pyyaml==6.0.1
# via pre-commit
requests==2.31.0
# via requests-futures
@@ -176,37 +169,40 @@ tomli==2.0.1
# pylint
# pyproject-hooks
# pytest
-tomlkit==0.11.8
+tomlkit==0.12.1
# via pylint
toolz==0.12.0
# via exchange-calendars
-tqdm==4.65.0
+tqdm==4.66.1
# via yahooquery
-types-pytz==2023.3.0.0
- # via
- # market-prices (pyproject.toml)
- # pandas-stubs
-typing-extensions==4.7.0
+types-pytz==2023.3.0.1
+ # via pandas-stubs
+typing-extensions==4.7.1
# via
- # annotated-types
# astroid
# black
+ # filelock
# mypy
- # pydantic
- # pydantic-core
# pylint
tzdata==2023.3
- # via pandas
-urllib3==2.0.3
+ # via
+ # exchange-calendars
+ # market-prices (pyproject.toml)
+ # pandas
+urllib3==2.0.4
# via requests
-virtualenv==20.23.1
+valimp==0.1
+ # via market-prices (pyproject.toml)
+virtualenv==20.24.5
# via pre-commit
-wheel==0.40.0
+wheel==0.41.2
# via pip-tools
wrapt==1.15.0
# via astroid
-yahooquery==2.3.1
+yahooquery==2.3.2
# via market-prices (pyproject.toml)
+zipp==3.16.2
+ # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
# pip
diff --git a/mypy.ini b/mypy.ini
index 504ad1b..55863cc 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -7,7 +7,6 @@ warn_redundant_casts = True
warn_unused_ignores = True
strict_equality = True
show_error_codes = True
-plugins = pydantic.mypy
; disable_error_code = operator, arg-type, attr-defined, call-arg, misc, assignment, type-var, name-defined
@@ -15,8 +14,6 @@ plugins = pydantic.mypy
; operator
; false +ve with * operation between int and DateOffset
; 'unsupported operand type for in (pd.Interval)'' - it is a valid operator
-; arg-type
-; incompatible with pydantic coersions (expects coerced type rather than a type that can be coerced)
; attr-defined false +ve accessing:
; return from df.itertuples with dot notation
; pd.DatetimeIndex attrs inc .tz_localize, .normalize, .day, .time
@@ -44,11 +41,3 @@ ignore_missing_imports = True
; [mypy-exchange_calendars]
; ignore_missing_imports = True
-
-[pydantic-mypy]
-; May21 - as doc https://pydantic-docs.helpmanual.io/mypy_plugin/
-; explore options as required...
-init_forbid_extra = True
-init_typed = True
-warn_required_dynamic_aliases = True
-warn_untyped_fields = True
\ No newline at end of file
diff --git a/pyproject.toml b/pyproject.toml
index e1aaf1d..4eab327 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
name = "market-prices"
description = "Meaningful OHLCV datasets"
authors = [
- {email = "marcusaread@gmail.com"},
+ {email = "marcusaread.prog@proton.me"},
{name = "Marcus Read"}
]
readme = "README.md"
@@ -27,7 +27,7 @@ keywords = [
"commodities",
"indices",
]
-requires-python = "~=3.8"
+requires-python = "~=3.9"
classifiers = [
"Development Status :: 4 - Beta",
@@ -41,7 +41,6 @@ classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
- "Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
@@ -52,12 +51,12 @@ classifiers = [
]
dependencies = [
- "exchange_calendars >=4.0.1",
+ "exchange_calendars",
"numpy",
- "pandas >= 1.1",
- "pydantic",
- "pytz",
- "yahooquery >=2.3",
+ "pandas",
+ "tzdata",
+ "yahooquery",
+ "valimp",
]
dynamic = ["version"]
@@ -71,7 +70,6 @@ tests = [
"pytest",
"pytest-mock",
"tables",
- "types-pytz",
]
dev = [
"black",
@@ -81,7 +79,6 @@ dev = [
"pytest",
"pytest-mock",
"tables",
- "types-pytz",
"mypy",
"mypy-extensions",
"pandas-stubs",
@@ -101,4 +98,4 @@ write_to = "src/market_prices/_version.py"
[tool.black]
line-length = 88
-target-version = ['py38', 'py39', 'py310']
+target-version = ['py39', 'py310', 'py311']
diff --git a/src/market_prices/daterange.py b/src/market_prices/daterange.py
index 9405c9f..40ca753 100644
--- a/src/market_prices/daterange.py
+++ b/src/market_prices/daterange.py
@@ -1112,14 +1112,14 @@ def daterange_tight(self) -> tuple[mptypes.DateRangeReq, pd.Timestamp]:
For example, if `interval` were 1H, `ds_interval` were 2H and
`daterange` were to return:
- ((Timestamp('2022-03-08 14:30', tz='UTC'),
- Timestamp('2022-03-10 22:30', tz='UTC')),
- Timestamp('2022-03-10 21:00', tz='UTC'))
+ ((Timestamp('2022-03-08 14:30', tz=zoneinfo.ZoneInfo("UTC")),
+ Timestamp('2022-03-10 22:30', tz=zoneinfo.ZoneInfo("UTC"))),
+ Timestamp('2022-03-10 21:00', tz=zoneinfo.ZoneInfo("UTC"))
...then `daterange_tight` would return:
- ((Timestamp('2022-03-08 14:30', tz='UTC'),
- Timestamp('2022-03-10 21:30', tz='UTC')),
- Timestamp('2022-03-10 21:00', tz='UTC'))
+ ((Timestamp('2022-03-08 14:30', tz=zoneinfo.ZoneInfo("UTC")),
+ Timestamp('2022-03-10 21:30', tz=zoneinfo.ZoneInfo("UTC"))),
+ Timestamp('2022-03-10 21:00', tz=zoneinfo.ZoneInfo("UTC"))
"""
(start, end), end_accuracy = self.daterange
if end - end_accuracy >= self.interval:
diff --git a/src/market_prices/helpers.py b/src/market_prices/helpers.py
index 525e9a3..49b5fc2 100644
--- a/src/market_prices/helpers.py
+++ b/src/market_prices/helpers.py
@@ -4,19 +4,24 @@
import re
import sys
-from typing import Literal
+from typing import Literal, TYPE_CHECKING
+import zoneinfo
import pandas as pd
import numpy as np
-import pytz
-from market_prices import intervals, mptypes
+from market_prices import mptypes
from market_prices.utils import general_utils as genutils
from market_prices.utils import pandas_utils as pdutils
+if TYPE_CHECKING:
+ from market_prices import intervals
+
if "pytest" in sys.modules:
import pytest # noqa: F401 # pylint: disable=unused-import # used within doctest
+UTC = zoneinfo.ZoneInfo("UTC")
+
ONE_DAY: pd.Timedelta = pd.Timedelta(1, "D")
ONE_MIN: pd.Timedelta = pd.Timedelta(1, "T")
ONE_SEC: pd.Timedelta = pd.Timedelta(1, "S")
@@ -97,9 +102,9 @@ def to_utc(ts: pd.Timestamp) -> pd.Timestamp:
Timestamp to return a copy of with timezone set to UTC.
"""
try:
- return ts.tz_convert(pytz.UTC)
+ return ts.tz_convert(UTC)
except TypeError:
- return ts.tz_localize(pytz.UTC)
+ return ts.tz_localize(UTC)
def to_tz_naive(ts: pd.Timestamp) -> pd.Timestamp:
@@ -113,8 +118,8 @@ def to_tz_naive(ts: pd.Timestamp) -> pd.Timestamp:
Timestamp to return a timezone-naive copy of.
"""
if ts.tz is None:
- return ts # type: ignore[unreachable] # 'tis very reachable
- return ts.tz_convert(pytz.UTC).tz_convert(None)
+ return ts
+ return ts.tz_convert(UTC).tz_convert(None)
def now(
@@ -145,7 +150,7 @@ def now(
UTC time.
"""
# pylint: disable=missing-param-doc
- now_ = pd.Timestamp.now(tz=pytz.UTC)
+ now_ = pd.Timestamp.now(tz=UTC)
if interval is not None and not interval.is_intraday:
now_ = now_.tz_convert(None)
res = "D"
@@ -312,7 +317,7 @@ def volume_to_na(df: pd.DataFrame) -> pd.DataFrame:
bv: pd.Series
if has_symbols(df):
for s in df.columns.remove_unused_levels().levels[0]:
- bv = df[(s, "close")].isna() # type: ignore[assignment] # is a Series
+ bv = df[(s, "close")].isna()
df.loc[bv, (s, "volume")] = np.nan
else:
bv = df["close"].isna()
@@ -322,9 +327,10 @@ def volume_to_na(df: pd.DataFrame) -> pd.DataFrame:
def resample(
resample_me: pd.DataFrame | pd.core.groupby.groupby.GroupBy,
- rule: pd.offsets.BaseOffset, # type: ignore[name-defined] # is defined
+ rule: pd.offsets.BaseOffset | str,
data: pd.DataFrame | None = None,
origin: str = "start",
+ nominal_start: pd.Timestamp | None = None,
) -> pd.DataFrame:
"""Resample ohlcv data to a pandas rule.
@@ -334,7 +340,7 @@ def resample(
Pandas object to be resampled. Object must have .resample method.
rule
- Pandas offset to which data to be resampled.
+ Pandas frequency or offset to which data to be resampled.
data
If resample_me is not a DataFrame (but, for example, a GroupBy
@@ -343,6 +349,16 @@ def resample(
origin
As `pd.DataFrame.resample` method.
+
+ nominal_start
+ The earliest date prior to the first index of `resample_me` on and
+ subsequent to which there are no trading sessions until the first
+ index of `resample_me`.
+
+ Only useful when `rule` describes a frequency greater than daily
+ and there are no sessions between the first index and the date to
+ which that first index would be rolled back to conicide with the
+ nearest occurrence of 'rule'.
"""
if isinstance(resample_me, pd.DataFrame):
resample_me = resample_me.copy()
@@ -362,6 +378,16 @@ def resample(
resampler = resample_me.resample(rule, closed="left", label="left", origin=origin)
resampled = resampler.agg(agg_f)
+
+ # NOTE START... required for at least pandas 2.1.0.
+ # See https://github.com/pandas-dev/pandas/issues/55064
+ offset = pdutils.pdfreq_to_offset(rule) if isinstance(rule, str) else rule
+ first_index = data.index[0] if nominal_start is None else nominal_start
+ cut_off = first_index - offset
+ if resampled.index[0] <= cut_off:
+ resampled = resampled[resampled.index > cut_off]
+ # required for at least pandas 2.1.0. ...END
+
resampled.columns = columns_
resampled = volume_to_na(resampled)
return resampled
diff --git a/src/market_prices/intervals.py b/src/market_prices/intervals.py
index 40324d3..676ee15 100644
--- a/src/market_prices/intervals.py
+++ b/src/market_prices/intervals.py
@@ -473,22 +473,6 @@ def raise_value_oob_error(component: str, limit: int):
return getattr(TDInterval, unit + str(value))
-class RowInterval:
- """Custom type with pydantic validator to parse to a PTInterval.
-
- A parameter annotated with this class can take str | pd.Timedelta as
- accepted to the `interval` parameter of
- `market_prices.prices.base.PricesBase.get`.
-
- The formal parameter will be assigned a member of either the
- TDInterval or DOInterval enum.
- """
-
- # pylint: disable=too-few-public-methods
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
-
- @classmethod
- def _validate(cls, v) -> PTInterval:
- return to_ptinterval(v)
+def parse_interval(name: str, obj: str | pd.Timedelta | timedelta, _) -> PTInterval:
+ """Parse input to an `interval` parameter."""
+ return to_ptinterval(obj)
diff --git a/src/market_prices/mptypes.py b/src/market_prices/mptypes.py
index 8dd3d05..82a0c9b 100644
--- a/src/market_prices/mptypes.py
+++ b/src/market_prices/mptypes.py
@@ -2,349 +2,104 @@
Includes:
Type aliases.
- Custom pydantic types used to parse parameters of public methods.
-Internal types are defined to their own section.
+ Internal types:
+ Type aliases
+ enums
+ TypeGuards
-Note: types concerning intervals are maintained on the
+NOTE: types concerning intervals are maintained on the
`market_prices.intervals` module.
"""
from __future__ import annotations
-import collections
+import datetime
import enum
-from typing import Any, Dict, List, Tuple, TypedDict, Union
+from typing import TypedDict, Union
import pandas as pd
-import pytz
from exchange_calendars import ExchangeCalendar
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
-
-# pylint: disable=too-few-public-methods # nature of pydantic types.
# ----------------------------- Type aliases ------------------------------
-Symbols = Union[List[str], str]
+Symbols = Union[list[str], str]
"""For public parameters that define instrument symbol(s)."""
-Calendar = Union[pydantic.StrictStr, ExchangeCalendar] # pylint: disable=no-member
+Calendar = Union[str, ExchangeCalendar] # pylint: disable=no-member
"""Acceptable types to define a single calendar."""
-Calendars = Union[Calendar, List[Calendar], Dict[str, Calendar]]
+Calendars = Union[Calendar, list[Calendar], dict[str, Calendar]]
"""For public parameters that can define calendars by-symbol."""
-
-# ----------------- Custom types with pydantic validators -----------------
-
-
-def type_error_msg(
- type_: type[object],
- valid_types: type[object] | collections.abc.Sequence[type[object]],
- value: Any,
-) -> str:
- """Return error message for a custom type receiving an invalid type.
-
- Parameters
- ----------
- type_
- Custom type.
-
- valid_types
- Valid type or types.
-
- value
- Value, of invalid type, received by parameter annotated with
- `type_`.
- """
- msg = f"{type_.__name__}"
- if isinstance(valid_types, collections.abc.Sequence):
- msg += f" can take any type from {list(valid_types)}"
- else:
- msg += f" takes type {valid_types}"
-
- msg += f" although receieved <{value}> of type {type(value)}."
- return msg
-
-
-class LeadSymbol:
- """Type to validate `lead_symbol` parameter.
-
- Only for annotating `lead_symbol` parameter of public methods of
- PricesBase (or subclass of), excluding __init__.
-
- A parameter annotated with this class can only take types that can be
- coerced to a `str` which is in PricesBase.symbols.
-
- The Formal Parameter will be assigned a `str`.
- """
-
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
-
- @classmethod
- def _validate(cls, v: str | None, values) -> str:
- valid_types = str
- if not isinstance(v, valid_types):
- raise TypeError(type_error_msg(cls, valid_types, v))
-
- values["self"]._verify_lead_symbol(v) # pylint: disable=protected-access
- return v
-
-
-class Timezone:
- """Type to parse to a timezone.
-
- A parameter annotated with this class can take:
- - an instance returned by pytz.timezone (i.e. instance of subclass
- of pytz.BaseTzInfo) or a
- - `str` that can be passed to pytz.timezone (for example 'utc' or
- 'US/Eastern`)
-
- The formal parameter will be assigned an instance of a subclass of
- `pytz.BaseTzInfo`.
- """
-
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
-
- @classmethod
- def _validate(cls, v) -> pytz.BaseTzInfo:
- if isinstance(v, pytz.BaseTzInfo):
- return v
-
- valid_types = str, pytz.BaseTzInfo
- if not isinstance(v, str):
- raise TypeError(type_error_msg(cls, valid_types, v))
-
- return pytz.timezone(v)
-
-
-class PricesTimezone:
- """Type to parse to a PricesBase parameter to a timezone.
-
- Only for annotating `tz`, `tzin` or `tzout` parameters of public
- methods of PricesBase (or subclass of).
-
- A parameter annotated with this class can take
- "pytz.BaseTzInfo | str", where:
- - pytz.BaseTzInfo: any instance returned by pytz.timezone
- - str:
- - valid input to `pytz.timezone`, for example 'utc' or
- 'US/Eastern`, to parse to pytz.timezone()
- - any symbol of `PricesBase.symbols` to parse to the timezone
- associated with that symbol.
-
- The formal parameter will be assigned an instance of a subclass of
- `pytz.BaseTzInfo`.
- """
-
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
-
- @classmethod
- def _validate(cls, v, values: dict) -> pytz.BaseTzInfo:
- if isinstance(v, pytz.BaseTzInfo):
- return v
-
- valid_types = str, pytz.BaseTzInfo
- if not isinstance(v, str):
- raise TypeError(type_error_msg(cls, valid_types, v))
-
- if v in values["self"].symbols:
- return values["self"].timezones[v]
- else:
- return pytz.timezone(v)
-
-
-class Timestamp:
- """Type to parse to a pd.Timestamp.
-
- A parameter annotated with this class can take any object that is
- acceptable as a single-argument input to pd.Timestamp:
- Union[pd.Timestamp, str, datetime.datetime, int, float]
-
- The formal parameter will be assigned a pd.Timestamp.
- """
-
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
-
- @classmethod
- def _validate(cls, v) -> pd.Timestamp:
- # if v not valid single-argument input to pd.Timestamp then will raise error.
- return pd.Timestamp(v)
-
-
-class DateTimestamp(Timestamp):
- """Type to parse to a pd.Timestamp and validate as a date.
-
- Considered a valid date (rather than a time), if:
- - no time component or time component defined as 00:00.
- - tz-naive.
-
- A parameter annotated with this class can take any object that is
- acceptable as a single-argument input to pd.Timestamp:
- Union[pd.Timestamp, str, datetime.datetime, int, float]
-
- The formal parameter will be assigned a pd.Timestamp.
- """
-
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
- yield cls._validate_date
-
- @classmethod
- def _validate_date(cls, v: pd.Timestamp, field) -> pd.Timestamp:
- if v.tz is not None:
- msg = f"`{field.name}` must be tz-naive, although receieved as {v}."
- raise ValueError(msg)
-
- if v != v.normalize(): # type: ignore[unreachable] # mypy doesn't like v.tz
- msg = (
- f"`{field.name}` can not have a time component, although receieved"
- f" as {v}. For an intraday price use .price_at()."
- )
- raise ValueError(msg)
- return v
-
-
-class TimeTimestamp(Timestamp):
- """Type to parse to a pd.Timestamp and validate as representing a time.
-
- Considered a valid time (rather than a date) if:
- - time component defined as anything other than 00:00.
- - time component defined as 00:00 and tz-aware.
-
- A parameter annotated with this class can take any object that is
- acceptable as a single-argument input to pd.Timestamp:
- Union[pd.Timestamp, str, datetime.datetime, int, float]
-
- The formal parameter will be assigned a pd.Timestamp.
- """
-
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
- yield cls._validate_time
-
- @classmethod
- def _validate_time(cls, v: pd.Timestamp, field) -> pd.Timestamp:
- if v == v.normalize() and v.tz is None:
- msg = ( # type: ignore[unreachable] # mypy doesn't like v.tz
- f"`{field.name}` must have a time component or be tz-aware, although"
- f" receieved as {v}. To define {field.name} as midnight pass as a"
- f" tz-aware pd.Timestamp. For prices as at a session's close use"
- f" .close_at()."
- )
- raise ValueError(msg)
- return v
+# ----------------------------- Custom types ------------------------------
class PandasFrequency(str):
- """Validated pandas frequency.
-
- A field annotated with this class:
- can take a string that is a valid pandas frequency, determined as
- being acceptable input to pd.tseries.frequencies.to_offset().
-
- will be assigned a PandasFrequency.
-
- Attributes
- ----------
- In addition to inherited str methods:
-
- as_offset
- """
-
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
-
- @classmethod
- def _validate(cls, v) -> "PandasFrequency":
- if not isinstance(v, str):
- raise TypeError(type_error_msg(cls, str, v))
+ """Validated pandas frequency."""
+ def __new__(cls, value: str):
try:
- _ = pd.tseries.frequencies.to_offset(v)
+ _ = pd.tseries.frequencies.to_offset(value)
except ValueError:
msg = (
f"PandasFrequency must be a pandas frequency although"
- f" received '{v}'."
+ f" received '{value}'."
)
raise ValueError(msg) from None
-
- return cls(v)
+ return super().__new__(cls, value)
@property
def as_offset(
self,
- ) -> pd.offsets.BaseOffset: # type: ignore[name-defined] # is defined
+ ) -> pd.offsets.BaseOffset:
"""Frequency as a pandas offset."""
return pd.tseries.frequencies.to_offset(self)
-class IntervalDatetimeIndex(pd.IntervalIndex):
- """Validated IntervalIndex with left and right as pd.DatetimeIndex."""
-
- # pylint: disable=abstract-method
-
- @classmethod
- def __get_validators__(cls):
- yield cls._validate
-
- @classmethod
- def _validate(cls, v) -> pd.IntervalIndex:
- if not isinstance(v, pd.IntervalIndex):
- raise ValueError(
- "Parameter must be passed as an instance of pd.IntervalIndex."
- )
- elif not isinstance(v.left, pd.DatetimeIndex):
- raise ValueError(
- "Parameter must have each side as type pd.DatetimeIndex"
- f" although received left side as '{v.left}'."
- )
- return v
-
-
# ---------------------------- Internal types -----------------------------
# Interval types are NOT to be used to annotate public parameters.
# ------------------------------ Type aliases -----------------------------
# Aliases with no public use
-DateRangeAmb = Tuple[Union[pd.Timestamp, None], Union[pd.Timestamp, None]]
+DateRangeAmb = tuple[Union[pd.Timestamp, None], Union[pd.Timestamp, None]]
"""For internal types that define a range of dates which can be ambiguous.
tuple[0]: Range start date. If None, earliest available date.
tuple[1]: Range end date. If None, latest available date.
"""
-DateRangeReq = Tuple[Union[pd.Timestamp, None], pd.Timestamp]
+DateRangeReq = tuple[Union[pd.Timestamp, None], pd.Timestamp]
"""For internal types that define a range of dates over which to request prices.
tuple[0]: Range start date. If None, earliest date for which prices are available.
tuple[1]: Range end date.
"""
-DateRange = Tuple[pd.Timestamp, pd.Timestamp]
+DateRange = tuple[pd.Timestamp, pd.Timestamp]
"""For internal parameters that define an unambiguous range of dates.
tuple[0]: Range start date.
tuple[1]: Range end date.
"""
-PytzUTC = type(pytz.UTC)
+DateTimestamp = Union[pd.Timestamp, str, datetime.datetime, int, float]
+"""Type to annotate an input that takes a value representing a date.
+
+Used in abstract base classes to identify inputs that should be coerced to
+a `pd.Timestamp` and validated as a date with
+`market_prices.parsing.to_datetimestamp`.
+"""
+
+TimeTimestamp = Union[pd.Timestamp, str, datetime.datetime, int, float]
+"""Type to annotate an input that takes a value representing a time.
+
+Used in abstract base classes to identify inputs that should be coerced to
+a `pd.Timestamp` and validated as a time with
+`market_prices.parsing.to_timetimestamp`.
+"""
# -------------------------------- enums ----------------------------------
diff --git a/src/market_prices/parsing.py b/src/market_prices/parsing.py
index 91129ca..9361a0c 100644
--- a/src/market_prices/parsing.py
+++ b/src/market_prices/parsing.py
@@ -1,22 +1,25 @@
"""Functions to parse public input.
-Supplements parsing functionality employed via pydantic.
-
Covers:
- Verification.
- Conversion.
- Assignment of default values.
+ Verification
+ Coercion
+ Dynamic assignment of default values
+
+Functions to parse/validate via a `valimp.Parser` are defined under a
+dedicated sections.
"""
from __future__ import annotations
import typing
+from typing import Any
+from zoneinfo import ZoneInfo
import exchange_calendars as xcals
import pandas as pd
-import pytz
from market_prices import errors, helpers, mptypes, intervals
+from market_prices.helpers import UTC
def verify_period_parameters(pp: mptypes.PP):
@@ -59,7 +62,7 @@ def verify_period_parameters(pp: mptypes.PP):
raise ValueError(msg)
-def parse_timestamp(ts: pd.Timestamp, tzin: pytz.BaseTzInfo) -> pd.Timestamp:
+def parse_timestamp(ts: pd.Timestamp, tzin: ZoneInfo) -> pd.Timestamp:
"""Parse timestamp to date or UTC time.
Parameters
@@ -73,8 +76,8 @@ def parse_timestamp(ts: pd.Timestamp, tzin: pytz.BaseTzInfo) -> pd.Timestamp:
if helpers.is_date(ts):
return ts
if ts.tz is None:
- ts = ts.tz_localize(tzin) # type: ignore[unreachable] # 'tis reachable
- return ts.tz_convert(pytz.UTC)
+ ts = ts.tz_localize(tzin)
+ return ts.tz_convert(UTC)
def _parse_start(
@@ -477,3 +480,103 @@ def verify_time_not_oob(
raise errors.DatetimeTooEarlyError(time, l_limit, "time")
if time > r_limit:
raise errors.DatetimeTooLateError(time, r_limit, "time")
+
+
+# ---------- parse/vaidation functions and Parsers for valimp -------------
+
+# sig_template
+# def verify_*(
+# name: str,
+# obj: Any,
+# params: dict[str, Any],
+# ):
+
+
+def lead_symbol(name, obj: str | None, params: dict[str, Any]) -> str:
+ """Parse `lead_symbol` parameter of `PricesBase.get`."""
+ if obj is None:
+ obj = params["self"].lead_symbol_default
+ params["self"]._verify_lead_symbol(obj) # pylint: disable=protected-access
+ return obj
+
+
+def verify_datetimestamp(name: str, obj: pd.Timestamp, _) -> pd.Timestamp:
+ """Validate a pd.Timestamp as a date.
+
+ Considered a valid date (rather than a time), if:
+ - no time component or time component defined as 00:00.
+ - tz-naive.
+ """
+ if obj.tz is not None:
+ msg = f"`{name}` must be tz-naive, although receieved as {obj}."
+ raise ValueError(msg)
+
+ if obj != obj.normalize():
+ msg = (
+ f"`{name}` can not have a time component, although receieved"
+ f" as {obj}. For an intraday price use .price_at()."
+ )
+ raise ValueError(msg)
+ return obj
+
+
+def verify_timetimestamp(name: str, obj: pd.Timestamp, _) -> pd.Timestamp:
+ """Validate a pd.Timestamp as representing a time.
+
+ Considered a valid time (rather than a date) if:
+ - time component defined as anything other than 00:00.
+ - time component defined as 00:00 and tz-aware.
+ """
+ if obj == obj.normalize() and obj.tz is None:
+ msg = (
+ f"`{name}` must have a time component or be tz-aware, although"
+ f" receieved as {obj}. To define {name} as midnight pass as a"
+ f" tz-aware pd.Timestamp. For prices as at a session's close use"
+ f" .close_at()."
+ )
+ raise ValueError(msg)
+ return obj
+
+
+def to_timezone(name: str, obj: ZoneInfo | str, _) -> ZoneInfo:
+ """Parse input to a timezone.
+
+ A parameter parsed with this function can take either of:
+ - an instance of `zoneinfo.ZoneInfo`.
+ - `str` that can be passed to `zoneinfo.ZoneInfo` (for example
+ 'UTC' or 'US/Eastern`).
+ """
+ if isinstance(obj, ZoneInfo):
+ return obj
+ return ZoneInfo(obj)
+
+
+def to_prices_timezone(
+ name: str, obj: str | ZoneInfo, params: dict[str, Any]
+) -> ZoneInfo:
+ """Parse a tz input to a timezone.
+
+ Only for parsing `tz`, `tzin` or `tzout` parameters of public
+ methods of PricesBase (or subclass of).
+
+ Parameters
+ ----------
+ obj : ZoneInfo | str
+
+ ZoneInfo, any instance returned by `zoneinfo.ZoneInfo`
+
+ str, as either:
+ - valid input to `zoneinfo.ZoneInfo`, for example 'UTC' or
+ 'US/Eastern`
+ - any symbol of `PricesBase.symbols` to parse to the timezone
+ associated with that symbol.
+
+ Returns
+ -------
+ timezone : zoneinfo.ZoneInfo
+ """
+ if isinstance(obj, ZoneInfo):
+ return obj
+ if obj in params["self"].symbols:
+ return params["self"].timezones[obj]
+ return ZoneInfo(obj)
diff --git a/src/market_prices/prices/base.py b/src/market_prices/prices/base.py
index 1c91e9b..9057068 100644
--- a/src/market_prices/prices/base.py
+++ b/src/market_prices/prices/base.py
@@ -12,27 +12,26 @@
import contextlib
import copy
import dataclasses
+import datetime
import functools
import warnings
-from typing import Any, Literal, Optional, Union, TYPE_CHECKING, Tuple
+from typing import Any, Literal, Optional, Union, TYPE_CHECKING, Annotated
+from zoneinfo import ZoneInfo
import exchange_calendars as xcals
import numpy as np
import pandas as pd
-import pytz
+from valimp import parse, Parser, Coerce
from market_prices import data
from market_prices import daterange as dr
from market_prices import errors, helpers, intervals, mptypes, parsing, pt
+from market_prices.helpers import UTC
from market_prices.mptypes import Anchor, OpenEnd, Alignment, Priority
from market_prices.intervals import BI, TDInterval
from market_prices.utils import calendar_utils as calutils
from market_prices.utils import pandas_utils as pdutils
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
# pylint: disable=too-many-lines
@@ -164,8 +163,8 @@ class PricesBase(metaclass=abc.ABCMeta):
- Abstract Methods -
_request_data(self, interval: BaseInterval,
- start: Optional[pd.Timestamp],
- end: Optional[pd.Timestamp]) -> pd.DataFrame:
+ start: pd.Timestamp | None,
+ end: pd.Timestamp | None) -> pd.DataFrame:
Request data from source with `interval` from `start` to
`end`.
@@ -315,7 +314,7 @@ class PricesBase(metaclass=abc.ABCMeta):
.has_single_calendar -> bool
Query if all symbols share the same calendar.
- ._indices_aligned -> Dict[BaseInterval, pd.Series]:
+ ._indices_aligned -> dict[BaseInterval, pd.Series]:
Query if indices are aligned. Return by base interval
and session.
@@ -463,7 +462,7 @@ def _dict_for_all_symbols(self, param: str, value: Any | list | dict) -> dict:
param: str
Parameter name.
- value: Union[Any, list, dict]:
+ value: Any | list | dict:
Any:
scalar to apply to every symbol
@@ -481,7 +480,7 @@ def _dict_for_all_symbols(self, param: str, value: Any | list | dict) -> dict:
Each item represents a symbol, all symbols represented.
key: str
symbol.
- value: Optional[Any]
+ value: Any | None
value corresponding with symbol.
"""
if not isinstance(value, (list, dict)):
@@ -593,8 +592,7 @@ def _set_calendars(self, calendars: mptypes.Calendars):
ll = None if self.bi_daily is None else self.base_limits[self.bi_daily]
if isinstance(ll, pd.Timedelta):
- # typing - recognises as datetime rather than timestamp
- ll = helpers.now(intervals.BI_ONE_DAY) - ll # type: ignore[assignment]
+ ll = helpers.now(intervals.BI_ONE_DAY) - ll
# margin to ensure calendar's first session is not later than limit.
kwargs = {"start": ll - pd.Timedelta(14, "D")} if ll is not None else {}
for k, v in d.items():
@@ -618,8 +616,7 @@ def _set_calendars(self, calendars: mptypes.Calendars):
# complexity. Cleaner to restrict the calendars here.
intraday_ll = self.base_limits[self.bis_intraday[-1]]
if isinstance(intraday_ll, pd.Timedelta):
- # typing - recognises as datetime rather than timestamp
- intraday_ll = helpers.now() - intraday_ll # type: ignore[assignment]
+ intraday_ll = helpers.now() - intraday_ll
if cal.first_minute > intraday_ll:
assert isinstance(intraday_ll, pd.Timestamp)
raise errors.CalendarTooShortError(cal, intraday_ll)
@@ -731,12 +728,12 @@ def calendars_max_delay(self) -> dict[xcals.ExchangeCalendar, pd.Timedelta]:
return self._calendars_delay(max)
@functools.cached_property
- def timezones(self) -> dict[str, pytz.BaseTzInfo]:
+ def timezones(self) -> dict[str, ZoneInfo]:
"""Timezones, by symbol. Evaluated from calendars."""
return {k: c.tz for k, c in self.calendars.items()}
@functools.cached_property
- def tz_default(self) -> pytz.BaseTzInfo:
+ def tz_default(self) -> ZoneInfo:
"""Default timezone."""
return self.timezones[self.lead_symbol_default]
@@ -966,7 +963,7 @@ def earliest_requestable_session(self) -> pd.Timestamp:
elif self.bi_daily is not None:
session = self.limits_sessions[self.bi_daily][0]
else:
- session = min( # type: ignore[type-var] # ll cannot be None
+ session = min(
[
ll
for bi, (ll, rl) in self.limits_sessions.items()
@@ -1144,7 +1141,7 @@ def _set_indexes_status(self):
for bi in self.bis_intraday:
start_session, end_session = self.limits_sessions[bi]
sessions = self.cc.sessions_in_range(start_session, end_session)
- status = pd.Series(True, index=sessions)
+ status = pd.Series(True, index=sessions, dtype="object")
if bi.is_one_minute:
# shortcut, cannot have partial indices or conflicts at T1
@@ -1803,7 +1800,7 @@ def _get_bi_table_intraday(self) -> tuple[pd.DataFrame, BI]:
# NOTE: If develop data.Data to not have a hard right limit then will be able
# to simply pass through drg.daterange[0] to ._get_bi_table.
bi_now = helpers.now(bi) - self.gpp.delay + bi # + bi to include live interval
- end = min(end, bi_now) # type: ignore[assignment] # datetime Timestamp issue
+ end = min(end, bi_now)
table = self._get_bi_table(bi, (start, end))
return table, bi
@@ -1830,12 +1827,10 @@ def _downsample_bi_table(self, df: pd.DataFrame, bi: intervals.BI) -> pd.DataFra
target_indices = pd.cut(bi_index.to_list(), target_index)
target_indices = target_indices.remove_unused_categories()
agg_f = helpers.agg_funcs(df)
- df = df.groupby(target_indices).agg(agg_f)
+ df = df.groupby(target_indices, observed=False).agg(agg_f)
df.index = pd.IntervalIndex(df.index) # convert from CategoricalIndex
df = helpers.volume_to_na(df)
- df.index = pdutils.interval_index_new_tz(
- df.index, pytz.UTC # type: ignore[arg-type] # expects mptype
- )
+ df.index = pdutils.interval_index_new_tz(df.index, UTC)
if df.pt.interval is None:
# Overlapping indices of a calendar-specific trading trading were curtailed.
warnings.warn(errors.IntervalIrregularWarning())
@@ -2030,9 +2025,8 @@ def _get_table_daily(self, force_ds_daily: bool = False) -> pd.DataFrame:
df.index = index
else: # downsample for monthly
pdfreq = ds_interval.as_pdfreq
- df = helpers.resample(df_bi, pdfreq, origin="start")
- df.index = pdutils.get_interval_index(
- df.index, pdfreq # type: ignore[arg-type] # expects mptype
+ df = df_bi.pt.downsample(
+ pdfreq, calendar, drop_incomplete_last_indice=False
)
if df.pt.first_ts < self.limits[intervals.BI_ONE_DAY][0]:
# This can happen if getting all data. As the Getter's .daterange
@@ -2082,8 +2076,7 @@ def _get_daily_intraday_composite(
table_daily = self._get_table_daily(force_ds_daily=True)
# up to and exclusive of split_s
table_daily = table_daily[: split_s - helpers.ONE_DAY]
- # typing note - expression returns what it is, i.e. a DataFrame, not a Series.
- table_daily = table_daily.tz_localize(pytz.UTC) # type: ignore[assignment]
+ table_daily = table_daily.tz_localize(UTC)
table_daily.index = pd.IntervalIndex.from_arrays(
table_daily.index, table_daily.index, "left"
)
@@ -2246,7 +2239,7 @@ def _force_partial_indices(self, table: pd.DataFrame) -> pd.IntervalIndex:
index = indices_to_stay.union(replacement_indices, sort=False)
index = index.sort_values()
- index = pdutils.interval_index_new_tz(index, pytz.UTC)
+ index = pdutils.interval_index_new_tz(index, UTC)
return index
@staticmethod
@@ -2482,12 +2475,21 @@ def request_earliest_available_data(self) -> bool:
"""Query if params represent request for all available data."""
return self.pp_raw["start"] is None and not self.duration
- @pydantic.validate_arguments
+ @parse
def get(
self,
- interval: Optional[intervals.RowInterval] = None,
- start: Optional[mptypes.Timestamp] = None,
- end: Optional[mptypes.Timestamp] = None,
+ interval: Annotated[
+ Union[str, pd.Timedelta, datetime.timedelta, None],
+ Parser(intervals.parse_interval, parse_none=False),
+ ] = None,
+ start: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ ] = None,
+ end: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ ] = None,
minutes: int = 0,
hours: int = 0,
days: int = 0,
@@ -2495,15 +2497,21 @@ def get(
months: int = 0,
years: int = 0,
add_a_row: bool = False,
- lead_symbol: Optional[mptypes.LeadSymbol] = None,
- tzin: Optional[mptypes.PricesTimezone] = None,
+ lead_symbol: Annotated[Optional[str], Parser(parsing.lead_symbol)] = None,
+ tzin: Annotated[
+ Optional[Union[str, ZoneInfo]],
+ Parser(parsing.to_prices_timezone, parse_none=False),
+ ] = None,
anchor: Literal["workback", "open"] = "open",
openend: Literal["maintain", "shorten"] = "maintain",
priority: Literal["period", "end"] = "end",
strict: bool = True,
composite: bool = False,
force: bool = False,
- tzout: Optional[mptypes.PricesTimezone] = None,
+ tzout: Annotated[
+ Optional[Union[str, ZoneInfo]],
+ Parser(parsing.to_prices_timezone, parse_none=False),
+ ] = None,
fill: Optional[Literal["ffill", "bfill", "both"]] = None,
include: Optional[mptypes.Symbols] = None,
exclude: Optional[mptypes.Symbols] = None,
@@ -2585,7 +2593,7 @@ def get(
- Period Parameters -
These parameters define the period over which to get prices.
- start : Union[pd.Timestamp, str, datetime.datetime, int, float],
+ start : pd.Timestamp | str | datetime.datetime | int | float | None
default: earliest available datetime (only if `start` required)
The first date or minute of the period for which to get
prices.
@@ -2605,7 +2613,7 @@ def get(
timestamp's timezone, if passed as a tz-aware
pd.Timestamp, or otherwise as `tzin`.
- end : Union[pd.Timestamp, str, datetime.datetime, int, float],
+ end : pd.Timestamp | str | datetime.datetime | int | float | None
default: most recent available datetime (only if `end` required)
The last date or minute of the period for which to get
prices.
@@ -2650,14 +2658,14 @@ def get(
Period duration in calendar years. Can be combined with
`weeks` and `months`.
- tzin : Optional[Union[str, BaseTzinfo],
+ tzin : str | BaseTzinfo | None,
default: timezone of any `lead_symbol`, otherwise `self.default_tz`
Timezone of any input to `start` and `end` that represents a
- minte (as opposed to a session).
+ minute (as opposed to a session).
Can be passed as a timezone defined as a `str` that's valid
- input to `pytz.timezone`, for example 'utc' or 'US/Eastern`,
- or as an instance of `pytz.timezone`.
+ input to `zoneinfo.ZoneInfo`, for example 'UTC' or
+ 'US/Eastern`, or as an instance of `zoneinfo.ZoneInfo`.
Can alternatively be passed as any symbol of `self.symbols`
to define as timezone associated with that symbol, for example
@@ -2677,7 +2685,8 @@ def get(
- Parameters related to index -
- interval : str | timedelta | pd.Timedelta, default: inferred
+ interval : str | timedelta | pd.Timedelta | None,
+ default: None (interval inferred)
Time interval to be represented by each price row.
Pass as either:
@@ -2744,7 +2753,7 @@ def get(
See intervals.ipynb tutorial for further explanation and
examples.
- lead_symbol : Optional[str],
+ lead_symbol : str | None,
default: symbol associated with most common calendar
A symbol associated with the calendar that should be used to
evaluate the period over which prices are returned.
@@ -2964,21 +2973,21 @@ def get(
- Post-processing options (formatting and tidying) -
- tzout : Optional[Union[str, BaseTzinfo],
+ tzout : str | BaseTzinfo | None,
default: as `tzin` if `interval` intraday, otherwise None
Timezone to set index to.
If interval daily or higher:
- Can only accept "utc", pytz.UTC or (for tz-naive dates)
- None.
+ Can only accept "utc", `zoneinfo.ZoneInfo("UTC")` or (for
+ tz-naive dates) None.
If interval intraday:
Can be passed as a timezone defined as a `str` that's valid
- input to `pytz.timezone`, for example 'utc' or
- 'US/Eastern`, or as an instance returned from
- `pytz.timezone`.
+ input to `zoneinfo.ZoneInfo`, for example 'UTC' or
+ 'US/Eastern`, or as an instance returned by
+ `zoneinfo.ZoneInfo`.
Can alternatively be passed as any symbol of `self.symbols`
to define as timezone associated with that symbol, for
@@ -2990,7 +2999,7 @@ def get(
table reflect session prices, not prices between successive
midnight UTC.
- fill : Optional[Literal["ffill", "bfill", "both"]], default: None
+ fill : Literal["ffill", "bfill", "both"] | None, default: None
Fill missing values where a symbol's calendar is not open
during the interval covered by an indice.
@@ -3005,15 +3014,15 @@ def get(
None: (default) do not fill.
- include : Union[List[str], str], optional
+ include : list[str] | str | None
Symbol or symbols to include. All other symbols will be
excluded. If passed, do not pass `exclude`.
- exclude : Union[List[str], str], optional
+ exclude : list[str] | str | None
Symbol or symbols to include. All other symbols will be
included. If passed, do not pass `include`.
- side : Optional[Literal['left', 'right']], default: None
+ side : Literal['left', 'right'] | None, default: None
Ignored if interval is (or inferred as) daily.
Determines index:
@@ -3185,7 +3194,7 @@ def get(
Return prices as a pd.DataFrame with a .pt accessor:
- index : Union[pd.IntervalIndex, pd.DatetimeIndex]
+ index : pd.IntervalIndex | pd.DatetimeIndex
If `interval` passed or infered as intraday:
pd.IntervalIndex with each row covering prices for
@@ -3270,6 +3279,13 @@ def get(
# pylint: disable=too-many-branches, too-many-statements, missing-param-doc
# pylint: disable=differing-type-doc
+ if TYPE_CHECKING:
+ assert start is None or isinstance(start, pd.Timestamp)
+ assert end is None or isinstance(start, pd.Timestamp)
+ assert isinstance(lead_symbol, str)
+ assert tzin is None or isinstance(tzin, ZoneInfo)
+ assert tzout is None or isinstance(tzout, ZoneInfo)
+
anchor_ = Anchor.WORKBACK if anchor.lower() == "workback" else Anchor.OPEN
openend_ = OpenEnd.SHORTEN if openend.lower() == "shorten" else OpenEnd.MAINTAIN
priority_ = Priority.PERIOD if priority.lower() == "period" else Priority.END
@@ -3289,27 +3305,14 @@ def get(
)
raise ValueError(msg)
- if lead_symbol is None:
- lead_symbol_ = self.lead_symbol_default
- else:
- assert isinstance(lead_symbol, str)
- lead_symbol_ = lead_symbol
-
if tzin is None:
- tzin_ = self.timezones[lead_symbol_]
- else:
- assert isinstance(tzin, pytz.BaseTzInfo)
- tzin_ = tzin
+ tzin = self.timezones[lead_symbol]
- if start is None:
- start_ = None
- else:
- start_ = parsing.parse_timestamp(start, tzin_) # type: ignore[arg-type]
+ if start is not None:
+ start = parsing.parse_timestamp(start, tzin)
- if end is None:
- end_ = None
- else:
- end_ = parsing.parse_timestamp(end, tzin_) # type: ignore[arg-type]
+ if end is not None:
+ end = parsing.parse_timestamp(end, tzin)
pp: mptypes.PP = {
"minutes": minutes,
@@ -3318,13 +3321,13 @@ def get(
"weeks": weeks,
"months": months,
"years": years,
- "start": start_,
- "end": end_,
+ "start": start,
+ "end": end,
"add_a_row": add_a_row,
}
parsing.verify_period_parameters(pp)
- cal = self.calendars[lead_symbol_]
+ cal = self.calendars[lead_symbol]
interval_: intervals.PTInterval | None
if interval is None and not self._inferred_intraday_interval(cal, pp):
@@ -3337,7 +3340,7 @@ def get(
interval_ = interval
self._gpp = self.GetPricesParams(
- self, pp, interval_, lead_symbol_, anchor_, openend_, strict, priority_
+ self, pp, interval_, lead_symbol, anchor_, openend_, strict, priority_
)
table = None
@@ -3409,8 +3412,8 @@ def get(
table.index = self._force_partial_indices(table)
if table.pt.is_intraday:
- tzout_ = tzin_ if tzout is None else tzout
- elif tzout is not pytz.UTC: # type: ignore[comparison-overlap]
+ tzout_: ZoneInfo | Literal[False] = tzin if tzout is None else tzout
+ elif tzout is not UTC:
# if tzout tz aware or None
tzout_ = False # output as default tz
else:
@@ -3437,10 +3440,9 @@ def request_all_prices(self) -> dict[BI, list[pd.Interval]]:
value:
Date range over which data now stored locally.
"""
- for bi in self.bis: # type: ignore[attr-defined] # enum has __iter__ attr.
+ for bi in self.bis:
try:
- self.get(bi, start=self.limits[bi][0]) # type: ignore[arg-type] # ...
- # ... expecting mptypes used by pydantic.
+ self.get(bi, start=self.limits[bi][0])
except errors.PricesIntradayUnavailableError:
limit = TDInterval.T10
if not self.has_single_calendar and bi > limit:
@@ -3449,17 +3451,22 @@ def request_all_prices(self) -> dict[BI, list[pd.Interval]]:
raise
return self._pdata_ranges
- @pydantic.validate_arguments
+ @parse
def session_prices(
self,
- session: Optional[mptypes.DateTimestamp] = None,
+ session: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ Parser(parsing.verify_datetimestamp, parse_none=False),
+ ] = None,
stack: bool = False,
) -> pd.DataFrame:
"""Return prices for specific session.
Parameters
----------
- session: Optional[Union[pd.Timestamp, str, datetime.datetime, int, float]],
+ session:
+ pd.Timestamp | str | datetime.datetime | int | float | None
default: most recent available session
Session to return prices for.
@@ -3504,24 +3511,22 @@ def session_prices(
See `specific_query_methods.ipynb` tutorial for example usage.
"""
# pylint: disable=missing-param-doc, differing-type-doc
+ if TYPE_CHECKING:
+ assert session is None or isinstance(session, pd.Timestamp)
assert self.bi_daily is not None
- T = pd.Timestamp # pylint: disable=invalid-name
- # next line and `session_` only required so mypy accepts change of type from
- # pydantic mptype to Timestamp.
- session_ = None if session is None else T(session) # type: ignore[arg-type]
mr_session = self.last_requestable_session_any
- if session_ is None:
+ if session is None:
table = self._get_bi_table(self.bi_daily, (mr_session, mr_session))
return table.pt.stacked if stack else table
first_session = self.earliest_requestable_session
- parsing.verify_date_not_oob(session_, first_session, mr_session, "session")
+ parsing.verify_date_not_oob(session, first_session, mr_session, "session")
- if not any(cal.is_session(session_) for cal in self.calendars_unique):
- msg = f"{session_} is not a session of any associated calendar."
+ if not any(cal.is_session(session) for cal in self.calendars_unique):
+ msg = f"{session} is not a session of any associated calendar."
raise ValueError(msg)
- table = self._get_bi_table(self.bi_daily, (session_, session_))
+ table = self._get_bi_table(self.bi_daily, (session, session))
return table.pt.stacked if stack else table
def _date_to_session(
@@ -3535,13 +3540,20 @@ def _date_to_session(
session = f([c.date_to_session(date, direction) for c in self.calendars_unique])
return session
- @pydantic.validate_arguments
- def close_at(self, date: Optional[mptypes.DateTimestamp] = None) -> pd.DataFrame:
+ @parse
+ def close_at(
+ self,
+ date: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ Parser(parsing.verify_datetimestamp, parse_none=False),
+ ] = None,
+ ) -> pd.DataFrame:
"""Return most recent end-of-day prices as at a specific date.
Parameters
----------
- date : Union[pd.Timestamp, str, datetime.datetime, int, float]
+ date : pd.Timestamp, str | datetime.datetime | int | float | None
default: most recent date
Date for which to return most recent end-of-day prices.
@@ -3569,19 +3581,18 @@ def close_at(self, date: Optional[mptypes.DateTimestamp] = None) -> pd.DataFrame
See `specific_query_methods.ipynb` tutorial for example usage.
"""
# pylint: disable=missing-param-doc, differing-type-doc
+ if TYPE_CHECKING:
+ assert date is None or isinstance(date, pd.Timestamp)
assert self.bi_daily is not None
- # next line and `date_` only required so mypy accepts change of type from
- # pydantic mptype to Timestamp.
- date_ = None if date is None else pd.Timestamp(date) # type: ignore[arg-type]
mr_session = self.last_requestable_session_any
- if date_ is None:
- date_ = mr_session
+ if date is None:
+ date = mr_session
else:
first_session = self.earliest_requestable_session
- parsing.verify_date_not_oob(date_, first_session, mr_session)
+ parsing.verify_date_not_oob(date, first_session, mr_session)
- end_sesh = self._date_to_session(date_, "latest", "previous")
- start_sesh = self._date_to_session(date_, "earliest", "previous")
+ end_sesh = self._date_to_session(date, "latest", "previous")
+ start_sesh = self._date_to_session(date, "earliest", "previous")
start_sesh = min([start_sesh, self.last_requestable_session_all])
table = self._get_bi_table(self.bi_daily, (start_sesh, end_sesh))
return table.pt.naive.pt.close_at(end_sesh)
@@ -3685,7 +3696,7 @@ def _price_at_most_accurate(
return ma_tss
def _price_at_from_daily(
- self, minute: pd.Timestamp | None, tz: pytz.BaseTzInfo
+ self, minute: pd.Timestamp | None, tz: ZoneInfo
) -> pd.DataFrame:
"""Serve call for `_price_at` from daily prices table."""
# pylint: disable=too-many-locals
@@ -3734,11 +3745,18 @@ def _price_at_from_daily(
df.columns.name = "symbol"
return df
- @pydantic.validate_arguments
+ @parse
def price_at(
self,
- minute: Optional[mptypes.TimeTimestamp] = None,
- tz: Optional[mptypes.PricesTimezone] = None,
+ minute: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ Parser(parsing.verify_timetimestamp, parse_none=False),
+ ] = None,
+ tz: Annotated[
+ Optional[Union[str, ZoneInfo]],
+ Parser(parsing.to_prices_timezone, parse_none=False),
+ ] = None,
) -> pd.DataFrame:
"""Most recent price as at a minute or 'now'.
@@ -3759,7 +3777,7 @@ def price_at(
Parameters
----------
minute :
- Optional[Union[pd.Timestamp, str, datetime.datetime, int, float]],
+ pd.Timestamp | str | datetime.datetime | int | float | None,
default: now
Minute at which require price data.
@@ -3769,16 +3787,16 @@ def price_at(
request prices at a minute representing midnight pass as a
timezone aware pd.Timestamp.
- tz : Optional[Union[str, pytz.BaseTzInfo]], default: `default_tz`
+ tz : str | ZoneInfo | None, default: `default_tz`
Timezone of `minute` (if `minute` otherwise timezone naive) and
for returned index. Can be passed as:
- pytz.BaseTzInfo:
- Any instance returned by pytz.timezone
+ ZoneInfo:
+ Any instance returned by `zoneinfo.ZoneInfo`.
str:
- - valid input to `pytz.timezone`, for example "utc" or
- "US/Eastern".
+ - valid input to `zoneinfo.ZoneInfo`, for example "UTC"
+ or "US/Eastern".
- any symbol of `symbols`. For example, pass "GOOG" to
define timezone as timezone associated with that
symbol.
@@ -3797,45 +3815,45 @@ def price_at(
# pylint: disable=missing-param-doc, differing-type-doc, differing-param-doc
# pylint: disable=too-complex, too-many-locals, too-many-branches
# pylint: disable=too-many-statements
- assert tz is None or isinstance(tz, pytz.BaseTzInfo)
- tz_ = tz if tz is not None else self.tz_default
- T = pd.Timestamp # pylint: disable=invalid-name
- # next line and `minute_` only required so mypy accepts change of type from
- # pydantic mptype to Timestamp.
- minute_ = None if minute is None else T(minute) # type: ignore[arg-type]
+ if TYPE_CHECKING:
+ assert (minute is None) or isinstance(minute, pd.Timestamp)
+ assert tz is None or isinstance(tz, ZoneInfo)
+
+ if tz is None:
+ tz = self.tz_default
l_limit, r_limit = self.earliest_requestable_minute, helpers.now()
- if minute_ is not None:
- minute_ = parsing.parse_timestamp(minute_, tz_)
- parsing.verify_time_not_oob(minute_, l_limit, r_limit)
+ if minute is not None:
+ minute = parsing.parse_timestamp(minute, tz)
+ parsing.verify_time_not_oob(minute, l_limit, r_limit)
if (
- minute_ is None
- or minute_ < self.limit_intraday()
- or minute_ > helpers.now() - self.min_delay # no intraday data available
+ minute is None
+ or minute < self.limit_intraday()
+ or minute > helpers.now() - self.min_delay # no intraday data available
):
- return self._price_at_from_daily(minute_, tz_)
+ return self._price_at_from_daily(minute, tz)
# get bis for which indices are not misaligned
- start_sesh = self._minute_to_session(minute_, "earliest", "previous")
- end_sesh = self._minute_to_session(minute_, "latest", "previous")
+ start_sesh = self._minute_to_session(minute, "earliest", "previous")
+ end_sesh = self._minute_to_session(minute, "latest", "previous")
bis_synced = []
for bi in self.bis_intraday:
if self._indices_aligned[bi][slice(start_sesh, end_sesh)].all():
bis_synced.append(bi)
- minute_received = minute_
+ minute_received = minute
minute_advanced = False
- if not self.cc.is_open_on_minute(minute_):
+ if not self.cc.is_open_on_minute(minute):
# only useful if `minute_` is between a (sub)session close and right side of
# an unaligned final indice (inclusive of close). Advancing non-trading
# minute will include the unanligned indice and hence get price_at the
# close rather than as at end of prior indice.
- adv = self.cc.minute_to_trading_minute(minute_, "next") - helpers.ONE_MIN
+ adv = self.cc.minute_to_trading_minute(minute, "next") - helpers.ONE_MIN
assert isinstance(adv, pd.Timestamp)
- minute_ = adv
+ minute = adv
minute_advanced = True
- rngs = self._price_at_rng(bis_synced, minute_)
+ rngs = self._price_at_rng(bis_synced, minute)
# of those, bis for which prices available
bis = []
for bi, rng in rngs.items():
@@ -3843,23 +3861,23 @@ def price_at(
if pdata.available_range(rng):
bis.append(bi)
if not bis:
- return self._price_at_from_daily(minute_, tz_)
+ return self._price_at_from_daily(minute, tz)
# of those, those can represent minute most accurately
- ma_tss = self._price_at_most_accurate(bis, minute_)
+ ma_tss = self._price_at_most_accurate(bis, minute)
srs = pd.Series(ma_tss)
- diff = minute_ - srs
+ diff = minute - srs
ma_bis_srs = diff[diff == min(diff)]
ma_bis = [intervals.to_ptinterval(td) for td in ma_bis_srs.index]
# of those, those that have stored data
bis_stored = []
- for bi in ma_bis: # type: ignore[assignment] # assigning TDInterval, not BI
+ for bi in ma_bis:
pdata = self._pdata[bi]
if pdata.requested_range(rngs[bi]):
bis_stored.append(bi)
- bi = bis_stored[-1] if bis_stored else ma_bis[-1] # type: ignore[assignment]
+ bi = bis_stored[-1] if bis_stored else ma_bis[-1]
table = self._get_bi_table(bi, rngs[bi])
minute_ma = ma_tss[bi]
@@ -3888,29 +3906,41 @@ def price_at(
# indice was right side of an indice that is unaligned
# with (sub)session close. Can roll back to last trading minute + 1
table_pa.index = pd.DatetimeIndex([rolled + helpers.ONE_MIN])
- table_pa.index = table_pa.index.tz_convert(tz_)
+ table_pa.index = table_pa.index.tz_convert(tz)
return table_pa
- @pydantic.validate_arguments
+ @parse
def price_range(
self,
- start: Optional[mptypes.Timestamp] = None,
- end: Optional[mptypes.Timestamp] = None,
+ start: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ ] = None,
+ end: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ ] = None,
minutes: int = 0,
hours: int = 0,
days: int = 0,
weeks: int = 0,
months: int = 0,
years: int = 0,
- lead_symbol: Optional[mptypes.LeadSymbol] = None,
- tzin: Optional[mptypes.PricesTimezone] = None,
+ lead_symbol: Annotated[Optional[str], Parser(parsing.lead_symbol)] = None,
+ tzin: Annotated[
+ Optional[Union[str, ZoneInfo]],
+ Parser(parsing.to_prices_timezone, parse_none=False),
+ ] = None,
strict: bool = True,
- tzout: Optional[mptypes.PricesTimezone] = None,
+ tzout: Annotated[
+ Optional[Union[str, ZoneInfo]],
+ Parser(parsing.to_prices_timezone, parse_none=False),
+ ] = None,
include: Optional[mptypes.Symbols] = None,
exclude: Optional[mptypes.Symbols] = None,
stack: bool = False,
underlying: bool = False,
- ) -> Union[pd.DataFrame, Tuple[pd.DataFrame, pd.DataFrame]]:
+ ) -> Union[pd.DataFrame, tuple[pd.DataFrame, pd.DataFrame]]:
"""Return OHLCV data for a period.
Returns the following for each symbol:
@@ -3927,7 +3957,7 @@ def price_range(
----------
Method parameters as for `get`, except:
- tzout : Optional[Union[str, BaseTzinfo], default: as `tzin`
+ tzout : str | BaseTzinfo | None, default: as `tzin`
Timezone of period as expressed by the index (or level 0 of).
Defined in same way as `tzout` parameter of `get`.
@@ -3952,7 +3982,7 @@ def price_range(
Returns
-------
- Union[pd.DataFrame, tuple[pd.DataFrame, pd.DataFrame]]
+ pd.DataFrame | tuple[pd.DataFrame, pd.DataFrame]
If `stack` False and `underlying` False (default): pd.DataFrame
Single-row pd.DataFrame with 'open', 'high' 'low', 'close'
@@ -3985,7 +4015,7 @@ def price_range(
columns: pd.Index
'open', 'close', 'high' 'low' 'volume'.
- If underlying is True: tuple[pd.DataFrame, pd.DataFrame]]
+ If underlying is True: tuple[pd.DataFrame, pd.DataFrame]
[0] As above
[1] Underlying Dataframe from which range data evaluated.
@@ -3998,6 +4028,13 @@ def price_range(
See `specific_query_methods.ipynb` tutorial for example usage.
"""
# pylint: disable=missing-param-doc, too-many-arguments, too-many-locals
+ if TYPE_CHECKING:
+ assert start is None or isinstance(start, pd.Timestamp)
+ assert end is None or isinstance(start, pd.Timestamp)
+ assert isinstance(lead_symbol, str)
+ assert tzin is None or isinstance(tzin, ZoneInfo)
+ assert tzout is None or isinstance(tzout, ZoneInfo)
+
interval = None
add_a_row = False
force = False
@@ -4017,9 +4054,9 @@ def price_range(
add_a_row,
lead_symbol,
tzin,
- anchor, # type: ignore[arg-type]
- openend, # type: ignore[arg-type]
- priority, # type: ignore[arg-type]
+ anchor,
+ openend,
+ priority,
strict,
composite,
force,
@@ -4037,16 +4074,11 @@ def price_range(
groups = df.groupby(by=group)
res = groups.agg(helpers.agg_funcs(df))
- # set tzout_
- tzout_: pytz.BaseTzInfo
- if tzout is not None:
- assert isinstance(tzout, pytz.BaseTzInfo)
- tzout_ = tzout
- elif tzin is not None:
- assert isinstance(tzin, pytz.BaseTzInfo)
- tzout_ = tzin
- else:
- tzout_ = self.timezones[self.gpp.lead_symbol]
+ if tzout is None:
+ if tzin is not None:
+ tzout = tzin
+ else:
+ tzout = self.timezones[self.gpp.lead_symbol]
# Define indice
if df.pt.is_daily:
@@ -4058,7 +4090,7 @@ def price_range(
first_session = helpers.to_tz_naive(left)
left = self.cc.session_open(first_session)
right = min(right, helpers.now())
- interval = pd.Interval(left.tz_convert(tzout_), right.tz_convert(tzout_))
+ interval = pd.Interval(left.tz_convert(tzout), right.tz_convert(tzout))
res.index = pd.IntervalIndex([interval])
if stack:
diff --git a/src/market_prices/prices/config/config_yahoo.py b/src/market_prices/prices/config/config_yahoo.py
index 0a3d536..71cae58 100644
--- a/src/market_prices/prices/config/config_yahoo.py
+++ b/src/market_prices/prices/config/config_yahoo.py
@@ -112,6 +112,7 @@
".JO": 15,
".KL": 15,
".KQ": 20,
+ ".KS": 20,
".L": 15,
".LS": 15,
".MC": 15,
@@ -127,6 +128,7 @@
".SI": 10,
".SN": 40,
".SS": 15,
+ ".SZ": 15,
".ST": 0,
".SW": 15,
".T": 20,
diff --git a/src/market_prices/prices/yahoo.py b/src/market_prices/prices/yahoo.py
index 20e94a5..d041f61 100644
--- a/src/market_prices/prices/yahoo.py
+++ b/src/market_prices/prices/yahoo.py
@@ -6,24 +6,21 @@
import datetime
import functools
import warnings
-from typing import Dict, List, Optional, Union
+from typing import Optional, Union
from pandas import DataFrame
import pandas as pd
import exchange_calendars as xcals
+from valimp import parse
import yahooquery as yq
from market_prices import errors, helpers, intervals, mptypes
+from market_prices.helpers import UTC
from market_prices.prices import base
from ..mptypes import Calendar, Symbols
from .config import config_yahoo
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
-
class PricesYahoo(base.PricesBase):
"""Retrieve and serve price data sourced via yahooquery.
@@ -33,20 +30,18 @@ class PricesYahoo(base.PricesBase):
Parameters
----------
- symbols: Union[str, List[str]]
+ symbols: str | list[str]
Symbols for which require price data. For example:
'AMZN'
'FB AAPL AMZN NFLX GOOG MSFT'
['FB', 'AAPL', 'AMZN']
calendars :
- Optional[
- Union[
- mptypes.Calendar,
- list[mptypes.Calendar],
- dict[str, mptypes.Calendar],
- ]
- ], default: evaluated
+ mptypes.Calendar |
+ list[myptypes.Calendar] |
+ dict[str, mytypes.Calendar] |
+ None
+ , default: evaluated
Calendar(s) defining trading times and timezones for `symbols`.
By default a calendar for each symbol is ascertained by
@@ -326,15 +321,15 @@ class PricesYahoo(base.PricesBase):
YAHOO_EXCHANGE_TO_CALENDAR = config_yahoo.EXCHANGE_TO_CALENDAR
YAHOO_DELAY_MAPPING = config_yahoo.DELAY_MAPPING
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def __init__(
self,
- symbols: Union[str, List[str]],
+ symbols: Union[str, list[str]],
calendars: Optional[mptypes.Calendars] = None,
lead_symbol: Optional[str] = None,
- delays: Optional[Union[int, List[int], Dict[str, int]]] = None,
+ delays: Optional[Union[int, list[int], dict[str, int]]] = None,
adj_close: bool = False,
- proxies: Optional[Dict[str, str]] = None,
+ proxies: Optional[dict[str, str]] = None,
):
symbols = helpers.symbols_to_list(symbols)
self._ticker = yq.Ticker(
@@ -497,7 +492,7 @@ def _set_daily_bi_limit(self):
if bi == intervals.ONE_DAY:
d[bi] = earliest
elif today - limit < earliest:
- d[bi] = pd.Timestamp(earliest, tz="UTC")
+ d[bi] = pd.Timestamp(earliest, tz=UTC)
self._update_base_limits(d)
# Methods to request data from yahooquery.
@@ -678,7 +673,7 @@ def _fill_reindexed_daily(
return df
delay = self.delays[symbol]
- if na_rows[-1] and helpers.now() <= cal.session_open(df.index[-1]) + delay:
+ if na_rows.iloc[-1] and helpers.now() <= cal.session_open(df.index[-1]) + delay:
na_rows.iloc[-1] = False
if not na_rows.any():
return df
@@ -848,9 +843,7 @@ def get_columns_index(index: pd.Index | None = None) -> pd.MultiIndex:
start = start if start is not None else sdf.index[0]
calendar = self.calendars[symbol]
index = self._get_trading_index(calendar, interval, start, end)
- reindex_index = (
- index if interval.is_daily else index.left # type: ignore[union-attr]
- )
+ reindex_index = index if interval.is_daily else index.left
sdf = sdf.reindex(reindex_index)
if interval.is_intraday:
sdf = self._fill_reindexed(sdf, calendar, interval, symbol)
@@ -987,7 +980,7 @@ def prices_for_symbols(self, symbols: Symbols) -> base.PricesBase:
cals = list(prices_obj.calendars_unique)
fewer_cals = len(cals) < len(self.calendars_unique)
- for bi in self.bis: # type: ignore[attr-defined] # enum has __iter__ attr.
+ for bi in self.bis:
new_pdata = copy.deepcopy(self._pdata[bi])
if new_pdata._table is not None:
table = new_pdata._table[symbols].copy()
diff --git a/src/market_prices/pt.py b/src/market_prices/pt.py
index 038a415..601cd04 100644
--- a/src/market_prices/pt.py
+++ b/src/market_prices/pt.py
@@ -4,28 +4,24 @@
import abc
import collections
+import datetime
import functools
import warnings
-from typing import TYPE_CHECKING, Dict, Literal, Optional, Union
+from typing import TYPE_CHECKING, Literal, Optional, Union, Annotated
+from zoneinfo import ZoneInfo
import exchange_calendars as xcals
import numpy as np
import pandas as pd
-import pytz
-from pytz import BaseTzInfo
+from valimp import parse, Coerce, Parser
import market_prices.utils.calendar_utils as calutils
from market_prices import errors, helpers, intervals, mptypes, parsing
+from market_prices.helpers import UTC
from market_prices.utils import general_utils as genutils
from market_prices.utils import pandas_utils as pdutils
-
-from .mptypes import Symbols
-from .utils.calendar_utils import CompositeCalendar
-
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
+from market_prices.utils.calendar_utils import CompositeCalendar
+from market_prices.mptypes import Symbols
# pylint: disable=too-many-lines
@@ -90,8 +86,7 @@ def __new__(cls, df: pd.DataFrame): # pylint: disable=missing-return-type-doc
" and frequency is greater than one day."
)
raise ValueError(msg)
- elif pdutils.index_is_normalized(df.index): # type: ignore[arg-type]
- # typing note: can pass IntervalIndex, param typed for pydantic
+ elif pdutils.index_is_normalized(df.index):
new_cls = PTMultipleSessions
elif df.index[0].left == df.index[0].right and pdutils.is_midnight(
df.index[0].left
@@ -148,7 +143,7 @@ def prices(self) -> pd.DataFrame:
@property
@abc.abstractmethod # abstracted so each subclass knows actual type
- def index(self) -> pd.DatatimeIndex | pd.IntervalIndex:
+ def index(self) -> pd.DatetimeIndex | pd.IntervalIndex:
"""Return index of prices table."""
@property
@@ -228,17 +223,17 @@ def utc(self) -> pd.DataFrame:
@property
@abc.abstractmethod
- def _tz(self) -> BaseTzInfo | None:
+ def _tz(self) -> ZoneInfo | None:
"""Timezone of index."""
@property
- def tz(self) -> BaseTzInfo | None:
+ def tz(self) -> ZoneInfo | None:
"""Timezone of index."""
return self._tz
# Index operations
- def _set_tz_non_local(self, tz: BaseTzInfo | None) -> pd.DataFrame:
+ def _set_tz_non_local(self, tz: ZoneInfo | None) -> pd.DataFrame:
"""Set tz to None or utc."""
if tz == self._tz:
return self.prices
@@ -246,17 +241,13 @@ def _set_tz_non_local(self, tz: BaseTzInfo | None) -> pd.DataFrame:
if self.is_daily:
assert isinstance(self.index, pd.DatetimeIndex)
index = self.index.tz_localize(tz)
- elif tz == pytz.UTC:
- index = pdutils.interval_index_new_tz(
- self.index, tz # type: ignore[arg-type] # expects mptype
- )
+ elif tz == UTC:
+ index = pdutils.interval_index_new_tz(self.index, tz)
else:
- index = pdutils.interval_index_new_tz(
- self.index, None # type: ignore[arg-type] # expects mptype
- )
+ index = pdutils.interval_index_new_tz(self.index, None)
return self._new_index(index)
- def _set_tz(self, tz: BaseTzInfo | None) -> pd.DataFrame:
+ def _set_tz(self, tz: ZoneInfo | None) -> pd.DataFrame:
"""Convert index to a timezone.
Subclass should overide if provides for setting tz to anything
@@ -264,7 +255,7 @@ def _set_tz(self, tz: BaseTzInfo | None) -> pd.DataFrame:
"""
return self._set_tz_non_local(tz)
- def set_tz(self, tz: str | BaseTzInfo):
+ def set_tz(self, tz: str | ZoneInfo):
"""Convert index timezone.
Parameters
@@ -293,7 +284,7 @@ def convert_to_table_tz(self, ts: pd.Timestamp) -> pd.Timestamp:
if ts.tz == self.tz:
return ts
elif ts.tz is None:
- return ts.tz_localize(self.tz) # type: ignore[unreachable] # is reachable
+ return ts.tz_localize(self.tz)
else:
return ts.tz_convert(self.tz)
@@ -371,7 +362,7 @@ def _compatible_sessions(
def _compatible_index(self, index: pd.IntervalIndex) -> bool:
"""Query if `index` compatible with table index."""
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def get_trading_index(
self,
calendar: xcals.ExchangeCalendar,
@@ -449,7 +440,7 @@ def _check_index_compatible(
non_compat_sessions = compat_sessions[~compat_sessions]
raise errors.IndexConflictError(calendar, non_compat_sessions.index)
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def reindex_to_calendar(
self,
calendar: xcals.ExchangeCalendar,
@@ -553,7 +544,7 @@ def indices_trading_status(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
(NB not possible for PTDaily).
"""
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def indices_trading(
self, calendar: xcals.ExchangeCalendar
) -> Union[pd.DatetimeIndex, pd.IntervalIndex]:
@@ -587,7 +578,7 @@ def indices_trading(
# Can't use its.all() as nan equates to True
return its[~(its.isna() | its.eq(False))].index
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def indices_non_trading(
self, calendar: xcals.ExchangeCalendar
) -> Union[pd.DatetimeIndex, pd.IntervalIndex]:
@@ -619,7 +610,7 @@ def indices_non_trading(
its = self.indices_trading_status(calendar)
return its[its.eq(False)].index
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def indices_partial_trading(
self, calendar: xcals.ExchangeCalendar
) -> Union[pd.DatetimeIndex, pd.IntervalIndex]:
@@ -652,7 +643,7 @@ def indices_partial_trading(
its = self.indices_trading_status(calendar)
return its[its.isna()].index
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def indices_all_trading(self, calendar: xcals.ExchangeCalendar) -> bool:
"""Query if all indices represent trading indices.
@@ -688,7 +679,7 @@ def _partial_non_trading(
for nanos in [calendar.first_minutes_nanos, calendar.break_ends_nanos]:
arr = np.intersect1d(nanos, trading_mins.values.view("int64"))
if arr.size > 0:
- trading_mins = trading_mins.drop(pd.DatetimeIndex(arr, tz=pytz.UTC))
+ trading_mins = trading_mins.drop(pd.DatetimeIndex(arr, tz=UTC))
all_mins = pd.date_range(start, end, freq="1T")
non_t = all_mins.difference(trading_mins)
@@ -707,10 +698,10 @@ def _partial_non_trading(
middle.append(pd.Interval(s, e, side))
return pd.IntervalIndex(first + middle + last)
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def indices_partial_trading_info(
self, calendar: xcals.ExchangeCalendar
- ) -> Dict[pd.IntervalIndex, pd.IntervalIndex]:
+ ) -> dict[pd.IntervalIndex, pd.IntervalIndex]:
"""Return information on partial trading indices.
Parameters
@@ -866,11 +857,11 @@ def fill(s: str | None):
if closes_missing.all() or not closes_missing.any():
return
if method != "bfill":
- df.loc[:, close_key] = df[close_key].fillna(method="ffill")
+ df.loc[:, close_key] = df[close_key].ffill()
df.loc[bv, open_key] = df.loc[bv, close_key]
bv = df[close_key].isna()
if method != "ffill":
- df.loc[:, open_key] = df[open_key].fillna(method="bfill")
+ df.loc[:, open_key] = df[open_key].bfill()
df.loc[bv, close_key] = df.loc[bv, open_key]
closes_still_missing = df[close_key].isna()
@@ -891,10 +882,10 @@ def fill(s: str | None):
return df
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def operate(
self,
- tz: Optional[Union[Literal[False], str, BaseTzInfo]] = False,
+ tz: Optional[Union[Literal[False], str, ZoneInfo]] = False,
fill: Optional[Literal["ffill", "bfill", "both"]] = None,
include: Optional[Symbols] = None,
exclude: Optional[Symbols] = None,
@@ -916,16 +907,16 @@ def operate(
Parameters
----------
- tz : Literal[False] | str | BaseTzInfo | None,
+ tz : Literal[False] | str | ZoneInfo | None,
default: False (no change)
For tables with an intraday interval:
timezone to set the index to. Only available if index is
timezone aware. If passed as string should be a valid
- arg of pytz.timezone.
+ arg for `zoneinfo.ZoneInfo`.
For tables with an interval that is daily or higher (including
tables that are a composite of daily and intraday price data):
- "UTC" or pytz.UTC: UTC
+ "UTC" or `zoneinfo.ZoneInfo("UTC")`: UTC
None: tz-naive index.
fill : Literal['ffill', 'bfill', 'both'] | None, default: None
@@ -936,13 +927,13 @@ def operate(
filling any initial missing values that would not
otherwise be filled with "ffill" alone.
- include : List[str] | str | None, default: include all symbols
+ include : list[str] | str | None, default: include all symbols
Symbols to include. All other symbols will be excluded. If
passed, do not pass `exclude`.
Ignored if table does not have symbols.
- exclude : List[str] | str | None, default: exclude no symbols
+ exclude : list[str] | str | None, default: exclude no symbols
Symbols to exclude. All other symbols will be included. If
passed, do not pass `include`.
@@ -1006,7 +997,7 @@ def operate(
prices = prices.pt.fillna(fill)
if isinstance(tz, str):
- tz = pytz.timezone(tz)
+ tz = ZoneInfo(tz)
if tz or tz is None:
if self.is_intraday:
if tz is None:
@@ -1015,7 +1006,7 @@ def operate(
self._verify_tz_awareness()
prices = prices.pt._set_tz(tz)
else: # pylint: disable=else-if-used
- if tz not in (None, pytz.UTC):
+ if tz not in (None, UTC):
raise ValueError(
f"`tz` for class {type(self)} can only be UTC or timezone naive"
f" (None), not {tz}."
@@ -1085,7 +1076,7 @@ class PTDaily(_PT):
""".pt accessor for prices table with daily interval."""
@property
- def index(self) -> pd.DatatimeIndex:
+ def index(self) -> pd.DatetimeIndex:
"""Return index of prices table."""
return self.prices.index
@@ -1105,10 +1096,10 @@ def _naive_index(self) -> pd.DatetimeIndex:
@property
def _utc_index(self) -> pd.DatetimeIndex:
- return self.prices.index.tz_localize(None).tz_localize(pytz.UTC)
+ return self.prices.index.tz_localize(None).tz_localize(UTC)
@property
- def _tz(self) -> BaseTzInfo | None:
+ def _tz(self) -> ZoneInfo | None:
"""Timezone of index."""
return self.index.tz
@@ -1150,7 +1141,7 @@ def _compatible_index(self, *_, **__) -> bool:
"""Query if table index compatible with an index of daily frequency."""
return True
- @functools.lru_cache
+ @functools.cache
def indices_trading_status(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
"""Query indices trading/non-trading status.
@@ -1182,13 +1173,20 @@ def price_at(self, *_, **__):
)
raise NotImplementedError(msg)
- @pydantic.validate_arguments
- def session_prices(self, session: mptypes.DateTimestamp) -> pd.DataFrame:
+ @parse
+ def session_prices(
+ self,
+ session: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float],
+ Coerce(pd.Timestamp),
+ Parser(parsing.verify_datetimestamp),
+ ],
+ ) -> pd.DataFrame:
"""Return OHLCV prices for a given session.
Parameters
----------
- session : Union[pd.Timestamp, str, datetime.datetime, int, float]
+ session : pd.Timestamp | str | datetime.datetime | int | float
Session for which require prices. Must not include time
component. If passsed as a pd.Timestamp must be tz-naive.
@@ -1201,17 +1199,24 @@ def session_prices(self, session: mptypes.DateTimestamp) -> pd.DataFrame:
level 0: symbol
level 1: ['open', 'high', 'low', 'close', 'volume']
"""
- # so mypy treats as Timestamp...
- session_ = pd.Timestamp(session) # type: ignore[call-overload]
- parsing.verify_date_not_oob(session_, self.first_ts, self.last_ts, "session")
+ if TYPE_CHECKING:
+ assert isinstance(session, pd.Timestamp)
+ parsing.verify_date_not_oob(session, self.first_ts, self.last_ts, "session")
if self.index.tz is not None:
- session_ = session_.tz_localize(pytz.UTC)
- if session_ not in self.index:
- raise ValueError(f"`session` {session_} is not present in the table.")
- return self.prices.loc[[session_]]
+ session = session.tz_localize(UTC)
+ if session not in self.index:
+ raise ValueError(f"`session` {session} is not present in the table.")
+ return self.prices.loc[[session]]
- @pydantic.validate_arguments
- def close_at(self, date: mptypes.DateTimestamp) -> pd.DataFrame:
+ @parse
+ def close_at(
+ self,
+ date: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float],
+ Coerce(pd.Timestamp),
+ Parser(parsing.verify_datetimestamp),
+ ],
+ ) -> pd.DataFrame:
"""Return price as at end of a given day.
For symbols where `date` represents a trading session, price will
@@ -1221,7 +1226,7 @@ def close_at(self, date: mptypes.DateTimestamp) -> pd.DataFrame:
Parameters
----------
- date : Union[pd.Timestamp, str, datetime.datetime, int, float]
+ date : pd.Timestamp | str | datetime.datetime | int | float
Date for which require end-of-day prices. Must not include time
component. If passsed as a pd.Timestamp must be tz-naive.
@@ -1233,13 +1238,13 @@ def close_at(self, date: mptypes.DateTimestamp) -> pd.DataFrame:
columns: Index
symbol.
"""
- # so mypy treats as Timestamp...
- date_ = pd.Timestamp(date) # type: ignore[call-overload]
- parsing.verify_date_not_oob(date_, self.first_ts, self.last_ts)
+ if TYPE_CHECKING:
+ assert isinstance(date, pd.Timestamp)
+ parsing.verify_date_not_oob(date, self.first_ts, self.last_ts)
if self.index.tz is not None:
- date_ = date_.tz_localize(pytz.UTC)
+ date = date.tz_localize(UTC)
prices = self.operate(fill="ffill", close_only=True)
- i = prices.index.get_indexer([date_], "ffill")[0]
+ i = prices.index.get_indexer([date], "ffill")[0]
return prices.iloc[[i]]
# Downsampling
@@ -1266,8 +1271,7 @@ def _downsample_cbdays(
if not isinstance(calendar, xcals.ExchangeCalendar):
raise TypeError(error_start + f" although received {calendar}." + advices)
- if calendar.day != pdfreq.base: # type: ignore[attr-defined]
- # typing note: CustomBusinessDay does have .base attr
+ if calendar.day != pdfreq.base:
raise ValueError(
error_start + " which has a `calendar.day` attribute equal to the"
" base CustomBusinessDay being downsampled to. Received calendar as"
@@ -1285,8 +1289,7 @@ def _downsample_cbdays(
# to remove any indices that would otherwise result in the last indice
# being comprised of less than the required number of CustomBuisnessDays.
sessions = calendar.sessions_in_range(df.pt.first_ts, df.pt.last_ts)
- excess_sessions = len(sessions) % pdfreq.n # type: ignore[attr-defined]
- # typing note: CustomBusinessDay does have .n attr
+ excess_sessions = len(sessions) % pdfreq.n
# first row of dataframe to be resampled has to be a `calendar` session,
# to the contrary initial rows (labeled earlier than a calendar session)
@@ -1347,7 +1350,7 @@ def _downsample_months(
if not pre_table_sessions.empty:
start_ds = pd_offset.rollforward(start_table)
df = df[start_ds:]
- resampled = helpers.resample(df, pdfreq, origin="start")
+ resampled = helpers.resample(df, pdfreq, origin="start", nominal_start=start_ds)
resampled.index = pdutils.get_interval_index(resampled.index, pdfreq)
if drop_incomplete_last_indice:
@@ -1359,7 +1362,7 @@ def _downsample_months(
return resampled
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def downsample( # pylint: disable=arguments-differ
self,
pdfreq: Union[str, pd.offsets.BaseOffset],
@@ -1466,12 +1469,12 @@ def downsample( # pylint: disable=arguments-differ
raise ValueError(msg) from None
else:
assert offset is not None
- freqstr: str = offset.freqstr # type: ignore[assignment] # is a str
+ freqstr: str = offset.freqstr
value, unit = helpers.extract_freq_parts(freqstr)
if unit.lower() == "d":
if isinstance(self.freq, pd.offsets.CustomBusinessDay):
- pdfreq = self.freq.base * value # type: ignore[attr-defined]
+ pdfreq = self.freq.base * value
return self._downsample_cbdays(pdfreq, calendar)
else:
return self._downsample_days(pdfreq)
@@ -1522,15 +1525,15 @@ def _utc_index(self) -> pd.IntervalIndex:
indexes = []
for index in (ii.left, ii.right):
if index.tz is None:
- indexes.append(index.tz_localize(pytz.UTC))
- elif index.tz == pytz.UTC:
+ indexes.append(index.tz_localize(UTC))
+ elif index.tz == UTC:
indexes.append(index)
else:
- indexes.append(index.tz_convert(pytz.UTC))
+ indexes.append(index.tz_convert(UTC))
return pd.IntervalIndex.from_arrays(indexes[0], indexes[1], ii.closed)
@property
- def _tz(self) -> BaseTzInfo | None:
+ def _tz(self) -> ZoneInfo | None:
if self.index.left.tz == self.index.right.tz:
return self.index.left.tz
else:
@@ -1611,16 +1614,16 @@ def is_intraday(self) -> bool:
"""Query if interval is less than daily."""
return True
- def _tz_index(self, tz: str | BaseTzInfo) -> pd.IntervalIndex:
+ def _tz_index(self, tz: str | ZoneInfo) -> pd.IntervalIndex:
"""Return index with tz as `tz`."""
- return pdutils.interval_index_new_tz(self.index, tz) # type: ignore[arg-type]
+ return pdutils.interval_index_new_tz(self.index, tz)
- def _set_tz(self, tz: str | BaseTzInfo) -> pd.DataFrame: # type: ignore[override]
+ def _set_tz(self, tz: str | ZoneInfo) -> pd.DataFrame: # type: ignore[override]
"""Convert index to a given timezone."""
# typing note: extends super args to provide for tz to be passed as str.
return self._new_index(self._tz_index(tz))
- def set_tz(self, tz: str | BaseTzInfo) -> pd.DataFrame:
+ def set_tz(self, tz: str | ZoneInfo) -> pd.DataFrame:
"""Set index timezone.
Parameters
@@ -1634,7 +1637,7 @@ def set_tz(self, tz: str | BaseTzInfo) -> pd.DataFrame:
# Mappings to sessions
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def sessions(
self,
calendar: Union[xcals.ExchangeCalendar, calutils.CompositeCalendar],
@@ -1676,7 +1679,7 @@ def _get_session(indice) -> pd.Timestamp:
srs.name = "session"
return srs
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def session_column(
self,
calendar: Union[xcals.ExchangeCalendar, calutils.CompositeCalendar],
@@ -1734,7 +1737,7 @@ def _compatible_index(self, index: pd.IntervalIndex) -> bool:
"""Query if `index` compatible with table index."""
if self.interval == helpers.ONE_MIN: # shortcut
return True
- assert index.left.tz is pytz.UTC
+ assert index.left.tz is UTC
df = self.prices if self.is_daily else self.utc
index_union = df.pt.index.union(index, sort=False).sort_values()
return index_union.is_non_overlapping_monotonic
@@ -1757,8 +1760,8 @@ def _is_trading_period(
else:
return False
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
- @functools.lru_cache
+ @functools.cache
+ @parse
def indices_trading_status(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
"""Query indices trading/non-trading status.
@@ -1790,7 +1793,7 @@ def indices_trading_status(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
# trading minutes
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+ @parse
def indices_trading_minutes(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
"""Return number of trading minutes that comprise each indice.
@@ -1942,20 +1945,29 @@ def _get_row(self, ts: pd.Timestamp) -> pd.DataFrame:
i = self._get_loc(ts, method="ffill")
return self.prices.iloc[[i]]
- @pydantic.validate_arguments
+ @parse
def price_at( # pylint: disable=arguments-differ
- self, ts: mptypes.TimeTimestamp, tz: Optional[mptypes.Timezone] = None
+ self,
+ ts: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float],
+ Coerce(pd.Timestamp),
+ Parser(parsing.verify_timetimestamp),
+ ],
+ tz: Annotated[
+ Union[ZoneInfo, str, None],
+ Parser(parsing.to_timezone, parse_none=False),
+ ] = None,
) -> pd.DataFrame:
"""Return most recent price as at a given timestamp.
Parameters
----------
- ts : Union[pd.Timestamp, str, datetime.datetime, int, float]
+ ts
Timestamp as at which to return most recent available price.
Will raise ValueError if `ts` represents a session. To request
- prices at midnight pass as a tz-aware pandas Timestamp.
+ prices at midnight pass as a tz-aware `pd.Timestamp`.
- tz : Optional[BaseTzInfo | str], default: table's tz
+ tz : default: table's tz
Timezone of `ts` and to use for returned index.
If `ts` is tz-aware then `tz` will NOT override `ts` timezone,
although will be used to define the index of the returned
@@ -1967,19 +1979,21 @@ def price_at( # pylint: disable=arguments-differ
session_prices
"""
# pylint: disable=missing-param-doc
- if tz is None:
- self._verify_tz_awareness()
- tz_ = self.tz if tz is None else tz
if TYPE_CHECKING:
assert isinstance(ts, pd.Timestamp)
- assert isinstance(tz_, BaseTzInfo)
- ts_ = parsing.parse_timestamp(ts, tz_)
+ assert tz is None or isinstance(tz, ZoneInfo)
+
+ if tz is None:
+ self._verify_tz_awareness()
+ tz = self.tz
+
+ ts = parsing.parse_timestamp(ts, tz)
parsing.verify_time_not_oob(
- ts_, self.first_ts.astimezone(pytz.UTC), self.last_ts.astimezone(pytz.UTC)
+ ts, self.first_ts.astimezone(UTC), self.last_ts.astimezone(UTC)
)
df = self.utc.pt.fillna("ffill")
- row = df.pt._get_row(ts_) # pylint: disable=protected-access
- side = "left" if row.index.contains(ts_) else "right"
+ row = df.pt._get_row(ts) # pylint: disable=protected-access
+ side = "left" if row.index.contains(ts) else "right"
column = "open" if side == "left" else "close"
if self.has_symbols:
if TYPE_CHECKING:
@@ -1989,10 +2003,10 @@ def price_at( # pylint: disable=arguments-differ
else:
columns = [column]
res = row[columns]
- ts_ = getattr(row.index, side)
+ ts = getattr(row.index, side)
if TYPE_CHECKING:
- assert isinstance(ts_, pd.Timestamp)
- res.index = ts_ if tz_ is None else ts_.tz_convert(tz_)
+ assert isinstance(ts, pd.Timestamp)
+ res.index = ts if tz is None else ts.tz_convert(tz)
res.columns = (
res.columns.droplevel(1) if self.has_symbols else pd.Index(["price"])
)
@@ -2147,7 +2161,7 @@ def _downsample_workback(self, interval: pd.Timedelta) -> pd.DataFrame:
table_interval = self.interval
# Resample each n rows, avoids introducing non-trading times and
# allows a row to encompass prices from contiguous trading sessions
- num_rows = interval // table_interval # type: ignore[operator]
+ num_rows = interval // table_interval
excess_rows = len(self.prices) % num_rows
df = self.prices[excess_rows:].copy()
agg_functions = helpers.agg_funcs(df)
@@ -2165,11 +2179,11 @@ def _downsample_workback(self, interval: pd.Timedelta) -> pd.DataFrame:
res = helpers.volume_to_na(res)
return res
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
- def downsample( # type: ignore[override] # extends super implementation
+ @parse
+ def downsample(
# pylint: disable=arguments-differ
self,
- pdfreq: mptypes.PandasFrequency,
+ pdfreq: Annotated[str, Coerce(mptypes.PandasFrequency)],
anchor: Literal["workback", "open"] = "workback",
calendar: Optional[xcals.ExchangeCalendar] = None,
curtail_end: bool = False,
@@ -2290,6 +2304,9 @@ def downsample( # type: ignore[override] # extends super implementation
reindex_to_calendar
"""
# pylint: disable=too-many-arguments, missing-param-doc
+ if TYPE_CHECKING:
+ assert isinstance(pdfreq, mptypes.PandasFrequency)
+
if not self.has_regular_interval:
raise ValueError(
"Cannot downsample a table for which a regular interval"
@@ -2325,7 +2342,7 @@ def downsample( # type: ignore[override] # extends super implementation
f" whilst table interval is {table_interval}."
)
- if interval % table_interval: # type: ignore[operator] # % is supported
+ if interval % table_interval:
raise ValueError(
"Table interval must be a factor of downsample interval,"
f" although downsampled interval evaluated as {interval}"
@@ -2387,8 +2404,8 @@ def _are_trading_sessions(
else:
return np.NaN
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
- @functools.lru_cache
+ @functools.cache
+ @parse
def indices_trading_status(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
"""Query indices trading/non-trading status.
@@ -2417,10 +2434,10 @@ def indices_trading_status(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
srs = self.index.to_series()
return srs.apply(self._are_trading_sessions, args=[calendar])
- @pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
- def indices_partial_trading_info( # type: ignore[override] # changes super imp.
+ @parse
+ def indices_partial_trading_info(
self, calendar: xcals.ExchangeCalendar
- ) -> Dict[pd.IntervalIndex, pd.DatetimeIndex]:
+ ) -> dict[pd.IntervalIndex, pd.DatetimeIndex]:
"""Return info on partial trading indices.
Returns information on indices that cover both sessions and
@@ -2495,15 +2512,14 @@ def _interval(self) -> None:
def daily_part(self) -> pd.DataFrame:
"""Part of composite table comprising daily intervals."""
df = self.prices[self.index.left == self.index.right]
- # mypy incorrectly assumes df is a pd.Series indexed with pd.Index
- df.index = df.index.left.tz_convert(None) # type: ignore[attr-defined]
- return df # type: ignore[return-value] # return is as requried and declared.
+ df.index = df.index.left.tz_convert(None)
+ return df
@property
def intraday_part(self) -> pd.DataFrame:
"""Part of composite table comprising intraday intervals."""
df = self.prices[self.index.left != self.index.right]
- return df # type: ignore[return-value] # return is as requried and declared.
+ return df
def indices_trading_status(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
"""Query indices trading/non-trading status.
@@ -2537,8 +2553,15 @@ def indices_trading_status(self, calendar: xcals.ExchangeCalendar) -> pd.Series:
srs.index = self.index
return srs
- @pydantic.validate_arguments
- def price_at(self, ts: mptypes.TimeTimestamp) -> pd.DataFrame:
+ @parse
+ def price_at(
+ self,
+ ts: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float],
+ Coerce(pd.Timestamp),
+ Parser(parsing.verify_timetimestamp),
+ ],
+ ) -> pd.DataFrame:
"""Most recent registered price as at a given timestamp.
Note: Only available over part of index defined by intraday
diff --git a/src/market_prices/utils/calendar_utils.py b/src/market_prices/utils/calendar_utils.py
index bf7c7e6..2d38c4e 100644
--- a/src/market_prices/utils/calendar_utils.py
+++ b/src/market_prices/utils/calendar_utils.py
@@ -11,19 +11,16 @@
from collections import abc
from typing import TYPE_CHECKING, Literal
-import exchange_calendars as xcals # type: ignore[import]
+import exchange_calendars as xcals
+from exchange_calendars.calendar_helpers import Date, Minute, Session, TradingMinute
import numpy as np
import pandas as pd
-import pytz
-from exchange_calendars.calendar_helpers import Date, Minute, Session, TradingMinute
+from valimp import parse
+
from market_prices import helpers, intervals, errors
+from market_prices.helpers import UTC
from market_prices.utils import pandas_utils as pdutils
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
-
def get_exchange_info() -> pd.DataFrame:
"""Retrieve information on exchanges for which calendars available.
@@ -95,7 +92,7 @@ def __str__(self) -> str:
return msg
-@pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+@parse
def subsession_length(
calendar: xcals.ExchangeCalendar,
session: Session,
@@ -357,7 +354,7 @@ def minutes(self) -> pd.DatetimeIndex:
Excludes minutes when no underlying calendar is open.
"""
- return pd.DatetimeIndex(self.minutes_nanos, tz=pytz.UTC)
+ return pd.DatetimeIndex(self.minutes_nanos, tz=UTC)
@property
def first_minutes(self) -> pd.Series:
@@ -382,12 +379,12 @@ def last_session(self) -> pd.Timestamp:
@property
def first_minute(self) -> pd.Timestamp:
"""First composite calendar minute."""
- return self.first_minutes[0]
+ return self.first_minutes.iloc[0]
@property
def last_minute(self) -> pd.Timestamp:
"""Last composite calendar minute."""
- return self.last_minutes[-1]
+ return self.last_minutes.iloc[-1]
def _parse_session(self, session: Session) -> pd.Timestamp:
"""Parse client input representing a session."""
@@ -765,7 +762,7 @@ def sessions_in_range(
end = self._parse_end(end)
slc_start = self._sessions_nanos.searchsorted(start.value, "left")
slc_stop = self._sessions_nanos.searchsorted(end.value, "right")
- return self.sessions[slc_start:slc_stop] # type: ignore[misc]
+ return self.sessions[slc_start:slc_stop]
def sessions_overlap(
self, start: Date | None = None, end: Date | None = None
@@ -991,7 +988,7 @@ def trading_index(
raise errors.CompositeIndexCalendarConflict(cal) from e
# work in tz-naive for quicker .union
- index = pdutils.interval_index_new_tz(index, None) # type: ignore[arg-type]
+ index = pdutils.interval_index_new_tz(index, None)
for cal in self.calendars[1:]:
if isinstance(ignore_breaks_, dict):
ignore_breaks = ignore_breaks_[cal]
@@ -1002,20 +999,13 @@ def trading_index(
)
except xcals.errors.IntervalsOverlapError as e:
raise errors.CompositeIndexCalendarConflict(cal) from e
- index_ = pdutils.interval_index_new_tz(
- index_, None # type: ignore[arg-type] # uses mptype
- )
+ index_ = pdutils.interval_index_new_tz(index_, None)
index = index.union(index_, sort=False)
index = index.sort_values()
- if (
- raise_overlapping
- and not index.is_non_overlapping_monotonic # type: ignore[attr-defined]
- ):
+ if raise_overlapping and not index.is_non_overlapping_monotonic:
raise errors.CompositeIndexConflict()
if utc:
- index = pdutils.interval_index_new_tz(
- index, pytz.UTC # type: ignore[arg-type] # uses mptype
- )
+ index = pdutils.interval_index_new_tz(index, UTC)
return index
@@ -1159,7 +1149,7 @@ def _add_to_index(self, last_close: pd.Series, next_open: pd.Series):
except ValueError:
last_close_ = last_close.dropna()
# last value of last close is last calendar close (there is no next open)
- if last_close_.iloc[-1] == self.cc.closes[-1].tz_convert(None):
+ if last_close_.iloc[-1] == self.cc.closes.iloc[-1].tz_convert(None):
index = pd.IntervalIndex.from_arrays(
last_close_.iloc[:-1], next_open.dropna(), "left"
)
@@ -1208,8 +1198,8 @@ def non_trading_index(
index = self._index.sort_values()
if utc:
- left = index.left.tz_localize(pytz.UTC)
- right = index.right.tz_localize(pytz.UTC)
+ left = index.left.tz_localize(UTC)
+ right = index.right.tz_localize(UTC)
return pd.IntervalIndex.from_arrays(left, right, "left")
else:
return index
diff --git a/src/market_prices/utils/pandas_utils.py b/src/market_prices/utils/pandas_utils.py
index cd611d9..7d63bf3 100644
--- a/src/market_prices/utils/pandas_utils.py
+++ b/src/market_prices/utils/pandas_utils.py
@@ -5,22 +5,15 @@
import warnings
from collections import abc
from contextlib import contextmanager
-from typing import Any, List, Literal, Union
+from typing import Any, Literal, Union, Annotated
+from zoneinfo import ZoneInfo
import numpy as np
import pandas as pd
-from pandas import DataFrame, Series
-import pytz
+from valimp import parse, Parser
-from market_prices import mptypes
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
-
-
-def pdfreq_to_offset(pdfreq: str) -> pd.offsets.BaseOffset: # type: ignore[name-defined]
+def pdfreq_to_offset(pdfreq: str) -> pd.offsets.BaseOffset:
"""Pandas frequency string to a pandas offset.
Parameters
@@ -216,7 +209,7 @@ def make_non_overlapping(
except AttributeError:
tz = None
else:
- index = interval_index_new_tz(index, None) # type: ignore[arg-type] # mptype
+ index = interval_index_new_tz(index, None)
# evaluate full_overlap_mask
# as 'int64' to use expanding, as series to use expanding and shift
@@ -251,7 +244,7 @@ def make_non_overlapping(
def get_interval_index(
left: pd.DatetimeIndex,
- offset: str | pd.offsets.BaseOffset, # type: ignore[name-defined]
+ offset: str | pd.offsets.BaseOffset,
closed="left",
non_overlapping=False,
) -> pd.IntervalIndex:
@@ -301,7 +294,7 @@ def get_interval_index(
return index
-@pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+@parse
def interval_contains(interval: pd.Interval, intervals: pd.IntervalIndex) -> np.ndarray:
"""Query which intervals are contained within an interval.
@@ -367,10 +360,10 @@ def interval_contains(interval: pd.Interval, intervals: pd.IntervalIndex) -> np.
return left_cond & right_cond
-@pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+@parse
def remove_intervals_from_interval(
interval: pd.Interval, intervals: pd.IntervalIndex
-) -> List[pd.Interval]:
+) -> list[pd.Interval]:
"""Difference between an interval and some intervals.
Parameters
@@ -384,7 +377,7 @@ def remove_intervals_from_interval(
Returns
-------
- List[pd.Interval]
+ list[pd.Interval]
List of intervals that remain after subtracting 'intervals' from
'interval'.
@@ -463,9 +456,24 @@ def remove_intervals_from_interval(
return diff
-@pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+# valimp Parser function
+def verify_interval_datetime_index(
+ name: str, obj: pd.DatetimeIndex | pd.IntervalIndex, _
+) -> pd.DatetimeIndex | pd.IntervalIndex:
+ """Verify pd.IntervalIndex has both sides as pd.DatetimeIndex."""
+ if isinstance(obj, pd.IntervalIndex) and not isinstance(obj.left, pd.DatetimeIndex):
+ raise ValueError(
+ f"'{name}' can only take a pd.IntervalIndex that has each side"
+ " as type pd.DatetimeIndex, although received with left side"
+ f" as type '{type(obj.left)}'."
+ )
+ return obj
+
+
+@parse
def interval_index_new_tz(
- index: mptypes.IntervalDatetimeIndex, tz: Union[pytz.tzinfo.BaseTzInfo, str, None]
+ index: Annotated[pd.IntervalIndex, Parser(verify_interval_datetime_index)],
+ tz: Union[ZoneInfo, str, None],
) -> pd.IntervalIndex:
"""Return pd.IntervalIndex with different timezone.
@@ -492,15 +500,16 @@ def interval_index_new_tz(
Examples
--------
+ >>> tz = ZoneInfo("US/Central")
>>> left = pd.date_range(
- ... '2021-05-01 12:00', periods=5, freq='1H', tz='US/Central'
+ ... '2021-05-01 12:00', periods=5, freq='1H', tz=tz
... )
>>> right = left + pd.Timedelta(30, 'T')
>>> index = pd.IntervalIndex.from_arrays(left, right)
>>> index.right.tz
-
- >>> new_index = interval_index_new_tz(index, tz=pytz.UTC)
- >>> new_index.left.tz.zone == new_index.right.tz.zone == "UTC"
+ zoneinfo.ZoneInfo(key='US/Central')
+ >>> new_index = interval_index_new_tz(index, tz=ZoneInfo("UTC"))
+ >>> new_index.left.tz.key == new_index.right.tz.key == "UTC"
True
"""
indices = []
@@ -512,9 +521,12 @@ def interval_index_new_tz(
return pd.IntervalIndex.from_arrays(indices[0], indices[1], closed=index.closed)
-@pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
+@parse
def index_is_normalized(
- index: Union[pd.DatetimeIndex, mptypes.IntervalDatetimeIndex]
+ index: Annotated[
+ Union[pd.DatetimeIndex, pd.IntervalIndex],
+ Parser(verify_interval_datetime_index),
+ ]
) -> bool:
"""Query if an index is normalized.
@@ -553,8 +565,8 @@ def index_is_normalized(
return (index == index.normalize()).all()
-@pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
-def indexes_union(indexes: List[pd.Index]) -> pd.Index:
+@parse
+def indexes_union(indexes: list[pd.Index]) -> pd.Index:
"""Union multiple pd.Index objects.
Parameters
@@ -580,8 +592,8 @@ def indexes_union(indexes: List[pd.Index]) -> pd.Index:
return index
-@pydantic.validate_arguments(config=dict(arbitrary_types_allowed=True))
-def index_union(indexes: List[Union[pd.Index, Series, DataFrame]]) -> pd.Index:
+@parse
+def index_union(indexes: list[Union[pd.Index, pd.Series, pd.DataFrame]]) -> pd.Index:
"""Union indexes of multiple indexes, Series and/or DataFrame.
Parameters
diff --git a/tests/conftest.py b/tests/conftest.py
index ce9a310..3a7e2f0 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -4,6 +4,7 @@
from collections import abc
import os
+from zoneinfo import ZoneInfo
from hypothesis import settings, HealthCheck
import exchange_calendars as xcals
@@ -132,7 +133,7 @@ def one_sec() -> abc.Iterator[pd.Timedelta]:
yield pd.Timedelta(1, "S")
-_now_utc = pd.Timestamp("2021-11-17 21:59", tz="UTC")
+_now_utc = pd.Timestamp("2021-11-17 21:59", tz=ZoneInfo("UTC"))
@pytest.fixture(scope="session")
diff --git a/tests/hypstrtgy.py b/tests/hypstrtgy.py
index f6a2449..4f4ca72 100644
--- a/tests/hypstrtgy.py
+++ b/tests/hypstrtgy.py
@@ -18,11 +18,11 @@
import exchange_calendars as xcals
import pandas as pd
-import pytz
from hypothesis import assume
from hypothesis import strategies as st
from market_prices import helpers
+from market_prices.helpers import UTC
from market_prices.intervals import DOInterval, TDInterval
from . import conftest
@@ -157,7 +157,7 @@ def calendar_start_end_sessions(
def nano_to_min(nano: int) -> pd.Timestamp:
"""Convert a 'nano' to a utc pd.Timestamp."""
- return pd.Timestamp(nano).tz_localize(pytz.UTC)
+ return pd.Timestamp(nano).tz_localize(UTC)
@st.composite
@@ -190,7 +190,7 @@ def start_minutes(
l_limit, r_limit = limit
if r_limit is None:
- r_limit = calendar.last_minutes[-2]
+ r_limit = calendar.last_minutes.iloc[-2]
if l_limit is None:
if calendar_name in _24h_calendars:
@@ -240,14 +240,14 @@ def end_minutes(
l_limit, r_limit = limit
if r_limit is None:
- r_limit = calendar.closes[-2]
+ r_limit = calendar.closes.iloc[-2]
if l_limit is None:
if calendar_name in _24h_calendars:
offset = pd.DateOffset(months=6)
else:
offset = pd.DateOffset(years=2)
- last_close = calendar.closes[0]
+ last_close = calendar.closes.iloc[0]
alt_limit = r_limit - offset # type: ignore[operator] # is a valid operation
l_limit = max(last_close, alt_limit)
@@ -441,7 +441,7 @@ def pp_days_start_minute(
"""
pp = get_pp_default()
calendar = get_calendar(calendar_name)
- start = draw(start_minutes(calendar_name, (None, calendar.last_minutes[-3])))
+ start = draw(start_minutes(calendar_name, (None, calendar.last_minutes.iloc[-3])))
start_session_i = calendar.sessions.get_loc(calendar.minute_to_session(start))
max_days = len(calendar.sessions) - 2 - start_session_i
pp["days"] = draw(st.integers(1, max_days))
@@ -578,10 +578,10 @@ def pp_caldur_start_minute(
months=pp["months"],
years=pp["years"],
)
- limit = (None, calendar.last_minutes[-2] - duration)
+ limit = (None, calendar.last_minutes.iloc[-2] - duration)
start = draw(start_minutes(calendar_name, limit))
# See `pp_caldur_end_session` for note on need for this assume guard
- assume(start + duration <= calendar.last_minutes[-2])
+ assume(start + duration <= calendar.last_minutes.iloc[-2])
pp["start"] = start
return pp
@@ -641,7 +641,7 @@ def pp_intraday_start_minute(
"""
pp = draw(pp_intraday())
calendar = get_calendar(calendar_name)
- i = calendar.minutes.get_loc(calendar.last_minutes[-2])
+ i = calendar.minutes.get_loc(calendar.last_minutes.iloc[-2])
i -= pp["minutes"] + (pp["hours"] * 60)
limit = (None, calendar.minutes[i])
pp["start"] = draw(start_minutes(calendar_name, limit))
diff --git a/tests/resources/store.h5 b/tests/resources/store.h5
index 37829a3..f805220 100644
Binary files a/tests/resources/store.h5 and b/tests/resources/store.h5 differ
diff --git a/tests/resources/store_pbt.h5 b/tests/resources/store_pbt.h5
index 0e76b7b..0c63cff 100644
Binary files a/tests/resources/store_pbt.h5 and b/tests/resources/store_pbt.h5 differ
diff --git a/tests/test_base.py b/tests/test_base.py
index 6feab11..b3686ef 100644
--- a/tests/test_base.py
+++ b/tests/test_base.py
@@ -20,11 +20,11 @@
import numpy as np
import pandas as pd
from pandas.testing import assert_frame_equal, assert_index_equal, assert_series_equal
-import pytz
import pytest
import market_prices.prices.base as m
from market_prices import helpers, intervals, errors, daterange, mptypes
+from market_prices.helpers import UTC
from market_prices.prices.yahoo import PricesYahoo
from market_prices.utils import calendar_utils as calutils
@@ -86,13 +86,13 @@ def test_create_composite(t1_us_lon, t5_us_lon, one_day):
f = m.create_composite
first_df = t5_us_lon
- start = pd.Timestamp("2022-02-03 14:00", tz=pytz.UTC)
- stop = pd.Timestamp("2022-02-09 15:32", tz=pytz.UTC)
+ start = pd.Timestamp("2022-02-03 14:00", tz=UTC)
+ stop = pd.Timestamp("2022-02-09 15:32", tz=UTC)
second_df = t1_us_lon[start:stop]
start_indice = first_df.index[33]
end_indice = second_df.index[-6]
- assert end_indice.right == pd.Timestamp("2022-02-09 15:28", tz=pytz.UTC)
+ assert end_indice.right == pd.Timestamp("2022-02-09 15:28", tz=UTC)
first = (first_df, start_indice)
second = (second_df, end_indice)
@@ -776,7 +776,7 @@ def match_daily_limit(limit) -> str:
limits = [
pd.Timestamp("2000-01-01 15:00"),
- pd.Timestamp("2000-01-01", tz=pytz.UTC),
+ pd.Timestamp("2000-01-01", tz=UTC),
]
for limit in limits:
with pytest.raises(ValueError, match=match_daily_limit(limit)):
@@ -998,7 +998,7 @@ def mock_now(tz=None) -> pd.Timestamp:
return pd.Timestamp("2022-02-14 21:21:05", tz=tz)
monkeypatch.setattr("pandas.Timestamp.now", mock_now)
- now = mock_now(tz=pytz.UTC)
+ now = mock_now(tz=UTC)
today = now.floor("D").tz_convert(None)
calendars = [xnys, xhkg, xlon]
@@ -1194,7 +1194,7 @@ def test__indices_aligned(
one_day,
):
"""Test `_indices_aligned` and `_indices_aligned_for_drg`."""
- now = pd.Timestamp("2021-12-31 23:59", tz=pytz.UTC)
+ now = pd.Timestamp("2021-12-31 23:59", tz=UTC)
monkeypatch.setattr("pandas.Timestamp.now", lambda *_, **__: now)
calendars = [xnys, xlon, xhkg]
@@ -1289,7 +1289,7 @@ def test__indexes_status(
):
"""Test `_indexes_status` and `_has_valid_fully_trading_indices`."""
# pylint: disable=too-complex, unbalanced-tuple-unpacking
- now = pd.Timestamp("2022", tz=pytz.UTC)
+ now = pd.Timestamp("2022", tz=UTC)
monkeypatch.setattr("pandas.Timestamp.now", lambda *_, **__: now)
symbols = ["ONE", "TWO"]
@@ -1312,7 +1312,7 @@ def assert_all_same(
prices: m.PricesBase, bi: intervals.BI, value: bool | float
):
sessions = get_sessions(prices, bi)
- expected = pd.Series(value, index=sessions)
+ expected = pd.Series(value, index=sessions, dtype="object")
assert_series_equal(prices._indexes_status[bi], expected)
drg = GetterMock(
@@ -1353,7 +1353,7 @@ def assert_all_same(
# ...1H conflict every day
bi = prices.bis.H1
sessions = get_sessions(prices, bi)
- expected = pd.Series(np.nan, index=sessions)
+ expected = pd.Series(np.nan, index=sessions, dtype="object")
# ...other than those sessions when xnys closed
x247_sessions, xnys_sessions = get_calendars_sessions(prices, bi, [x247, xnys])
expected[x247_sessions.difference(xnys_sessions)] = True
@@ -1366,7 +1366,7 @@ def assert_all_same(
bi = prices.bis.H1
sessions = get_sessions(prices, bi)
# on a normal day, no partial indices
- expected = pd.Series(True, index=sessions)
+ expected = pd.Series(True, index=sessions, dtype="object")
# although there are a couple of early closes that are not aligned with 1H
dates = ["2021-12-24", "2021-12-31"]
expected[dates] = False
@@ -1380,7 +1380,7 @@ def assert_all_same(
sessions = get_sessions(prices, bi)
xasx_sessions, xlon_sessions = get_calendars_sessions(prices, bi, [xasx, xlon])
# ...IH partial indices every session
- expected = pd.Series(False, index=sessions)
+ expected = pd.Series(False, index=sessions, dtype="object")
# ...save when xlon closed
expected[xasx_sessions.difference(xlon_sessions)] = True
assert_series_equal(prices._indexes_status[bi], expected)
@@ -1392,7 +1392,7 @@ def assert_all_same(
bi = prices.bis.H1
sessions = get_sessions(prices, bi)
# ...on a normal day, True (xasx enveloped by cmes and indices align)
- expected = pd.Series(True, index=sessions)
+ expected = pd.Series(True, index=sessions, dtype="object")
# ...except when axsx early close (unaligned with !H) coincides with CMES hol.
expected["2021-12-24"] = False
assert_series_equal(prices._indexes_status[bi], expected)
@@ -1405,7 +1405,7 @@ def assert_all_same(
sessions = get_sessions(prices, bi)
xasx_sessions, xhkg_sessions = get_calendars_sessions(prices, bi, [xasx, xhkg])
# ...on a normal day sessions will conflict
- expected = pd.Series(np.NaN, index=sessions)
+ expected = pd.Series(np.NaN, index=sessions, dtype="object")
# ...but if xasx open and xhkg closed, no partial indices
expected[xasx_sessions.difference(xhkg_sessions)] = True
# ...whilst if xhkg open and xasx closed, always partial indices
@@ -1549,9 +1549,9 @@ def test__minute_to_session(PricesMock, cal_start, side, one_min, monkeypatch):
# assert assumption that sessions overlap
xlon_session_close = xlon.session_close(session)
- assert xlon_session_close == pd.Timestamp("2021-01-19 16:30", tz=pytz.UTC)
+ assert xlon_session_close == pd.Timestamp("2021-01-19 16:30", tz=UTC)
xnys_session_open = xnys.session_open(session)
- assert xnys_session_open == pd.Timestamp("2021-01-19 14:30", tz=pytz.UTC)
+ assert xnys_session_open == pd.Timestamp("2021-01-19 14:30", tz=UTC)
xnys_session_close = xnys.session_close(session)
@@ -1717,7 +1717,7 @@ def test__get_trading_index(
Test covers only verifying that arguments are passed through and
ignore_breaks argument provided by method.
"""
- now = pd.Timestamp("2021-12-31 23:59", tz=pytz.UTC)
+ now = pd.Timestamp("2021-12-31 23:59", tz=UTC)
monkeypatch.setattr("pandas.Timestamp.now", lambda *_, **__: now)
cal = xhkg
@@ -1766,7 +1766,7 @@ def expected_index(
class TestBis:
"""Tests methods and properties that return base interval/s."""
- _now = pd.Timestamp("2022", tz=pytz.UTC)
+ _now = pd.Timestamp("2022", tz=UTC)
@pytest.fixture
def now(self) -> abc.Iterator[pd.Timestamp]:
@@ -1804,13 +1804,13 @@ class PricesMockBis_(PricesMock): # type: ignore[valid-type, misc]
)
BASE_LIMITS = {
- BaseInterval.T1: pd.Timestamp("2021-12-01", tz=pytz.UTC),
- BaseInterval.T2: pd.Timestamp("2021-11-01", tz=pytz.UTC),
- BaseInterval.T5: pd.Timestamp("2021-10-01", tz=pytz.UTC),
- BaseInterval.T10: pd.Timestamp("2021-09-01", tz=pytz.UTC),
- BaseInterval.T15: pd.Timestamp("2021-06-01", tz=pytz.UTC),
- BaseInterval.T30: pd.Timestamp("2021-03-01", tz=pytz.UTC),
- BaseInterval.H1: pd.Timestamp("2021-01-01", tz=pytz.UTC),
+ BaseInterval.T1: pd.Timestamp("2021-12-01", tz=UTC),
+ BaseInterval.T2: pd.Timestamp("2021-11-01", tz=UTC),
+ BaseInterval.T5: pd.Timestamp("2021-10-01", tz=UTC),
+ BaseInterval.T10: pd.Timestamp("2021-09-01", tz=UTC),
+ BaseInterval.T15: pd.Timestamp("2021-06-01", tz=UTC),
+ BaseInterval.T30: pd.Timestamp("2021-03-01", tz=UTC),
+ BaseInterval.H1: pd.Timestamp("2021-01-01", tz=UTC),
BaseInterval.D1: daily_limit,
}
@@ -2030,8 +2030,8 @@ def get_drg(pp: dict) -> daterange.GetterIntraday:
)
# a trading hour, from inspection of schedules
- start = pd.Timestamp("2021-12-23 15:00", tz=pytz.UTC)
- end = pd.Timestamp("2021-12-23 16:00", tz=pytz.UTC)
+ start = pd.Timestamp("2021-12-23 15:00", tz=UTC)
+ end = pd.Timestamp("2021-12-23 16:00", tz=UTC)
pp = dict(
minutes=0,
hours=0,
@@ -2047,7 +2047,7 @@ def get_drg(pp: dict) -> daterange.GetterIntraday:
prices.gpp.drg_intraday = get_drg(pp)
assert prices._bis_valid == prices.bis_intraday[:-1]
- pp["end"] = pd.Timestamp("2021-12-23 15:12", tz=pytz.UTC)
+ pp["end"] = pd.Timestamp("2021-12-23 15:12", tz=UTC)
drg = get_drg(pp)
self.set_prices_gpp_drg_properties(prices, drg)
# only those bis <= 12 min duration should be valid
@@ -2149,16 +2149,17 @@ def prices_partial_indices(
------
tuple[m.PricesBase, pd.Timestamp]
[0] Instance of PricesMockBisAlt (revised `PricesMockBis`)
- [1] Early close pd.Timestamp("2021-12-24 03:10", tz=pytz.UTC)
+ [1] Early close
+ pd.Timestamp("2021-12-24 03:10", tz=zoneinfo.ZoneInfo("UTC"))
"""
# pylint: disable=redundant-yields-doc
early_close_session = pd.Timestamp("2021-12-24")
early_close = xasx.session_close(early_close_session)
# assert assumption that early close
- assert early_close == pd.Timestamp("2021-12-24 03:10", tz=pytz.UTC)
+ assert early_close == pd.Timestamp("2021-12-24 03:10", tz=UTC)
revised_limits = PricesMockBis.BASE_LIMITS.copy()
- t1_limit = pd.Timestamp("2021-12-29", tz="UTC")
+ t1_limit = pd.Timestamp("2021-12-29", tz=UTC)
revised_limits[PricesMockBis.BaseInterval.T1] = t1_limit
class PricesMockBisAlt(PricesMockBis): # type: ignore[valid-type, misc]
@@ -2182,7 +2183,7 @@ def test__bis_no_partial_indices(self, prices_partial_indices, GetterMock, one_m
no_partial = prices._bis_no_partial_indices
start = prices.BASE_LIMITS[prices.bis.T2]
- end = pd.Timestamp("2021-12-23 05:00", tz=pytz.UTC)
+ end = pd.Timestamp("2021-12-23 05:00", tz=UTC)
# period with no partial indices
drg = self.get_mock_drg(GetterMock, cc, start, end)
self.set_prices_gpp_drg_properties(prices, drg)
@@ -2229,13 +2230,13 @@ def test_bis_accuracy_methods(self, PricesMockBis, symbols, xnys, monkeypatch):
Tests following properties:
`_bis_end_most_accurate`
"""
- now = pd.Timestamp("2021-12-31 15:14", tz=pytz.UTC)
+ now = pd.Timestamp("2021-12-31 15:14", tz=UTC)
monkeypatch.setattr("pandas.Timestamp.now", lambda *_, **__: now)
cal = xnys
ds_interval = intervals.TDInterval.H1 # all bis valid
prices = PricesMockBis(symbols, cal, ds_interval=ds_interval)
- start = pd.Timestamp("2021", tz=pytz.UTC) # start for all drg
+ start = pd.Timestamp("2021", tz=UTC) # start for all drg
period_end_now = prices._bis_period_end_now
bis_most_accurate = prices._bis_most_accurate
@@ -2243,14 +2244,14 @@ def test_bis_accuracy_methods(self, PricesMockBis, symbols, xnys, monkeypatch):
# verify period end does not evaluate as now for any bis.
for end in [
- pd.Timestamp("2021-12-31 15:13", tz=pytz.UTC),
- pd.Timestamp("2021-12-31 15:00", tz=pytz.UTC),
- pd.Timestamp("2021-12-31 12:00", tz=pytz.UTC),
- pd.Timestamp("2021-12-30 15:14", tz=pytz.UTC),
- pd.Timestamp("2021-12-30 15:13", tz=pytz.UTC),
- pd.Timestamp("2021-12-29 12:00", tz=pytz.UTC),
- pd.Timestamp("2021-06-29 12:00", tz=pytz.UTC),
- pd.Timestamp("2021-01-29 12:00", tz=pytz.UTC),
+ pd.Timestamp("2021-12-31 15:13", tz=UTC),
+ pd.Timestamp("2021-12-31 15:00", tz=UTC),
+ pd.Timestamp("2021-12-31 12:00", tz=UTC),
+ pd.Timestamp("2021-12-30 15:14", tz=UTC),
+ pd.Timestamp("2021-12-30 15:13", tz=UTC),
+ pd.Timestamp("2021-12-29 12:00", tz=UTC),
+ pd.Timestamp("2021-06-29 12:00", tz=UTC),
+ pd.Timestamp("2021-01-29 12:00", tz=UTC),
]:
pp = get_pp(start=start, end=end)
drg = self.get_drg(cal, pp=pp)
@@ -2261,7 +2262,7 @@ def test_bis_accuracy_methods(self, PricesMockBis, symbols, xnys, monkeypatch):
prices.gpp.anchor = mptypes.Anchor.WORKBACK
assert period_end_now(prices.bis_intraday) == []
- end = pd.Timestamp("2021-12-23 18:15", tz=pytz.UTC)
+ end = pd.Timestamp("2021-12-23 18:15", tz=UTC)
pp = get_pp(start=start, end=end)
drg = self.get_drg(cal, pp=pp)
self.set_prices_gpp_drg_properties(prices, drg)
@@ -2275,7 +2276,7 @@ def test_bis_accuracy_methods(self, PricesMockBis, symbols, xnys, monkeypatch):
# with end late dec, all bis could serve prices at period end
assert prices._bis_end_most_accurate == expected
# although with end mid June, only base intervals, >T15 can
- end = pd.Timestamp("2021-06-16 18:15", tz=pytz.UTC)
+ end = pd.Timestamp("2021-06-16 18:15", tz=UTC)
pp = get_pp(start=start, end=end)
drg = self.get_drg(cal, pp=pp)
self.set_prices_gpp_drg_properties(prices, drg)
@@ -2311,8 +2312,8 @@ def test__get_bi_from_bis(self, PricesMockBis, symbols, xhkg):
ds_interval = intervals.TDInterval.H1 # all bis valid
prices = PricesMockBis(symbols, cal, ds_interval=ds_interval)
prices.gpp.calendar = cal
- start = pd.Timestamp("2021", tz=pytz.UTC) # start for all drg
- end = pd.Timestamp("2021-12-23 05:45", tz=pytz.UTC)
+ start = pd.Timestamp("2021", tz=UTC) # start for all drg
+ end = pd.Timestamp("2021-12-23 05:45", tz=UTC)
pp = get_pp(start=start, end=end)
drg = self.get_drg(cal, pp=pp)
self.set_prices_gpp_drg_properties(prices, drg)
@@ -2572,7 +2573,7 @@ def assert_drg_daily_properties(
assert drg._strict is strict
assert drg.pp == gpp.pp(intraday=False)
- start = pd.Timestamp("2021-12-15 12:22", tz=pytz.UTC)
+ start = pd.Timestamp("2021-12-15 12:22", tz=UTC)
pp = get_pp(start=start, days=2)
ds_interval = intervals.TDInterval.H2
lead_symbol = "TWO"
@@ -2584,7 +2585,7 @@ def assert_drg_daily_properties(
assert_properties(gpp, ds_interval, lead_symbol, anchor, openend, strict, priority)
# expected starts from knowledge of schedule
- start_expected = pd.Timestamp("2021-12-15 14:30", tz=pytz.UTC)
+ start_expected = pd.Timestamp("2021-12-15 14:30", tz=UTC)
pp_expected = get_pp(start=start_expected, days=2)
assert gpp.pp(intraday=True) == pp_expected
start_expected_daily = pd.Timestamp("2021-12-15")
@@ -2604,8 +2605,8 @@ def assert_drg_daily_properties(
drg = gpp.drg_intraday_no_limit
assert_drg_intraday_properties(drg, gpp, False, ds_interval, no_limit=True)
drg.interval = prices.bis.T5
- acc_expected = pd.Timestamp("2021-12-16 21:00", tz=pytz.UTC)
- end_expected = pd.Timestamp("2021-12-16 22:30", tz=pytz.UTC)
+ acc_expected = pd.Timestamp("2021-12-16 21:00", tz=UTC)
+ end_expected = pd.Timestamp("2021-12-16 22:30", tz=UTC)
assert drg.daterange == ((start_expected, end_expected), acc_expected)
assert not drg.ignore_breaks
@@ -2635,7 +2636,7 @@ def assert_drg_daily_properties(
assert drg.daterange == ((start_expected_daily, end_expected), end_expected)
# alternative parameters
- end = pd.Timestamp("2021-12-15 03:10", tz=pytz.UTC)
+ end = pd.Timestamp("2021-12-15 03:10", tz=UTC)
pp = get_pp(end=end, hours=3, minutes=30)
ds_interval = intervals.TDInterval.H1
lead_symbol = "ONE"
@@ -2659,13 +2660,13 @@ def assert_drg_daily_properties(
drg.interval = prices.bis.H1
assert drg.ignore_breaks
# from knowledge of schedule...
- end_expected = pd.Timestamp("2021-12-15 02:30", tz=pytz.UTC)
- start_expected = pd.Timestamp("2021-12-14 05:30", tz=pytz.UTC)
+ end_expected = pd.Timestamp("2021-12-15 02:30", tz=UTC)
+ start_expected = pd.Timestamp("2021-12-14 05:30", tz=UTC)
assert drg.daterange == ((start_expected, end_expected), end_expected)
drg.interval = prices.bis.T5
assert not drg.ignore_breaks
- start_expected = pd.Timestamp("2021-12-14 06:00", tz=pytz.UTC)
+ start_expected = pd.Timestamp("2021-12-14 06:00", tz=UTC)
assert drg.daterange == ((start_expected, end_expected), end_expected)
# alternative parameters just to verify request_earliest_available_data True
diff --git a/tests/test_base_prices.py b/tests/test_base_prices.py
index d2d34ae..acfa4cc 100644
--- a/tests/test_base_prices.py
+++ b/tests/test_base_prices.py
@@ -17,19 +17,20 @@
from collections import abc
import datetime
import itertools
-from typing import Tuple, Dict, Literal
import re
+from typing import Literal
+from zoneinfo import ZoneInfo
import attr
import exchange_calendars as xcals
import pandas as pd
from pandas.testing import assert_index_equal, assert_frame_equal
import pytest
-import pytz
-from pytz import UTC
+import valimp
import market_prices.prices.base as m
from market_prices import errors, helpers, intervals, mptypes, pt
+from market_prices.helpers import UTC
from market_prices.intervals import TDInterval, DOInterval
from market_prices.mptypes import Anchor, OpenEnd, Priority
from market_prices.support import tutorial_helpers as th
@@ -38,11 +39,6 @@
from .utils import get_resource_pbt
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
-
# pylint: disable=missing-function-docstring, missing-type-doc
# pylint: disable=missing-param-doc, missing-any-param-doc, redefined-outer-name
# pylint: disable=too-many-public-methods, too-many-arguments, too-many-locals
@@ -195,7 +191,7 @@ def mock_now_(*_, tz=None, **__) -> pd.Timestamp:
# --- PricesBaseTst fixtures ---
-ResourcePBT = Tuple[Dict[str, pd.DataFrame], pd.Timestamp]
+ResourcePBT = tuple[dict[str, pd.DataFrame], pd.Timestamp]
# [0] keys as names of base intervals, values as corresponding prices tables.
# [1] timestamp when price tables created.
@@ -208,7 +204,7 @@ def res_us_only() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("MSFT")
at:
- Timestamp('2022-06-15 16:51:12', tz='UTC')
+ Timestamp('2022-06-15 16:51:12', tz=ZoneInfo("UTC"))
"""
yield get_resource_pbt("us_only")
@@ -235,7 +231,7 @@ def res_hk_only() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("9988.HK")
at:
- Timestamp('2022-06-16 15:27:12', tz='UTC')
+ Timestamp('2022-06-16 15:27:12', tz=ZoneInfo("UTC"))
"""
yield get_resource_pbt("hk_only")
@@ -264,7 +260,7 @@ def res_247_only() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("BTC-USD")
at:
- Timestamp('2022-06-17 13:26:44', tz='UTC')
+ Timestamp('2022-06-17 13:26:44', tz=ZoneInfo("UTC"))
NOTE: following warnings were raised on creating resource:
PricesMissingWarning: Prices from Yahoo are missing for 'BTC-USD' at
@@ -297,7 +293,7 @@ def res_us_lon() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("MSFT, AZN.L")
at:
- Timestamp('2022-06-16 09:29:12', tz='UTC')
+ Timestamp('2022-06-16 09:29:12', tz=ZoneInfo("UTC"))
"""
yield get_resource_pbt("us_lon")
@@ -338,7 +334,7 @@ def res_hk_lon() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("9988.HK, AZN.L")
at:
- Timestamp('2022-06-17 12:10:12', tz='UTC')
+ Timestamp('2022-06-17 12:10:12', tz=ZoneInfo("UTC"))
"""
yield get_resource_pbt("hk_lon")
@@ -365,7 +361,7 @@ def res_us_hk() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("MSFT, 9988.HK")
at:
- Timestamp('2022-06-17 22:04:55', tz='UTC')
+ Timestamp('2022-06-17 22:04:55', tz=ZoneInfo("UTC"))
"""
yield get_resource_pbt("us_hk")
@@ -392,7 +388,7 @@ def res_brz_hk() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("PETR3.SA, 9988.HK")
at:
- Timestamp('2022-06-16 15:46:12', tz='UTC')
+ Timestamp('2022-06-16 15:46:12', tz=ZoneInfo("UTC"))
"""
yield get_resource_pbt("brz_hk")
@@ -424,7 +420,7 @@ def res_lon_247() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("AZN.L, BTC-USD")
at:
- Timestamp('2022-06-17 13:17:32', tz='UTC')
+ Timestamp('2022-06-17 13:17:32', tz=ZoneInfo("UTC"))
NOTE: following warnings were raised on creating resource:
PricesMissingWarning: Prices from Yahoo are missing for 'BTC-USD' at
@@ -462,7 +458,7 @@ def res_247_245() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("BTC-USD, ES=F")
at:
- Timestamp('2022-06-17 22:41:25', tz='UTC')
+ Timestamp('2022-06-17 22:41:25', tz=ZoneInfo("UTC"))
NOTE: following warnings were raised on creating resource:
PricesMissingWarning: Prices from Yahoo are missing for 'BTC-USD' at
@@ -529,7 +525,7 @@ def res_us_lon_hk() -> abc.Iterator[ResourcePBT]:
instance:
PricesYahoo("MSFT, AZN.L, 9988.HK")
at:
- Timestamp('2022-06-17 15:57:09', tz='UTC')
+ Timestamp('2022-06-17 15:57:09', tz=ZoneInfo("UTC"))
"""
yield get_resource_pbt("us_lon_hk")
@@ -1714,8 +1710,8 @@ def assertions_downsample_bi_table(
assert subset_s.volume.sum() == row_s.volume
assert subset_s.high.max() == row_s.high
assert subset_s.low.min() == row_s.low
- assert subset_s.bfill().open[0] == row_s.open
- assert subset_s.ffill().close[-1] == row_s.close
+ assert subset_s.bfill().open.iloc[0] == row_s.open
+ assert subset_s.ffill().close.iloc[-1] == row_s.close
def test__downsample_bi_table_lon_us(self, prices_lon_us, one_min):
"""Tests `_downsample_bi_table` for symbols on overlapping exchanges.
@@ -2090,7 +2086,7 @@ def test__get_daily_intraday_composite(self, prices_us_lon, lead, one_min):
)
last_daily_session = last_daily_sessions[-1]
sessions = prices.cc.sessions_in_range(start, last_daily_session)
- sessions = sessions.tz_localize(pytz.UTC)
+ sessions = sessions.tz_localize(UTC)
assert_index_equal(daily_part.index.left, sessions)
assert_index_equal(daily_part.index.right, sessions)
# verify not missing anything inbetween
@@ -2231,7 +2227,9 @@ def test__get_table_composite_daily_intraday(
_start_session, end_session = get_sessions_daterange_for_bi(
prices, prices.bis.T2, length_end_session=length
)
- while not (prices.cc.sessions_length(end_session, end_session) == length)[0]:
+ while not (prices.cc.sessions_length(end_session, end_session) == length).iloc[
+ 0
+ ]:
end_session = prices.cc.previous_session(end_session)
if end_session == _start_session:
raise ValueError(f"Unable to get a 'T2' session of length {length}.")
@@ -2420,18 +2418,17 @@ def test_params_errors(self, prices_us, one_min, session_length_xnys):
with pytest.raises(ValueError, match=msg):
prices.get(interval, composite=True)
- msg = "unexpected keyword argument: 'minute'"
- with pytest.raises(pydantic.ValidationError, match=msg):
+ msg = "Got unexpected keyword argument: 'minute'"
+ with pytest.raises(valimp.InputsError, match=msg):
prices.get(minute=3)
# Verify that a parameter that takes a Literal raises exception if pass
# non-valid value. Only test for one such parameter.
msg = re.escape(
- "1 validation error for Get\nanchor\n unexpected value; permitted:"
- " 'workback', 'open' (type=value_error.const; given=wrkback;"
- " permitted=('workback', 'open'))"
+ "anchor\n\tTakes a value from <('workback', 'open')> although"
+ " received 'wrkback'."
)
- with pytest.raises(pydantic.ValidationError, match=msg):
+ with pytest.raises(valimp.InputsError, match=msg):
prices.get("30T", anchor="wrkback")
# verify period parameters being verified by `verify_period_parameters`
@@ -2467,17 +2464,18 @@ def test_params_errors(self, prices_us, one_min, session_length_xnys):
# ----- Tests related to `interval` parameter -----
def test_interval_validation(self, prices_us):
- """Test pydantic validating `interval`.
+ """Test that parsing validates `interval`.
- Only tests that `interval` being validated by `to_ptinterval`.
- `to_ptinterval` is comprehensively tested elsewhere.
+ Only tests that `interval` is being validated by `to_ptinterval`
+ via `intevals.parse_interval`. `to_ptinterval` is
+ comprehensively tested elsewhere.
"""
# Verify invalid interval unit
match = re.escape(
"`interval` unit must by one of ['MIN', 'T', 'H', 'D', 'M']"
" (or lower-case) although evaluated to 'G'."
)
- with pytest.raises(pydantic.ValidationError, match=match):
+ with pytest.raises(ValueError, match=match):
prices_us.get("3G")
def test_intervals_inferred(self, prices_us):
@@ -2771,7 +2769,7 @@ def test_start_end_inputs(self, prices_us_lon):
# Verify `tzin`
symb_xnys = get_symbols_for_calendar(prices, "XNYS")
- for tzin in ("America/New_York", pytz.timezone("America/New_York"), symb_xnys):
+ for tzin in ("America/New_York", ZoneInfo("America/New_York"), symb_xnys):
df = prices.get("2H", start_utc, end_str, tzin=tzin)
assert_frame_equal(df, df_base)
@@ -2985,8 +2983,8 @@ def test_calendar_time_duration(
)
start_session = sessions[0]
- num_sessions_in_week = cal.sessions_distance(
- start_session, start_session + pd.Timedelta(6, "D")
+ num_sessions_in_week = int(
+ cal.sessions_distance(start_session, start_session + pd.Timedelta(6, "D"))
)
# verify getting intraday data with bound as date
@@ -4301,7 +4299,7 @@ def test_tzout(self, prices_us_lon_hk):
assert prices.get(**kwargs_daily).index.tz is None
assert prices.get(**kwargs_daily, tzout=tzhkg).index.tz is None
# unless tz is UTC
- assert prices.get(**kwargs_daily, tzout="UTC").index.tz is UTC
+ assert prices.get(**kwargs_daily, tzout=UTC).index.tz is UTC
# verify `tzout` defaults to timezone that `tzin` evaluates to
kwargs_intraday = dict(end=session, days=2)
@@ -4424,13 +4422,13 @@ def assertions(
df: pd.DataFrame,
indice: pd.Timestamp,
values: dict[str, tuple[pd.Timestamp, Literal["open", "close"]]],
- tz: pytz.BaseTzInfo = UTC,
+ tz: ZoneInfo = UTC,
):
self.assert_price_at_rtrn_format(table, df)
assert df.index[0] == indice
assert df.index.tz is tz
for s, (session, col) in values.items():
- assert df[s][0] == self.get_cell(table, s, session, col)
+ assert df[s].iloc[0] == self.get_cell(table, s, session, col)
def test_oob(self, prices_us_lon_hk, one_min):
"""Test raises errors when minute out-of-bounds.
@@ -4703,7 +4701,7 @@ def test_timezone(self, prices_us_hk, one_min):
self.assertions(table, df, indice, values, xnys.tz)
df = f(minute, tz="Europe/London")
- self.assertions(table, df, indice, values, pytz.timezone("Europe/London"))
+ self.assertions(table, df, indice, values, ZoneInfo("Europe/London"))
# verify tz also defines tz of a timezone naive minute
minute = minute.astimezone(None) + xhkg.tz.utcoffset(session)
@@ -4806,7 +4804,7 @@ def test_single_symbol_T1_and_now(
close = xnys.session_close(session)
open_next = xnys.session_open(xnys.next_session(session))
- table = prices.get("1T", session_before, session, tzout=pytz.UTC)
+ table = prices.get("1T", session_before, session, tzout=UTC)
tableD1 = prices.get("1D", session_before, session)
delay = 20
@@ -5888,7 +5886,7 @@ def get(kwargs, **others) -> pd.DataFrame:
def assertions(
rng: pd.DataFrame,
table: pd.DataFrame,
- tz: pytz.BaseTzInfo = prices.tz_default,
+ tz: ZoneInfo = prices.tz_default,
to_now: bool = False,
):
assert_prices_table_ii(rng, prices)
@@ -5949,11 +5947,11 @@ def test_it(kwargs: dict, pt_type: pt._PT | None = None, to_now: bool = False):
assert_frame_equal(rng.pt.stacked, f(**kwargs, stack=True))
# Sidetrack to verify `tzout`
- tz = pytz.UTC
+ tz = UTC
rng = f(**kwargs, tzout=tz)
assertions(rng, table, tz)
- tz = pytz.timezone("Australia/Perth")
+ tz = ZoneInfo("Australia/Perth")
rng = f(**kwargs, tzout=tz)
assertions(rng, table, tz)
@@ -5961,7 +5959,7 @@ def test_it(kwargs: dict, pt_type: pt._PT | None = None, to_now: bool = False):
assertions(rng, table, xhkg.tz)
# verify output in terms of `tzin` if tzout not otherwise passed.
- tzin = pytz.timezone("Australia/Perth")
+ tzin = ZoneInfo("Australia/Perth")
rng = f(**kwargs, tzin=tzin)
assertions(rng, table, tzin)
@@ -5971,7 +5969,7 @@ def test_it(kwargs: dict, pt_type: pt._PT | None = None, to_now: bool = False):
assertions(rng, table_, xhkg.tz)
# but not if `tzout` passed
- tzout = pytz.timezone("Europe/Rome")
+ tzout = ZoneInfo("Europe/Rome")
rng = f(**kwargs, tzin=tzin, tzout=tzout)
assertions(rng, table, tzout)
@@ -6021,7 +6019,7 @@ def test_it(kwargs: dict, pt_type: pt._PT | None = None, to_now: bool = False):
assert rng.index.right <= helpers.now()
# mock now to ensure same return for `price_range` and `get``
- mock_now(monkeypatch, pd.Timestamp.now(tz=pytz.UTC) - pd.Timedelta(5, "D"))
+ mock_now(monkeypatch, pd.Timestamp.now(tz=UTC) - pd.Timedelta(5, "D"))
# verify for passing `start` and for requesting to now
kwargs = dict(start=minute)
test_it(kwargs, to_now=True)
diff --git a/tests/test_calendar_utils.py b/tests/test_calendar_utils.py
index ddadf5b..612334e 100644
--- a/tests/test_calendar_utils.py
+++ b/tests/test_calendar_utils.py
@@ -19,11 +19,11 @@
from pandas import Timestamp as T
from pandas.testing import assert_index_equal
import pytest
-import pytz
import exchange_calendars as xcals
from market_prices.intervals import TDInterval
from market_prices import errors, helpers
+from market_prices.helpers import UTC
import market_prices.utils.calendar_utils as m
import market_prices.utils.pandas_utils as pdutils
@@ -328,7 +328,7 @@ def _get_csv(self, name: str) -> pd.DataFrame:
# Necessary for csv saved prior to xcals v4.0
for col in df:
if df[col].dt.tz is None:
- df[col] = df[col].dt.tz_localize(pytz.UTC)
+ df[col] = df[col].dt.tz_localize(UTC)
return df
@functools.cached_property
@@ -380,7 +380,7 @@ def _opens(self) -> pd.Series:
for col in all_opens:
all_opens[col] = all_opens[col].dt.tz_convert(None)
opens_min = all_opens.min(axis=1)
- return opens_min.dt.tz_localize(pytz.UTC)
+ return opens_min.dt.tz_localize(UTC)
@functools.cached_property
def _closes(self) -> pd.Series:
@@ -392,7 +392,7 @@ def _closes(self) -> pd.Series:
for col in all_closes:
all_closes[col] = all_closes[col].dt.tz_convert(None)
closes_max = all_closes.max(axis=1)
- return closes_max.dt.tz_localize(pytz.UTC)
+ return closes_max.dt.tz_localize(UTC)
# properties of composite calendar
@@ -460,12 +460,12 @@ def session(self) -> pd.Series:
@property
def session_open(self) -> pd.Series:
"""Open time of `self.session`."""
- return self.opens[self._session_idx]
+ return self.opens.iloc[self._session_idx]
@property
def session_close(self) -> pd.Series:
"""Close time of `self.session`."""
- return self.closes[self._session_idx]
+ return self.closes.iloc[self._session_idx]
@property
def next_session(self) -> pd.Series:
@@ -881,8 +881,8 @@ def test_misc_properties(self, composite_calendars_with_answers, calendar_groups
cc, answers = composite_calendars_with_answers
assert cc.first_session == answers.sessions[0]
assert cc.last_session == answers.sessions[-1]
- assert cc.first_minute == answers.first_minutes[0]
- assert cc.last_minute == answers.last_minutes[-1]
+ assert cc.first_minute == answers.first_minutes.iloc[0]
+ assert cc.last_minute == answers.last_minutes.iloc[-1]
assert cc.side == "left"
assert len(cc.calendars) in (2, 3)
i = 1 if len(cc.calendars) == 3 else 2
@@ -978,17 +978,17 @@ def test_non_trading_index1(self, composite_calendars):
# test full index, with no arguments
full_index = f()
assert isinstance(full_index, pd.IntervalIndex)
- assert cc.closes[0] in full_index[:6].left
- assert cc.opens[-1] in full_index[-6:].right
+ assert cc.closes.iloc[0] in full_index[:6].left
+ assert cc.opens.iloc[-1] in full_index[-6:].right
# test utc option
args = ("2021-02", "2021-03")
index_utc = f(*args, utc=True)
- assert index_utc.left.tz == pytz.UTC and index_utc.right.tz == pytz.UTC
+ assert index_utc.left.tz == UTC and index_utc.right.tz == UTC
index_naive = f(*args, utc=False)
assert index_naive.left.tz is None and index_naive.right.tz is None
assert_index_equal(index_naive.left, index_utc.left.tz_convert(None))
- assert_index_equal(index_naive.right.tz_localize(pytz.UTC), index_utc.right)
+ assert_index_equal(index_naive.right.tz_localize(UTC), index_utc.right)
# Compare with expected from manual inspection of period with unusual timings.
# NB also tests passing end as last session of composite calendar
@@ -1137,7 +1137,7 @@ def index_for_session(
"""Get expected index for or within a single trading (sub)session."""
if add_interval:
end += interval
- tz = pytz.UTC if utc else None
+ tz = UTC if utc else None
dti = pd.date_range(start, end, freq=interval.as_pdfreq, tz=tz)
return pd.IntervalIndex.from_arrays(dti[:-1], dti[1:], "left")
@@ -1190,16 +1190,16 @@ def test_xlon(self, xlon, intervals):
assert_index_equal(rtrn, rtrn_)
# passing as times
- start = T("2021-12-24 08:01", tz=pytz.UTC)
- end = T("2021-12-29 16:30", tz=pytz.UTC)
+ start = T("2021-12-24 08:01", tz=UTC)
+ end = T("2021-12-29 16:30", tz=UTC)
rtrn = f(interval, start, end)
unaligned = last_session_duration % interval != pd.Timedelta(0)
expected = index[1:-1] if unaligned else index[1:]
assert_index_equal(rtrn, expected)
# passing as times
- start = T("2021-12-24 08:01", tz=pytz.UTC)
- end = T("2021-12-29 16:29", tz=pytz.UTC)
+ start = T("2021-12-24 08:01", tz=UTC)
+ end = T("2021-12-29 16:29", tz=UTC)
rtrn = f(interval, start, end)
assert_index_equal(rtrn, index[1:-1])
@@ -1494,7 +1494,7 @@ def assertions(
# verify xnys closed 24 and xhkg closed on 27, so...
minute = xhkg.session_close(session_next_xhkg)
- next_ = pd.Timestamp("2021-12-27 14:30", tz=pytz.UTC)
+ next_ = pd.Timestamp("2021-12-27 14:30", tz=UTC)
assertions(cc, minute, False, minute - one_min, next_)
def test_minute_properties(self):
@@ -1542,5 +1542,5 @@ def assertions(
assertions(cc.opens_nanos, xnys.opens_nanos, xlon.opens_nanos, xhkg.opens_nanos)
# verify cc.minutes
- assert cc.minutes.tz == pytz.UTC
+ assert cc.minutes.tz == UTC
assert (cc.minutes.tz_convert(None) == pd.DatetimeIndex(cc.minutes_nanos)).all()
diff --git a/tests/test_data.py b/tests/test_data.py
index f5bb4a3..7dfbbc4 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -7,12 +7,12 @@
import exchange_calendars as xcals
import pandas as pd
import pytest
-import pytz
import market_prices.data as m
from market_prices import helpers, intervals, errors
-from market_prices.utils import calendar_utils as calutils
+from market_prices.helpers import UTC
from market_prices.intervals import TDInterval
+from market_prices.utils import calendar_utils as calutils
# pylint: disable=missing-function-docstring, missing-type-doc
# pylint: disable=missing-param-doc, missing-any-param-doc, redefined-outer-name
@@ -235,16 +235,20 @@ def get_data(delay, left_limit=None, right_limit=None) -> m.Data:
assert data.cc is cc
assert data.bi == bi
+ pool: pd.Series | pd.DatetimeIndex
if bi.is_intraday:
pool = ans.first_minutes
- r_edge = pd.Timestamp.now(tz=pytz.UTC) + bi
+ r_edge = pd.Timestamp.now(tz=UTC) + bi
+ l_edge = pool.iloc[0]
else:
pool = ans.sessions
r_edge = today
+ l_edge = pool[0]
- l_edge = pool[0]
- delta = get_delta(pool[0])
+ def get_pool_value(idx: int) -> pd.Timestamp:
+ return pool.iloc[idx] if isinstance(pool, pd.Series) else pool[idx]
+ delta = get_delta(get_pool_value(0))
assert data.ll is None
assert data.rl == r_edge
@@ -252,7 +256,7 @@ def get_data(delay, left_limit=None, right_limit=None) -> m.Data:
assert_ts_not_available(data, r_edge + delta)
# define left_limit, right_limit as default
- left_limit = pool[-30]
+ left_limit = get_pool_value(-30)
data = get_data(delay, left_limit)
assert_empty(data)
@@ -264,7 +268,7 @@ def get_data(delay, left_limit=None, right_limit=None) -> m.Data:
assert_ts_not_available(data, [left_limit - delta, r_edge + delta])
# define left_limit and right_limit
- right_limit = pool[-5]
+ right_limit = get_pool_value(-5)
data = get_data(delay, left_limit, right_limit)
assert_empty(data)
@@ -876,7 +880,7 @@ def set_now(now: pd.Timestamp):
if bi_daily:
left_bound = today - left_limit
else:
- left_bound = pd.Timestamp.now(tz=pytz.UTC) - left_limit
+ left_bound = pd.Timestamp.now(tz=UTC) - left_limit
left_bound += one_min # one_min provided for processing
data = get_data()
diff --git a/tests/test_daterange.py b/tests/test_daterange.py
index 6d3b400..b69104c 100644
--- a/tests/test_daterange.py
+++ b/tests/test_daterange.py
@@ -392,7 +392,9 @@ def do_bounds_from_start(
data=sthyp.data(),
ds_interval=stmp.intervals_non_intraday(),
)
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_start_end(
self, calendars_with_answers_extended, data, ds_interval
):
@@ -428,7 +430,9 @@ def test_daterange_start_end_ool(self):
assert True
@hyp.given(ds_interval=stmp.intervals_non_intraday())
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
@pytest.mark.parametrize("limit_idx", [0, 50])
def test_daterange_add_a_row_errors(
self,
@@ -504,7 +508,9 @@ def match_msg(limit: pd.Timestamp) -> str:
_ = drg.daterange
@hyp.given(data=sthyp.data(), ds_interval=stmp.intervals_non_intraday())
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_start_only(
self, calendars_with_answers_extended, data, ds_interval
):
@@ -532,7 +538,9 @@ def test_daterange_start_only(
data=sthyp.data(),
ds_interval=stmp.intervals_non_intraday(),
)
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_end_only(
self, calendars_with_answers_extended, data, ds_interval
):
@@ -569,7 +577,9 @@ def test_daterange_end_only(
data=sthyp.data(),
ds_interval=stmp.intervals_non_intraday(),
)
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_days_start(
self, calendars_with_answers_extended, data, ds_interval
):
@@ -620,7 +630,9 @@ def test_daterange_duration_days_start_ool(self, calendars_extended, pp_default)
data=sthyp.data(),
ds_interval=stmp.intervals_non_intraday(),
)
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_days_end(
self, calendars_with_answers_extended, data, ds_interval
):
@@ -720,7 +732,9 @@ def test_daterange_duration_days_end_oolb(
data=sthyp.data(),
ds_interval=stmp.intervals_non_intraday(),
)
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_cal_start(
self, calendars_with_answers_extended, data, ds_interval
):
@@ -778,6 +792,7 @@ def test_daterange_duration_cal_start_ool(self, calendars_extended, pp_default):
data=sthyp.data(),
ds_interval=stmp.intervals_non_intraday(),
)
+ @hyp.settings(suppress_health_check=[hyp.HealthCheck.differing_executors])
def test_daterange_duration_cal_end(
self, calendars_with_answers_extended, data, ds_interval
):
@@ -1278,7 +1293,9 @@ def verify_starts(open_, close, next_open):
@hyp.example(conftest.base_ds_intervals_dict[TDInterval.T1][7])
@hyp.example(conftest.base_ds_intervals_dict[TDInterval.T1][8])
@hyp.example(conftest.base_ds_intervals_dict[TDInterval.T1][9])
- @hyp.settings(deadline=None)
+ @hyp.settings(
+ deadline=None, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_get_start_too_late(
self, calendars_with_answers_extended, one_min, ds_interval
):
@@ -1790,7 +1807,7 @@ def test_get_end_high_interval2(
def test_get_end_ool(self, calendars_with_answers_extended, pp_default, one_min):
"""Test `get_end` with ool input."""
cal, ans = calendars_with_answers_extended
- limit = ans.opens[len(ans.sessions) // 2]
+ limit = ans.opens.iloc[len(ans.sessions) // 2]
too_early = limit - one_min
match = re.escape(
f"Prices unavailable as end ({helpers.fts(too_early)}) is earlier"
@@ -1877,7 +1894,9 @@ def test_get_start_get_end_anchor_effect(
data=sthyp.data(),
base_ds_interval=stmp.base_ds_intervals(),
)
- @hyp.settings(deadline=None)
+ @hyp.settings(
+ deadline=None, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_start_end(self, calendars_extended, data, base_ds_interval):
"""Test `daterange` for with period parameters as `pp_start_end_minutes`."""
cal = calendars_extended
@@ -1909,20 +1928,20 @@ def test_daterange_start_end(self, calendars_extended, data, base_ds_interval):
start_session = cal.minute_to_session(start, _parse=False)
i = cal.sessions.get_loc(start_session)
- if start >= cal.first_pm_minutes[i]:
- session_start = cal.first_pm_minutes[i]
+ if start >= cal.first_pm_minutes.iloc[i]:
+ session_start = cal.first_pm_minutes.iloc[i]
else:
- session_start = cal.first_minutes[i]
+ session_start = cal.first_minutes.iloc[i]
minutes_i = cal.minutes.get_loc(start)
start_ = cal.minutes[minutes_i - interval.as_minutes]
if start == session_start:
start_ = drg.get_start(start_)
- if session_start == cal.first_pm_minutes[i]:
- prev_session_start = cal.first_minutes[i]
+ if session_start == cal.first_pm_minutes.iloc[i]:
+ prev_session_start = cal.first_minutes.iloc[i]
else:
prev_session_start = max(
- cal.first_minutes[i - 1], cal.first_pm_minutes[i - 1]
+ cal.first_minutes.iloc[i - 1], cal.first_pm_minutes.iloc[i - 1]
)
start_ = max(prev_session_start, start_)
else:
@@ -1977,8 +1996,8 @@ def match(
session = ans.sessions_sample[1]
i = ans.sessions.get_loc(session)
- session_open = ans.opens[i]
- prev_session_close = ans.closes[i - 1]
+ session_open = ans.opens.iloc[i]
+ prev_session_close = ans.closes.iloc[i - 1]
bi = TDInterval.T5
dsi = TDInterval.T15
@@ -2132,7 +2151,9 @@ def assertions(
_ = drg.daterange
@hyp.given(base_ds_interval=stmp.base_ds_intervals())
- @hyp.settings(deadline=None)
+ @hyp.settings(
+ deadline=None, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
@pytest.mark.parametrize("session_limit_idx", [0, 50])
def test_daterange_add_a_row_errors(
self,
@@ -2196,7 +2217,9 @@ def test_daterange_add_a_row_errors(
data=sthyp.data(),
base_ds_interval=stmp.base_ds_intervals(),
)
- @hyp.settings(deadline=None)
+ @hyp.settings(
+ deadline=None, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_start_only_end_None(
self, calendars_extended, data, base_ds_interval
):
@@ -2214,7 +2237,9 @@ def test_daterange_start_only_end_None(
data=sthyp.data(),
base_ds_interval=stmp.base_ds_intervals(),
)
- @hyp.settings(deadline=None)
+ @hyp.settings(
+ deadline=None, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_end_only_start_None(
self, calendars_extended, base_ds_interval, data
):
@@ -2252,7 +2277,9 @@ def limit_f(interval) -> pd.Timestamp:
assert drg.daterange == ((exp_start, end_now), end_now_accuracy)
@hyp.given(data=sthyp.data())
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_cal_start_minute(
self, calendars_extended, data, one_min
):
@@ -2312,7 +2339,9 @@ def test_daterange_duration_cal_start_minute_ool(
assert drg.daterange == ((start, now), now_accuracy)
@hyp.given(data=sthyp.data())
- @hyp.settings(deadline=None)
+ @hyp.settings(
+ deadline=None, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_cal_end_minute(self, calendars_extended, data):
"""Test `daterange` for with period parameters as `pp_caldur_end_minute`.
@@ -2377,7 +2406,9 @@ def test_daterange_duration_cal_end_minute_oolb(
_ = drg.daterange
@hyp.given(data=sthyp.data())
- @hyp.settings(deadline=None)
+ @hyp.settings(
+ deadline=None, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_days_start_minute(
self, calendars_with_answers_extended, data
):
@@ -2450,7 +2481,9 @@ def test_daterange_duration_days_start_minute_ool(
assert drg.daterange == expected
@hyp.given(data=sthyp.data())
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_days_end_minute(self, calendars_extended, data):
"""Test `daterange` for with period parameters as `pp_days_end_minute`.
@@ -2507,10 +2540,10 @@ def test_daterange_duration_days_end_oolb_minute(
drg_kwargs = dict(interval=bi)
- start = ans.opens[0]
+ start = ans.opens.iloc[0]
for i in range(3):
pp["days"] = i + 1
- pp["end"] = end = ans.closes[i]
+ pp["end"] = end = ans.closes.iloc[i]
# on left bound
for strict in [True, False]:
@@ -2533,13 +2566,13 @@ def test_daterange_duration_days_end_oolb_minute(
_ = drg.daterange
limit_i = 30
- limit = ans.opens[limit_i]
+ limit = ans.opens.iloc[limit_i]
drg_kwargs["limit"] = limit
for i in range(3):
pp["days"] = i + 1
- pp["end"] = end = ans.closes[limit_i + i]
+ pp["end"] = end = ans.closes.iloc[limit_i + i]
for strict in [True, False]: # on left limit
drg = self.get_drg(cal, pp, strict=strict, **drg_kwargs)
@@ -2576,7 +2609,7 @@ def test_daterange_duration_days_intervalperiod_error(
if session > today:
continue
i = ans.sessions.get_loc(session)
- open_, close = ans.opens[i], ans.closes[i]
+ open_, close = ans.opens.iloc[i], ans.closes.iloc[i]
length = close - open_
if length > TDInterval.H22 or ans.session_has_break(session):
@@ -2611,7 +2644,9 @@ def test_daterange_duration_days_intervalperiod_error(
_ = drg.daterange
@hyp.given(data=sthyp.data())
- @hyp.settings(deadline=500)
+ @hyp.settings(
+ deadline=500, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_intraday_start_minute(self, calendars_extended, data):
"""Test `daterange` for with period parameters as `pp_intraday_start_minute`."""
cal = calendars_extended
@@ -2630,7 +2665,9 @@ def test_daterange_duration_intraday_start_minute(self, calendars_extended, data
assert drg.daterange == ((start, end), end)
@hyp.given(data=sthyp.data())
- @hyp.settings(deadline=None)
+ @hyp.settings(
+ deadline=None, suppress_health_check=[hyp.HealthCheck.differing_executors]
+ )
def test_daterange_duration_intraday_end_minute(self, calendars_extended, data):
"""Test `daterange` for with period parameters as `pp_intraday_end_minute`.
@@ -2820,8 +2857,8 @@ def test_daterange_duration_intraday_intervalduration_error(
# on limit, where intraday duration == final interval
pp["minutes"] = base_interval.as_minutes
- pp["start"] = start = ans.first_minutes[1]
- end = ans.first_minutes[1] + base_interval
+ pp["start"] = start = ans.first_minutes.iloc[1]
+ end = ans.first_minutes.iloc[1] + base_interval
drg = self.get_drg(cal, pp, **drg_kwargs)
assert drg.daterange == ((start, end), end)
diff --git a/tests/test_helpers.py b/tests/test_helpers.py
index b44602e..5ec6d6f 100644
--- a/tests/test_helpers.py
+++ b/tests/test_helpers.py
@@ -3,15 +3,16 @@
from __future__ import annotations
from collections.abc import Iterator
+from zoneinfo import ZoneInfo
import numpy as np
import pytest
import pandas as pd
from pandas import Timestamp as T
from pandas.testing import assert_index_equal, assert_series_equal, assert_frame_equal
-import pytz
import market_prices.helpers as m
+from market_prices.helpers import UTC
from market_prices import intervals
from .utils import get_resource
@@ -35,6 +36,7 @@
def test_constants():
# Just to make sure they aren't inadvertently changed
+ assert m.UTC is ZoneInfo("UTC")
assert m.ONE_DAY == pd.Timedelta(1, "D")
assert m.ONE_MIN == pd.Timedelta(1, "T")
assert m.ONE_SEC == pd.Timedelta(1, "S")
@@ -51,9 +53,9 @@ def test_is_date(one_min):
assert not f(T("2021-11-02 12:00"))
minutes = [
- T("2021-11-02", tz=pytz.UTC),
- T("2021-11-02", tz="US/Eastern"),
- T("2021-11-02", tz=pytz.UTC).tz_convert("US/Eastern"),
+ T("2021-11-02", tz=UTC),
+ T("2021-11-02", tz=ZoneInfo("US/Eastern")),
+ T("2021-11-02", tz=UTC).tz_convert(ZoneInfo("US/Eastern")),
]
for minute in minutes:
assert not f(minute)
@@ -66,31 +68,31 @@ def test_fts():
date = pd.Timestamp("2021-11-03")
assert f(date) == "2021-11-03"
- time = pd.Timestamp("2021-11-03 12:44", tz=pytz.UTC)
+ time = pd.Timestamp("2021-11-03 12:44", tz=UTC)
assert f(time) == "2021-11-03 12:44 UTC"
- midnight = pd.Timestamp("2021-11-03", tz=pytz.UTC)
+ midnight = pd.Timestamp("2021-11-03", tz=UTC)
assert f(midnight) == "2021-11-03 00:00 UTC"
def test_is_utc():
f = m.to_utc
- expected = T("2021-11-02", tz="UTC")
- assert f(T("2021-11-02", tz="UTC")) == expected
+ expected = T("2021-11-02", tz=UTC)
+ assert f(T("2021-11-02", tz=UTC)) == expected
assert f(T("2021-11-02")) == expected
- expected = T("2021-11-02 13:33", tz="UTC")
+ expected = T("2021-11-02 13:33", tz=UTC)
assert f(T("2021-11-02 13:33")) == expected
- assert f(T("2021-11-02 09:33", tz="US/Eastern")) == expected
+ assert f(T("2021-11-02 09:33", tz=ZoneInfo("US/Eastern"))) == expected
def test_is_tz_naive():
f = m.to_tz_naive
expected = T("2021-11-02 15:30")
assert f(T("2021-11-02 15:30")) == expected
- assert f(T("2021-11-02 15:30", tz="UTC")) == expected
- assert f(T("2021-11-02 11:30", tz="US/Eastern")) == expected
+ assert f(T("2021-11-02 15:30", tz=UTC)) == expected
+ assert f(T("2021-11-02 11:30", tz=ZoneInfo("US/Eastern"))) == expected
def mock_now(mpatch, now: pd.Timestamp):
@@ -114,22 +116,22 @@ def test_now(monkeypatch):
# verify for intraday interval
- expected_left = pd.Timestamp("2022-05-01 14:32", tz=pytz.UTC)
- expected_right = pd.Timestamp("2022-05-01 14:33", tz=pytz.UTC)
+ expected_left = pd.Timestamp("2022-05-01 14:32", tz=UTC)
+ expected_right = pd.Timestamp("2022-05-01 14:33", tz=UTC)
- time_now = pd.Timestamp("2022-05-01 14:32", tz=pytz.UTC)
+ time_now = pd.Timestamp("2022-05-01 14:32", tz=UTC)
mock_now(monkeypatch, time_now)
assert f(interval_intraday) == expected_left
assert f(interval_intraday, "left") == expected_left
assert f(interval_intraday, "right") == expected_left
- time_now = pd.Timestamp("2022-05-01 14:32:01", tz=pytz.UTC)
+ time_now = pd.Timestamp("2022-05-01 14:32:01", tz=UTC)
mock_now(monkeypatch, time_now)
assert f(interval_intraday) == expected_left
assert f(interval_intraday, "left") == expected_left
assert f(interval_intraday, "right") == expected_right
- time_now = pd.Timestamp("2022-05-01 14:32:59", tz=pytz.UTC)
+ time_now = pd.Timestamp("2022-05-01 14:32:59", tz=UTC)
mock_now(monkeypatch, time_now)
assert f(interval_intraday) == expected_left
assert f(interval_intraday, "left") == expected_left
@@ -140,21 +142,21 @@ def test_now(monkeypatch):
expected_left = pd.Timestamp("2022-05-01")
expected_right = pd.Timestamp("2022-05-02")
- time_now = pd.Timestamp("2022-05-01", tz=pytz.UTC)
+ time_now = pd.Timestamp("2022-05-01", tz=UTC)
mock_now(monkeypatch, time_now)
for interval in intervals_daily:
assert f(interval) == expected_left
assert f(interval, "left") == expected_left
assert f(interval, "right") == expected_left
- time_now = pd.Timestamp("2022-05-01 00:00:01", tz=pytz.UTC)
+ time_now = pd.Timestamp("2022-05-01 00:00:01", tz=UTC)
mock_now(monkeypatch, time_now)
for interval in intervals_daily:
assert f(interval) == expected_left
assert f(interval, "left") == expected_left
assert f(interval, "right") == expected_right
- time_now = pd.Timestamp("2022-05-01 23:59:59", tz=pytz.UTC)
+ time_now = pd.Timestamp("2022-05-01 23:59:59", tz=UTC)
mock_now(monkeypatch, time_now)
for interval in intervals_daily:
assert f(interval) == expected_left
diff --git a/tests/test_mptypes.py b/tests/test_mptypes.py
index a04d380..01dcab8 100644
--- a/tests/test_mptypes.py
+++ b/tests/test_mptypes.py
@@ -5,19 +5,13 @@
from __future__ import annotations
-from collections.abc import Callable
-import re
+from typing import Annotated
import pandas as pd
import pytest
-import pytz
+from valimp import parse, Coerce
-from market_prices import mptypes
-
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
+from market_prices import mptypes as m
# pylint: disable=missing-function-docstring, missing-type-doc
# pylint: disable=missing-param-doc, missing-any-param-doc, redefined-outer-name
@@ -36,218 +30,11 @@
# Any flake8 disabled violations handled via per-file-ignores on .flake8
-def test_LeadSymbol():
- class MockCls:
- """Mock class to test mpytypes.LeadSymbol."""
-
- # pylint: disable=too-few-public-methods
-
- def _verify_lead_symbol(self, symbol: str):
- if symbol != "MSFT":
- raise ValueError(f"{symbol} not in symbols.")
-
- @pydantic.validate_arguments
- def mock_func(self, arg: mptypes.LeadSymbol) -> str:
- arg_ = str(arg)
- return arg_
-
- f = MockCls().mock_func
-
- # verify valid inpout
- s = "MSFT"
- assert f(s) is s
-
- # verify type other than str is invalid input
- obj = 3
- match = (
- "arg\n LeadSymbol takes type although receieved"
- f" <{obj}> of type ."
- )
- with pytest.raises(pydantic.ValidationError, match=match):
- f(3)
-
- # verify raises error if symbol not valid lead_symbol
- s = "RFT"
- match = f"arg\n {s} not in symbols."
- with pytest.raises(pydantic.ValidationError, match=match):
- f("RFT")
-
-
-def assert_valid_timezone(func: Callable, field: str):
- """Assert `func` arg takes input valid for pytz.timezone.
-
- Asserts valid input returns as would be returned by pytz.timezone.
- Verifies that invalid input for pytz.timezone raises an error.
- """
- # verify valid input
- assert func("UTC") == pytz.UTC
- expected = pytz.timezone("Europe/London")
- assert func("Europe/London") == expected
- assert func(expected) == expected
-
- # verify raises error if type invalid
- obj = 3
- match = re.escape(
- f"arg\n {field} can take any type from [, ] although receieved <{obj}> of type"
- " ."
- )
- with pytest.raises(pydantic.ValidationError, match=match):
- func(obj)
-
-
-def test_Timezone():
- @pydantic.validate_arguments
- def mock_func(arg: mptypes.Timezone) -> pytz.BaseTzInfo:
- assert isinstance(arg, pytz.BaseTzInfo)
- return arg
-
- assert_valid_timezone(mock_func, "Timezone")
-
-
-def test_PricesTimezone():
- tz = pytz.timezone("US/Eastern")
-
- class MockCls:
- """Mock class to test mpytypes.PricesTimezone."""
-
- @property
- def symbols(self) -> list[str]:
- return ["MSFT"]
-
- @property
- def timezones(self) -> dict:
- return {"MSFT": tz}
-
- @pydantic.validate_arguments
- def mock_func(self, arg: mptypes.PricesTimezone) -> pytz.BaseTzInfo:
- assert isinstance(arg, pytz.BaseTzInfo)
- return arg
-
- f = MockCls().mock_func
-
- # verify valid input
- assert_valid_timezone(f, "PricesTimezone")
-
- # verify can take a symbol
- assert f("MSFT") == tz
- # but not any symbol
- with pytest.raises(pytz.UnknownTimeZoneError):
- f("HEY")
-
-
-def assert_date_input(func: Callable):
- """Assert `func` arg takes a date.
-
- Asserts `func` arg can take valid single input to pd.Timestamp
- that represents a date.
-
- Verifies that invalid input to pd.Timestamp raises an error.
- """
- # verify valid input
- expected = pd.Timestamp("2022-03-01")
- assert func("2022-03-01") == expected
- assert func("2022-03") == expected
- assert func(expected) == expected
- assert func(expected.value) == expected
-
- # verify input has to be valid input to a pd.Timestamp
- obj = [expected]
- match = re.escape(
- f"arg\n Cannot convert input [{obj}] of type " " to Timestamp"
- )
- with pytest.raises(pydantic.ValidationError, match=match):
- func(obj)
-
-
-def assert_time_input(func: Callable):
- """Assert `func` arg takes a time.
-
- Asserts `func` arg can take valid single input to pd.Timestamp
- that represents a time.
-
- Verifies that invalid input to pd.Timestamp raises an error.
- """
- # verify valid input
- expected = pd.Timestamp("2022-03-01 00:01")
- assert func("2022-03-01 00:01") == expected
- assert func(expected) == expected
- assert func(expected.value) == expected
-
- # verify input has to be valid input to a pd.Timestamp
- obj = [expected]
- match = re.escape(
- f"arg\n Cannot convert input [{obj}] of type " " to Timestamp"
- )
- with pytest.raises(pydantic.ValidationError, match=match):
- func(obj)
-
-
-def test_Timestamp():
- @pydantic.validate_arguments
- def mock_func(arg: mptypes.Timestamp) -> pd.Timestamp:
- arg_ = pd.Timestamp(arg) # type: ignore[call-overload]
- return arg_
-
- assert_date_input(mock_func)
- assert_time_input(mock_func)
-
-
-def test_TimeTimestamp():
- @pydantic.validate_arguments
- def mock_func(arg: mptypes.TimeTimestamp) -> pd.Timestamp:
- arg_ = pd.Timestamp(arg) # type: ignore[call-overload]
- return arg_
-
- assert_time_input(mock_func)
- # verify input can be midnight if tz aware
- ts = pd.Timestamp("2022-03-01 00:00", tz=pytz.UTC)
- assert mock_func(ts) == ts
-
- # verify input can be timezone naive if not midnight
- ts = pd.Timestamp("2022-03-01 00:01")
- assert mock_func(ts) == ts
-
- # verify input cannot be midnight and timezone naive
- ts = pd.Timestamp("2022-03-01 00:00")
- match = re.escape(
- "arg\n `arg` must have a time component or be tz-aware,"
- f" although receieved as {ts}. To define arg as midnight pass"
- " as a tz-aware pd.Timestamp. For prices as at a session's"
- " close use .close_at()."
- )
- with pytest.raises(pydantic.ValidationError, match=match):
- mock_func(ts)
-
-
-def test_DateTimestamp():
- @pydantic.validate_arguments
- def mock_func(arg: mptypes.DateTimestamp) -> pd.Timestamp:
- arg_ = pd.Timestamp(arg) # type: ignore[call-overload]
- return arg_
-
- assert_date_input(mock_func)
-
- # verify input cannot be timezone aware
- expected = pd.Timestamp("2022-03-01")
- ts = expected.tz_localize(pytz.UTC)
- match = re.escape(f"arg\n `arg` must be tz-naive, although receieved as {ts}")
- with pytest.raises(pydantic.ValidationError, match=match):
- mock_func(ts)
-
- # verify input cannot have a time component
- obj = "2022-03-01 00:01"
- match = re.escape(
- "arg\n `arg` can not have a time component, although receieved"
- f" as {pd.Timestamp(obj)}. For an intraday price use .price_at()."
- )
- with pytest.raises(pydantic.ValidationError, match=match):
- mock_func(obj)
-
-
-def test_PandasFrequency():
- @pydantic.validate_arguments
- def mock_func(arg: mptypes.PandasFrequency) -> mptypes.PandasFrequency:
+def test_pandasfreq():
+ @parse
+ def mock_func(
+ arg: Annotated[str, Coerce(m.PandasFrequency)],
+ ) -> m.PandasFrequency:
return arg
# verify valid input
@@ -261,45 +48,8 @@ def mock_func(arg: mptypes.PandasFrequency) -> mptypes.PandasFrequency:
# verify invalid input
invalid_freq = "4p"
match = (
- "arg\n PandasFrequency must be a pandas frequency although"
+ "PandasFrequency must be a pandas frequency although"
f" received '{invalid_freq}'."
)
- with pytest.raises(pydantic.ValidationError, match=match):
+ with pytest.raises(ValueError, match=match):
mock_func(invalid_freq)
-
- # verify invalid type raises error
- obj = 3
- match = (
- "arg\n PandasFrequency takes type although"
- f" receieved <{obj}> of type ."
- )
- with pytest.raises(pydantic.ValidationError, match=match):
- mock_func(obj)
-
-
-def test_IntervalDatetimeIndex():
- @pydantic.validate_arguments
- def mock_func(arg: mptypes.IntervalDatetimeIndex) -> pd.IntervalIndex:
- return arg
-
- # verify valid input
- dti = pd.date_range("2021", periods=3, freq="MS")
- interval_index = pd.IntervalIndex.from_arrays(dti, dti)
- assert mock_func(interval_index) is interval_index
-
- # verify invalid input
- int_index = pd.Index([1, 2, 3])
- invalid_int_index = pd.IntervalIndex.from_arrays(int_index, int_index)
- match = re.escape(
- "arg\n Parameter must have each side as type pd.DatetimeIndex"
- f" although received left side as '{int_index}'."
- )
- with pytest.raises(pydantic.ValidationError, match=match):
- mock_func(invalid_int_index)
-
- # verify invalid types raises error
- match = "Parameter must be passed as an instance of pd.IntervalIndex."
- with pytest.raises(pydantic.ValidationError, match=match):
- mock_func("bob")
- with pytest.raises(pydantic.ValidationError, match=match):
- mock_func(3)
diff --git a/tests/test_pandas_utils.py b/tests/test_pandas_utils.py
index 964e0be..4083825 100644
--- a/tests/test_pandas_utils.py
+++ b/tests/test_pandas_utils.py
@@ -3,12 +3,14 @@
from __future__ import annotations
from collections import abc
import re
+from typing import Annotated
import pandas as pd
import pytest
-import pytz
+from valimp import parse, Parser
import market_prices.utils.pandas_utils as m
+from market_prices.helpers import UTC
# pylint: disable=missing-function-docstring,redefined-outer-name,too-many-public-methods
# pylint: disable=missing-param-doc,missing-any-param-doc,too-many-locals
@@ -125,7 +127,7 @@ def test_make_non_overlapping(
pd.testing.assert_index_equal(rtrn, expected)
# test non tz_naive index
- index = m.interval_index_new_tz(index, pytz.UTC)
+ index = m.interval_index_new_tz(index, UTC)
rtrn = test_method(index, fully_overlapped="remove")
expected = index.drop(index[i])
pd.testing.assert_index_equal(rtrn, expected)
@@ -145,3 +147,30 @@ def test_remove_intervals_from_interval_invalid_input(
m.remove_intervals_from_interval(
interval, interval_index_not_monotonoic_increasing
)
+
+
+# ------------------ tests for valimp.Parser functions --------------------
+
+
+def test_verify_interval_datetime_index():
+ @parse
+ def mock_func(
+ arg: Annotated[pd.IntervalIndex, Parser(m.verify_interval_datetime_index)]
+ ) -> pd.IntervalIndex:
+ return arg
+
+ # verify valid input
+ dti = pd.date_range("2021", periods=3, freq="MS")
+ interval_index = pd.IntervalIndex.from_arrays(dti, dti)
+ assert mock_func(interval_index) is interval_index
+
+ # verify invalid input
+ int_index = pd.Index([1, 2, 3])
+ invalid_int_index = pd.IntervalIndex.from_arrays(int_index, int_index)
+ match = re.escape(
+ "'arg' can only take a pd.IntervalIndex that has each side"
+ " as type pd.DatetimeIndex, although received with left side"
+ " as type ''."
+ )
+ with pytest.raises(ValueError, match=match):
+ mock_func(invalid_int_index)
diff --git a/tests/test_parsing.py b/tests/test_parsing.py
index 80c4bce..0821745 100644
--- a/tests/test_parsing.py
+++ b/tests/test_parsing.py
@@ -2,18 +2,25 @@
from __future__ import annotations
+from collections import abc
+from collections.abc import Callable
import dataclasses
+import datetime
import itertools
import re
import typing
-from collections import abc
+from typing import Annotated, Union, TYPE_CHECKING
+import zoneinfo
+from zoneinfo import ZoneInfo
import pandas as pd
import pytest
-import pytz
+from valimp import parse, Parser, Coerce
import market_prices.parsing as m
from market_prices import errors, helpers, mptypes
+from market_prices.helpers import UTC
+
from .utils import Answers
# pylint: disable=missing-function-docstring, missing-type-doc
@@ -108,29 +115,29 @@ def assert_raises_inc_with_start_end(kwargs: dict):
def test_parse_timestamp():
f = m.parse_timestamp
- tzin = pytz.timezone("US/Eastern")
+ tzin = ZoneInfo("US/Eastern")
date = pd.Timestamp("2021-11-02")
rtrn = f(date, tzin)
assert rtrn == date # date as tz-naive timestamp
assert rtrn.tz is None
- midnight = pd.Timestamp("2021-11-02", tz=pytz.UTC)
+ midnight = pd.Timestamp("2021-11-02", tz=UTC)
rtrn = f(midnight, tzin)
assert rtrn == midnight
- assert rtrn.tz == pytz.UTC # Does not change tz.
+ assert rtrn.tz == UTC # Does not change tz.
time = pd.Timestamp("2021-11-02 14:33")
rtrn = f(time, tzin)
assert rtrn == pd.Timestamp(time, tz=tzin) # defines time in terms of tzin...
- assert rtrn.tz == pytz.UTC # ...although tz is UTC
- assert rtrn != pd.Timestamp(time, tz=pytz.UTC)
- assert rtrn == time.tz_localize(tzin).tz_convert(pytz.UTC)
+ assert rtrn.tz == UTC # ...although tz is UTC
+ assert rtrn != pd.Timestamp(time, tz=UTC)
+ assert rtrn == time.tz_localize(tzin).tz_convert(UTC)
time = pd.Timestamp("2021-11-02 14:33", tz=tzin)
- rtrn = f(time, pytz.timezone("Japan")) # time tz-aware, tzin should be ignored...
+ rtrn = f(time, ZoneInfo("Japan")) # time tz-aware, tzin should be ignored...
assert rtrn == time
- assert rtrn.tz == pytz.UTC # ...albeit converted to UTC
+ assert rtrn.tz == UTC # ...albeit converted to UTC
class TestParseStartEnd:
@@ -146,7 +153,7 @@ class TestParseStartEnd:
@pytest.fixture
def f_with_ans(
self, calendars_with_answers_extended
- ) -> abc.Iterator[tuple[abc.Callable, Answers]]:
+ ) -> abc.Iterator[tuple[Callable, Answers]]:
calendar, answers = calendars_with_answers_extended
def f(
@@ -260,7 +267,7 @@ def test_start_end_as_minutes(
assert f(None, minute, as_times) == (None, ans.closes[session])
# verify if start/end are not minute accurate then rounded up/down respectively
- start, end = first_mins[0], last_mins[0]
+ start, end = first_mins.iloc[0], last_mins.iloc[0]
assert f(start + one_sec, end - one_sec, True) == (
start + one_min,
end - one_min,
@@ -355,15 +362,15 @@ def test_start_end_at_right_limit(
session_first_minute = ans.first_minutes[session]
session_close = ans.closes[session]
- def mock_now_closed(*_, tz=pytz.UTC, **__) -> pd.Timestamp:
+ def mock_now_closed(*_, tz=UTC, **__) -> pd.Timestamp:
now = ans.last_minutes[session] + (5 * one_min)
- if tz is not pytz.UTC:
+ if tz is not UTC:
now = now.tz_convert(tz)
return now
- def mock_now_open(*_, tz=pytz.UTC, **__) -> pd.Timestamp:
+ def mock_now_open(*_, tz=UTC, **__) -> pd.Timestamp:
now = session_close - (5 * one_min)
- if tz is not pytz.UTC:
+ if tz is not UTC:
now = now.tz_convert(tz)
return now
@@ -427,15 +434,15 @@ def test_start_end_ool_right(
oob_session = ans.last_session + one_day # right of right calendar bound
oob_minute = ans.last_minute + one_min # right of right calendar bound
- def mock_now_closed(*_, tz=pytz.UTC, **__) -> pd.Timestamp:
+ def mock_now_closed(*_, tz=UTC, **__) -> pd.Timestamp:
now = ans.last_minutes[last_session] + (5 * one_min)
- if tz is not pytz.UTC:
+ if tz is not UTC:
now = now.tz_convert(tz)
return now
- def mock_now_open(*_, tz=pytz.UTC, **__) -> pd.Timestamp:
+ def mock_now_open(*_, tz=UTC, **__) -> pd.Timestamp:
now = ans.last_minutes[last_session] - (5 * one_min)
- if tz is not pytz.UTC:
+ if tz is not UTC:
now = now.tz_convert(tz)
return now
@@ -511,7 +518,7 @@ def f_starts_ends(
self, f_with_ans, one_min, today
) -> abc.Iterator[
tuple[
- abc.Callable,
+ Callable,
list[tuple[pd.Timestamp, pd.Timestamp, pd.Timestamp]],
list[tuple[pd.Timestamp, pd.Timestamp, pd.Timestamp]],
]
@@ -849,8 +856,8 @@ def f(start, end, gregorian) -> mptypes.DateRangeAmb:
assert f(start, today + one_day, True) == (start, None)
# verify times pass to gregorian dates
- start_time = (start - one_min).tz_localize(pytz.UTC)
- end_time = (end + one_min).tz_localize(pytz.UTC)
+ start_time = (start - one_min).tz_localize(UTC)
+ end_time = (end + one_min).tz_localize(UTC)
assert f(start_time, end_time, True) == (start, end)
# verify when start/end None, evalute to None
@@ -908,7 +915,7 @@ def too_late_error_msg(ts, bound) -> str:
f" are available is {bound} although `time` received as {ts}."
)
- time = pd.Timestamp("2021-11-02 14:33", tz=pytz.UTC)
+ time = pd.Timestamp("2021-11-02 14:33", tz=UTC)
assert f(time, time, time) is None
@@ -925,10 +932,178 @@ def too_late_error_msg(ts, bound) -> str:
):
f(time, time, r_bound)
- midnight = pd.Timestamp("2021-11-02", tz=pytz.UTC)
+ midnight = pd.Timestamp("2021-11-02", tz=UTC)
assert f(midnight, midnight, midnight) is None
with pytest.raises(errors.DatetimeTooEarlyError):
f(midnight, midnight + one_min, midnight)
with pytest.raises(errors.DatetimeTooLateError):
f(midnight, midnight, midnight - one_min)
+
+
+# ------------------ tests for valimp.Parser functions --------------------
+
+
+def test_lead_symbol():
+ class MockCls:
+ """Mock class to test parsing.lead_symbol."""
+
+ # pylint: disable=too-few-public-methods
+
+ def _verify_lead_symbol(self, symbol: str):
+ if symbol != "MSFT":
+ raise ValueError(f"{symbol} not in symbols.")
+
+ @parse
+ def mock_func(self, arg: Annotated[str, Parser(m.lead_symbol)]) -> str:
+ return arg
+
+ f = MockCls().mock_func
+
+ # verify valid inpout
+ s = "MSFT"
+ assert f(s) is s
+
+ # verify raises error if symbol not valid lead_symbol
+ s = "RFT"
+ match = f"{s} not in symbols."
+ with pytest.raises(ValueError, match=match):
+ f("RFT")
+
+
+def assert_valid_timezone(func: Callable, field: str):
+ """Assert `func` arg takes input valid for ZoneInfo.
+
+ Asserts valid input returns as would be returned by ZoneInfo.
+ Verifies that invalid input for ZoneInfo raises an error.
+ """
+ # verify valid input
+ assert func("UTC") == UTC
+ expected = ZoneInfo("Europe/London")
+ assert func("Europe/London") == expected
+ assert func(expected) == expected
+
+
+def test_to_timezone():
+ @parse
+ def mock_func(
+ arg: Annotated[Union[ZoneInfo, str], Parser(m.to_timezone)]
+ ) -> ZoneInfo:
+ assert isinstance(arg, ZoneInfo)
+ return arg
+
+ assert_valid_timezone(mock_func, "Timezone")
+
+
+def test_to_prices_timezone():
+ tz = ZoneInfo("US/Eastern")
+
+ class MockCls:
+ """Mock class to test parsing.to_prices_timezone."""
+
+ @property
+ def symbols(self) -> list[str]:
+ return ["MSFT"]
+
+ @property
+ def timezones(self) -> dict:
+ return {"MSFT": tz}
+
+ @parse
+ def mock_func(
+ self,
+ arg: Annotated[Union[str, ZoneInfo], Parser(m.to_prices_timezone)],
+ ) -> ZoneInfo:
+ assert isinstance(arg, ZoneInfo)
+ return arg
+
+ f = MockCls().mock_func
+
+ # verify valid input
+ assert_valid_timezone(f, "PricesTimezone")
+
+ # verify can take a symbol
+ assert f("MSFT") == tz
+ # but not any symbol
+ with pytest.raises(zoneinfo.ZoneInfoNotFoundError):
+ f("HEY")
+
+
+def test_to_datetimestamp():
+ @parse
+ def mock_func(
+ arg: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ Parser(m.verify_datetimestamp, parse_none=False),
+ ] = None,
+ ) -> pd.Timestamp:
+ if TYPE_CHECKING:
+ assert isinstance(arg, pd.Timestamp)
+ return arg
+
+ # verify valid input
+ assert mock_func() is None
+ assert mock_func(None) is None
+ expected = pd.Timestamp("2022-03-01")
+ assert mock_func("2022-03-01") == expected
+ assert mock_func("2022-03") == expected
+ assert mock_func(expected) == expected
+ assert mock_func(expected.value) == expected
+
+ # verify input cannot be timezone aware
+ expected = pd.Timestamp("2022-03-01")
+ ts = expected.tz_localize(UTC)
+ match = re.escape(f"`arg` must be tz-naive, although receieved as {ts}")
+ with pytest.raises(ValueError, match=match):
+ mock_func(ts)
+
+ # verify input cannot have a time component
+ obj = "2022-03-01 00:01"
+ match = re.escape(
+ "`arg` can not have a time component, although receieved"
+ f" as {pd.Timestamp(obj)}. For an intraday price use .price_at()."
+ )
+ with pytest.raises(ValueError, match=match):
+ mock_func(obj)
+
+
+def test_to_timetimestamp():
+ @parse
+ def mock_func(
+ arg: Annotated[
+ Union[pd.Timestamp, str, datetime.datetime, int, float, None],
+ Coerce(pd.Timestamp),
+ Parser(m.verify_timetimestamp, parse_none=False),
+ ] = None,
+ ) -> pd.Timestamp:
+ if TYPE_CHECKING:
+ assert isinstance(arg, pd.Timestamp)
+ return arg
+
+ # verify valid input
+ assert mock_func() is None
+ assert mock_func(None) is None
+ expected = pd.Timestamp("2022-03-01 00:01")
+ assert mock_func("2022-03-01 00:01") == expected
+ assert mock_func(expected) == expected
+ assert mock_func(expected.value) == expected
+
+ # verify input can be midnight if tz aware
+ ts = pd.Timestamp("2022-03-01 00:00", tz=UTC)
+ assert mock_func(ts) == ts
+
+ # verify input can be timezone naive if not midnight
+ ts = pd.Timestamp("2022-03-01 00:01")
+ assert mock_func(ts) == ts
+
+ # verify input cannot be midnight and timezone naive
+ ts = pd.Timestamp("2022-03-01 00:00")
+ match = re.escape(
+ "`arg` must have a time component or be tz-aware,"
+ f" although receieved as {ts}. To define arg as midnight pass"
+ " as a tz-aware pd.Timestamp. For prices as at a session's"
+ " close use .close_at()."
+ )
+ with pytest.raises(ValueError, match=match):
+ mock_func(ts)
diff --git a/tests/test_pt.py b/tests/test_pt.py
index 37dcdf2..0679f27 100644
--- a/tests/test_pt.py
+++ b/tests/test_pt.py
@@ -6,26 +6,22 @@
import re
import typing
from collections import abc
+from zoneinfo import ZoneInfo
import exchange_calendars as xcals
import numpy as np
import pandas as pd
from pandas.testing import assert_frame_equal, assert_index_equal, assert_series_equal
import pytest
-import pytz
import market_prices.pt as m
from market_prices import errors, helpers
+from market_prices.helpers import UTC
from market_prices.intervals import TDInterval
from market_prices.utils import calendar_utils as calutils
from .utils import get_resource, multiple_sessions_freq
-import pydantic
-
-if int(next(c for c in pydantic.__version__ if c.isdigit())) > 1:
- from pydantic import v1 as pydantic
-
# pylint: disable=missing-function-docstring, missing-type-doc
# pylint: disable=missing-param-doc, missing-any-param-doc, redefined-outer-name
# pylint: disable=too-many-public-methods, too-many-arguments, too-many-locals
@@ -305,13 +301,13 @@ def calendars_overlapping(cmes, x247) -> abc.Iterator[list[xcals.ExchangeCalenda
@pytest.fixture(scope="session")
-def tz_default(xnys) -> abc.Iterator[pytz.tzinfo.BaseTzInfo]:
+def tz_default(xnys) -> abc.Iterator[ZoneInfo]:
yield xnys.tz
@pytest.fixture(scope="session")
-def tz_moscow() -> abc.Iterator[pytz.tzinfo.BaseTzInfo]:
- yield pytz.timezone("Europe/Moscow")
+def tz_moscow() -> abc.Iterator[ZoneInfo]:
+ yield ZoneInfo("Europe/Moscow")
class TestConstructorErrors:
@@ -413,7 +409,7 @@ class TestPriceTables:
@pytest.fixture(scope="class")
def session_utc(self) -> abc.Iterator[pd.Timestamp]:
- yield pd.Timestamp("2021-11-02", tz=pytz.UTC)
+ yield pd.Timestamp("2021-11-02", tz=UTC)
@pytest.fixture(scope="class")
def session_naive(self, session_utc) -> abc.Iterator[pd.Timestamp]:
@@ -421,7 +417,7 @@ def session_naive(self, session_utc) -> abc.Iterator[pd.Timestamp]:
@pytest.fixture(scope="class", autouse=True)
def minute_utc(self) -> abc.Iterator[pd.Timestamp]:
- yield pd.Timestamp("2021-12-21 15:31", tz=pytz.UTC)
+ yield pd.Timestamp("2021-12-21 15:31", tz=UTC)
@pytest.fixture(scope="class", autouse=True)
def minute_naive(self, minute_utc) -> abc.Iterator[pd.Timestamp]:
@@ -431,7 +427,7 @@ def minute_naive(self, minute_utc) -> abc.Iterator[pd.Timestamp]:
def minute_default_tz(self, minute_utc, tz_default) -> abc.Iterator[pd.Timestamp]:
yield minute_utc.tz_convert(tz_default)
- def assert_interval_index_tz(self, df: pd.DataFrame, tz: pytz.BaseTzInfo | None):
+ def assert_interval_index_tz(self, df: pd.DataFrame, tz: ZoneInfo | None):
"""Assert tz of interval index."""
assert df.index.left.tz is tz
assert df.index.right.tz is tz
@@ -440,16 +436,14 @@ def assert_frames_equal(self, df0: pd.DataFrame, df1: pd.DataFrame):
"""Assert data frame values are equivalent. Does not check index."""
assert_frame_equal(df0.reset_index(drop=True), df1.reset_index(drop=True))
- def assert_interval_index_tz_properties(
- self, df, default_tz: pytz.BaseTzInfo | None
- ):
+ def assert_interval_index_tz_properties(self, df, default_tz: ZoneInfo | None):
self.assert_interval_index_tz(df, default_tz)
assert df.pt.tz is default_tz
self.assert_interval_index_tz(df.pt.naive, None)
assert df.pt.naive.pt.tz is None
self.assert_frames_equal(df.pt.naive, df)
- self.assert_interval_index_tz(df.pt.utc, pytz.UTC)
- assert df.pt.utc.pt.tz is pytz.UTC
+ self.assert_interval_index_tz(df.pt.utc, UTC)
+ assert df.pt.utc.pt.tz is UTC
self.assert_frames_equal(df.pt.utc, df)
def assert_set_tz_not_implemented(self, df):
@@ -459,7 +453,7 @@ def assert_set_tz_not_implemented(self, df):
" .pt.utc and .pt.naive methods."
)
with pytest.raises(NotImplementedError, match=match):
- df.pt.set_tz(pytz.timezone("Europe/Moscow"))
+ df.pt.set_tz(ZoneInfo("Europe/Moscow"))
def assert_not_implemented(self, method, *args, **kwargs):
match = (
@@ -531,8 +525,8 @@ def test_daily_pt(
assert_frame_equal(df.pt.naive, df)
assert df.pt.convert_to_table_tz(session_utc) == session_naive
assert df.pt.convert_to_table_tz(session_naive) == session_naive
- assert df.pt.utc.index.tz is pytz.UTC
- assert df.pt.utc.pt.tz is pytz.UTC
+ assert df.pt.utc.index.tz is UTC
+ assert df.pt.utc.pt.tz is UTC
assert_frame_equal(df.pt.utc, df, check_index_type=False)
assert df.pt.utc.pt.convert_to_table_tz(session_utc) == session_utc
assert df.pt.utc.pt.convert_to_table_tz(session_naive) == session_utc
@@ -582,11 +576,11 @@ def test_intraday_pt(
f = df.pt.convert_to_table_tz
assert f(minute_utc) == minute_default_tz
- assert f(minute_utc).tz.zone == tz_default.zone
+ assert f(minute_utc).tz == tz_default
assert f(minute_naive) == minute_naive.tz_localize(tz_default)
- assert f(minute_naive).tz.zone == tz_default.zone
+ assert f(minute_naive).tz == tz_default
assert f(minute_default_tz) == minute_default_tz
- assert f(minute_default_tz).tz.zone == tz_default.zone
+ assert f(minute_default_tz).tz == tz_default
df_tz_moscow = df.pt.set_tz(tz_moscow)
self.assert_interval_index_tz(df_tz_moscow, tz_moscow)
@@ -683,11 +677,11 @@ def test_composite_intraday_pt(
f = df.pt.convert_to_table_tz
assert f(minute_utc) == minute_default_tz
- assert f(minute_utc).tz.zone == tz_default.zone
+ assert f(minute_utc).tz == tz_default
assert f(minute_naive) == minute_naive.tz_localize(tz_default)
- assert f(minute_naive).tz.zone == tz_default.zone
+ assert f(minute_naive).tz == tz_default
assert f(minute_default_tz) == minute_default_tz
- assert f(minute_default_tz).tz.zone == tz_default.zone
+ assert f(minute_default_tz).tz == tz_default
df_tz_moscow = df.pt.set_tz(tz_moscow)
self.assert_interval_index_tz(df_tz_moscow, tz_moscow)
@@ -698,10 +692,10 @@ def test_composite_intraday_pt(
assert not df.pt.has_regular_interval
match = " requires price table to have a regular interval."
- with pytest.raises(ValueError):
- df.pt.get_trading_index(xnys, match="`get_trading_index`" + match)
- with pytest.raises(ValueError):
- df.pt.reindex_to_calendar(xnys, match="`reindex_to_calendar`" + match)
+ with pytest.raises(ValueError, match="`get_trading_index`" + match):
+ df.pt.get_trading_index(xnys)
+ with pytest.raises(ValueError, match="`reindex_to_calendar`" + match):
+ df.pt.reindex_to_calendar(xnys)
self.assert_data_for_all(df)
@@ -721,12 +715,12 @@ def test_composite_daily_intraday_pt(
assert df.pt.first_ts == df.index[0].left
assert df.pt.last_ts == df.index[-1].right
- self.assert_interval_index_tz_properties(df, pytz.UTC)
+ self.assert_interval_index_tz_properties(df, UTC)
self.assert_set_tz_not_implemented(df)
for minute in (minute_utc, minute_naive, minute_default_tz):
rtrn = df.pt.convert_to_table_tz(minute)
- assert (rtrn, rtrn.tz) == (minute_utc, pytz.UTC)
+ assert (rtrn, rtrn.tz) == (minute_utc, UTC)
assert df.pt.freq is None
assert df.pt.interval is None
@@ -793,7 +787,7 @@ def test_intraday_1h_pt(self, intraday_1h_pt, symbols, calendars, side):
bv = df[symbol].notna().all(axis=1) # rows for which have prices
# partial indices are the last indice of each session.
bv_partial_trading = (bv + bv.shift(-1) == 1) & bv
- bv_trading_status = bv.copy()
+ bv_trading_status = pd.Series(bv, dtype="object")
bv_trading_status.loc[bv_partial_trading] = np.nan
assert_series_equal(df.pt.indices_trading_status(cal), bv_trading_status)
assert_index_equal(
@@ -820,7 +814,7 @@ def test_intraday_1h_pt(self, intraday_1h_pt, symbols, calendars, side):
bv = df[symbol].notna().all(axis=1)
# normally, partial indices are first indice of each session...
bv_partial_trading = (bv + bv.shift(1) == 1) & bv
- bv_trading_status = bv.copy()
+ bv_trading_status = pd.Series(bv, dtype="object")
bv_trading_status.loc[bv_partial_trading] = np.nan
# ...but 2021-12-24 is irregular due to different exchange hours.
@@ -866,7 +860,7 @@ def test_multiple_sessions_pt(
df = multiple_sessions_alldays_pt
symbols, calendars = symbols_alldays, calendars_alldays
for symbol, cal in zip(symbols, calendars):
- indices_trading_status = pd.Series(np.nan, index=df.index)
+ indices_trading_status = pd.Series(np.nan, index=df.index, dtype="object")
bv = df[symbol].notna().all(axis=1)
indices_non_trading = df.index[~bv]
indices_trading_status.loc[indices_non_trading] = False
@@ -944,7 +938,7 @@ def test_daily_pt(self, daily_pt, one_day):
end = df.index[-2]
assert_frame_equal(f(start, end), df[start:end])
assert_frame_equal(
- f(start.tz_localize(pytz.UTC), end.tz_localize(pytz.UTC)), df[start:end]
+ f(start.tz_localize(UTC), end.tz_localize(UTC)), df[start:end]
)
dates = pd.date_range(df.index[0], df.index[-1])
@@ -1260,8 +1254,8 @@ def test_intraday_pt(self, intraday_pt, tz_default, one_sec, one_min):
f = df.pt.price_at
# test timestamps that lie within an indice with no gaps either side
- left = pd.Timestamp("2021-12-21 14:40:00", tz=pytz.UTC)
- right = pd.Timestamp("2021-12-21 14:45:00", tz=pytz.UTC)
+ left = pd.Timestamp("2021-12-21 14:40:00", tz=UTC)
+ right = pd.Timestamp("2021-12-21 14:45:00", tz=UTC)
i = df.index.left.get_loc(left)
expected = self.get_expected(df, i, "open")
@@ -1285,10 +1279,10 @@ def test_intraday_pt(self, intraday_pt, tz_default, one_sec, one_min):
assert_frame_equal(f(ts), expected)
# assert that there is a gap here in the index
- gap_left = pd.Timestamp("2021-12-23 23:00:00", tz=pytz.UTC)
+ gap_left = pd.Timestamp("2021-12-23 23:00:00", tz=UTC)
assert gap_left in df.index.right
assert not df.index.contains(gap_left).any()
- gap_right = pd.Timestamp("2021-12-24 08:00:00", tz=pytz.UTC)
+ gap_right = pd.Timestamp("2021-12-24 08:00:00", tz=UTC)
assert gap_right in df.index.left
assert not df.index.contains(gap_right - one_sec).any()
@@ -1311,7 +1305,7 @@ def test_intraday_pt(self, intraday_pt, tz_default, one_sec, one_min):
# but prices not available for MSFT and AZN.L, so...
expected_ = self.get_expected(df.ffill(), i - 1, "close")
for s in ["MSFT", "AZN.L"]:
- expected[s] = expected_[s][0]
+ expected[s] = expected_[s].iloc[0]
for ts in (gap_left - one_sec, gap_left - one_min):
assert_frame_equal(f(ts), expected)
@@ -1322,7 +1316,7 @@ def test_intraday_pt(self, intraday_pt, tz_default, one_sec, one_min):
# but prices not available for MSFT and ES=F, so...
expected_ = self.get_expected(df.ffill(), i, "close")
for s in ["MSFT", "ES=F"]:
- expected[s] = expected_[s][0]
+ expected[s] = expected_[s].iloc[0]
for ts in (gap_right, gap_right + one_sec, gap_right + one_min):
assert_frame_equal(f(ts), expected)
@@ -1339,12 +1333,10 @@ def test_intraday_pt(self, intraday_pt, tz_default, one_sec, one_min):
assert rtrn.index.tz is tz_default
# tz to define return index only as ts is tz aware
- assert_frame_equal(f(left_default_tz, tz=pytz.UTC), expected.tz_convert("UTC"))
+ assert_frame_equal(f(left_default_tz, tz=UTC), expected.tz_convert(UTC))
# tz to define ts (tz-naive) and return index
- assert_frame_equal(
- f(left.tz_localize(None), tz=pytz.UTC), expected.tz_convert("UTC")
- )
+ assert_frame_equal(f(left.tz_localize(None), tz=UTC), expected.tz_convert(UTC))
# Test a session label raises expected error
session = pd.Timestamp("2021-12-21 00:00")
@@ -1354,7 +1346,7 @@ def test_intraday_pt(self, intraday_pt, tz_default, one_sec, one_min):
f" as {session}. To define ts as midnight pass as a tz-aware"
" pd.Timestamp. For prices as at a session's close use .close_at()."
)
- with pytest.raises(pydantic.ValidationError, match=msg):
+ with pytest.raises(ValueError, match=msg):
f(session)
# Test bounds
@@ -1365,12 +1357,12 @@ def test_intraday_pt(self, intraday_pt, tz_default, one_sec, one_min):
match = (
"`time` cannot be earlier than the first time for which prices are"
" available. First time for which prices are available is"
- f" {left_bound.tz_convert(pytz.UTC)} although `time` received as "
+ f" {left_bound.tz_convert(UTC)} although `time` received as "
)
for ts in (left_bound - one_sec, left_bound - one_min):
with pytest.raises(
errors.DatetimeTooEarlyError,
- match=re.escape(match + f"{ts.tz_convert(pytz.UTC)}."),
+ match=re.escape(match + f"{ts.tz_convert(UTC)}."),
):
f(ts)
@@ -1381,12 +1373,12 @@ def test_intraday_pt(self, intraday_pt, tz_default, one_sec, one_min):
match = (
"`time` cannot be later than the most recent time for which prices are"
f" available. Most recent time for which prices are available is"
- f" {right_bound.tz_convert(pytz.UTC)} although `time` received as "
+ f" {right_bound.tz_convert(UTC)} although `time` received as "
)
for ts in (right_bound + one_sec, right_bound + one_min):
with pytest.raises(
errors.DatetimeTooLateError,
- match=re.escape(match + f"{ts.tz_convert(pytz.UTC)}."),
+ match=re.escape(match + f"{ts.tz_convert(UTC)}."),
):
f(ts)
@@ -1405,12 +1397,12 @@ def test_composite_daily_intraday_pt(
match = (
"`time` cannot be earlier than the first time for which prices are"
" available. First time for which prices are available is"
- f" {left_bound.tz_convert(pytz.UTC)} although `time` received as "
+ f" {left_bound.tz_convert(UTC)} although `time` received as "
)
for ts in (left_bound - one_sec, left_bound - one_min):
with pytest.raises(
errors.DatetimeTooEarlyError,
- match=re.escape(match + f"{ts.tz_convert(pytz.UTC)}."),
+ match=re.escape(match + f"{ts.tz_convert(UTC)}."),
):
df.pt.price_at(ts)
@@ -1501,18 +1493,16 @@ def test_daily_pt_close_at(
f"`date` can not have a time component, although receieved as {ts}."
" For an intraday price use .price_at()."
)
- with pytest.raises(pydantic.ValidationError, match=match):
+ with pytest.raises(ValueError, match=match):
f(ts)
# test non-valid input
match = "`date` must be tz-naive, although receieved as "
for ts in (
- session.tz_localize(pytz.UTC),
+ session.tz_localize(UTC),
session.tz_localize(tz_default),
):
- with pytest.raises(
- pydantic.ValidationError, match=re.escape(match + f"{ts}.")
- ):
+ with pytest.raises(ValueError, match=re.escape(match + f"{ts}.")):
f(ts)
def test_daily_pt_session_prices(self, daily_pt, session, non_session, one_day):
@@ -1543,7 +1533,7 @@ def test_composite_daily_intraday_pt(self, composite_daily_intraday_pt, one_day)
right_bound = df_dp.index[-1]
assert_frame_equal(df.pt.close_at(right_bound), self.get_expected(df_dp, -1))
- right_bound_utc = right_bound.tz_localize(pytz.UTC)
+ right_bound_utc = right_bound.tz_localize(UTC)
indice = pd.Interval(right_bound_utc, right_bound_utc, "left")
expected = df.loc[[indice]]
expected.index = expected.index.left.tz_convert(None)
@@ -1661,16 +1651,16 @@ def assertions(df: pd.DataFrame, symbols: list[str]):
df_test = df.iloc[i_start : i_end + 1].copy()
rtrn_ff = df_test.pt.fillna("ffill")
- assert rtrn_ff.isna().any(axis=1)[0]
+ assert rtrn_ff.isna().any(axis=1).iloc[0]
rtrn_bf = df_test.pt.fillna("bfill")
- assert rtrn_bf.isna().any(axis=1)[-1]
+ assert rtrn_bf.isna().any(axis=1).iloc[-1]
rtrn_both = df_test.pt.fillna("both")
assert rtrn_both.notna().all(axis=None)
# for those symbols that have missing values in first row of df_test, make sure
# that "both" is filling initial na rows backwards and everything else forwards.
for s in symbols:
- if df_test[s].notna().all(axis=1)[0]:
+ if df_test[s].notna().all(axis=1).iloc[0]:
continue
df_notna = df_test[s][df_test.notna().all(axis=1)]
start_label = df_notna.index[0]
@@ -1744,7 +1734,7 @@ def assertions_all_options_combo(self, df: pd.DataFrame, symbols: list[str]):
df_test = self.df_na_start_end_rows(df)
rtrn = df_test.pt.operate(
- tz=pytz.UTC,
+ tz=UTC,
data_for_all_start=True,
fill="ffill",
include=symbols[0],
@@ -1765,10 +1755,10 @@ def assertions_all_options_combo(self, df: pd.DataFrame, symbols: list[str]):
assert_frame_equal(rtrn, df_)
- def assertions_tz_utc_naive(self, df: pd.DataFrame, other_tz: pytz.BaseTzInfo):
+ def assertions_tz_utc_naive(self, df: pd.DataFrame, other_tz: ZoneInfo):
"""Assert tz as naive and utc and that other tz raises error."""
f = df.pt.operate
- assert_frame_equal(f(tz=pytz.UTC), df.pt.utc)
+ assert_frame_equal(f(tz=UTC), df.pt.utc)
assert_frame_equal(f(tz=None), df.pt.naive)
match = re.escape(
@@ -1778,10 +1768,10 @@ def assertions_tz_utc_naive(self, df: pd.DataFrame, other_tz: pytz.BaseTzInfo):
with pytest.raises(ValueError, match=match):
f(tz=other_tz)
- def assertions_tz(self, df: pd.DataFrame, other_tz: pytz.BaseTzInfo):
+ def assertions_tz(self, df: pd.DataFrame, other_tz: ZoneInfo):
"""Assert tz as naive, utc and `other_tz`."""
f = df.pt.operate
- assert_frame_equal(f(tz=pytz.UTC), df.pt.utc)
+ assert_frame_equal(f(tz=UTC), df.pt.utc)
assert_frame_equal(f(tz=None), df.pt.naive)
assert_frame_equal(f(tz=other_tz), df.pt.set_tz(other_tz))
@@ -1964,8 +1954,8 @@ def assert_aggregations(symbols, subset: pd.DataFrame, row: pd.Series):
assert subset_s.volume.sum() == row_s.volume
assert subset_s.high.max() == row_s.high
assert subset_s.low.min() == row_s.low
- assert subset_s.bfill().open[0] == row_s.open
- assert subset_s.ffill().close[-1] == row_s.close
+ assert subset_s.bfill().open.iloc[0] == row_s.open
+ assert subset_s.ffill().close.iloc[-1] == row_s.close
class TestDownsampleDaily:
@@ -3033,9 +3023,9 @@ def test_sessions(self, intraday_pt, calendars, cc):
f = df.pt.sessions
calendars = list(calendars) + [cc]
for cal in calendars:
- opens_ = pd.DatetimeIndex(cal.opens.values, tz=pytz.UTC)
+ opens_ = pd.DatetimeIndex(cal.opens.values, tz=UTC)
opens_arr = opens_.get_indexer(df.pt.utc.index.left, "ffill")
- closes_ = pd.DatetimeIndex(cal.closes.values, tz=pytz.UTC)
+ closes_ = pd.DatetimeIndex(cal.closes.values, tz=UTC)
closes_arr = closes_.get_indexer(df.pt.utc.index.left, "bfill")
sessions = cal.sessions
srs = pd.Series(pd.NaT, index=df.index, name="session")
@@ -3046,9 +3036,9 @@ def test_sessions(self, intraday_pt, calendars, cc):
# verify `direction` as default / "previous"
rtrn = f(cal)
srs_ = srs.copy()
- if pd.isna(srs_[0]):
+ if pd.isna(srs_.iloc[0]):
srs_.iloc[0] = sessions[opens_arr[0]]
- expected = srs_.fillna(method="ffill")
+ expected = srs_.ffill()
assert_series_equal(rtrn, expected)
rtrn_previous = f(cal, direction="previous")
@@ -3057,9 +3047,9 @@ def test_sessions(self, intraday_pt, calendars, cc):
# verify `direction` as "previous"
rtrn_next = f(cal, direction="next")
srs_ = srs.copy()
- if pd.isna(srs_[-1]):
+ if pd.isna(srs_.iloc[-1]):
srs_.iloc[-1] = sessions[opens_arr[-1] + 1]
- assert_series_equal(rtrn_next, srs_.fillna(method="bfill"))
+ assert_series_equal(rtrn_next, srs_.bfill())
# verify `direction` as None
rtrn_none = f(cal, direction=None)
@@ -3100,13 +3090,13 @@ def test_indices_trading_minutes(self, intraday_1h_pt, calendars, one_sec):
assert indice_mins in possible_indice_mins
# create df_test where all indices have same number of trading minutes
- indice_mins_change = expected[expected != expected[0]].index[0]
+ indice_mins_change = expected[expected != expected.iloc[0]].index[0]
constant_trading_mins = expected[: indice_mins_change.left - one_sec]
start = constant_trading_mins.index[0].left
end = constant_trading_mins.index[-1].right - one_sec
df_test = df[start:end]
rtrn = df_test.pt.trading_minutes_interval(cal)
- expected_interval = TDInterval(pd.Timedelta(minutes=expected[0]))
+ expected_interval = TDInterval(pd.Timedelta(minutes=expected.iloc[0]))
assert rtrn == expected_interval
assert df_test.pt.indices_have_regular_trading_minutes(cal)
diff --git a/tests/test_tutorial_helpers.py b/tests/test_tutorial_helpers.py
index ff20dbe..d4d3e7c 100644
--- a/tests/test_tutorial_helpers.py
+++ b/tests/test_tutorial_helpers.py
@@ -9,10 +9,10 @@
import pandas as pd
from pandas.testing import assert_index_equal
from pandas import Timestamp as T
-from pytz import UTC
import exchange_calendars as xcals
from market_prices import intervals, errors
+from market_prices.helpers import UTC
from market_prices.utils import calendar_utils as calutils
from market_prices.prices.base import PricesBase
import market_prices.support.tutorial_helpers as m
diff --git a/tests/test_yahoo.py b/tests/test_yahoo.py
index 3702d4f..c4e5f5f 100644
--- a/tests/test_yahoo.py
+++ b/tests/test_yahoo.py
@@ -13,11 +13,11 @@
import pandas as pd
from pandas.testing import assert_frame_equal, assert_series_equal
import pytest
-import pytz
import yahooquery as yq
import market_prices.prices.yahoo as m
from market_prices import data, daterange, helpers, intervals, errors
+from market_prices.helpers import UTC
from market_prices.support import tutorial_helpers as th
from market_prices.utils import calendar_utils as calutils
from .test_base_prices import (
@@ -44,12 +44,12 @@
# pylint: disable=too-many-lines
-UTC = pytz.UTC
-
# NOTE See ../docs/developers/testing.md...
# ...sessions that yahoo temporarily fails to return prices for if (seemingly)
# send a high frequency of requests for prices from the same IP address.
_flakylist = (
+ pd.Timestamp("2023-09-01"),
+ pd.Timestamp("2023-07-17"),
pd.Timestamp("2023-04-23"),
pd.Timestamp("2023-01-18"),
pd.Timestamp("2023-01-17"),
@@ -431,13 +431,13 @@ def test__adjust_high_low():
"""Verify staticmethod PricesYahoo._adjust_high_low."""
columns = pd.Index(["open", "high", "low", "close", "volume"])
ohlcv = (
- [100, 103, 98, 103.4, 0], # close higher than high
- [104, 109, 104, 107, 0],
- [106, 108, 104, 107, 0],
- [106, 110, 107, 109, 0], # open lower than low
- [108, 112, 108, 112, 0],
- [112, 114, 107, 106.4, 0], # close lower than low
- [112, 108, 104, 105, 0], # open higher than high
+ [100.0, 103.0, 98.0, 103.4, 0], # close higher than high
+ [104.0, 109.0, 104.0, 107.0, 0],
+ [106.0, 108.0, 104.0, 107.0, 0],
+ [106.0, 110.0, 107.0, 109.0, 0], # open lower than low
+ [108.0, 112.0, 108.0, 112.0, 0],
+ [112.0, 114.0, 107.0, 106.4, 0], # close lower than low
+ [112.0, 108.0, 104.0, 105.0, 0], # open higher than high
)
index = pd.date_range(
start=pd.Timestamp("2022-01-01"), freq="D", periods=len(ohlcv)
@@ -446,13 +446,13 @@ def test__adjust_high_low():
rtrn = m.PricesYahoo._adjust_high_low(df)
ohlcv_expected = (
- [100, 103.4, 98, 103.4, 0], # close was higher than high
- [104, 109, 104, 107, 0],
- [106, 108, 104, 107, 0],
- [107, 110, 107, 109, 0], # open was lower than low
- [108, 112, 108, 112, 0],
- [112, 114, 106.4, 106.4, 0], # close was lower than low
- [108, 108, 104, 105, 0], # open was higher than high
+ [100.0, 103.4, 98, 103.4, 0], # close was higher than high
+ [104.0, 109.0, 104.0, 107.0, 0],
+ [106.0, 108.0, 104.0, 107.0, 0],
+ [107.0, 110.0, 107.0, 109.0, 0], # open was lower than low
+ [108.0, 112.0, 108.0, 112.0, 0],
+ [112.0, 114.0, 106.4, 106.4, 0], # close was lower than low
+ [108.0, 108.0, 104.0, 105.0, 0], # open was higher than high
)
expected = pd.DataFrame(ohlcv_expected, index=index, columns=columns)
assert (expected.open >= expected.low).all()
@@ -544,7 +544,7 @@ def f(df: pd.DataFrame, cal: xcals.ExchangeCalendar) -> pd.DataFrame:
"2022-01-05 23:00",
"2022-01-06 00:00",
],
- tz=pytz.UTC,
+ tz=UTC,
)
assert (
@@ -707,7 +707,7 @@ def f(df: pd.DataFrame, cal: xcals.ExchangeCalendar) -> pd.DataFrame:
"2022-01-12 02:00",
"2022-01-12 03:00",
],
- tz=pytz.UTC,
+ tz=UTC,
)
df = pd.DataFrame(ohlcv, index=index, columns=columns)
match_sessions = ["2022-01-05", "2022-01-07", "2022-01-10", "2022-01-12"]
@@ -1027,7 +1027,7 @@ def test_adj_close(self):
interval, interval_yq = prices.bis.D1, "1d"
# inputs for intraday interval
- end_id = pd.Timestamp.now(tz=pytz.UTC).floor("D") - pd.Timedelta(14, "D")
+ end_id = pd.Timestamp.now(tz=UTC).floor("D") - pd.Timedelta(14, "D")
start_id = end_id - pd.Timedelta(14, "D")
interval_id, interval_yq_id = prices.bis.H1, "1h"
@@ -1282,8 +1282,8 @@ def expected_table_structure_us(
expected_num_rows = int(sessions_rows.sum())
sessions_end = cc.opens[slc] + (interval.as_pdtd * sessions_rows)
- start = cc.opens[slc][0]
- end = sessions_end[-1]
+ start = cc.opens[slc].iloc[0]
+ end = sessions_end.iloc[-1]
return (start, end), expected_num_rows, sessions_end
@@ -1446,8 +1446,8 @@ def test_prices_us_lon(self, pricess):
sessions_last_indice = cc.opens[slc] + (
interval.as_pdtd * sessions_rows_gross
)
- start = cc.opens[slc][0]
- end = sessions_last_indice[-1]
+ start = cc.opens[slc].iloc[0]
+ end = sessions_last_indice.iloc[-1]
assertions_intraday(df, interval, prices, start, end, expected_num_rows)
assert cc.opens[slc].isin(df.index.left).all()
@@ -1522,8 +1522,8 @@ def test_prices_inc_245(self, pricess):
expected_num_rows = int(sessions_rows.sum())
sessions_last_indice = cc.opens[slc] + (interval.as_pdtd * sessions_rows)
- start = cc.opens[slc][0]
- end = sessions_last_indice[-1]
+ start = cc.opens[slc].iloc[0]
+ end = sessions_last_indice.iloc[-1]
assertions_intraday(df, interval, prices, start, end, expected_num_rows)
assert cc.opens[slc].isin(df.index.left).all()
@@ -1556,8 +1556,8 @@ def test_prices_inc_247(self, pricess):
expected_num_rows = int(sessions_rows.sum())
sessions_last_indice = cc.opens[slc] + (interval.as_pdtd * sessions_rows)
- start = cc.opens[slc][0]
- end = sessions_last_indice[-1]
+ start = cc.opens[slc].iloc[0]
+ end = sessions_last_indice.iloc[-1]
assertions_intraday(df, interval, prices, start, end, expected_num_rows)
assert cc.opens[slc].isin(df.index.left).all()
@@ -1604,8 +1604,8 @@ def test_start_end_session_minutes(self, pricess, one_min):
_, slc = get_data_bounds(prices, interval)
delta = pd.Timedelta(20, "T")
- start = cc.opens[slc][0] + delta
- end = cc.closes[slc][-1] - delta
+ start = cc.opens[slc].iloc[0] + delta
+ end = cc.closes[slc].iloc[-1] - delta
expected_num_rows, _ = self.get_expected_num_rows_us_lon(interval, cc, slc)
expected_num_rows -= (delta // interval) * 2
@@ -1697,9 +1697,9 @@ def test_start_none(self, pricess):
def test_live_indice(self, pricess):
"""Verify return with live indice as expected."""
prices = pricess["inc_247"]
- start = pd.Timestamp.now(tz="UTC").floor("D") - pd.Timedelta(2, "D")
+ start = pd.Timestamp.now(tz=UTC).floor("D") - pd.Timedelta(2, "D")
for interval in prices.BaseInterval[:-1]:
- now = pd.Timestamp.now(tz="UTC").floor("T")
+ now = pd.Timestamp.now(tz=UTC).floor("T")
end = now + interval
df = prices._request_data(interval, start, end)
num_rows = (now - start) / interval
@@ -1713,7 +1713,7 @@ def test_live_indice(self, pricess):
cal = prices.calendars[symbol]
cc = calutils.CompositeCalendar([cal])
delay = pd.Timedelta(delay_mins, "T")
- start = pd.Timestamp.now(tz="UTC").floor("D") - pd.Timedelta(2, "D")
+ start = pd.Timestamp.now(tz=UTC).floor("D") - pd.Timedelta(2, "D")
pp = {
"minutes": 0,
"hours": 0,
@@ -1731,7 +1731,7 @@ def test_live_indice(self, pricess):
)
(_, end), _ = drg.daterange
df = prices._request_data(interval, start, end)
- now = pd.Timestamp.now(tz="UTC").floor("T")
+ now = pd.Timestamp.now(tz=UTC).floor("T")
num_rows = (now - delay - start) / interval
num_rows = np.ceil(num_rows) if num_rows % 1 else num_rows + 1
expected_end = start + (num_rows * interval)
@@ -1746,7 +1746,7 @@ def test_one_min_interval(self, pricess):
prices = pricess["only_247"]
interval = prices.BaseInterval.T1
for days in [5, 6, 7, 11, 12, 13]:
- end = pd.Timestamp.now(tz=pytz.UTC).ceil("T")
+ end = pd.Timestamp.now(tz=UTC).ceil("T")
start = end - pd.Timedelta(days, "D")
df = prices._request_data(interval, start, end)
num_expected_rows = days * 24 * 60
@@ -1781,7 +1781,7 @@ def assertions(
indice = hist0.name
df = prices._request_data(interval, start, end)[symbol]
- df0_vol = df[indice:indice].volume[0]
+ df0_vol = df[indice:indice].volume.iloc[0]
# verify glitch in hist not present in df
if prev_close is None:
@@ -1834,7 +1834,7 @@ def assertions(
prices = pricess["us"]
symbol = prices.symbols[0]
cal = prices.calendars[symbol]
- now = pd.Timestamp.now(pytz.UTC).floor("T")
+ now = pd.Timestamp.now(UTC).floor("T")
session = cal.minute_to_past_session(now, 2)
session = get_valid_session(session, cal, "previous")
# extra 30T to cover unaligned end of 1H interval
@@ -1845,7 +1845,7 @@ def assertions(
# Verify for lon prices
prices = m.PricesYahoo(["AZN.L"])
cal = prices.calendars["AZN.L"]
- now = pd.Timestamp.now(pytz.UTC).floor("T")
+ now = pd.Timestamp.now(UTC).floor("T")
session = cal.minute_to_past_session(now, 2)
session = get_valid_session(session, cal, "previous")
# extra 30T to cover unaligned end of 1H interval
@@ -1875,7 +1875,7 @@ def test_prices_for_symbols():
# us and lon calendars overlap.
cal_us = prices.calendars[symb_us]
cal_lon = prices.calendars[symb_lon]
- now = pd.Timestamp.now(tz=pytz.UTC)
+ now = pd.Timestamp.now(tz=UTC)
end_session = cal_us.minute_to_past_session(now, 2)
start_session = cal_us.minute_to_past_session(now, 12)
@@ -1893,11 +1893,11 @@ def test_prices_for_symbols():
start = us_open - pd.Timedelta(2, "H")
end = lon_close + pd.Timedelta(2, "H")
# xcals 4.0 del clause
- if start.tz is not pytz.UTC:
- start = start.tz_localize(pytz.UTC)
- end = end.tz_localize(pytz.UTC)
- us_open = us_open.tz_localize(pytz.UTC)
- lon_close = lon_close.tz_localize(pytz.UTC)
+ if start.tz is not UTC:
+ start = start.tz_localize(UTC)
+ end = end.tz_localize(UTC)
+ us_open = us_open.tz_localize(UTC)
+ lon_close = lon_close.tz_localize(UTC)
break
_ = prices.get("5T", start, us_open, lead_symbol="AZN.L")
@@ -1988,7 +1988,7 @@ def test__get_bi_table(pricess):
to = pd.Timestamp.now()
from_ = to - pd.Timedelta(21, "D")
(start, _), slc = get_data_bounds(prices, interval, (from_, to))
- end = prices.cc.closes[slc][-1]
+ end = prices.cc.closes[slc].iloc[-1]
table = prices._get_bi_table(interval, (start, end))
bounds, num_rows, sessions_end = expected_table_structure_us(prices, interval, slc)
diff --git a/tests/utils.py b/tests/utils.py
index 09f167f..f7f8dab 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -5,16 +5,16 @@
import functools
import pathlib
from collections import abc
-from typing import Literal, TYPE_CHECKING, Tuple
+from typing import Literal, TYPE_CHECKING
import shelve
import exchange_calendars as xcals
from exchange_calendars.utils import pandas_utils as xcals_pdutils
import numpy as np
import pandas as pd
-from pytz import UTC
from market_prices import intervals
+from market_prices.helpers import UTC
from market_prices.utils import pandas_utils as pdutils
from market_prices.prices.base import PricesBase
@@ -56,7 +56,7 @@ def get_store_pbt(mode: str = "a") -> pd.HDFStore:
def get_shelf() -> shelve.DbfilenameShelf:
- """Return pbt resoureces shelf.
+ """Return pbt resources shelf.
Shelf has keys as pbt resource keys and values as pd.Timestamp
corresponding to time when resource was created.
@@ -175,9 +175,9 @@ def save_resource_pbt(
" `overwrite` as True."
)
- now = pd.Timestamp.now(tz="UTC")
+ now = pd.Timestamp.now(tz=UTC)
prices.request_all_prices()
- if pd.Timestamp.now(tz="UTC").floor("T") != now.floor("T"):
+ if pd.Timestamp.now(tz=UTC).floor("T") != now.floor("T"):
remove_resource_pbt(key, store_only=True)
raise RuntimeError(
"Operation aborted as unable to get all data within the same minute."
@@ -571,12 +571,12 @@ def sessions_range(self) -> tuple[pd.Timestamp, pd.Timestamp]:
@property
def first_session_open(self) -> pd.Timestamp:
"""Open time of first session covered by answers."""
- return self.opens[0]
+ return self.opens.iloc[0]
@property
def last_session_close(self) -> pd.Timestamp:
"""Close time of last session covered by answers."""
- return self.closes[-1]
+ return self.closes.iloc[-1]
@property
def first_minute(self) -> pd.Timestamp:
@@ -882,7 +882,7 @@ def _get_sessions_with_times_different_to_next_session(
if is_break_col:
if column_.isna().all():
return [pd.DatetimeIndex([])] * 2
- column_ = column_.fillna(method="ffill").fillna(method="bfill")
+ column_ = column_.ffill().bfill()
diff = (column_.shift(-1) - column_)[:-1]
remainder = diff % pd.Timedelta(24, "H")
@@ -1339,7 +1339,7 @@ def non_sessions_range(self) -> tuple[pd.Timestamp, pd.Timestamp] | None:
# --- Evaluated sets of minutes ---
- Minutes = Tuple[pd.Timestamp, pd.Timestamp, pd.Timestamp, pd.Timestamp]
+ Minutes = tuple[pd.Timestamp, pd.Timestamp, pd.Timestamp, pd.Timestamp]
@functools.lru_cache(maxsize=4)
def _evaluate_trading_and_break_minutes(