Skip to content

Commit

Permalink
Add a new --fail-on-increase option
Browse files Browse the repository at this point in the history
As requested from some users, add a new --fail-on-increase option that
makes the test run fail if the memory usage increases from previous runs
in tests marked with "limit_memory".

Signed-off-by: Pablo Galindo <[email protected]>
  • Loading branch information
pablogsal authored and godlygeek committed Nov 7, 2023
1 parent 67d731e commit 40e3601
Show file tree
Hide file tree
Showing 6 changed files with 130 additions and 3 deletions.
6 changes: 5 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,8 @@ MEMORY PROBLEMS demo/test_ok.py::test_memory_exceed
- `--stacks=STACKS` - Show the N stack entries when showing tracebacks of memory allocations
- `--native` - Show native frames when showing tracebacks of memory allocations (will be slower)
- `--trace-python-allocators` - Record allocations made by the Pymalloc allocator (will be slower)
- `--fail-on-increase` - Fail a test with the `limit_memory`` marker if it uses
more memory than its last successful run

## Configuration - INI

Expand All @@ -105,7 +107,9 @@ MEMORY PROBLEMS demo/test_ok.py::test_memory_exceed
- `hide_memray_summary(bool)` - hide the memray summary at the end of the execution
- `stacks(int)` - Show the N stack entries when showing tracebacks of memory allocations
- `native(bool)`- Show native frames when showing tracebacks of memory allocations (will be slower)
- `trace_python_allocators` - Record allocations made by the Pymalloc allocator (will be slower)
- `trace_python_allocators(bool)` - Record allocations made by the Pymalloc allocator (will be slower)
- `fail-on-increase(bool)` - Fail a test with the `limit_memory` marker if it
uses more memory than its last successful run

## License

Expand Down
6 changes: 6 additions & 0 deletions docs/configuration.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ The complete list of command line options is:

``--trace-python-allocators``
Record allocations made by the Pymalloc allocator (will be slower)

``--fail-on-increase``
Fail a test with the limit_memory marker if it uses more memory than its last successful run

.. tab:: Config file options

Expand All @@ -49,3 +52,6 @@ The complete list of command line options is:

``trace_python_allocators(bool)``
Record allocations made by the Pymalloc allocator (will be slower)

``--fail-on-increase(bool)``
Fail a test with the limit_memory marker if it uses more memory than its last successful run
1 change: 1 addition & 0 deletions docs/news/91.feature.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add a new --fail-on-increase option that fails a test with the ``limit_memory`` marker if it uses more memory than its last successful run.
42 changes: 40 additions & 2 deletions src/pytest_memray/marks.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,28 @@ def long_repr(self) -> str:
)


@dataclass
class _MoreMemoryInfo:
previous_memory: float
new_memory: float

@property
def section(self) -> PytestSection:
"""Return a tuple in the format expected by section reporters."""
return (
"memray-max-memory",
"Test uses more memory than previous run",
)

@property
def long_repr(self) -> str:
"""Generate a longrepr user-facing error message."""
return (
f"Test previously used {sizeof_fmt(self.previous_memory)} "
f"but now uses {sizeof_fmt(self.new_memory)}"
)


def _generate_section_text(
allocations: list[AllocationRecord], native_stacks: bool, num_stacks: int
) -> str:
Expand Down Expand Up @@ -162,15 +184,30 @@ def _passes_filter(


def limit_memory(
limit: str, *, _result_file: Path, _config: Config
) -> _MemoryInfo | None:
limit: str,
*,
_result_file: Path,
_config: Config,
_test_id: str,
) -> _MemoryInfo | _MoreMemoryInfo | None:
"""Limit memory used by the test."""
reader = FileReader(_result_file)
allocations: list[AllocationRecord] = list(
reader.get_high_watermark_allocation_records(merge_threads=True)
)
max_memory = parse_memory_string(limit)
total_allocated_memory = sum(record.size for record in allocations)

if _config.cache is not None:
cache = _config.cache.get(f"memray/{_test_id}", {})
previous = cache.get("total_allocated_memory", float("inf"))
fail_on_increase = cast(bool, value_or_ini(_config, "fail_on_increase"))
if fail_on_increase and total_allocated_memory > previous:
return _MoreMemoryInfo(previous, total_allocated_memory)

cache["total_allocated_memory"] = total_allocated_memory
_config.cache.set(f"memray/{_test_id}", cache)

if total_allocated_memory < max_memory:
return None
num_stacks: int = cast(int, value_or_ini(_config, "stacks"))
Expand All @@ -190,6 +227,7 @@ def limit_leaks(
filter_fn: Optional[LeaksFilterFunction] = None,
_result_file: Path,
_config: Config,
_test_id: str,
) -> _LeakedInfo | None:
reader = FileReader(_result_file)
allocations: list[AllocationRecord] = list(
Expand Down
13 changes: 13 additions & 0 deletions src/pytest_memray/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ def __call__(
*args: Any,
_result_file: Path,
_config: Config,
_test_id: str,
**kwargs: Any,
) -> SectionMetadata | None:
...
Expand Down Expand Up @@ -237,6 +238,7 @@ def pytest_runtest_makereport(
**marker.kwargs,
_result_file=result.result_file,
_config=self.config,
_test_id=item.nodeid,
)
if res:
report.outcome = "failed"
Expand Down Expand Up @@ -384,6 +386,12 @@ def pytest_addoption(parser: Parser) -> None:
default=False,
help="Record allocations made by the Pymalloc allocator (will be slower)",
)
group.addoption(
"--fail-on-increase",
action="store_true",
default=False,
help="Fail a test with the limit_memory marker if it uses more memory than its last successful run",
)

parser.addini("memray", "Activate pytest.ini setting", type="bool")
parser.addini(
Expand All @@ -407,6 +415,11 @@ def pytest_addoption(parser: Parser) -> None:
help="Record allocations made by the Pymalloc allocator (will be slower)",
type="bool",
)
parser.addini(
"fail-on-increase",
help="Fail a test with the limit_memory marker if it uses more memory than its last successful run",
type="bool",
)
help_msg = "Show the N tests that allocate most memory (N=0 for all)"
parser.addini("most_allocations", help_msg)

Expand Down
65 changes: 65 additions & 0 deletions tests/test_pytest_memray.py
Original file line number Diff line number Diff line change
Expand Up @@ -796,3 +796,68 @@ def test_bar():

output = result.stdout.str()
assert "Only one Memray marker can be applied to each test" in output


def test_fail_on_increase(pytester: Pytester):
pytester.makepyfile(
"""
import pytest
from memray._test import MemoryAllocator
allocator = MemoryAllocator()
@pytest.mark.limit_memory("100MB")
def test_memory_alloc_fails():
allocator.valloc(1024)
allocator.free()
"""
)
result = pytester.runpytest("--memray")
assert result.ret == ExitCode.OK
pytester.makepyfile(
"""
import pytest
from memray._test import MemoryAllocator
allocator = MemoryAllocator()
@pytest.mark.limit_memory("100MB")
def test_memory_alloc_fails():
allocator.valloc(1024 * 10)
allocator.free()
"""
)
result = pytester.runpytest("--memray", "--fail-on-increase")
assert result.ret == ExitCode.TESTS_FAILED
output = result.stdout.str()
assert "Test uses more memory than previous run" in output
assert "Test previously used 1.0KiB but now uses 10.0KiB" in output


def test_fail_on_increase_unset(pytester: Pytester):
pytester.makepyfile(
"""
import pytest
from memray._test import MemoryAllocator
allocator = MemoryAllocator()
@pytest.mark.limit_memory("100MB")
def test_memory_alloc_fails():
allocator.valloc(1024)
allocator.free()
"""
)
result = pytester.runpytest("--memray")
assert result.ret == ExitCode.OK
pytester.makepyfile(
"""
import pytest
from memray._test import MemoryAllocator
allocator = MemoryAllocator()
@pytest.mark.limit_memory("100MB")
def test_memory_alloc_fails():
allocator.valloc(1024 * 10)
allocator.free()
"""
)
result = pytester.runpytest("--memray")
assert result.ret == ExitCode.OK

0 comments on commit 40e3601

Please sign in to comment.