From eb2e4ab90d8661a69a3b6830fc272e37a0304869 Mon Sep 17 00:00:00 2001 From: Matt Angus Date: Fri, 9 Aug 2024 23:37:41 +0100 Subject: [PATCH] don't generate stubs from invalid signatures --- mypy/stubdoc.py | 25 ++++++++++++- mypy/test/teststubgen.py | 79 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+), 1 deletion(-) diff --git a/mypy/stubdoc.py b/mypy/stubdoc.py index 928d024514f3..6897706d1415 100644 --- a/mypy/stubdoc.py +++ b/mypy/stubdoc.py @@ -180,6 +180,16 @@ def __init__(self, function_name: str) -> None: def add_token(self, token: tokenize.TokenInfo) -> None: """Process next token from the token stream.""" + + if ( + self.state[-1] == STATE_ARGUMENT_TYPE + and token.type != tokenize.NAME + and len(self.accumulator) == 0 + ): + # the next token after : must be a name + self.reset() + return + if ( token.type == tokenize.NAME and token.string == self.function_name @@ -282,8 +292,13 @@ def add_token(self, token: tokenize.TokenInfo) -> None: self.accumulator = "" elif token.type == tokenize.OP and token.string == "->" and self.state[-1] == STATE_INIT: + if len(self.accumulator) == 0: + self.state.append(STATE_RETURN_VALUE) + else: + # ) is not directly followed by -> + self.reset() + return self.accumulator = "" - self.state.append(STATE_RETURN_VALUE) # ENDMAKER is necessary for python 3.4 and 3.5. elif token.type in (tokenize.NEWLINE, tokenize.ENDMARKER) and self.state[-1] in ( @@ -306,6 +321,14 @@ def add_token(self, token: tokenize.TokenInfo) -> None: self.args = [] self.ret_type = "Any" # Leave state as INIT. + elif ( + token.type == tokenize.NAME + and self.state[-1] == STATE_ARGUMENT_LIST + and len(self.accumulator) != 0 + ): + # not the first name in a row + self.reset() + return else: self.accumulator += token.string diff --git a/mypy/test/teststubgen.py b/mypy/test/teststubgen.py index e65a16c8f395..f481d66b7781 100644 --- a/mypy/test/teststubgen.py +++ b/mypy/test/teststubgen.py @@ -398,6 +398,85 @@ def test_infer_sig_from_docstring_bad_indentation(self) -> None: None, ) + def test_infer_sig_from_docstring_invalid_signature(self) -> None: + + assert_equal(infer_sig_from_docstring("\nfunc() --> None", "func"), []) + + assert_equal( + infer_sig_from_docstring("\nfunc(name1 name2) -> None", "func"), [] + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(name1, name2 name3) -> None", "func"), [] + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(name2, name3) -> None, None", "func"), + [], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(invalid::name) -> None", "func"), + [], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(invalid: [type]) -> None", "func"), + [], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(invalid: (type)) -> None", "func"), + [], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(invalid: -type) -> None", "func"), + [], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(invalid None", "func"), + [], + ) + + assert_equal( + infer_sig_from_docstring("\nfunc(cpp::type name) -> None", "func"), + [], + ) + assert_equal( + infer_sig_from_docstring("\nfunc(name) -> cpp::type", "func"), + [], + ) + assert_equal( + infer_sig_from_docstring("\nvoid func(int name) {", "func"), + [], + ) + assert_equal( + infer_sig_from_docstring("\nvoid func(std::vector name)", "func"), + [], + ) + + def test_infer_sig_from_docstring_deeply_nested_types(self) -> None: + assert_equal( + infer_sig_from_docstring( + "\nfunc(name: dict[str, dict[str, list[tuple[int, float]]]]) -> None", + "func", + ), + [ + FunctionSig( + name="func", + args=[ + ArgSig( + name="name", + type="dict[str,dict[str,list[tuple[int,float]]]]", + ) + ], + ret_type="None", + ) + ], + ) + def test_infer_arg_sig_from_anon_docstring(self) -> None: assert_equal( infer_arg_sig_from_anon_docstring("(*args, **kwargs)"),