Skip to content

Commit f439ca2

Browse files
committed
delete and build_stream_transfer_data
1 parent 6bc1ed2 commit f439ca2

File tree

2 files changed

+3
-9
lines changed

2 files changed

+3
-9
lines changed

fastdeploy/model_executor/pre_and_post_process.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
"""
1616

1717
import queue
18-
from typing import Dict, Optional, Union
18+
from typing import Dict, List, Optional, Union
1919

2020
import numpy as np
2121
import paddle
@@ -85,7 +85,7 @@
8585
speculate_limit_thinking_content_length_v2,
8686
)
8787

88-
from fastdeploy.output.pooler import PoolerOutput
88+
from fastdeploy.output.pooler import PoolerOutput, PoolingSequenceGroupOutput
8989
from fastdeploy.output.stream_transfer_data import DecoderState, StreamTransferData
9090
from fastdeploy.worker.output import ModelOutputData, ModelRunnerOutput, SamplerOutput
9191

@@ -239,7 +239,7 @@ def pre_process(
239239
)
240240

241241

242-
def _build_stream_transfer_data(output_tokens: np.ndarray, pooler_outputs: None):
242+
def _build_stream_transfer_data(output_tokens: np.ndarray, pooler_outputs: List[PoolingSequenceGroupOutput] = None):
243243
"""Split output_tokens and output"""
244244

245245
stream_transfer_datas = []

fastdeploy/worker/gpu_model_runner.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1536,12 +1536,6 @@ def _dummy_pooler_run_task(
15361536
assert len(num_scheduled_tokens_list) == num_reqs
15371537

15381538
req_num_tokens = num_tokens // num_reqs
1539-
1540-
print("num_tokens", num_tokens)
1541-
print("max_num_seqs", max_num_seqs)
1542-
print("num_reqs", num_reqs)
1543-
print("min_tokens_per_req", min_tokens_per_req)
1544-
print("num_scheduled_token_list", num_scheduled_tokens_list)
15451539
dummy_prompt_lens = paddle.to_tensor(num_scheduled_tokens_list, dtype="int64")
15461540
dummy_token_ids = paddle.zeros(
15471541
[num_reqs, req_num_tokens],

0 commit comments

Comments
 (0)