File tree Expand file tree Collapse file tree 2 files changed +0
-3
lines changed
tensorrt_llm/_torch/pyexecutor Expand file tree Collapse file tree 2 files changed +0
-3
lines changed Original file line number Diff line number Diff line change @@ -1857,7 +1857,6 @@ def previous_seq_slots_device():
18571857 self .iter_states ['num_ctx_requests' ] = num_ctx_requests
18581858 self .iter_states ['num_ctx_tokens' ] = num_ctx_tokens
18591859 self .iter_states ['num_generation_tokens' ] = num_generation_tokens
1860- print (f"DEBUG: is_draft_model: { self .is_draft_model } , inputs: { inputs } " )
18611860 return inputs , self .gather_ids_cuda [:len (
18621861 gather_ids )] if self .enable_spec_decode else None
18631862
Original file line number Diff line number Diff line change @@ -1339,7 +1339,6 @@ def _executor_loop_overlap(self):
13391339 else :
13401340 previous_tensors_device = self .previous_batch and self .previous_batch .sample_state and self .previous_batch .sample_state .device
13411341
1342- print (f"previous_tensors_device: { previous_tensors_device } " )
13431342 batch_outputs = self ._forward_step (scheduled_batch ,
13441343 previous_tensors_device )
13451344
@@ -1368,7 +1367,6 @@ def _executor_loop_overlap(self):
13681367 sample_state = self ._sample_async (scheduled_batch ,
13691368 batch_outputs )
13701369 assert sample_state is not None , "Sampling failed"
1371- print (f"sample_state: { sample_state } " )
13721370
13731371 self ._update_request_states (scheduled_batch )
13741372
You can’t perform that action at this time.
0 commit comments