File tree Expand file tree Collapse file tree 1 file changed +4
-9
lines changed Expand file tree Collapse file tree 1 file changed +4
-9
lines changed Original file line number Diff line number Diff line change @@ -86,11 +86,9 @@ def forward(
86
86
# NOTE: the top_p_renorm_prob from flashinfer has numerical problems,
87
87
# https://github.com/flashinfer-ai/flashinfer/issues/708
88
88
# so we use the torch implementation.
89
-
90
- # clamp to avoid -inf
91
- logprobs = torch .log (
92
- top_p_normalize_probs_torch (probs , sampling_info .top_ps )
93
- ).clamp (min = torch .finfo (probs .dtype ).min )
89
+ # NOTE: OpenAI's logprobs is independent of top-p, we use the
90
+ # same rule.
91
+ logprobs = torch .log (probs ).clamp (min = torch .finfo (probs .dtype ).min )
94
92
95
93
max_top_k_round , batch_size = 32 , probs .shape [0 ]
96
94
if sampling_info .need_min_p_sampling :
@@ -121,10 +119,7 @@ def forward(
121
119
)
122
120
123
121
if return_logprob :
124
- # clamp to avoid -inf
125
- logprobs = torch .log (
126
- top_p_normalize_probs_torch (probs , sampling_info .top_ps )
127
- ).clamp (min = torch .finfo (probs .dtype ).min )
122
+ logprobs = torch .log (probs ).clamp (min = torch .finfo (probs .dtype ).min )
128
123
else :
129
124
raise ValueError (
130
125
f"Invalid sampling backend: { global_server_args_dict ['sampling_backend' ]} "
You can’t perform that action at this time.
0 commit comments