Skip to content

Commit

Permalink
return prompt_tokens for generate/generate_stream of api_lightllm (#463)
Browse files Browse the repository at this point in the history
  • Loading branch information
shihaobai committed Jul 11, 2024
1 parent c600d14 commit 904d6b1
Showing 1 changed file with 4 additions and 0 deletions.
4 changes: 4 additions & 0 deletions lightllm/server/api_lightllm.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ async def lightllm_generate(request: Request, g_id_gen, httpserver_manager) -> R
tokens_dict = collections.defaultdict(list)
finish_reason_dict = {}
prompt_logprobs = None
prompt_tokens = 0
prompt_token_ids = None
is_first_metadata = True
async for sub_req_id, request_output, metadata, finish_status in results_generator:
Expand All @@ -37,6 +38,7 @@ async def lightllm_generate(request: Request, g_id_gen, httpserver_manager) -> R
if is_first_metadata:
prompt_logprobs = metadata.get("prompt_logprobs", None)
prompt_token_ids = metadata.get("prompt_token_ids", None)
prompt_tokens = metadata.get("prompt_tokens", 0)
if prompt_logprobs is not None:
del metadata["prompt_logprobs"]
if prompt_token_ids is not None:
Expand Down Expand Up @@ -65,6 +67,7 @@ async def lightllm_generate(request: Request, g_id_gen, httpserver_manager) -> R
"generated_text": final_output_list,
"count_output_tokens": ret_data_format(count_output_tokens_list),
"finish_reason": ret_data_format(finish_reson_list),
"prompt_tokens": prompt_tokens,
}
if return_details:
ret["tokens"] = ret_data_format(tokens_list)
Expand Down Expand Up @@ -104,6 +107,7 @@ async def stream_results() -> AsyncGenerator[bytes, None]:
"logprob": metadata.get("logprob", None),
"special": metadata.get("special", False),
"count_output_tokens": metadata.get("count_output_tokens", 0),
"prompt_tokens": metadata.get("prompt_tokens", 0),
},
"generated_text": None,
"finished": finish_status.is_finished(),
Expand Down

0 comments on commit 904d6b1

Please sign in to comment.