eleuther generate until

This commit is contained in:
Xi Yan 2024-10-08 23:57:22 -07:00
parent 6abef716dd
commit 9c38d9ae13
3 changed files with 34 additions and 40 deletions

View file

@ -91,10 +91,9 @@ class MetaReferenceInferenceImpl(Inference):
else:
return self._nonstream_chat_completion(request)
async def _nonstream_chat_completion(
def _nonstream_chat_completion(
self, request: ChatCompletionRequest
) -> ChatCompletionResponse:
async with SEMAPHORE:
messages = chat_completion_request_to_messages(request)
tokens = []
@ -121,18 +120,14 @@ class MetaReferenceInferenceImpl(Inference):
logprobs.append(
TokenLogProbs(
logprobs_by_token={
token_result.text: token_result.logprobs[0]
}
logprobs_by_token={token_result.text: token_result.logprobs[0]}
)
)
if stop_reason is None:
stop_reason = StopReason.out_of_tokens
message = self.generator.formatter.decode_assistant_message(
tokens, stop_reason
)
message = self.generator.formatter.decode_assistant_message(tokens, stop_reason)
return ChatCompletionResponse(
completion_message=message,
logprobs=logprobs if request.logprobs else None,

View file

@ -102,9 +102,8 @@ class EleutherEvalsWrapper(LM):
stream=False,
)
print(response)
res.append(response.completion_message)
res.append(response.completion_message.content)
print(response)
return res

View file

@ -14,8 +14,8 @@ apis:
- evals
providers:
evals:
- provider_id: meta-reference
provider_type: meta-reference
- provider_id: eleuther
provider_type: eleuther
config: {}
inference:
- provider_id: meta-reference