Skip to content

Commit

Permalink
NO LOGGING]
Browse files Browse the repository at this point in the history
  • Loading branch information
Kye committed Mar 21, 2024
1 parent 935d860 commit c241614
Showing 1 changed file with 27 additions and 27 deletions.
54 changes: 27 additions & 27 deletions servers/cogvlm/cogvlm.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,33 +159,33 @@ async def create_chat_completion(
content=response["text"],
)

# Log the entry to supabase
entry = ModelAPILogEntry(
user_id=await fetch_api_key_info(token),
model_id="41a2869c-5f8d-403f-83bb-1f06c56bad47",
input_tokens=await count_tokens(request.messages, tokenizer, request.model),
output_tokens=await count_tokens(response["text"], tokenizer, request.model),
all_cost=await calculate_pricing(
texts=[message.content], tokenizer=tokenizer, rate_per_million=15.0
),
input_cost=await calculate_pricing(
texts=[message.content], tokenizer=tokenizer, rate_per_million=15.0
),
output_cost=await calculate_pricing(
texts=response["text"], tokenizer=tokenizer, rate_per_million=15.0
)
* 5,
messages=request.messages,
# temperature=request.temperature,
top_p=request.top_p,
# echo=request.echo,
stream=request.stream,
repetition_penalty=request.repetition_penalty,
max_tokens=request.max_tokens,
)

# Log the entry to supabase
await log_to_supabase(entry=entry)
# # Log the entry to supabase
# entry = ModelAPILogEntry(
# user_id=await fetch_api_key_info(token),
# model_id="41a2869c-5f8d-403f-83bb-1f06c56bad47",
# input_tokens=await count_tokens(request.messages, tokenizer, request.model),
# output_tokens=await count_tokens(response["text"], tokenizer, request.model),
# all_cost=await calculate_pricing(
# texts=[message.content], tokenizer=tokenizer, rate_per_million=15.0
# ),
# input_cost=await calculate_pricing(
# texts=[message.content], tokenizer=tokenizer, rate_per_million=15.0
# ),
# output_cost=await calculate_pricing(
# texts=response["text"], tokenizer=tokenizer, rate_per_million=15.0
# )
# * 5,
# messages=request.messages,
# # temperature=request.temperature,
# top_p=request.top_p,
# # echo=request.echo,
# stream=request.stream,
# repetition_penalty=request.repetition_penalty,
# max_tokens=request.max_tokens,
# )

# # Log the entry to supabase
# await log_to_supabase(entry=entry)

# ChatCompletionResponseChoice
logger.debug(f"==== message ====\n{message}")
Expand Down

0 comments on commit c241614

Please sign in to comment.