Skip to content

Commit

Permalink
fixed cost calc
Browse files Browse the repository at this point in the history
  • Loading branch information
chitalian committed May 14, 2024
1 parent ffbd24e commit 6b7343b
Show file tree
Hide file tree
Showing 6 changed files with 11 additions and 11 deletions.
4 changes: 2 additions & 2 deletions costs/__tests__/ensureOnlyOne.ts
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ WHEN (request_response_log.model = 'gpt-4-32k-0613') THEN 60000 * request_respon
WHEN (request_response_log.model = 'gpt-4-0125-preview') THEN 10000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4-1106-preview') THEN 10000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4-1106-vision-preview') THEN 10000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4o') THEN 15000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4o') THEN 5000 * request_response_log.prompt_tokens + 15000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4o-2024-05-13') THEN 15000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-3.5-turbo-0613') THEN 1500 * request_response_log.prompt_tokens + 2000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-35-turbo-16k') THEN 3000 * request_response_log.prompt_tokens + 4000 * request_response_log.completion_tokens
Expand Down Expand Up @@ -584,7 +584,7 @@ WHEN (request_response_log.model = 'gpt-4-32k-0613') THEN 60000 * request_respon
WHEN (request_response_log.model = 'gpt-4-0125-preview') THEN 10000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4-1106-preview') THEN 10000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4-1106-vision-preview') THEN 10000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4o') THEN 15000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4o') THEN 5000 * request_response_log.prompt_tokens + 15000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-4o-2024-05-13') THEN 15000 * request_response_log.prompt_tokens + 30000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-3.5-turbo-0613') THEN 1500 * request_response_log.prompt_tokens + 2000 * request_response_log.completion_tokens
WHEN (request_response_log.model = 'gpt-35-turbo-16k') THEN 3000 * request_response_log.prompt_tokens + 4000 * request_response_log.completion_tokens
Expand Down
4 changes: 2 additions & 2 deletions costs/src/providers/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,8 +266,8 @@ export const costs: ModelRow[] = [
value: "gpt-4o",
},
cost: {
prompt_token: 0.000015,
completion_token: 0.00003,
prompt_token: 0.000005,
completion_token: 0.000015,
},
showInPlayground: true,
},
Expand Down
4 changes: 2 additions & 2 deletions valhalla/jawn/src/packages/cost/providers/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,8 +266,8 @@ export const costs: ModelRow[] = [
value: "gpt-4o",
},
cost: {
prompt_token: 0.000015,
completion_token: 0.00003,
prompt_token: 0.000005,
completion_token: 0.000015,
},
showInPlayground: true,
},
Expand Down
2 changes: 1 addition & 1 deletion web/packages/cost/providers/mappings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ export const playgroundModels: {
}));
})
.flat()
.filter((model) => model !== undefined && model.provider !== "AZURE") as {
.filter((model) => model !== undefined) as {
name: string;
provider: ProviderName;
}[]) ?? [];
Expand Down
4 changes: 2 additions & 2 deletions web/packages/cost/providers/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,8 +266,8 @@ export const costs: ModelRow[] = [
value: "gpt-4o",
},
cost: {
prompt_token: 0.000015,
completion_token: 0.00003,
prompt_token: 0.000005,
completion_token: 0.000015,
},
showInPlayground: true,
},
Expand Down
4 changes: 2 additions & 2 deletions worker/src/packages/cost/providers/openai/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,8 +266,8 @@ export const costs: ModelRow[] = [
value: "gpt-4o",
},
cost: {
prompt_token: 0.000015,
completion_token: 0.00003,
prompt_token: 0.000005,
completion_token: 0.000015,
},
showInPlayground: true,
},
Expand Down

0 comments on commit 6b7343b

Please sign in to comment.