Skip to content

Commit

Permalink
formattin
Browse files Browse the repository at this point in the history
  • Loading branch information
drisspg committed Aug 12, 2023
1 parent 53b53db commit b1bdcdd
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,4 @@ def tokenizer_name(self) -> str:

@property
def prefix_token(self) -> str:
return self.end_of_text_token
return self.end_of_text_token
17 changes: 5 additions & 12 deletions src/helm/proxy/clients/http_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,14 +68,10 @@ def do_it():
response, cached = do_it(), False

tokens = [
Token(
text=token["text"], logprob=token["logprob"], top_logprobs=token["top_logprob"]
)
Token(text=token["text"], logprob=token["logprob"], top_logprobs=token["top_logprob"])
for token in response["tokens"]
]
completions = [
Sequence(text=response["text"], logprob=response["logprob"], tokens=tokens)
]
completions = [Sequence(text=response["text"], logprob=response["logprob"], tokens=tokens)]

return RequestResult(
success=True,
Expand All @@ -87,9 +83,7 @@ def do_it():
)
except requests.exceptions.RequestException as e:
error: str = f"Request error: {e}"
return RequestResult(
success=False, cached=False, error=error, completions=[], embedding=[]
)
return RequestResult(success=False, cached=False, error=error, completions=[], embedding=[])

def tokenize(self, request: TokenizationRequest) -> TokenizationRequestResult:
cache_key = asdict(request)
Expand All @@ -107,15 +101,14 @@ def do_it():
response.raise_for_status()
response_data = response.json()
return response_data

if self.do_cache:
result, cached = self.cache.get(cache_key, wrap_request_time(do_it))
else:
result, cached = do_it(), False
except Exception as e:
error: str = f"Local Model error: {e}"
return TokenizationRequestResult(
success=False, cached=False, error=error, text="", tokens=[]
)
return TokenizationRequestResult(success=False, cached=False, error=error, text="", tokens=[])

return TokenizationRequestResult(
success=True,
Expand Down

0 comments on commit b1bdcdd

Please sign in to comment.