Skip to content

Commit

Permalink
Update Assistant model import
Browse files Browse the repository at this point in the history
  • Loading branch information
artitw committed Jan 13, 2024
1 parent 4a19a72 commit 1c78d23
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 11 deletions.
3 changes: 2 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

setuptools.setup(
name="text2text",
version="1.3.5",
version="1.3.6",
author="artitw",
author_email="[email protected]",
description="Text2Text: Crosslingual NLP/G toolkit",
Expand All @@ -29,6 +29,7 @@
'langchain',
'googledrivedownloader',
'numpy',
'optimum',
'pandas',
'scikit-learn',
'scipy',
Expand Down
16 changes: 6 additions & 10 deletions text2text/assistant.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import logging
import pandas as pd
import text2text as t2t
from transformers import AutoTokenizer, logging
from auto_gptq import AutoGPTQForCausalLM
from transformers import AutoModelForCausalLM, AutoTokenizer, logging

logging.set_verbosity(logging.CRITICAL)

Expand All @@ -14,15 +13,12 @@ class Assistant(t2t.Transformer):
def __init__(self, **kwargs):
model_name_or_path = kwargs.get("model_name_or_path", "TheBloke/vicuna-13B-v1.5-16K-GPTQ")

self.__class__.tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
self.__class__.tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True, padding_side='left')

self.__class__.model = AutoGPTQForCausalLM.from_quantized(model_name_or_path,
use_safetensors=True,
trust_remote_code=False,
device="cuda:0",
use_triton=False,
quantize_config=None
)
self.__class__.model = AutoModelForCausalLM.from_pretrained(model_name_or_path,
device_map="auto",
trust_remote_code=False,
revision="main")

def completion_preprocess(self, input_lines, retriever=None, **kwargs):
df = pd.DataFrame({"input_line": input_lines})
Expand Down

0 comments on commit 1c78d23

Please sign in to comment.