Skip to content

Commit

Permalink
Revert "try comparing to hardcoding"
Browse files Browse the repository at this point in the history
This reverts commit 0ed7f31.
  • Loading branch information
thejaminator committed May 3, 2023
1 parent 0ed7f31 commit 69bbf64
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 111 deletions.
104 changes: 0 additions & 104 deletions elk/utils/llama.py

This file was deleted.

9 changes: 2 additions & 7 deletions elk/utils/multi_gpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@

from elk.utils import instantiate_model, select_usable_devices
from elk.utils.gpu_utils import get_available_memory_for_devices
from elk.utils.llama import get_llama_65b_8bit_device_map

if TYPE_CHECKING:
from elk import Extract
Expand Down Expand Up @@ -121,12 +120,8 @@ def create_device_map(
# but found at least two devices, cuda:0 and cuda1
maybe_transformer_class: Type[Module] | None = get_transformer_layer_cls(model)
dont_split = [maybe_transformer_class.__name__] if maybe_transformer_class else []
# autodevice_map = infer_auto_device_map(
# model, no_split_module_classes=dont_split, max_memory=max_memory_used_devices
# )
autodevice_map = get_llama_65b_8bit_device_map(
first_device=model_devices.first_device,
second_device=model_devices.other_devices[0],
autodevice_map = infer_auto_device_map(
model, no_split_module_classes=dont_split, max_memory=max_memory_used_devices
)

if verbose:
Expand Down

0 comments on commit 69bbf64

Please sign in to comment.