prgrmc commited on
Commit
8b1855d
·
1 Parent(s): 5f7211a

update bitsandbytes error

Browse files
Files changed (2) hide show
  1. helper.py +1 -1
  2. requirements.txt +1 -1
helper.py CHANGED
@@ -72,7 +72,7 @@ def initialize_model_pipeline(model_name, force_cpu=False):
72
  # Use 8-bit quantization for memory efficiency
73
  model = AutoModelForCausalLM.from_pretrained(
74
  model_name,
75
- load_in_8bit=True,
76
  torch_dtype=MODEL_CONFIG["main_model"]["dtype"],
77
  use_cache=True,
78
  device_map="auto",
 
72
  # Use 8-bit quantization for memory efficiency
73
  model = AutoModelForCausalLM.from_pretrained(
74
  model_name,
75
+ load_in_8bit=False if device == "cpu" else True,
76
  torch_dtype=MODEL_CONFIG["main_model"]["dtype"],
77
  use_cache=True,
78
  device_map="auto",
requirements.txt CHANGED
@@ -64,7 +64,7 @@ tomlkit==0.12.0
64
  torch==2.5.1
65
  torchvision==0.20.1
66
  tqdm==4.67.1
67
- transformers==4.46.3
68
  typer==0.14.0
69
  typing_extensions==4.12.2
70
  tzdata==2024.2
 
64
  torch==2.5.1
65
  torchvision==0.20.1
66
  tqdm==4.67.1
67
+ transformers>=4.46.3
68
  typer==0.14.0
69
  typing_extensions==4.12.2
70
  tzdata==2024.2