text
stringlengths
0
15.3k
if not weight_by_size:
sizes = [1] * len(sizes)
assert len(metrics) == len(sizes)
return sum([metric * size for (metric, size) in zip(metrics, sizes)]) / sum(sizes)
# File: lm-evaluation-harness-main/lm_eval/api/model.py
import abc
import hashlib
import json
import logging
import os
from typing import Dict, List, Optional, Tuple, Type, TypeVar
import transformers
from sqlitedict import SqliteDict
from tqdm import tqdm
from lm_eval import utils
eval_logger = logging.getLogger('lm-eval')
T = TypeVar('T', bound='LM')
class LM(abc.ABC):
def __init__(self) -> None:
self._rank = 0
self._world_size = 1
self.cache_hook = CacheHook(None)
@abc.abstractmethod
def loglikelihood(self, requests) -> List[Tuple[float, bool]]:
pass
@abc.abstractmethod
def loglikelihood_rolling(self, requests) -> List[Tuple[float]]:
pass
@abc.abstractmethod
def generate_until(self, requests) -> List[str]:
pass
def apply_chat_template(self, chat_history: List[Dict[str, str]]) -> str:
raise NotImplementedError("To use this model with chat templates, please implement the 'apply_chat_template' method for your model type.")
@classmethod
def create_from_arg_string(cls: Type[T], arg_string: str, additional_config: Optional[dict]=None) -> T:
additional_config = {} if additional_config is None else additional_config
args = utils.simple_parse_args_string(arg_string)
args2 = {k: v for (k, v) in additional_config.items() if v is not None}
return cls(**args, **args2)
@classmethod
def create_from_arg_obj(cls: Type[T], arg_dict: dict, additional_config: Optional[dict]=None) -> T:
additional_config = {} if additional_config is None else additional_config
additional_config = {k: v for (k, v) in additional_config.items() if v is not None}
return cls(**arg_dict, **additional_config)
@property
def rank(self):
return self._rank
@property
def world_size(self):
return self._world_size
@property
def tokenizer_name(self) -> str:
raise NotImplementedError("To use this model with chat templates, please implement the 'tokenizer_name' property.")
@property
def chat_template(self) -> str:
raise NotImplementedError("To use this model with chat templates, please implement the 'chat_template' property.")
def set_cache_hook(self, cache_hook) -> None:
self.cache_hook = cache_hook
def hash_args(attr, args):
dat = json.dumps([attr] + list(args))
return hashlib.sha256(dat.encode('utf-8')).hexdigest()
class CacheHook:
def __init__(self, cachinglm) -> None:
if cachinglm is None:
self.dbdict = None
return
self.dbdict = cachinglm.dbdict
def add_partial(self, attr, req, res) -> None:
if self.dbdict is None:
return
hsh = hash_args(attr, req)
self.dbdict[hsh] = res
class CachingLM:
def __init__(self, lm, cache_db) -> None:
self.lm = lm
self.cache_db = cache_db
if os.path.dirname(cache_db):
os.makedirs(os.path.dirname(cache_db), exist_ok=True)
self.dbdict = SqliteDict(cache_db, autocommit=True)
lm.set_cache_hook(self.get_cache_hook())