text
stringlengths 0
15.3k
|
---|
return value |
else: |
try: |
return getsource(value) |
except (TypeError, OSError): |
return str(value) |
class Task(abc.ABC): |
VERSION: Optional[Union[int, str]] = None |
DATASET_PATH: Optional[str] = None |
DATASET_NAME: Optional[str] = None |
OUTPUT_TYPE: Optional[OutputType] = None |
def __init__(self, data_dir: Optional[str]=None, cache_dir: Optional[str]=None, download_mode: Optional[datasets.DownloadMode]=None, config: Optional[Mapping]=None) -> None: |
self.download(data_dir, cache_dir, download_mode) |
self._training_docs: Optional[list] = None |
self._fewshot_docs: Optional[list] = None |
self._instances: Optional[List[Instance]] = None |
self._config: TaskConfig = TaskConfig({**config}) if config else TaskConfig() |
self._filters = [build_filter_ensemble('none', [['take_first', None]])] |
self.fewshot_rnd: Optional[random.Random] = None |
def download(self, data_dir: Optional[str]=None, cache_dir: Optional[str]=None, download_mode=None) -> None: |
self.dataset = datasets.load_dataset(path=self.DATASET_PATH, name=self.DATASET_NAME, data_dir=data_dir, cache_dir=cache_dir, download_mode=download_mode) |
@property |
def config(self) -> TaskConfig: |
return self._config |
@abc.abstractmethod |
def has_training_docs(self): |
pass |
@abc.abstractmethod |
def has_validation_docs(self): |
pass |
@abc.abstractmethod |
def has_test_docs(self): |
pass |
def training_docs(self) -> Iterable: |
return [] |
def validation_docs(self) -> Iterable: |
return [] |
def test_docs(self) -> Iterable: |
return [] |
def fewshot_docs(self) -> Iterable: |
if self.has_training_docs(): |
return self.training_docs() |
elif self.has_validation_docs(): |
return self.validation_docs() |
else: |
eval_logger.warning(f'[Task: {self.config.task}] has_training_docs and has_validation_docs are False, using test_docs as fewshot_docs but this is not recommended.') |
return self.test_docs() |
def _process_doc(self, doc: dict) -> dict: |
return doc |
@property |
def instances(self) -> List[Instance]: |
return self._instances |
def fewshot_examples(self, k, rnd): |
if self._training_docs is None: |
self._training_docs = list(self.training_docs()) |
return rnd.sample(self._training_docs, k) |
def doc_to_decontamination_query(self, doc): |
raise NotImplementedError('Override doc_to_decontamination_query with document specific decontamination query.') |
@abc.abstractmethod |
def doc_to_text(self, doc): |
pass |
@abc.abstractmethod |
def doc_to_target(self, doc): |
pass |
def build_all_requests(self, *, limit: Union[int, None]=None, rank: int=0, world_size: int=1, cache_requests: bool=False, rewrite_requests_cache: bool=False, system_instruction: Optional[str]=None, apply_chat_template: bool=False, fewshot_as_multiturn: bool=False, chat_template: Optional[Callable]=None, tokenizer_name: str='') -> None: |
og_limit = limit |
cache_key = f'requests-{self._config.task}-{self.config.num_fewshot}shot-rank{rank}-world_size{world_size}' |
cache_key += '-chat_template' if apply_chat_template else '' |
cache_key += '-fewshot_as_multiturn' if fewshot_as_multiturn else '' |
cache_key += f'-system_prompt_hash{utils.hash_string(system_instruction)}' if system_instruction is not None else '' |
cache_key += f'-tokenizer{tokenizer_name}' |
cached_instances = load_from_cache(file_name=cache_key) |
if cache_requests and cached_instances and (not rewrite_requests_cache): |
cached_instances = cached_instances[:limit] |
flattened_instances = [instance for instance_group in cached_instances for instance in instance_group] |
self._instances = flattened_instances |
return |
eval_logger.info(f'Building contexts for {self.config.task} on rank {rank}...') |
instances = [] |
if cache_requests and (not cached_instances or rewrite_requests_cache) and (limit is not None): |
limit = None |
doc_id_docs = list(self.doc_iterator(rank=rank, limit=limit, world_size=world_size)) |