text
stringlengths 0
15.3k
|
---|
return (result, removed) |
def _handle_non_serializable(o: Any) -> Union[int, str, list]: |
if isinstance(o, np.int64) or isinstance(o, np.int32): |
return int(o) |
elif isinstance(o, set): |
return list(o) |
else: |
return str(o) |
def get_commit_from_path(repo_path: Union[Path, str]) -> Optional[str]: |
try: |
git_folder = Path(repo_path, '.git') |
if git_folder.is_file(): |
git_folder = Path(git_folder.parent, git_folder.read_text(encoding='utf-8').split('\n')[0].split(' ')[-1]) |
if Path(git_folder, 'HEAD').exists(): |
head_name = Path(git_folder, 'HEAD').read_text(encoding='utf-8').split('\n')[0].split(' ')[-1] |
head_ref = Path(git_folder, head_name) |
git_hash = head_ref.read_text(encoding='utf-8').replace('\n', '') |
else: |
git_hash = None |
except Exception as err: |
logger.debug(f'Failed to retrieve a Git commit hash from path: {str(repo_path)}. Error: {err}') |
return None |
return git_hash |
def get_git_commit_hash(): |
try: |
git_hash = subprocess.check_output(['git', 'describe', '--always']).strip() |
git_hash = git_hash.decode() |
except (subprocess.CalledProcessError, FileNotFoundError): |
git_hash = get_commit_from_path(os.getcwd()) |
return git_hash |
def add_env_info(storage: Dict[str, Any]): |
try: |
pretty_env_info = get_pretty_env_info() |
except Exception as err: |
pretty_env_info = str(err) |
transformers_version = trans_version |
upper_dir_commit = get_commit_from_path(Path(os.getcwd(), '..')) |
added_info = {'pretty_env_info': pretty_env_info, 'transformers_version': transformers_version, 'upper_git_hash': upper_dir_commit} |
storage.update(added_info) |
def add_tokenizer_info(storage: Dict[str, Any], lm): |
if getattr(lm, 'tokenizer', False): |
try: |
tokenizer_info = {'tokenizer_pad_token': [lm.tokenizer.pad_token, str(lm.tokenizer.pad_token_id)], 'tokenizer_eos_token': [lm.tokenizer.eos_token, str(lm.tokenizer.eos_token_id)], 'tokenizer_bos_token': [lm.tokenizer.bos_token, str(lm.tokenizer.bos_token_id)], 'eot_token_id': getattr(lm, 'eot_token_id', None), 'max_length': getattr(lm, 'max_length', None)} |
storage.update(tokenizer_info) |
except Exception as err: |
logger.debug(f'Logging detailed tokenizer info failed with {err}, skipping...') |
else: |
logger.debug("LM does not have a 'tokenizer' attribute, not logging tokenizer metadata to results.") |
# File: lm-evaluation-harness-main/lm_eval/loggers/wandb_logger.py |
import copy |
import json |
import logging |
from typing import Any, Dict, List, Literal, Tuple |
import numpy as np |
import pandas as pd |
from packaging.version import Version |
from lm_eval.loggers.utils import _handle_non_serializable, remove_none_pattern |
logger = logging.getLogger(__name__) |
def get_wandb_printer() -> Literal['Printer']: |
from wandb.sdk.lib.printer import get_printer |
from wandb.sdk.wandb_settings import Settings |
printer = get_printer(Settings()._jupyter) |
return printer |
class WandbLogger: |
def __init__(self, **kwargs) -> None: |
try: |
import wandb |
assert Version(wandb.__version__) >= Version('0.13.6') |
if Version(wandb.__version__) < Version('0.13.6'): |
wandb.require('report-editing:v0') |
except Exception as e: |
logger.warning(f'To use the wandb reporting functionality please install wandb>=0.13.6.\nTo install the latest version of wandb run `pip install wandb --upgrade`\n{e}') |
self.wandb_args: Dict[str, Any] = kwargs |
if wandb.run is None: |
self.run = wandb.init(**self.wandb_args) |
else: |
self.run = wandb.run |
self.printer = get_wandb_printer() |
def post_init(self, results: Dict[str, Any]) -> None: |
self.results: Dict[str, Any] = copy.deepcopy(results) |
self.task_names: List[str] = list(results.get('results', {}).keys()) |
self.group_names: List[str] = list(results.get('groups', {}).keys()) |
def _get_config(self) -> Dict[str, Any]: |
self.task_configs = self.results.get('configs', {}) |
cli_configs = self.results.get('config', {}) |
configs = {'task_configs': self.task_configs, 'cli_configs': cli_configs} |
return configs |
def _sanitize_results_dict(self) -> Tuple[Dict[str, str], Dict[str, Any]]: |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.