python3Packages.lm-eval: 0.4.8 -> 0.4.9.1; python3Packages.mlx-lm: 0.26.0 -> 0.26.3 (#434724)

This commit is contained in:
Someone 2025-08-19 12:40:59 +00:00 committed by GitHub
commit 7dd627d233
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 66 additions and 35 deletions

View File

@ -2,58 +2,76 @@
lib,
buildPythonPackage,
fetchFromGitHub,
# build-system
setuptools-scm,
# dependencies
accelerate,
aiohttp,
antlr4-python3-runtime,
causal-conv1d,
datasets,
dill,
evaluate,
hf-transfer,
immutabledict,
jsonlines,
langdetect,
mamba-ssm,
more-itertools,
nltk,
numexpr,
numpy,
optimum,
pandas,
peft,
pybind11,
pytablewriter,
pytestCheckHook,
requests,
rouge-score,
sacrebleu,
scikit-learn,
sentencepiece,
sqlitedict,
sympy,
tenacity,
tiktoken,
torch,
tqdm,
tqdm-multiprocess,
transformers,
vllm,
wandb,
word2number,
zstandard,
# optional-dependencies
# api
aiohttp,
requests,
tenacity,
tiktoken,
tqdm,
# hf_transfer
hf-transfer,
# ifeval
immutabledict,
langdetect,
nltk,
# neuronx
optimum,
# mamba
causal-conv1d,
mamba-ssm,
# math
antlr4-python3-runtime,
sympy,
# sentencepiece
sentencepiece,
# vllm
vllm,
# wandb
numpy,
pandas,
wandb,
# tests
pytestCheckHook,
writableTmpDirAsHomeHook,
}:
buildPythonPackage rec {
pname = "lm-eval";
version = "0.4.8";
version = "0.4.9.1";
pyproject = true;
src = fetchFromGitHub {
owner = "EleutherAI";
repo = "lm-evaluation-harness";
tag = "v${version}";
hash = "sha256-F8oy6XTovqiU7FQyuubRsiblSdvfZg9RPIyzRw2GH18=";
hash = "sha256-N5NRRabjWxPchwOIkjqYTCKInCmVSY6T5cAmdxNbCkU=";
};
build-system = [
@ -84,34 +102,34 @@ buildPythonPackage rec {
optional-dependencies = {
api = [
requests
aiohttp
requests
tenacity
tqdm
tiktoken
tqdm
];
hf_transfer = [ hf-transfer ];
ifeval = [
langdetect
immutabledict
langdetect
nltk
];
neuronx = [ optimum ] ++ optimum.optional-dependencies.neuronx;
mamba = [
mamba-ssm
causal-conv1d
mamba-ssm
];
math = [
sympy
antlr4-python3-runtime
sympy
];
optimum = [ optimum ] ++ optimum.optional-dependencies.openvino;
sentencepiece = [ sentencepiece ];
vllm = [ vllm ];
wandb = [
wandb
pandas
numpy
pandas
wandb
];
# Still missing dependencies for the following:
# deepsparse, gptq, ibm_watsonx_ai, multilingual, promptsource, sparseml,
@ -122,16 +140,16 @@ buildPythonPackage rec {
nativeCheckInputs = [
pytestCheckHook
writableTmpDirAsHomeHook
]
++ optional-dependencies.api;
preCheck = ''
export HOME=$TMP
'';
disabledTests = [
"test_deepsparse" # deepsparse is not available
"test_model_tokenized_call_usage" # downloads a model
# download models from the internet
"test_get_batched_requests_with_no_ssl"
"test_model_tokenized_call_usage"
];
disabledTestPaths = [
@ -142,9 +160,13 @@ buildPythonPackage rec {
"tests/test_prompt.py"
"tests/test_task_manager.py"
"tests/test_tasks.py"
"tests/test_unitxt_tasks.py"
# optimum-intel is not available
"tests/models/test_openvino.py"
# zeno-client is not packaged
"tests/scripts/test_zeno_visualize.py"
];
meta = {

View File

@ -2,13 +2,20 @@
lib,
buildPythonPackage,
fetchFromGitHub,
# build-system
setuptools,
# dependencies
jinja2,
mlx,
numpy,
protobuf,
pyyaml,
transformers,
# tests
lm-eval,
sentencepiece,
pytestCheckHook,
writableTmpDirAsHomeHook,
@ -40,9 +47,10 @@ buildPythonPackage rec {
];
nativeCheckInputs = [
writableTmpDirAsHomeHook
lm-eval
pytestCheckHook
sentencepiece
writableTmpDirAsHomeHook
];
pythonImportsCheck = [
@ -62,6 +70,7 @@ buildPythonPackage rec {
"tests/test_prompt_cache.py::TestPromptCache::test_cache_with_generate"
"tests/test_prompt_cache.py::TestPromptCache::test_trim_cache_with_generate"
# RuntimeError: [metal_kernel] No GPU back-end.
"tests/test_losses.py"
"tests/test_models.py::TestModels::test_bitnet"
];