Skip to content

Commit 4fb416a

Browse files
authored
Lighteval math (#630)
* doc:model_parallel * fix:task lighteval/MATH
1 parent 10d9c5c commit 4fb416a

File tree

4 files changed

+31
-31
lines changed

4 files changed

+31
-31
lines changed

docs/source/quicktour.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ accelerate).
129129
- **add_special_tokens** (bool, optional, defaults to True): Whether to add special tokens to the input sequences.
130130
If `None`, the default value will be set to `True` for seq2seq models (e.g. T5) and
131131
`False` for causal models.
132-
- **model_parallel** (bool, optional, defaults to False):
132+
- **model_parallel** (bool, optional, defaults to None):
133133
True/False: force to use or not the `accelerate` library to load a large
134134
model across multiple devices.
135135
Default: None which corresponds to comparing the number of processes with

examples/nanotron/custom_evaluation_tasks.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -256,7 +256,7 @@ def __init__(
256256
self,
257257
name,
258258
prompt_function=prompt.math,
259-
hf_repo="lighteval/MATH",
259+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
260260
hf_subset=None,
261261
metric=[Metrics.quasi_exact_match_math],
262262
hf_avail_splits=None,

src/lighteval/models/transformers/transformers_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ class TransformersModelConfig:
112112
add_special_tokens (bool, optional, defaults to True): Whether to add special tokens to the input sequences.
113113
If `None`, the default value will be set to `True` for seq2seq models (e.g. T5) and
114114
`False` for causal models.
115-
model_parallel (bool, optional, defaults to False):
115+
model_parallel (bool, optional, defaults to None):
116116
True/False: force to use or not the `accelerate` library to load a large
117117
model across multiple devices.
118118
Default: None which corresponds to comparing the number of processes with

src/lighteval/tasks/default_tasks.py

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -9665,9 +9665,9 @@
96659665
name="math:algebra",
96669666
suite=["lighteval", "math"],
96679667
prompt_function=prompt.math,
9668-
hf_repo="lighteval/MATH",
9668+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
96699669
hf_subset="algebra",
9670-
hf_avail_splits=["train", "test", "validation"],
9670+
hf_avail_splits=["train", "test"],
96719671
evaluation_splits=["test"],
96729672
few_shots_split=None,
96739673
few_shots_select=None,
@@ -9681,9 +9681,9 @@
96819681
name="math:counting_and_probability",
96829682
suite=["lighteval", "math"],
96839683
prompt_function=prompt.math,
9684-
hf_repo="lighteval/MATH",
9684+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
96859685
hf_subset="counting_and_probability",
9686-
hf_avail_splits=["train", "test", "validation"],
9686+
hf_avail_splits=["train", "test"],
96879687
evaluation_splits=["test"],
96889688
few_shots_split=None,
96899689
few_shots_select=None,
@@ -9697,9 +9697,9 @@
96979697
name="math:geometry",
96989698
suite=["lighteval", "math"],
96999699
prompt_function=prompt.math,
9700-
hf_repo="lighteval/MATH",
9700+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
97019701
hf_subset="geometry",
9702-
hf_avail_splits=["train", "test", "validation"],
9702+
hf_avail_splits=["train", "test"],
97039703
evaluation_splits=["test"],
97049704
few_shots_split=None,
97059705
few_shots_select=None,
@@ -9713,9 +9713,9 @@
97139713
name="math:intermediate_algebra",
97149714
suite=["lighteval", "math"],
97159715
prompt_function=prompt.math,
9716-
hf_repo="lighteval/MATH",
9716+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
97179717
hf_subset="intermediate_algebra",
9718-
hf_avail_splits=["train", "test", "validation"],
9718+
hf_avail_splits=["train", "test"],
97199719
evaluation_splits=["test"],
97209720
few_shots_split=None,
97219721
few_shots_select=None,
@@ -9729,9 +9729,9 @@
97299729
name="math:number_theory",
97309730
suite=["lighteval", "math"],
97319731
prompt_function=prompt.math,
9732-
hf_repo="lighteval/MATH",
9732+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
97339733
hf_subset="number_theory",
9734-
hf_avail_splits=["train", "test", "validation"],
9734+
hf_avail_splits=["train", "test"],
97359735
evaluation_splits=["test"],
97369736
few_shots_split=None,
97379737
few_shots_select=None,
@@ -9745,9 +9745,9 @@
97459745
name="math:prealgebra",
97469746
suite=["lighteval", "math"],
97479747
prompt_function=prompt.math,
9748-
hf_repo="lighteval/MATH",
9748+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
97499749
hf_subset="prealgebra",
9750-
hf_avail_splits=["train", "test", "validation"],
9750+
hf_avail_splits=["train", "test"],
97519751
evaluation_splits=["test"],
97529752
few_shots_split=None,
97539753
few_shots_select=None,
@@ -9761,9 +9761,9 @@
97619761
name="math:precalculus",
97629762
suite=["lighteval", "math"],
97639763
prompt_function=prompt.math,
9764-
hf_repo="lighteval/MATH",
9764+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
97659765
hf_subset="precalculus",
9766-
hf_avail_splits=["train", "test", "validation"],
9766+
hf_avail_splits=["train", "test"],
97679767
evaluation_splits=["test"],
97689768
few_shots_split=None,
97699769
few_shots_select=None,
@@ -9777,9 +9777,9 @@
97779777
name="math_cot:algebra",
97789778
suite=["lighteval", "math"],
97799779
prompt_function=prompt.math_cot,
9780-
hf_repo="lighteval/MATH",
9780+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
97819781
hf_subset="algebra",
9782-
hf_avail_splits=["train", "test", "validation"],
9782+
hf_avail_splits=["train", "test"],
97839783
evaluation_splits=["test"],
97849784
few_shots_split=None,
97859785
few_shots_select=None,
@@ -9793,9 +9793,9 @@
97939793
name="math_cot:counting_and_probability",
97949794
suite=["lighteval", "math"],
97959795
prompt_function=prompt.math_cot,
9796-
hf_repo="lighteval/MATH",
9796+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
97979797
hf_subset="counting_and_probability",
9798-
hf_avail_splits=["train", "test", "validation"],
9798+
hf_avail_splits=["train", "test"],
97999799
evaluation_splits=["test"],
98009800
few_shots_split=None,
98019801
few_shots_select=None,
@@ -9809,9 +9809,9 @@
98099809
name="math_cot:geometry",
98109810
suite=["lighteval", "math"],
98119811
prompt_function=prompt.math_cot,
9812-
hf_repo="lighteval/MATH",
9812+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
98139813
hf_subset="geometry",
9814-
hf_avail_splits=["train", "test", "validation"],
9814+
hf_avail_splits=["train", "test"],
98159815
evaluation_splits=["test"],
98169816
few_shots_split=None,
98179817
few_shots_select=None,
@@ -9825,9 +9825,9 @@
98259825
name="math_cot:intermediate_algebra",
98269826
suite=["lighteval", "math"],
98279827
prompt_function=prompt.math_cot,
9828-
hf_repo="lighteval/MATH",
9828+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
98299829
hf_subset="intermediate_algebra",
9830-
hf_avail_splits=["train", "test", "validation"],
9830+
hf_avail_splits=["train", "test"],
98319831
evaluation_splits=["test"],
98329832
few_shots_split=None,
98339833
few_shots_select=None,
@@ -9841,9 +9841,9 @@
98419841
name="math_cot:number_theory",
98429842
suite=["lighteval", "math"],
98439843
prompt_function=prompt.math_cot,
9844-
hf_repo="lighteval/MATH",
9844+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
98459845
hf_subset="number_theory",
9846-
hf_avail_splits=["train", "test", "validation"],
9846+
hf_avail_splits=["train", "test"],
98479847
evaluation_splits=["test"],
98489848
few_shots_split=None,
98499849
few_shots_select=None,
@@ -9857,9 +9857,9 @@
98579857
name="math_cot:prealgebra",
98589858
suite=["lighteval", "math"],
98599859
prompt_function=prompt.math_cot,
9860-
hf_repo="lighteval/MATH",
9860+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
98619861
hf_subset="prealgebra",
9862-
hf_avail_splits=["train", "test", "validation"],
9862+
hf_avail_splits=["train", "test"],
98639863
evaluation_splits=["test"],
98649864
few_shots_split=None,
98659865
few_shots_select=None,
@@ -9873,9 +9873,9 @@
98739873
name="math_cot:precalculus",
98749874
suite=["lighteval", "math"],
98759875
prompt_function=prompt.math_cot,
9876-
hf_repo="lighteval/MATH",
9876+
hf_repo="DigitalLearningGmbH/MATH-lighteval",
98779877
hf_subset="precalculus",
9878-
hf_avail_splits=["train", "test", "validation"],
9878+
hf_avail_splits=["train", "test"],
98799879
evaluation_splits=["test"],
98809880
few_shots_split=None,
98819881
few_shots_select=None,

0 commit comments

Comments
 (0)