ArthurZ HF Staff commited on
Commit
14502e2
·
verified ·
1 Parent(s): 3706dd7

Update CircleCI artifacts for PR 40546 (53053ac85dcc)

Browse files
pr-40546/sha-53053ac85dcc/failure_summary.json ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "failures": [
3
+ {
4
+ "job_name": "tests_tokenization",
5
+ "test_name": "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_all_special_ids",
6
+ "short_error": "AttributeError: LlamaTokenizer(name_or_path='hf-internal-testing/namespace-mistralai-repo_name-Mistral-Small-3.1-24B-Instruct-2503', vocab_size=3, model_max_length=1000000000000000019884624838656, padding_side='left', truncation_side='right', special_tokens={'bos_token': '<s>', 'eos_token': '</s>', 'unk_token': '<unk>'}, added_tokens_decoder={",
7
+ "error": "/usr/local/lib/python3.10/unittest/mock.py:1420: AttributeError: LlamaTokenizer(name_or_path='hf-internal-testing/namespace-mistralai-repo_name-Mistral-Small-3.1-24B-Instruct-2503', vocab_size=3, model_max_length=1000000000000000019884624838656, padding_side='left', truncation_side='right', special_tokens={'bos_token': '<s>', 'eos_token': '</s>', 'unk_token': '<unk>'}, added_tokens_decoder={",
8
+ "model_name": null
9
+ },
10
+ {
11
+ "job_name": "tests_tokenization",
12
+ "test_name": "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_decode_on_batch",
13
+ "short_error": "AssertionError: Lists differ: ['', ''] != ['Hello, world!', 'Hello, world !']",
14
+ "error": "/usr/local/lib/python3.10/unittest/case.py:675: AssertionError: Lists differ: ['', ''] != ['Hello, world!', 'Hello, world !']",
15
+ "model_name": null
16
+ },
17
+ {
18
+ "job_name": "tests_tokenization",
19
+ "test_name": "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_encode",
20
+ "short_error": "AssertionError: Lists differ: [1] != [1, 22177, 1044, 4304, 1033]",
21
+ "error": "/usr/local/lib/python3.10/unittest/case.py:675: AssertionError: Lists differ: [1] != [1, 22177, 1044, 4304, 1033]",
22
+ "model_name": null
23
+ },
24
+ {
25
+ "job_name": "tests_tokenization",
26
+ "test_name": "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_get_special_tokens_mask",
27
+ "short_error": "NotImplementedError: LlamaTokenizer does not implement get_special_tokens_mask for non-formatted sequences",
28
+ "error": "/usr/local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py:1363: NotImplementedError: LlamaTokenizer does not implement get_special_tokens_mask for non-formatted sequences",
29
+ "model_name": null
30
+ },
31
+ {
32
+ "job_name": "tests_tokenization",
33
+ "test_name": "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_get_vocab",
34
+ "short_error": "AssertionError: 3 == 3",
35
+ "error": "/usr/local/lib/python3.10/unittest/case.py:854: AssertionError: 3 == 3",
36
+ "model_name": null
37
+ },
38
+ {
39
+ "job_name": "tests_tokenization",
40
+ "test_name": "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_list_batch_encoding_input",
41
+ "short_error": "ValueError: Asking to pad but the tokenizer does not have a padding token. Please select a token to use as `pad_token` `(tokenizer.pad_token = tokenizer.eos_token e.g.)` or add a new pad token via `tokenizer.add_special_tokens({'pad_token': '[PAD]'})`.",
42
+ "error": "/usr/local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py:2606: ValueError: Asking to pad but the tokenizer does not have a padding token. Please select a token to use as `pad_token` `(tokenizer.pad_token = tokenizer.eos_token e.g.)` or add a new pad token via `tokenizer.add_special_tokens({'pad_token': '[PAD]'})`.",
43
+ "model_name": null
44
+ }
45
+ ],
46
+ "by_test": {
47
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_all_special_ids": {
48
+ "count": 1,
49
+ "errors": {
50
+ "/usr/local/lib/python3.10/unittest/mock.py:1420: AttributeError: LlamaTokenizer(name_or_path='hf-internal-testing/namespace-mistralai-repo_name-Mistral-Small-3.1-24B-Instruct-2503', vocab_size=3, model_max_length=1000000000000000019884624838656, padding_side='left', truncation_side='right', special_tokens={'bos_token': '<s>', 'eos_token': '</s>', 'unk_token': '<unk>'}, added_tokens_decoder={": 1
51
+ },
52
+ "jobs": [
53
+ "tests_tokenization"
54
+ ],
55
+ "variants": [
56
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_all_special_ids"
57
+ ]
58
+ },
59
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_decode_on_batch": {
60
+ "count": 1,
61
+ "errors": {
62
+ "/usr/local/lib/python3.10/unittest/case.py:675: AssertionError: Lists differ: ['', ''] != ['Hello, world!', 'Hello, world !']": 1
63
+ },
64
+ "jobs": [
65
+ "tests_tokenization"
66
+ ],
67
+ "variants": [
68
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_decode_on_batch"
69
+ ]
70
+ },
71
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_encode": {
72
+ "count": 1,
73
+ "errors": {
74
+ "/usr/local/lib/python3.10/unittest/case.py:675: AssertionError: Lists differ: [1] != [1, 22177, 1044, 4304, 1033]": 1
75
+ },
76
+ "jobs": [
77
+ "tests_tokenization"
78
+ ],
79
+ "variants": [
80
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_encode"
81
+ ]
82
+ },
83
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_get_special_tokens_mask": {
84
+ "count": 1,
85
+ "errors": {
86
+ "/usr/local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py:1363: NotImplementedError: LlamaTokenizer does not implement get_special_tokens_mask for non-formatted sequences": 1
87
+ },
88
+ "jobs": [
89
+ "tests_tokenization"
90
+ ],
91
+ "variants": [
92
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_get_special_tokens_mask"
93
+ ]
94
+ },
95
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_get_vocab": {
96
+ "count": 1,
97
+ "errors": {
98
+ "/usr/local/lib/python3.10/unittest/case.py:854: AssertionError: 3 == 3": 1
99
+ },
100
+ "jobs": [
101
+ "tests_tokenization"
102
+ ],
103
+ "variants": [
104
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_get_vocab"
105
+ ]
106
+ },
107
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_list_batch_encoding_input": {
108
+ "count": 1,
109
+ "errors": {
110
+ "/usr/local/lib/python3.10/site-packages/transformers/tokenization_utils_base.py:2606: ValueError: Asking to pad but the tokenizer does not have a padding token. Please select a token to use as `pad_token` `(tokenizer.pad_token = tokenizer.eos_token e.g.)` or add a new pad token via `tokenizer.add_special_tokens({'pad_token': '[PAD]'})`.": 1
111
+ },
112
+ "jobs": [
113
+ "tests_tokenization"
114
+ ],
115
+ "variants": [
116
+ "tests/test_tokenization_mistral_common.py::TestMistralCommonBackend::test_list_batch_encoding_input"
117
+ ]
118
+ }
119
+ },
120
+ "by_model": {}
121
+ }