It seems `T5WithLMHead` is outdated
Browse filesLeaving it for potential older versions of the lib.
But adding it so that `infer_framework_load_model` from the pipeline can load the model directly without needing to refer to the pipeline task.
https://github.com/huggingface/transformers/blob/main/src/transformers/models/t5/modeling_t5.py#L1466
- config.json +2 -1
config.json
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
-
"T5WithLMHeadModel"
|
|
|
4 |
],
|
5 |
"d_ff": 2048,
|
6 |
"d_kv": 64,
|
|
|
1 |
{
|
2 |
"architectures": [
|
3 |
+
"T5WithLMHeadModel",
|
4 |
+
"T5ForConditionalGeneration"
|
5 |
],
|
6 |
"d_ff": 2048,
|
7 |
"d_kv": 64,
|