fix: Set AutoModelForQuestionAnswering class path in config
Browse filesThis depends on [this PR](https://huggingface.co/EuroBERT/EuroBERT-610m/discussions/12), which adds the architecture to the modeling script.
- config.json +2 -1
 
    	
        config.json
    CHANGED
    
    | 
         @@ -8,7 +8,8 @@ 
     | 
|
| 8 | 
         
             
                "AutoModelForPreTraining": "modeling_eurobert.EuroBertPreTrainedModel",
         
     | 
| 9 | 
         
             
                "AutoModelForMaskedLM": "modeling_eurobert.EuroBertForMaskedLM",
         
     | 
| 10 | 
         
             
                "AutoModelForSequenceClassification": "modeling_eurobert.EuroBertForSequenceClassification",
         
     | 
| 11 | 
         
            -
                "AutoModelForTokenClassification": "modeling_eurobert.EuroBertForTokenClassification"
         
     | 
| 
         | 
|
| 12 | 
         
             
              },
         
     | 
| 13 | 
         
             
              "attention_bias": false,
         
     | 
| 14 | 
         
             
              "attention_dropout": 0.0,
         
     | 
| 
         | 
|
| 8 | 
         
             
                "AutoModelForPreTraining": "modeling_eurobert.EuroBertPreTrainedModel",
         
     | 
| 9 | 
         
             
                "AutoModelForMaskedLM": "modeling_eurobert.EuroBertForMaskedLM",
         
     | 
| 10 | 
         
             
                "AutoModelForSequenceClassification": "modeling_eurobert.EuroBertForSequenceClassification",
         
     | 
| 11 | 
         
            +
                "AutoModelForTokenClassification": "modeling_eurobert.EuroBertForTokenClassification",
         
     | 
| 12 | 
         
            +
                "AutoModelForQuestionAnswering": "modeling_eurobert.EuroBertForQuestionAnswering"
         
     | 
| 13 | 
         
             
              },
         
     | 
| 14 | 
         
             
              "attention_bias": false,
         
     | 
| 15 | 
         
             
              "attention_dropout": 0.0,
         
     |