add tokenizer
Browse files- tokenizer.json +2 -2
 - tokenizer_config.json +2 -6
 
    	
        tokenizer.json
    CHANGED
    
    | 
         @@ -1,3 +1,3 @@ 
     | 
|
| 1 | 
         
             
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            -
            oid sha256: 
     | 
| 3 | 
         
            -
            size  
     | 
| 
         | 
|
| 1 | 
         
             
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:444f68abc3b798ce3f285f1afff88f634a15514d53c11b49d5f9f640a006af03
         
     | 
| 3 | 
         
            +
            size 1049337
         
     | 
    	
        tokenizer_config.json
    CHANGED
    
    | 
         @@ -1,9 +1,5 @@ 
     | 
|
| 1 | 
         
             
            {
         
     | 
| 2 | 
         
            -
              " 
     | 
| 3 | 
         
            -
             
     | 
| 4 | 
         
            -
                "attention_mask"
         
     | 
| 5 | 
         
            -
              ],
         
     | 
| 6 | 
         
            -
              "name_or_path": "hyunwoongko/kobart",
         
     | 
| 7 | 
         
            -
              "special_tokens_map_file": "/root/.cache/huggingface/transformers/a87d2ed77831bb40ce806a97c04126addf5ecc82b3e23ecf916b2a4acdb9c29a.c23d5e62137984cf842a885705037b25b156747d145406702932d5f5d5e7c88e",
         
     | 
| 8 | 
         
             
              "tokenizer_class": "PreTrainedTokenizerFast"
         
     | 
| 9 | 
         
             
            }
         
     | 
| 
         | 
|
| 1 | 
         
             
            {
         
     | 
| 2 | 
         
            +
              "name_or_path": "naem1023-bart-v2-x-fp32",
         
     | 
| 3 | 
         
            +
              "special_tokens_map_file": "/home/ubuntu/.cache/huggingface/transformers/a87d2ed77831bb40ce806a97c04126addf5ecc82b3e23ecf916b2a4acdb9c29a.c23d5e62137984cf842a885705037b25b156747d145406702932d5f5d5e7c88e",
         
     | 
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 4 | 
         
             
              "tokenizer_class": "PreTrainedTokenizerFast"
         
     | 
| 5 | 
         
             
            }
         
     |