khaiphan29 commited on
Commit
2c62878
1 Parent(s): 0217fc8

Upload folder using huggingface_hub

Browse files
main.py CHANGED
@@ -1,4 +1,7 @@
1
  #uvicorn main:app --reload
 
 
 
2
  from fastapi import FastAPI, status
3
  from fastapi.responses import Response, JSONResponse
4
  from pydantic import BaseModel
 
1
  #uvicorn main:app --reload
2
+ import os
3
+ os.environ['HF_HOME'] = './src/cache/'
4
+
5
  from fastapi import FastAPI, status
6
  from fastapi.responses import Response, JSONResponse
7
  from pydantic import BaseModel
src/.DS_Store CHANGED
Binary files a/src/.DS_Store and b/src/.DS_Store differ
 
src/mDeBERTa (ft) V6/mDeBERTa-v3-base-mnli-xnli-mean/config.json CHANGED
@@ -1,45 +1,3 @@
1
- {
2
- "_name_or_path": "/content/checkpoint",
3
- "architectures": [
4
- "DebertaV2Model"
5
- ],
6
- "attention_probs_dropout_prob": 0.1,
7
- "hidden_act": "gelu",
8
- "hidden_dropout_prob": 0.1,
9
- "hidden_size": 768,
10
- "id2label": {
11
- "0": "entailment",
12
- "1": "neutral",
13
- "2": "contradiction"
14
- },
15
- "initializer_range": 0.02,
16
- "intermediate_size": 3072,
17
- "label2id": {
18
- "contradiction": 2,
19
- "entailment": 0,
20
- "neutral": 1
21
- },
22
- "layer_norm_eps": 1e-07,
23
- "max_position_embeddings": 512,
24
- "max_relative_positions": -1,
25
- "model_type": "deberta-v2",
26
- "norm_rel_ebd": "layer_norm",
27
- "num_attention_heads": 12,
28
- "num_hidden_layers": 12,
29
- "pad_token_id": 0,
30
- "pooler_dropout": 0,
31
- "pooler_hidden_act": "gelu",
32
- "pooler_hidden_size": 768,
33
- "pos_att_type": [
34
- "p2c",
35
- "c2p"
36
- ],
37
- "position_biased_input": false,
38
- "position_buckets": 256,
39
- "relative_attention": true,
40
- "share_att_key": true,
41
- "torch_dtype": "float32",
42
- "transformers_version": "4.35.0",
43
- "type_vocab_size": 0,
44
- "vocab_size": 251000
45
- }
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5da5d76bed323094e0ea55fc189872f0ad42e1b6bbf93440cb6078f80bcbdb0a
3
+ size 1026