fabbrorada commited on
Commit
8e28760
·
verified ·
1 Parent(s): 4e6649c

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +19 -20
config.json CHANGED
@@ -1,27 +1,27 @@
1
  {
2
- "_name_or_path": "fabbrorada/prediction-lab-chronos-base-fine-tuned",
3
  "architectures": [
4
- "ChronosBoltModelForForecasting"
5
  ],
6
  "chronos_config": {
7
  "context_length": 9,
8
- "input_patch_size": 16,
9
- "input_patch_stride": 16,
10
- "prediction_length": 64,
11
- "quantiles": [
12
- 0.1,
13
- 0.2,
14
- 0.3,
15
- 0.4,
16
- 0.5,
17
- 0.6,
18
- 0.7,
19
- 0.8,
20
- 0.9
21
- ],
22
- "use_reg_token": true
 
23
  },
24
- "chronos_pipeline_class": "ChronosBoltPipeline",
25
  "classifier_dropout": 0.0,
26
  "d_ff": 3072,
27
  "d_kv": 64,
@@ -41,11 +41,10 @@
41
  "num_heads": 12,
42
  "num_layers": 12,
43
  "pad_token_id": 0,
44
- "reg_token_id": 1,
45
  "relative_attention_max_distance": 128,
46
  "relative_attention_num_buckets": 32,
47
  "torch_dtype": "float32",
48
  "transformers_version": "4.46.3",
49
  "use_cache": true,
50
- "vocab_size": 2
51
  }
 
1
  {
2
+ "_name_or_path": "/home/fabrizio/prediction_lab/chronos_example/output/aws-finetuning/202412241542",
3
  "architectures": [
4
+ "T5ForConditionalGeneration"
5
  ],
6
  "chronos_config": {
7
  "context_length": 9,
8
+ "eos_token_id": 1,
9
+ "model_type": "seq2seq",
10
+ "n_special_tokens": 2,
11
+ "n_tokens": 4096,
12
+ "num_samples": 86585,
13
+ "pad_token_id": 0,
14
+ "prediction_length": 1,
15
+ "temperature": 1.0,
16
+ "tokenizer_class": "MeanScaleUniformBins",
17
+ "tokenizer_kwargs": {
18
+ "high_limit": 15.0,
19
+ "low_limit": -15.0
20
+ },
21
+ "top_k": 50,
22
+ "top_p": 1.0,
23
+ "use_eos_token": true
24
  },
 
25
  "classifier_dropout": 0.0,
26
  "d_ff": 3072,
27
  "d_kv": 64,
 
41
  "num_heads": 12,
42
  "num_layers": 12,
43
  "pad_token_id": 0,
 
44
  "relative_attention_max_distance": 128,
45
  "relative_attention_num_buckets": 32,
46
  "torch_dtype": "float32",
47
  "transformers_version": "4.46.3",
48
  "use_cache": true,
49
+ "vocab_size": 4096
50
  }