File size: 2,327 Bytes
e59eb9e
 
9282a5a
555cd42
8c4a00c
506e239
 
 
 
 
 
 
0067690
506e239
 
 
 
 
 
 
 
 
e59eb9e
555cd42
e59eb9e
3649303
26b368d
555cd42
 
3649303
555cd42
3649303
 
 
 
 
555cd42
3649303
 
0d3c7d8
9282a5a
cb08e07
9282a5a
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import os

# options
INDEX_BY_DESC = {
    'Dolma-v1.7 (2.6T tokens)': 'v4_dolma-v1_7_llama',
    'RedPajama (1.4T tokens)': 'v4_rpj_llama_s4',
    'Pile-train (380B tokens)': 'v4_piletrain_llama',
    'C4-train (200B tokens)': 'v4_c4train_llama',
    'Pile-val (390M tokens)': 'v4_pileval_llama',
    # 'Pile-val (GPT-2 tokenizer), 380M tokens': 'v4_pileval_gpt2',
    # 'Dolma-v1.6-sample (OLMo tokenizer), 8.0B tokens': 'v4_dolmasample_olmo',
    # 'Dolma-v1.6-sample (9.2B tokens)': 'v4_dolma-v1_6-sample_llama',
    # 'Dolma-v1.6 (3.1T tokens)': 'v4_dolma-v1_6_llama',
    # 'Dolma-v1.6-wiki (4.3B tokens)': 'v4_dolma-v1_6-wiki_llama',
    # 'Dolma-v1.6-books (5.8B tokens)': 'v4_dolma-v1_6-books_llama',
    # 'Dolma-v1.6-pes2o (69B tokens)': 'v4_dolma-v1_6-pes2o_llama',
    # 'Dolma-v1.6-reddit (89B tokens)': 'v4_dolma-v1_6-reddit_llama',
    # 'Dolma-v1.6-c4 (200B tokens)': 'v4_dolma-v1_6-c4_llama',
    # 'Dolma-v1.6-stack (420B tokens)': 'v4_dolma-v1_6-stack_llama',
    # 'Dolma-v1.6-cc_en_head (660B tokens): 'v4_dolma-v1_6-cc_en_head_llama',
    # 'Dolma-v1.6-cc_en_middle (650B tokens): 'v4_dolma-v1_6-cc_en_middle_llama',
    # 'Dolma-v1.6-cc_en_tail (970B tokens): 'v4_dolma-v1_6-cc_en_tail_llama',
}
INDEX_DESCS = list(INDEX_BY_DESC.keys())

# API limits and defaults
MAX_QUERY_CHARS = int(os.environ.get('MAX_QUERY_CHARS', 1000))
MAX_CLAUSES_PER_CNF = int(os.environ.get('MAX_CLAUSES_PER_CNF', 4))
MAX_TERMS_PER_CLAUSE = int(os.environ.get('MAX_TERMS_PER_CLAUSE', 4))
max_support = int(os.environ.get('max_support', 1000))
MAX_SUPPORT = int(os.environ.get('MAX_SUPPORT', 1000))
max_clause_freq = int(os.environ.get('max_clause_freq', 50000))
MAX_CLAUSE_FREQ = int(os.environ.get('MAX_CLAUSE_FREQ', 500000))
max_diff_tokens = int(os.environ.get('max_diff_tokens', 100))
MAX_DIFF_TOKENS = int(os.environ.get('MAX_DIFF_TOKENS', 1000))
maxnum = int(os.environ.get('maxnum', 1))
MAXNUM = int(os.environ.get('MAXNUM', 10))
max_disp_len = int(os.environ.get('max_disp_len', 1000))
MAX_DISP_LEN = int(os.environ.get('MAX_DISP_LEN', 10000))

# HF demo
API_URL = os.environ.get('API_URL', None)
DEFAULT_CONCURRENCY_LIMIT = os.environ.get('DEFAULT_CONCURRENCY_LIMIT', 10)
MAX_SIZE = os.environ.get('MAX_SIZE', 100)
MAX_THREADS = os.environ.get('MAX_THREADS', 40)
DEBUG = (os.environ.get('DEBUG', 'False') != 'False')