nxphi47 commited on
Commit
b139ef7
1 Parent(s): 7c07e7c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -2
app.py CHANGED
@@ -47,6 +47,11 @@ HF_TOKEN = os.environ.get("HF_TOKEN", None)
47
  # ! path where the model is downloaded, either on ./ or persistent disc
48
  MODEL_PATH = os.environ.get("MODEL_PATH", "./seal-13b-chat-a")
49
 
 
 
 
 
 
50
  # ! list of keywords to disabled as security measures to comply with local regulation
51
  KEYWORDS = os.environ.get("KEYWORDS", "").strip()
52
  KEYWORDS = KEYWORDS.split(";") if len(KEYWORDS) > 0 else []
@@ -205,6 +210,9 @@ path_markdown = """
205
  """
206
 
207
 
 
 
 
208
  def _detect_lang(text):
209
  # Disable language that may have safety risk
210
  from langdetect import detect as detect_lang
@@ -1072,8 +1080,10 @@ def chat_response_stream_multiturn(
1072
  )
1073
 
1074
  sampling_params = SamplingParams(
1075
- temperature=temperature, max_tokens=max_tokens,
 
1076
  frequency_penalty=frequency_penalty,
 
1077
  )
1078
  cur_out = None
1079
 
@@ -1093,7 +1103,7 @@ def chat_response_stream_multiturn(
1093
  item = next(iter(gen.values()))
1094
  cur_out = item.outputs[0].text
1095
 
1096
- print(f'{full_prompt}<<<{cur_out}>>>\n\n')
1097
 
1098
  if cur_out is not None and "\\n" in cur_out:
1099
  print(f'double slash-n in cur_out:\n{cur_out}')
@@ -1140,6 +1150,21 @@ def check_model_path(model_path) -> str:
1140
  return ckpt_info
1141
 
1142
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1143
  def launch():
1144
  global demo, llm, DEBUG
1145
  model_desc = MODEL_DESC
@@ -1207,6 +1232,13 @@ def launch():
1207
  except Exception as e:
1208
  print(f'Cannot print model worker: {e}')
1209
 
 
 
 
 
 
 
 
1210
  print(f'Use system prompt:\n{sys_prompt}')
1211
 
1212
  response_fn = chat_response_stream_multiturn
 
47
  # ! path where the model is downloaded, either on ./ or persistent disc
48
  MODEL_PATH = os.environ.get("MODEL_PATH", "./seal-13b-chat-a")
49
 
50
+ # ! !! Whether to delete the folder, ONLY SET THIS IF YOU WANT TO DELETE SAVED MODEL ON PERSISTENT DISC
51
+ DELETE_FOLDER = os.environ.get("DELETE_FOLDER", "")
52
+ IS_DELETE_FOLDER = DELETE_FOLDER is not None and os.path.exists(DELETE_FOLDER)
53
+ print(f'DELETE_FOLDER: {DELETE_FOLDER} | {DOWNLOAD_SNAPSHOT=}')
54
+
55
  # ! list of keywords to disabled as security measures to comply with local regulation
56
  KEYWORDS = os.environ.get("KEYWORDS", "").strip()
57
  KEYWORDS = KEYWORDS.split(";") if len(KEYWORDS) > 0 else []
 
210
  """
211
 
212
 
213
+
214
+
215
+
216
  def _detect_lang(text):
217
  # Disable language that may have safety risk
218
  from langdetect import detect as detect_lang
 
1080
  )
1081
 
1082
  sampling_params = SamplingParams(
1083
+ temperature=temperature,
1084
+ max_tokens=max_tokens,
1085
  frequency_penalty=frequency_penalty,
1086
+ stop=['<s>', '</s>', '<<SYS>>', '<</SYS>>', '[INST]', '[/INST]']
1087
  )
1088
  cur_out = None
1089
 
 
1103
  item = next(iter(gen.values()))
1104
  cur_out = item.outputs[0].text
1105
 
1106
+ print(f'@@@@@@@@@@\n{full_prompt}<<<{cur_out}>>>\n@@@@@@@@@@\n')
1107
 
1108
  if cur_out is not None and "\\n" in cur_out:
1109
  print(f'double slash-n in cur_out:\n{cur_out}')
 
1150
  return ckpt_info
1151
 
1152
 
1153
+
1154
+ def maybe_delete_folder():
1155
+ if IS_DELETE_FOLDER and DOWNLOAD_SNAPSHOT:
1156
+ print(f'DELETE ALL FILES IN {DELETE_FOLDER}')
1157
+ for filename in os.listdir(DELETE_FOLDER):
1158
+ file_path = os.path.join(DELETE_FOLDER, filename)
1159
+ try:
1160
+ if os.path.isfile(file_path) or os.path.islink(file_path):
1161
+ os.unlink(file_path)
1162
+ elif os.path.isdir(file_path):
1163
+ shutil.rmtree(file_path)
1164
+ except Exception as e:
1165
+ print('Failed to delete %s. Reason: %s' % (file_path, e))
1166
+
1167
+
1168
  def launch():
1169
  global demo, llm, DEBUG
1170
  model_desc = MODEL_DESC
 
1232
  except Exception as e:
1233
  print(f'Cannot print model worker: {e}')
1234
 
1235
+ try:
1236
+ llm.llm_engine.scheduler_config.max_model_len = 4096
1237
+ llm.llm_engine.scheduler_config.max_num_batched_tokens = 4096
1238
+ llm.llm_engine.tokenizer.add_special_tokens = False
1239
+ except Exception as e:
1240
+ print(f'Cannot set parameters: {e}')
1241
+
1242
  print(f'Use system prompt:\n{sys_prompt}')
1243
 
1244
  response_fn = chat_response_stream_multiturn