Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -98,6 +98,9 @@ gpu_memory_utilization = float(os.environ.get("gpu_memory_utilization", "0.9"))
|
|
98 |
# whether to enable quantization, currently not in use
|
99 |
QUANTIZATION = str(os.environ.get("QUANTIZATION", ""))
|
100 |
|
|
|
|
|
|
|
101 |
|
102 |
"""
|
103 |
Internal instructions of how to configure the DEMO
|
@@ -1207,6 +1210,34 @@ def format_conversation(history):
|
|
1207 |
return _str
|
1208 |
|
1209 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1210 |
def print_log_file():
|
1211 |
global LOG_FILE, LOG_PATH
|
1212 |
if SAVE_LOGS and os.path.exists(LOG_PATH):
|
@@ -1222,6 +1253,7 @@ def print_log_file():
|
|
1222 |
print(f'######--{v}--##')
|
1223 |
_str = format_conversation(history)
|
1224 |
print(_str)
|
|
|
1225 |
|
1226 |
|
1227 |
def debug_chat_response_echo(
|
@@ -1319,6 +1351,7 @@ def launch():
|
|
1319 |
f'\n| gpu_memory_utilization={gpu_memory_utilization} '
|
1320 |
f'\n| KEYWORDS={KEYWORDS} '
|
1321 |
f'\n| LOG_PATH={LOG_PATH} | SAVE_LOGS={SAVE_LOGS} '
|
|
|
1322 |
f'\n| GET_LOG_CMD={GET_LOG_CMD} '
|
1323 |
f'\n| Sys={SYSTEM_PROMPT_1}'
|
1324 |
f'\n| Desc={model_desc}'
|
|
|
98 |
# whether to enable quantization, currently not in use
|
99 |
QUANTIZATION = str(os.environ.get("QUANTIZATION", ""))
|
100 |
|
101 |
+
DATA_SET_REPO_PATH = str(os.environ.get("DATA_SET_REPO_PATH", ""))
|
102 |
+
DATA_SET_REPO = None
|
103 |
+
|
104 |
|
105 |
"""
|
106 |
Internal instructions of how to configure the DEMO
|
|
|
1210 |
return _str
|
1211 |
|
1212 |
|
1213 |
+
def maybe_upload_to_dataset():
|
1214 |
+
global LOG_FILE, DATA_SET_REPO_PATH, SAVE_LOGS
|
1215 |
+
if SAVE_LOGS and os.path.exists(LOG_PATH) and DATA_SET_REPO_PATH is not "":
|
1216 |
+
with open(LOG_PATH, 'r', encoding='utf-8') as f:
|
1217 |
+
convos = {}
|
1218 |
+
for l in f:
|
1219 |
+
if l:
|
1220 |
+
item = json.loads(l)
|
1221 |
+
convos[item['key']] = item
|
1222 |
+
AGG_LOG_PATH = LOG_PATH + ".agg.json"
|
1223 |
+
with open(AGG_LOG_PATH, 'w', encoding='utf-8') as fo:
|
1224 |
+
json.dump(convos, fo, indent=4, ensure_ascii=False)
|
1225 |
+
print(f'Saved aggregated json to {AGG_LOG_PATH}')
|
1226 |
+
try:
|
1227 |
+
from huggingface_hub import upload_file
|
1228 |
+
print(f'upload {AGG_LOG_PATH} to {DATA_SET_REPO_PATH}')
|
1229 |
+
upload_file(
|
1230 |
+
path_or_fileobj=AGG_LOG_PATH,
|
1231 |
+
path_in_repo=os.path.basename(AGG_LOG_PATH),
|
1232 |
+
repo_id=DATA_SET_REPO_PATH,
|
1233 |
+
token=HF_TOKEN,
|
1234 |
+
repo_type="dataset",
|
1235 |
+
create_pr=True
|
1236 |
+
)
|
1237 |
+
except Exception as e:
|
1238 |
+
print(f'Failed to save to repo: {DATA_SET_REPO_PATH}|{str(e)}')
|
1239 |
+
|
1240 |
+
|
1241 |
def print_log_file():
|
1242 |
global LOG_FILE, LOG_PATH
|
1243 |
if SAVE_LOGS and os.path.exists(LOG_PATH):
|
|
|
1253 |
print(f'######--{v}--##')
|
1254 |
_str = format_conversation(history)
|
1255 |
print(_str)
|
1256 |
+
maybe_upload_to_dataset()
|
1257 |
|
1258 |
|
1259 |
def debug_chat_response_echo(
|
|
|
1351 |
f'\n| gpu_memory_utilization={gpu_memory_utilization} '
|
1352 |
f'\n| KEYWORDS={KEYWORDS} '
|
1353 |
f'\n| LOG_PATH={LOG_PATH} | SAVE_LOGS={SAVE_LOGS} '
|
1354 |
+
f'\n| DATA_SET_REPO_PATH={DATA_SET_REPO_PATH} '
|
1355 |
f'\n| GET_LOG_CMD={GET_LOG_CMD} '
|
1356 |
f'\n| Sys={SYSTEM_PROMPT_1}'
|
1357 |
f'\n| Desc={model_desc}'
|