sasha HF staff commited on
Commit
e5dba85
·
verified ·
1 Parent(s): fea204c

Update app.py

Browse files

switching from json to pandas

Files changed (1) hide show
  1. app.py +11 -35
app.py CHANGED
@@ -11,7 +11,8 @@ from enum import Enum
11
 
12
  OWNER = "EnergyStarAI"
13
  COMPUTE_SPACE = f"{OWNER}/launch-computation-example"
14
- REQUESTS_DATASET_PATH = f"{OWNER}/requests_debug"
 
15
 
16
  TOKEN = os.environ.get("DEBUG")
17
  API = HfApi(token=TOKEN)
@@ -112,9 +113,11 @@ def add_new_eval(
112
  # return styled_error("Could not get your model information. Please fill it up properly.")
113
 
114
  model_size = get_model_size(model_info=model_info, precision=precision)
115
-
116
  print("Adding request")
117
 
 
 
118
  request_dict = {
119
  "model": repo_id,
120
  "precision": precision,
@@ -127,46 +130,19 @@ def add_new_eval(
127
  #"private": False,
128
  #}
129
 
130
- out_file = f"{model_name}_eval_request_{precision}.json"
131
- temp_out_path = f"./temp/{REQUESTS_DATASET_PATH}/{model_owner}/"
132
- temp_out_file = f"./temp/{REQUESTS_DATASET_PATH}/{model_owner}/{out_file}"
133
- print("Making directory to output results at %s" % temp_out_path)
134
- os.makedirs(temp_out_path, exist_ok=True)
135
-
136
  print("Writing out temp request file to %s" % temp_out_file)
137
- with open(temp_out_file, "w+") as f:
138
- f.write(json.dumps(request_dict))
139
-
140
- print("Uploading request to Dataset repo at %s" % REQUESTS_DATASET_PATH)
141
- API.upload_file(
142
- path_or_fileobj=temp_out_file,
143
- path_in_repo=f"{model_owner}/{out_file}",
144
- repo_id=REQUESTS_DATASET_PATH,
145
- repo_type="dataset",
146
- commit_message=f"Adding {model_name} to requests queue.",
147
- )
148
-
149
- # Remove the local file
150
- os.remove(temp_out_file)
151
 
152
  print("Starting compute space at %s " % COMPUTE_SPACE)
153
  return start_compute_space()
154
 
155
  def print_existing_models():
156
  model_list= []
157
- requests_dataset = snapshot_download('EnergyStarAI/requests_debug', token=TOKEN, repo_type="dataset", allow_patterns="*.json")
158
- for dir, path, files in os.walk(requests_dataset):
159
- for fid in files:
160
- file_path = os.path.join(dir, fid)
161
- with open(file_path) as fp:
162
- request = json.load(fp)
163
- status = request['status']
164
- if status == 'COMPLETE':
165
- status = request['status']
166
- model = request['model']
167
- task = request['task']
168
- model_list.append([model, task])
169
- return model_list
170
 
171
 
172
 
 
11
 
12
  OWNER = "EnergyStarAI"
13
  COMPUTE_SPACE = f"{OWNER}/launch-computation-example"
14
+ requests_dset = load_dataset("EnergyStarAI/requests_debug", split="test")
15
+
16
 
17
  TOKEN = os.environ.get("DEBUG")
18
  API = HfApi(token=TOKEN)
 
113
  # return styled_error("Could not get your model information. Please fill it up properly.")
114
 
115
  model_size = get_model_size(model_info=model_info, precision=precision)
116
+
117
  print("Adding request")
118
 
119
+ requests_dset = requests.to_pandas()
120
+
121
  request_dict = {
122
  "model": repo_id,
123
  "precision": precision,
 
130
  #"private": False,
131
  #}
132
 
 
 
 
 
 
 
133
  print("Writing out temp request file to %s" % temp_out_file)
134
+ df_request_dict = pd.DataFrame({'name':request_dict.keys(), 'value':request_dict.values()})
135
+ df_final = pd.concat([requests_dset, df_request_dict], ignore_index=True)
136
+ updated_dset =Dataset.from_pandas(requests_dset)
137
+ updated_dset.push_to_hub("EnergyStarAI/requests_debug", split="test")
 
 
 
 
 
 
 
 
 
 
138
 
139
  print("Starting compute space at %s " % COMPUTE_SPACE)
140
  return start_compute_space()
141
 
142
  def print_existing_models():
143
  model_list= []
144
+ requests = load_dataset("EnergyStarAI/requests_debug", split="test")
145
+ requests_dset = requests.to_pandas()
 
 
 
 
 
 
 
 
 
 
 
146
 
147
 
148