Spaces:
Runtime error
Runtime error
Fix seed being shared.
Browse files
app.py
CHANGED
@@ -45,7 +45,6 @@ decision_model_path = huggingface_hub.hf_hub_download(
|
|
45 |
args_path = huggingface_hub.hf_hub_download(
|
46 |
repo_id='jykoh/gill', filename='model_args.json')
|
47 |
model = models.load_gill('./', args_path, ckpt_path, decision_model_path)
|
48 |
-
g_cuda = torch.Generator(device='cuda').manual_seed(1337)
|
49 |
|
50 |
|
51 |
def upload_image(state, image_input):
|
@@ -76,6 +75,8 @@ def save_image_to_local(image: Image.Image):
|
|
76 |
|
77 |
|
78 |
def generate_for_prompt(input_text, state, ret_scale_factor, num_words, temperature):
|
|
|
|
|
79 |
# Ignore empty inputs.
|
80 |
if len(input_text) == 0:
|
81 |
return state, state[0], gr.update(visible=True)
|
|
|
45 |
args_path = huggingface_hub.hf_hub_download(
|
46 |
repo_id='jykoh/gill', filename='model_args.json')
|
47 |
model = models.load_gill('./', args_path, ckpt_path, decision_model_path)
|
|
|
48 |
|
49 |
|
50 |
def upload_image(state, image_input):
|
|
|
75 |
|
76 |
|
77 |
def generate_for_prompt(input_text, state, ret_scale_factor, num_words, temperature):
|
78 |
+
g_cuda = torch.Generator(device='cuda').manual_seed(1337)
|
79 |
+
|
80 |
# Ignore empty inputs.
|
81 |
if len(input_text) == 0:
|
82 |
return state, state[0], gr.update(visible=True)
|