Spaces:
Running
Running
File size: 6,104 Bytes
94f41f8 58b5cd0 94f41f8 e126885 58b5cd0 62645d8 902b7ab 94f41f8 58b5cd0 e126885 58b5cd0 c096e3a 58b5cd0 902b7ab 94f41f8 d76061d 574d43f 94f41f8 d76061d 94f41f8 1c69313 d76061d c096e3a d76061d 94f41f8 d76061d 5aa4faf d76061d c096e3a d76061d 5aa4faf d76061d c096e3a 1c69313 472f34c 1c69313 c096e3a d76061d 94f41f8 d76061d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 |
import streamlit as st
from transformers import AutoModelForImageClassification, AutoImageProcessor
from PIL import Image
import requests
from io import BytesIO
import time
import torch
import concurrent.futures
import torch.nn.functional as F
# def inferring(encoding, model):
# with torch.no_grad():
# outputs = model(**encoding)
# logits = outputs.logits
# probabilities = F.softmax(logits, dim=-1) # Convert logits to probabilities
# predicted_class_idx = logits.argmax(-1).item() # Get the predicted class index
# predicted_probability = probabilities[0, predicted_class_idx].item() # Get the probability of the predicted class
# return model.config.id2label[predicted_class_idx]+f" prob:{predicted_probability}"
def inferring(encoding, model):
with torch.no_grad():
outputs = model(**encoding)
logits = outputs.logits
probabilities = F.softmax(logits, dim=-1) # Convert logits to probabilities
predicted_class_idx = logits.argmax(-1).item() # Get the predicted class index
predicted_probability = round(probabilities[0, predicted_class_idx].item(), 2) # Round probability to 2 decimal places
return model.config.id2label[predicted_class_idx]+f"\nprob:{predicted_probability}"
# #testing
# def inferring(encoding,model):
# with torch.no_grad():
# outputs = model(**encoding)
# logits = outputs.logits
# predicted_class_idx = logits.argmax(-1).item()
# # st.write(f"Top Wear: {top_wear_model.config.id2label[predicted_class_idx]}")
# return model.config.id2label[predicted_class_idx]
def imageprocessing(image):
encoding = st.session_state.image_processor(images=image, return_tensors="pt")
return encoding
# Run all models concurrently using threading
def pipes(image,categories):
# st.header(categories)
# Process the image once and reuse the encoding
encoding = imageprocessing(image)
# Access models from session state before threading
top_wear_model = st.session_state.top_wear_model
full_wear_model=st.session_state.fullwear
bottom_wear_model=st.session_state.bottomwear_model
pattern_model = st.session_state.pattern_model
print_model = st.session_state.print_model
sleeve_length_model = st.session_state.sleeve_length_model
neck_style_model=st.session_state.neck_style_model
#process ---------------------------------------------------------------------------upperwear--------------
if categories=="UpperBody":
# Define functions to run the models in parallel
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {
executor.submit(inferring, encoding, top_wear_model): "topwear",
executor.submit(inferring, encoding, pattern_model): "patterns",
executor.submit(inferring, encoding, print_model): "prints",
executor.submit(inferring, encoding, sleeve_length_model): "sleeve_length"
}
results = {}
for future in concurrent.futures.as_completed(futures):
model_name = futures[future]
try:
results[model_name] = future.result()
except Exception as e:
st.error(f"Error in {model_name}: {str(e)}")
results[model_name] = None
return results
#process ---------------------------------------------------------------------------fullwear--------------
elif categories=="Wholebody":
# Define functions to run the models in parallel
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {
executor.submit(inferring, encoding, full_wear_model): "fullwear",
executor.submit(inferring, encoding, pattern_model): "patterns",
executor.submit(inferring, encoding, print_model): "prints",
executor.submit(inferring, encoding, sleeve_length_model): "sleeve_length"
}
results = {}
for future in concurrent.futures.as_completed(futures):
model_name = futures[future]
try:
results[model_name] = future.result()
except Exception as e:
st.error(f"Error in {model_name}: {str(e)}")
results[model_name] = None
return results
#process ---------------------------------------------------------------------------bottomwear--------------
elif categories=="Lowerbody":
# Define functions to run the models in parallel
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {
executor.submit(inferring, encoding, bottom_wear_model): "lowerwear",
}
results = {}
for future in concurrent.futures.as_completed(futures):
model_name = futures[future]
try:
results[model_name] = future.result()
except Exception as e:
st.error(f"Error in {model_name}: {str(e)}")
results[model_name] = None
return results
#process ---------------------------------------------------------------------------Neck_design--------------
elif categories=="Neck":
# Define functions to run the models in parallel
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = {
executor.submit(inferring, encoding, neck_style_model): "Neckstyle",
}
results = {}
for future in concurrent.futures.as_completed(futures):
model_name = futures[future]
try:
results[model_name] = future.result()
except Exception as e:
st.error(f"Error in {model_name}: {str(e)}")
results[model_name] = None
return results
else:
return {"invalid categorie":f"{categories} categorie not in process!"}
|