aiqtech commited on
Commit
52f4e8f
ยท
verified ยท
1 Parent(s): 599ec34

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -2
app.py CHANGED
@@ -22,6 +22,24 @@ TMP_DIR = "/tmp/Trellis-demo"
22
 
23
  os.makedirs(TMP_DIR, exist_ok=True)
24
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
 
26
  def preprocess_image(image: Image.Image) -> Tuple[str, Image.Image]:
27
  """
@@ -78,7 +96,6 @@ def unpack_state(state: dict) -> Tuple[Gaussian, edict, str]:
78
 
79
  return gs, mesh, state['trial_id']
80
 
81
-
82
  @spaces.GPU
83
  def text_to_image(prompt: str, seed: int, randomize_seed: bool) -> Image.Image:
84
  """
@@ -87,8 +104,14 @@ def text_to_image(prompt: str, seed: int, randomize_seed: bool) -> Image.Image:
87
  if randomize_seed:
88
  seed = np.random.randint(0, MAX_SEED)
89
 
 
 
 
 
 
 
90
  generator = torch.Generator(device="cuda").manual_seed(seed)
91
- image = text2img_pipeline(prompt, generator=generator).images[0]
92
  return image
93
 
94
 
 
22
 
23
  os.makedirs(TMP_DIR, exist_ok=True)
24
 
25
+ # ์ƒ๋‹จ์˜ import ๋ถ€๋ถ„์— ์ถ”๊ฐ€
26
+ from transformers import pipeline
27
+
28
+ # ๋ฒˆ์—ญ๊ธฐ ์ดˆ๊ธฐํ™”
29
+ translator = pipeline("translation", model="Helsinki-NLP/opus-mt-ko-en")
30
+
31
+ # ํ•œ๊ธ€ ๊ฐ์ง€ ๋ฐ ๋ฒˆ์—ญ ํ•จ์ˆ˜
32
+ def translate_korean_prompt(prompt: str) -> str:
33
+ """
34
+ ํ•œ๊ธ€์ด ํฌํ•จ๋œ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์˜์–ด๋กœ ๋ฒˆ์—ญ
35
+ """
36
+ def contains_korean(text):
37
+ return any(ord('๊ฐ€') <= ord(c) <= ord('ํžฃ') for c in text)
38
+
39
+ if contains_korean(prompt):
40
+ translated = translator(prompt)[0]['translation_text']
41
+ return translated
42
+ return prompt
43
 
44
  def preprocess_image(image: Image.Image) -> Tuple[str, Image.Image]:
45
  """
 
96
 
97
  return gs, mesh, state['trial_id']
98
 
 
99
  @spaces.GPU
100
  def text_to_image(prompt: str, seed: int, randomize_seed: bool) -> Image.Image:
101
  """
 
104
  if randomize_seed:
105
  seed = np.random.randint(0, MAX_SEED)
106
 
107
+ # ํ•œ๊ธ€ ํ”„๋กฌํ”„ํŠธ๋ฅผ ์˜์–ด๋กœ ๋ฒˆ์—ญ
108
+ english_prompt = translate_korean_prompt(prompt)
109
+
110
+ # ํ”„๋กฌํ”„ํŠธ ํ˜•์‹ ๊ฐ•์ œ
111
+ formatted_prompt = f"wbgmsst, 3D, {english_prompt}, white background"
112
+
113
  generator = torch.Generator(device="cuda").manual_seed(seed)
114
+ image = text2img_pipeline(formatted_prompt, generator=generator).images[0]
115
  return image
116
 
117