Barath05 commited on
Commit
46e1f05
Β·
verified Β·
1 Parent(s): 933118b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -34
app.py CHANGED
@@ -7,14 +7,19 @@ import tempfile
7
  import time
8
  from gradio_client import Client, handle_file
9
 
10
- # --- CONFIGURATION: ROBUST PRIORITY LIST ---
11
- # We try completely different architectures to avoid shared server outages.
12
- # 1. CRM (Zhengyi/CRM) - High quality, separate infrastructure
13
- # 2. TripoSR (Official) - Fast, but currently flaky
14
- # 3. Shap-E (OpenAI) - Old reliable fallback
15
  MODELS = [
16
- {"id": "Zhengyi/CRM", "api": "/generate", "type": "crm"},
17
- {"id": "stabilityai/TripoSR", "api": "/generate", "type": "tripo"},
 
 
 
 
 
 
 
 
18
  {"id": "hysts/Shap-E", "api": "/image-to-3d", "type": "shape"}
19
  ]
20
 
@@ -23,6 +28,10 @@ def photo_to_sketch(image):
23
  if image is None: return None
24
  if isinstance(image, np.ndarray):
25
  image = Image.fromarray(image.astype('uint8'))
 
 
 
 
26
  gray = image.convert("L")
27
  img_array = np.array(gray)
28
  blurred = cv2.GaussianBlur(img_array, (5, 5), 0)
@@ -37,13 +46,17 @@ def generate_3d_avatar(sketch_image, height, weight, muscle, gender, breast):
37
  if sketch_image is None:
38
  raise gr.Error("Please upload an image first!")
39
 
40
- # Save temp file
 
41
  if isinstance(sketch_image, np.ndarray):
42
  sketch_image = Image.fromarray(sketch_image.astype('uint8'))
 
 
 
43
  temp_dir = tempfile.gettempdir()
44
  sketch_path = os.path.join(temp_dir, f"sketch_{int(time.time())}.png")
45
  sketch_image.save(sketch_path)
46
- print(f"-> Saved input to {sketch_path}")
47
 
48
  last_error = ""
49
 
@@ -56,44 +69,51 @@ def generate_3d_avatar(sketch_image, height, weight, muscle, gender, breast):
56
 
57
  client = Client(model_id)
58
 
59
- if model["type"] == "crm":
60
- # CRM Parameters: [Image, Scale, Steps, Seed]
61
- print("-> Sending request (CRM format)...")
62
- # Note: CRM sometimes returns a tuple of (model, video). We handle both.
63
  result = client.predict(
64
- handle_file(sketch_path), # Input image
65
  api_name=model["api"]
66
  )
67
 
68
- elif model["type"] == "tripo":
69
- # TripoSR Parameters
70
- print("-> Sending request (TripoSR format)...")
 
 
 
 
 
 
 
 
 
71
  result = client.predict(
72
- handle_file(sketch_path), # Input image
73
- False, # Remove background?
74
- 0.85, # Foreground ratio
75
  api_name=model["api"]
76
  )
77
 
78
  elif model["type"] == "shape":
79
- # Shap-E Parameters
80
  print("-> Sending request (Shap-E format)...")
 
81
  result = client.predict(
82
- handle_file(sketch_path), # Input Image
83
- "high quality 3d model", # Prompt (Required)
84
- 0, # Seed
85
- 15, # Guidance Scale
86
- 64, # Steps
87
  api_name=model["api"]
88
  )
89
 
90
  # If we get here, it worked!
91
  print(f"-> SUCCESS! Model generated by {model_id}")
92
 
93
- # Handle different return types (some spaces return [path, video], others just path)
94
  if isinstance(result, (list, tuple)):
95
- # Look for the .glb or .obj file in the list
96
- final_model = next((item for item in result if isinstance(item, str) and item.endswith(('.glb', '.obj', '.gltf'))), result[0])
97
  else:
98
  final_model = result
99
 
@@ -104,13 +124,13 @@ def generate_3d_avatar(sketch_image, height, weight, muscle, gender, breast):
104
  last_error = str(e)
105
  continue # Try next model
106
 
107
- # If loop finishes without success
108
- raise gr.Error(f"All backup models failed. The Hugging Face inference cloud is experiencing a widespread outage. Last Error: {last_error}")
109
 
110
  # =============== UI ===============
111
  with gr.Blocks(title="SketchToLife") as demo:
112
- gr.Markdown("# SketchToLife – Robust 3D Generator")
113
- gr.Markdown("**Status:** Using Multi-Model Fallback (CRM β†’ TripoSR β†’ Shap-E)")
114
 
115
  with gr.Row():
116
  with gr.Column():
@@ -120,7 +140,6 @@ with gr.Blocks(title="SketchToLife") as demo:
120
 
121
  with gr.Column():
122
  gr.Markdown("### Customize Body")
123
- # Placeholders for UI consistency
124
  h = gr.Dropdown(["short", "average", "tall", "giant"], value="average", label="Height")
125
  w = gr.Dropdown(["slim", "average", "curvy", "heavy"], value="average", label="Weight")
126
  m = gr.Dropdown(["slim", "fit", "muscular", "bodybuilder"], value="fit", label="Muscle")
 
7
  import time
8
  from gradio_client import Client, handle_file
9
 
10
+ # --- CONFIGURATION: FALLBACK LIST ---
11
+ # We try these 4 distinct spaces. If one is down, we jump to the next.
 
 
 
12
  MODELS = [
13
+ # 1. LGM (ashawkey/LGM) - Very fast, usually online.
14
+ {"id": "ashawkey/LGM", "api": "/process", "type": "lgm"},
15
+
16
+ # 2. InstantMesh (TencentARC) - High quality, try again.
17
+ {"id": "TencentARC/InstantMesh", "api": "/generate", "type": "instantmesh"},
18
+
19
+ # 3. Zero123++ (sudo-ai) - Good alternative architecture.
20
+ {"id": "sudo-ai/zero123plus-v1.2", "api": "/generate", "type": "zero123"},
21
+
22
+ # 4. Shap-E (OpenAI) - The reliable backup.
23
  {"id": "hysts/Shap-E", "api": "/image-to-3d", "type": "shape"}
24
  ]
25
 
 
28
  if image is None: return None
29
  if isinstance(image, np.ndarray):
30
  image = Image.fromarray(image.astype('uint8'))
31
+
32
+ # Resize to safe dimensions (512x512) to prevent downstream API crashes
33
+ image = image.resize((512, 512))
34
+
35
  gray = image.convert("L")
36
  img_array = np.array(gray)
37
  blurred = cv2.GaussianBlur(img_array, (5, 5), 0)
 
46
  if sketch_image is None:
47
  raise gr.Error("Please upload an image first!")
48
 
49
+ # --- CRITICAL FIX: Sanitize Image ---
50
+ # Many 3D APIs crash if the image is not 256x256 or 512x512 RGB.
51
  if isinstance(sketch_image, np.ndarray):
52
  sketch_image = Image.fromarray(sketch_image.astype('uint8'))
53
+
54
+ sketch_image = sketch_image.convert("RGB").resize((512, 512))
55
+
56
  temp_dir = tempfile.gettempdir()
57
  sketch_path = os.path.join(temp_dir, f"sketch_{int(time.time())}.png")
58
  sketch_image.save(sketch_path)
59
+ print(f"-> Saved clean input to {sketch_path}")
60
 
61
  last_error = ""
62
 
 
69
 
70
  client = Client(model_id)
71
 
72
+ if model["type"] == "lgm":
73
+ # LGM Parameters: [Image, Scale, Steps, Seed]
74
+ print("-> Sending request (LGM format)...")
 
75
  result = client.predict(
76
+ handle_file(sketch_path),
77
  api_name=model["api"]
78
  )
79
 
80
+ elif model["type"] == "instantmesh":
81
+ print("-> Sending request (InstantMesh format)...")
82
+ result = client.predict(
83
+ handle_file(sketch_path), # Image
84
+ True, # Remove Background
85
+ 30, # Steps
86
+ 42, # Seed
87
+ api_name=model["api"]
88
+ )
89
+
90
+ elif model["type"] == "zero123":
91
+ print("-> Sending request (Zero123 format)...")
92
  result = client.predict(
93
+ handle_file(sketch_path), # Image
94
+ True, # Remove Background
 
95
  api_name=model["api"]
96
  )
97
 
98
  elif model["type"] == "shape":
 
99
  print("-> Sending request (Shap-E format)...")
100
+ # Shap-E is strictly: Image, Prompt, Seed, Guidance, Steps
101
  result = client.predict(
102
+ handle_file(sketch_path),
103
+ "", # Prompt must be string (empty is fine)
104
+ 0, # Seed
105
+ 15, # Guidance
106
+ 64, # Steps
107
  api_name=model["api"]
108
  )
109
 
110
  # If we get here, it worked!
111
  print(f"-> SUCCESS! Model generated by {model_id}")
112
 
113
+ # Handle return types (list of files vs single path)
114
  if isinstance(result, (list, tuple)):
115
+ # Find the first .glb or .obj
116
+ final_model = next((item for item in result if isinstance(item, str) and item.endswith(('.glb', '.obj', '.gltf', '.ply'))), result[0])
117
  else:
118
  final_model = result
119
 
 
124
  last_error = str(e)
125
  continue # Try next model
126
 
127
+ # If all fail
128
+ raise gr.Error(f"CRITICAL OUTAGE: All 4 backup models failed. The Hugging Face inference cloud is severely degraded right now. Last Error: {last_error}")
129
 
130
  # =============== UI ===============
131
  with gr.Blocks(title="SketchToLife") as demo:
132
+ gr.Markdown("# SketchToLife – Emergency Backup Mode")
133
+ gr.Markdown("**Status:** Trying LGM β†’ InstantMesh β†’ Zero123 β†’ Shap-E")
134
 
135
  with gr.Row():
136
  with gr.Column():
 
140
 
141
  with gr.Column():
142
  gr.Markdown("### Customize Body")
 
143
  h = gr.Dropdown(["short", "average", "tall", "giant"], value="average", label="Height")
144
  w = gr.Dropdown(["slim", "average", "curvy", "heavy"], value="average", label="Weight")
145
  m = gr.Dropdown(["slim", "fit", "muscular", "bodybuilder"], value="fit", label="Muscle")