Harsh7817 commited on
Commit
508dbb9
·
1 Parent(s): 4fcc94b

Refactor for Hugging Face Spaces: Remove Redis/Celery, add standalone mode

Browse files
Files changed (3) hide show
  1. Dockerfile +8 -2
  2. app.py +41 -56
  3. tasks.py +13 -30
Dockerfile CHANGED
@@ -13,5 +13,11 @@ COPY . /app
13
  ENV PYTHONUNBUFFERED=1
14
  ENV PYTHONDONTWRITEBYTECODE=1
15
 
16
- # For GPU: use an nvidia/cuda base image and install the correct torch wheel with CUDA.
17
- # When running with nvidia runtime, pass --gpus=all to docker run or set deploy settings in compose.
 
 
 
 
 
 
 
13
  ENV PYTHONUNBUFFERED=1
14
  ENV PYTHONDONTWRITEBYTECODE=1
15
 
16
+ # Create directories with permissions
17
+ RUN mkdir -p /tmp/uploads /tmp/results && chmod 777 /tmp/uploads /tmp/results
18
+
19
+ # Expose Hugging Face default port
20
+ EXPOSE 7860
21
+
22
+ # Start the application
23
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py CHANGED
@@ -1,55 +1,39 @@
1
- from fastapi import FastAPI, UploadFile, File, HTTPException
2
  from fastapi.responses import FileResponse, JSONResponse
3
  from uuid import uuid4
4
  from pathlib import Path
5
  import shutil
6
  import os
7
  import json
8
- import redis
9
- from celery import Celery
10
  from dotenv import load_dotenv
 
11
 
12
  load_dotenv()
13
 
14
- # Directories (mounted by docker-compose)
15
- UPLOAD_DIR = Path(os. environ. get("UPLOAD_DIR", "/data/uploads"))
16
- RESULT_DIR = Path(os.environ. get("RESULT_DIR", "/data/results"))
 
17
  UPLOAD_DIR.mkdir(parents=True, exist_ok=True)
18
  RESULT_DIR.mkdir(parents=True, exist_ok=True)
19
 
20
- # Redis for job status
21
- REDIS_HOST = os. environ.get("REDIS_HOST", "redis")
22
- REDIS_PORT = int(os.environ.get("REDIS_PORT", 6379))
23
- CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL", "redis://redis:6379/0")
24
 
25
- # Lazy connections
26
- _rdb = None
27
- _celery_client = None
28
-
29
- def get_redis():
30
- global _rdb
31
- if _rdb is None:
32
- _rdb = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=0, decode_responses=True)
33
- return _rdb
34
-
35
- def get_celery():
36
- global _celery_client
37
- if _celery_client is None:
38
- _celery_client = Celery(broker=CELERY_BROKER_URL)
39
- return _celery_client
40
-
41
- app = FastAPI(title="Depth->STL processing service (API)")
42
-
43
-
44
- def set_status(job_id: str, state: str, detail: str = "", result: str = ""):
45
- payload = {"state": state, "detail": detail, "result": result}
46
- get_redis().set(job_id, json.dumps(payload))
47
 
 
 
 
 
 
 
48
 
49
  @app.post("/upload/")
50
- async def upload_image(file: UploadFile = File(... )):
51
  # Basic validation
52
- if not file.content_type. startswith("image/"):
53
  raise HTTPException(status_code=400, detail="File must be an image")
54
 
55
  job_id = str(uuid4())
@@ -57,46 +41,47 @@ async def upload_image(file: UploadFile = File(... )):
57
  fname = f"{job_id}_{safe_name}"
58
  save_path = UPLOAD_DIR / fname
59
 
60
- # Save uploaded file to mounted volume
61
  try:
62
  with save_path.open("wb") as buffer:
63
  shutil.copyfileobj(file.file, buffer)
64
  except Exception as e:
65
  raise HTTPException(status_code=500, detail=f"Failed to save upload: {e}")
66
 
67
- # Mark queued and enqueue Celery task
68
- set_status(job_id, "QUEUED", "Job received and queued")
69
- try:
70
- async_result = get_celery().send_task(
71
- "tasks.process_image_task",
72
- args=[str(save_path), str(RESULT_DIR), job_id],
73
- kwargs={},
74
- queue=os.environ.get("CELERY_QUEUE", None),
75
- )
76
- except Exception as e:
77
- set_status(job_id, "FAILURE", f"Failed to enqueue task: {e}")
78
- raise HTTPException(status_code=500, detail=f"Failed to enqueue task: {e}")
79
 
80
- return {"job_id": job_id, "celery_id": str(async_result.id)}
81
 
82
 
83
  @app.get("/status/{job_id}")
84
  def status(job_id: str):
85
- raw = get_redis().get(job_id)
86
- if not raw:
87
  return JSONResponse({"state": "UNKNOWN", "detail": "No such job_id"}, status_code=404)
88
- return JSONResponse(json.loads(raw))
89
 
90
 
91
  @app.get("/download/{job_id}")
92
  def download(job_id: str):
93
- raw = get_redis().get(job_id)
94
- if not raw:
95
  raise HTTPException(status_code=404, detail="No such job")
96
- info = json.loads(raw)
97
- if info.get("state") != "SUCCESS":
98
  raise HTTPException(status_code=404, detail="Result not ready")
99
- stl_path = info.get("result")
 
100
  if not stl_path or not Path(stl_path).exists():
101
  raise HTTPException(status_code=404, detail="Result file missing")
 
102
  return FileResponse(path=stl_path, filename=Path(stl_path).name, media_type="application/sla")
 
1
+ from fastapi import FastAPI, UploadFile, File, HTTPException, BackgroundTasks
2
  from fastapi.responses import FileResponse, JSONResponse
3
  from uuid import uuid4
4
  from pathlib import Path
5
  import shutil
6
  import os
7
  import json
 
 
8
  from dotenv import load_dotenv
9
+ from tasks import process_image_task
10
 
11
  load_dotenv()
12
 
13
+ # Directories
14
+ # Use /tmp for Hugging Face Spaces as it is writable
15
+ UPLOAD_DIR = Path("/tmp/uploads")
16
+ RESULT_DIR = Path("/tmp/results")
17
  UPLOAD_DIR.mkdir(parents=True, exist_ok=True)
18
  RESULT_DIR.mkdir(parents=True, exist_ok=True)
19
 
20
+ # In-memory job store (Global variable)
21
+ # Since HF Spaces (Free) runs 1 replica, this works for a demo.
22
+ JOBS = {}
 
23
 
24
+ app = FastAPI(title="Depth->STL processing service (Standalone)")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
 
26
+ def update_job_status(job_id: str, state: str, detail: str = "", result: str = ""):
27
+ JOBS[job_id] = {
28
+ "state": state,
29
+ "detail": detail,
30
+ "result": result
31
+ }
32
 
33
  @app.post("/upload/")
34
+ async def upload_image(background_tasks: BackgroundTasks, file: UploadFile = File(...)):
35
  # Basic validation
36
+ if not file.content_type.startswith("image/"):
37
  raise HTTPException(status_code=400, detail="File must be an image")
38
 
39
  job_id = str(uuid4())
 
41
  fname = f"{job_id}_{safe_name}"
42
  save_path = UPLOAD_DIR / fname
43
 
44
+ # Save uploaded file
45
  try:
46
  with save_path.open("wb") as buffer:
47
  shutil.copyfileobj(file.file, buffer)
48
  except Exception as e:
49
  raise HTTPException(status_code=500, detail=f"Failed to save upload: {e}")
50
 
51
+ # Mark queued
52
+ update_job_status(job_id, "QUEUED", "Job received and queued")
53
+
54
+ # Add to background tasks
55
+ background_tasks.add_task(
56
+ process_image_task,
57
+ str(save_path),
58
+ str(RESULT_DIR),
59
+ job_id,
60
+ update_job_status
61
+ )
 
62
 
63
+ return {"job_id": job_id}
64
 
65
 
66
  @app.get("/status/{job_id}")
67
  def status(job_id: str):
68
+ job = JOBS.get(job_id)
69
+ if not job:
70
  return JSONResponse({"state": "UNKNOWN", "detail": "No such job_id"}, status_code=404)
71
+ return JSONResponse(job)
72
 
73
 
74
  @app.get("/download/{job_id}")
75
  def download(job_id: str):
76
+ job = JOBS.get(job_id)
77
+ if not job:
78
  raise HTTPException(status_code=404, detail="No such job")
79
+
80
+ if job.get("state") != "SUCCESS":
81
  raise HTTPException(status_code=404, detail="Result not ready")
82
+
83
+ stl_path = job.get("result")
84
  if not stl_path or not Path(stl_path).exists():
85
  raise HTTPException(status_code=404, detail="Result file missing")
86
+
87
  return FileResponse(path=stl_path, filename=Path(stl_path).name, media_type="application/sla")
tasks.py CHANGED
@@ -1,9 +1,7 @@
1
  import os
2
- from celery import Celery
3
  from pathlib import Path
4
  import traceback
5
  import json
6
- import redis
7
  import time
8
  import sys
9
 
@@ -11,24 +9,17 @@ import numpy as np
11
  import cv2
12
  import open3d as o3d
13
  import torch
14
- from PIL import Image
15
  import trimesh
16
  from transformers import AutoImageProcessor, AutoModelForDepthEstimation
17
 
18
- # Celery / Redis config
19
- CELERY_BROKER = os.environ.get("CELERY_BROKER_URL", "redis://redis:6379/0")
20
- CELERY_BACKEND = os.environ.get("CELERY_RESULT_BACKEND", "redis://redis:6379/0")
21
- REDIS_HOST = os.environ.get("REDIS_HOST", "redis")
22
- REDIS_PORT = int(os.environ.get("REDIS_PORT", 6379))
23
-
24
- # Pipeline settings (fixed to orthographic + Poisson depth=9 to match your notebook)
25
- DEPTH_CHECKPOINT = os.environ.get("DEPTH_CHECKPOINT", "/models/depth-anything-Large-hf")
26
- USE_GPU = int(os.environ.get("USE_GPU", "1"))
27
  POISSON_DEPTH = int(os.environ.get("POISSON_DEPTH", "9"))
28
  OUTLIER_NEIGHBORS = int(os.environ.get("OUTLIER_NEIGHBORS", "15"))
29
  OUTLIER_STD_RATIO = float(os.environ.get("OUTLIER_STD_RATIO", "1.0"))
30
- ORTHO_SCALE_FACTOR = float(os.environ.get("ORTHO_SCALE_FACTOR", "255")) # same as your function
31
- INFERENCE_RESIZE = int(os.environ.get("INFERENCE_RESIZE", "0")) # 0 keeps original
32
  RESULT_PREFIX = os.environ.get("RESULT_PREFIX", "")
33
 
34
  try:
@@ -36,9 +27,6 @@ try:
36
  except Exception:
37
  pass
38
 
39
- celery = Celery("tasks", broker=CELERY_BROKER, backend=CELERY_BACKEND)
40
- rdb = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=0, decode_responses=True)
41
-
42
  _model = None
43
  _processor = None
44
  _device = "cpu"
@@ -47,10 +35,6 @@ def log(msg):
47
  print(msg, flush=True)
48
  sys.stdout.flush()
49
 
50
- def set_status(job_id: str, state: str, detail: str = "", result: str = ""):
51
- payload = {"state": state, "detail": detail, "result": result}
52
- rdb.set(job_id, json.dumps(payload))
53
-
54
  def load_model():
55
  global _model, _processor, _device
56
  if _model is None:
@@ -85,11 +69,10 @@ def build_orthographic_point_cloud(depth_u8: np.ndarray, color_rgb: np.ndarray)
85
  pcd.colors = o3d.utility.Vector3dVector(colors)
86
  return pcd
87
 
88
- @celery.task(bind=True)
89
- def process_image_task(self, image_path: str, result_dir: str, job_id: str):
90
  start = time.time()
91
  try:
92
- set_status(job_id, "RUNNING", "Loading model")
93
  model, processor, device = load_model()
94
  log(f"[{job_id}] Model loaded on {device}")
95
 
@@ -108,7 +91,7 @@ def process_image_task(self, image_path: str, result_dir: str, job_id: str):
108
  else:
109
  img_proc = img_rgb
110
 
111
- set_status(job_id, "RUNNING", "Running depth inference")
112
  depth_inputs = processor(images=img_proc, return_tensors="pt").to(device)
113
  with torch.no_grad():
114
  outputs = model(**depth_inputs)
@@ -120,7 +103,7 @@ def process_image_task(self, image_path: str, result_dir: str, job_id: str):
120
 
121
  depth_u8 = normalize_depth_uint8(depth)
122
 
123
- set_status(job_id, "RUNNING", "Building orthographic point cloud")
124
  pcd = build_orthographic_point_cloud(depth_u8, color_resized)
125
 
126
  # Outlier removal (nb=15, std_ratio=1.0)
@@ -144,7 +127,7 @@ def process_image_task(self, image_path: str, result_dir: str, job_id: str):
144
  if num_pts == 0:
145
  raise RuntimeError("Empty point cloud after cleanup")
146
 
147
- set_status(job_id, "RUNNING", f"Poisson reconstruction depth={POISSON_DEPTH}")
148
  mesh, densities = o3d.geometry.TriangleMesh.create_from_point_cloud_poisson(
149
  pcd, depth=POISSON_DEPTH
150
  )
@@ -165,14 +148,14 @@ def process_image_task(self, image_path: str, result_dir: str, job_id: str):
165
  Path(result_dir).mkdir(parents=True, exist_ok=True)
166
  stl_path = Path(result_dir) / f"{RESULT_PREFIX}{job_id}.stl"
167
 
168
- set_status(job_id, "RUNNING", "Exporting STL")
169
  tm = trimesh.Trimesh(vertices=np.asarray(mesh.vertices),
170
  faces=np.asarray(mesh.triangles),
171
  process=True)
172
  tm.export(str(stl_path), file_type="stl")
173
 
174
  total = time.time() - start
175
- set_status(job_id, "SUCCESS", f"Done in {total:.2f}s", str(stl_path))
176
  log(f"[{job_id}] SUCCESS total={total:.2f}s STL={stl_path}")
177
  return {
178
  "status": "success",
@@ -181,6 +164,6 @@ def process_image_task(self, image_path: str, result_dir: str, job_id: str):
181
  }
182
  except Exception as e:
183
  traceback.print_exc()
184
- set_status(job_id, "FAILURE", str(e))
185
  log(f"[{job_id}] FAILURE: {e}")
186
  raise
 
1
  import os
 
2
  from pathlib import Path
3
  import traceback
4
  import json
 
5
  import time
6
  import sys
7
 
 
9
  import cv2
10
  import open3d as o3d
11
  import torch
 
12
  import trimesh
13
  from transformers import AutoImageProcessor, AutoModelForDepthEstimation
14
 
15
+ # Pipeline settings
16
+ DEPTH_CHECKPOINT = os.environ.get("DEPTH_CHECKPOINT", "LiheYoung/depth-anything-large-hf") # Default to HF Hub model if local not found
17
+ USE_GPU = int(os.environ.get("USE_GPU", "0")) # Default to CPU for HF Spaces
 
 
 
 
 
 
18
  POISSON_DEPTH = int(os.environ.get("POISSON_DEPTH", "9"))
19
  OUTLIER_NEIGHBORS = int(os.environ.get("OUTLIER_NEIGHBORS", "15"))
20
  OUTLIER_STD_RATIO = float(os.environ.get("OUTLIER_STD_RATIO", "1.0"))
21
+ ORTHO_SCALE_FACTOR = float(os.environ.get("ORTHO_SCALE_FACTOR", "255"))
22
+ INFERENCE_RESIZE = int(os.environ.get("INFERENCE_RESIZE", "0"))
23
  RESULT_PREFIX = os.environ.get("RESULT_PREFIX", "")
24
 
25
  try:
 
27
  except Exception:
28
  pass
29
 
 
 
 
30
  _model = None
31
  _processor = None
32
  _device = "cpu"
 
35
  print(msg, flush=True)
36
  sys.stdout.flush()
37
 
 
 
 
 
38
  def load_model():
39
  global _model, _processor, _device
40
  if _model is None:
 
69
  pcd.colors = o3d.utility.Vector3dVector(colors)
70
  return pcd
71
 
72
+ def process_image_task(image_path: str, result_dir: str, job_id: str, status_callback):
 
73
  start = time.time()
74
  try:
75
+ status_callback(job_id, "RUNNING", "Loading model")
76
  model, processor, device = load_model()
77
  log(f"[{job_id}] Model loaded on {device}")
78
 
 
91
  else:
92
  img_proc = img_rgb
93
 
94
+ status_callback(job_id, "RUNNING", "Running depth inference")
95
  depth_inputs = processor(images=img_proc, return_tensors="pt").to(device)
96
  with torch.no_grad():
97
  outputs = model(**depth_inputs)
 
103
 
104
  depth_u8 = normalize_depth_uint8(depth)
105
 
106
+ status_callback(job_id, "RUNNING", "Building orthographic point cloud")
107
  pcd = build_orthographic_point_cloud(depth_u8, color_resized)
108
 
109
  # Outlier removal (nb=15, std_ratio=1.0)
 
127
  if num_pts == 0:
128
  raise RuntimeError("Empty point cloud after cleanup")
129
 
130
+ status_callback(job_id, "RUNNING", f"Poisson reconstruction depth={POISSON_DEPTH}")
131
  mesh, densities = o3d.geometry.TriangleMesh.create_from_point_cloud_poisson(
132
  pcd, depth=POISSON_DEPTH
133
  )
 
148
  Path(result_dir).mkdir(parents=True, exist_ok=True)
149
  stl_path = Path(result_dir) / f"{RESULT_PREFIX}{job_id}.stl"
150
 
151
+ status_callback(job_id, "RUNNING", "Exporting STL")
152
  tm = trimesh.Trimesh(vertices=np.asarray(mesh.vertices),
153
  faces=np.asarray(mesh.triangles),
154
  process=True)
155
  tm.export(str(stl_path), file_type="stl")
156
 
157
  total = time.time() - start
158
+ status_callback(job_id, "SUCCESS", f"Done in {total:.2f}s", str(stl_path))
159
  log(f"[{job_id}] SUCCESS total={total:.2f}s STL={stl_path}")
160
  return {
161
  "status": "success",
 
164
  }
165
  except Exception as e:
166
  traceback.print_exc()
167
+ status_callback(job_id, "FAILURE", str(e))
168
  log(f"[{job_id}] FAILURE: {e}")
169
  raise