Apple commited on
Commit
ace93f0
·
1 Parent(s): 7d3cd23

Initial CADFusion Space with Gradio

Browse files
Files changed (2) hide show
  1. app.py +29 -24
  2. requirements.txt +2 -2
app.py CHANGED
@@ -1,34 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
  import torch
3
- from cadfusion.models import CADFusionModel
4
-
5
- # Load model (from HF weights + GitHub code)
6
- print("Loading CADFusion model...")
7
- device = "cuda" if torch.cuda.is_available() else "cpu"
8
 
9
- model = CADFusionModel.from_pretrained("microsoft/CADFusion")
10
- model = model.to(device)
11
- model.eval()
 
12
 
13
- def generate(prompt):
14
- """Run CADFusion inference on user prompt"""
 
15
  with torch.no_grad():
16
- output = model.generate(
17
- prompt,
18
- max_new_tokens=256,
19
- temperature=0.7,
20
- top_p=0.9,
21
- )
22
- return output
23
 
24
  # Gradio UI
25
- with gr.Blocks() as demo:
26
- gr.Markdown("## 🏗️ CADFusion Demo\nEnter a CAD prompt below:")
27
- inp = gr.Textbox(label="Your CAD prompt")
28
- out = gr.Textbox(label="Model Output")
29
-
30
- btn = gr.Button("Generate")
31
- btn.click(fn=generate, inputs=inp, outputs=out)
32
 
33
  if __name__ == "__main__":
34
  demo.launch()
 
1
+ import os, sys, subprocess
2
+
3
+ # Clone CADFusion repo if not already cloned
4
+ if not os.path.exists("CADFusion"):
5
+ subprocess.check_call(["git", "clone", "https://github.com/microsoft/CADFusion.git"])
6
+
7
+ # Add CADFusion repo to Python path
8
+ sys.path.append("CADFusion")
9
+
10
+ # Now import from repo
11
+ from models import CADFusionModel # repo folder has `models.py`
12
+
13
  import gradio as gr
14
  import torch
15
+ from transformers import AutoTokenizer
 
 
 
 
16
 
17
+ # Load HF checkpoint
18
+ checkpoint = "microsoft/CADFusion"
19
+ tokenizer = AutoTokenizer.from_pretrained(checkpoint)
20
+ model = CADFusionModel.from_pretrained(checkpoint)
21
 
22
+ # Define inference function
23
+ def run_cadfusion(prompt: str):
24
+ inputs = tokenizer(prompt, return_tensors="pt")
25
  with torch.no_grad():
26
+ output = model.generate(**inputs, max_new_tokens=128)
27
+ return tokenizer.decode(output[0], skip_special_tokens=True)
 
 
 
 
 
28
 
29
  # Gradio UI
30
+ demo = gr.Interface(
31
+ fn=run_cadfusion,
32
+ inputs="text",
33
+ outputs="text",
34
+ title="CADFusion Demo",
35
+ description="Run Microsoft's CADFusion model"
36
+ )
37
 
38
  if __name__ == "__main__":
39
  demo.launch()
requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
- git+https://github.com/microsoft/CADFusion.git
2
  gradio
3
  torch
4
- transformers
 
 
 
1
  gradio
2
  torch
3
+ transformers
4
+ gitpython