|
import torch |
|
from diffusers import ShapEPipeline |
|
import trimesh |
|
import numpy as np |
|
|
|
def generate_3d_model(prompt, output_path="assistant_3d.obj"): |
|
""" |
|
Generate a 3D model using ShapE and export it in a Blender-compatible format |
|
""" |
|
try: |
|
|
|
pipe = ShapEPipeline.from_pretrained( |
|
"openai/shap-e", |
|
torch_dtype=torch.float32, |
|
low_cpu_mem_usage=True |
|
).to("cpu") |
|
|
|
|
|
outputs = pipe( |
|
prompt, |
|
num_inference_steps=16, |
|
guidance_scale=7.5, |
|
|
|
frame_size=32, |
|
) |
|
|
|
|
|
vertices = outputs.vertices[0].detach().cpu().numpy() |
|
faces = outputs.faces[0].detach().cpu().numpy() |
|
|
|
|
|
mesh_obj = trimesh.Trimesh( |
|
vertices=vertices, |
|
faces=faces, |
|
process=True |
|
) |
|
|
|
|
|
try: |
|
if output_path.endswith('.obj'): |
|
mesh_obj.export(output_path, include_normals=True) |
|
elif output_path.endswith('.glb'): |
|
mesh_obj.export(output_path) |
|
elif output_path.endswith('.stl'): |
|
mesh_obj.export(output_path) |
|
print(f"Successfully exported 3D model to: {output_path}") |
|
except Exception as export_error: |
|
print(f"Error during export: {export_error}") |
|
|
|
backup_path = output_path.rsplit('.', 1)[0] + '.ply' |
|
mesh_obj.export(backup_path) |
|
print(f"Exported backup model to: {backup_path}") |
|
|
|
return output_path |
|
|
|
except Exception as e: |
|
print(f"Error during generation: {e}") |
|
print(f"Error type: {type(e)}") |
|
print(f"Full error details: {str(e)}") |
|
|
|
|
|
if hasattr(outputs, 'shape'): |
|
print(f"Output shape: {outputs.shape}") |
|
raise |
|
|
|
if __name__ == "__main__": |
|
|
|
prompt = "a simple 3D ring with a perfect clean geometry centered by 3 lines of conscise elegance the shape is illuminus and futureistuc" |
|
try: |
|
generate_3d_model(prompt, "assistant_3d.obj") |
|
except Exception as e: |
|
print(f"Generation failed: {e}") |