Spaces:
Sleeping
Sleeping
| from mcp.server.fastmcp import FastMCP | |
| from gradio_client import Client | |
| import sys | |
| import io | |
| import json | |
| mcp = FastMCP("gradio-spaces") | |
| clients = {} | |
| def get_client(space_id: str) -> Client: | |
| """Get or create a Gradio client for the specified space.""" | |
| if space_id not in clients: | |
| clients[space_id] = Client(space_id) | |
| return clients[space_id] | |
| async def generate_image(prompt: str, space_id: str = "inoculatemedia/SanaSprint") -> str: | |
| """Generate an image using Flux. | |
| Args: | |
| prompt: Text prompt describing the image to generate | |
| space_id: inoculatemedia/SanaSprint | |
| """ | |
| client = get_client(space_id) | |
| result = client.predict( | |
| prompt=prompt, | |
| model_size="1.6B", | |
| seed=0, | |
| randomize_seed=True, | |
| width=1024, | |
| height=1024, | |
| guidance_scale=4.5, | |
| num_inference_steps=2, | |
| api_name="/infer" | |
| ) | |
| return result | |
| if __name__ == "__main__": | |
| import sys | |
| import io | |
| sys.stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8') | |
| mcp.run(transport='stdio') | |