Python SDK
Examples
Copy, paste, deploy. Ready-to-use patterns for common scenarios.
Basic Examples
Hello World
The simplest possible deployment:
from basilica import BasilicaClient
client = BasilicaClient()
deployment = client.deploy(
name="hello",
source="""
from http.server import HTTPServer, BaseHTTPRequestHandler
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.end_headers()
self.wfile.write(b'Hello World!')
HTTPServer(('', 8000), Handler).serve_forever()
""",
port=8000,
)
print(f"Live at: {deployment.url}")Static File Server
Serve static files with nginx:
from basilica import BasilicaClient
client = BasilicaClient()
deployment = client.deploy(
name="static",
image="nginxinc/nginx-unprivileged:alpine",
port=8080,
)
print(f"Nginx at: {deployment.url}")Web Frameworks
FastAPI
import basilica
@basilica.deployment(
name="fastapi-app",
port=8000,
pip_packages=["fastapi", "uvicorn"],
)
def serve():
from fastapi import FastAPI
import uvicorn
app = FastAPI(title="My API")
@app.get("/")
def root():
return {"message": "Hello from FastAPI!"}
@app.get("/items/{item_id}")
def get_item(item_id: int, q: str = None):
return {"item_id": item_id, "q": q}
@app.post("/items")
def create_item(item: dict):
return {"created": item}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()
print(f"API docs: {deployment.url}/docs")Flask
import basilica
@basilica.deployment(
name="flask-app",
port=5000,
pip_packages=["flask", "gunicorn"],
)
def serve():
from flask import Flask, jsonify
import subprocess
app = Flask(__name__)
@app.route("/")
def home():
return jsonify({"message": "Hello from Flask!"})
@app.route("/health")
def health():
return jsonify({"status": "healthy"})
# Run with gunicorn for production
subprocess.run([
"gunicorn",
"--bind", "0.0.0.0:5000",
"--workers", "2",
"app:app"
])
deployment = serve()Streamlit
import basilica
@basilica.deployment(
name="streamlit-app",
port=8501,
pip_packages=["streamlit"],
)
def serve():
import subprocess
import textwrap
from pathlib import Path
# Write Streamlit app
app_code = textwrap.dedent("""
import streamlit as st
st.title("Hello Streamlit!")
st.write("This app is running on Basilica.")
name = st.text_input("Your name")
if name:
st.write(f"Hello, {name}!")
number = st.slider("Pick a number", 0, 100, 50)
st.write(f"You picked: {number}")
""")
Path("/tmp/app.py").write_text(app_code)
subprocess.run([
"streamlit", "run", "/tmp/app.py",
"--server.port", "8501",
"--server.address", "0.0.0.0",
])
deployment = serve()Gradio
import basilica
@basilica.deployment(
name="gradio-app",
port=7860,
pip_packages=["gradio"],
)
def serve():
import gradio as gr
def greet(name):
return f"Hello, {name}!"
demo = gr.Interface(
fn=greet,
inputs="text",
outputs="text",
title="Greeting App",
)
demo.launch(server_name="0.0.0.0", server_port=7860)
deployment = serve()Storage Examples
Persistent Counter
from basilica import BasilicaClient
client = BasilicaClient()
deployment = client.deploy(
name="counter",
source="""
from pathlib import Path
from http.server import HTTPServer, BaseHTTPRequestHandler
COUNTER_FILE = Path('/data/count.txt')
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
count = int(COUNTER_FILE.read_text()) if COUNTER_FILE.exists() else 0
count += 1
COUNTER_FILE.write_text(str(count))
self.send_response(200)
self.end_headers()
self.wfile.write(f'Visit #{count}'.encode())
HTTPServer(('', 8000), Handler).serve_forever()
""",
port=8000,
storage=True,
)File Upload API
import basilica
uploads = basilica.Volume.from_name("uploads", create_if_missing=True)
@basilica.deployment(
name="upload-api",
port=8000,
pip_packages=["fastapi", "uvicorn", "python-multipart"],
volumes={"/uploads": uploads},
)
def serve():
from fastapi import FastAPI, UploadFile, HTTPException
from fastapi.responses import FileResponse
from pathlib import Path
import uvicorn
app = FastAPI()
UPLOAD_DIR = Path("/uploads")
@app.post("/upload")
async def upload(file: UploadFile):
path = UPLOAD_DIR / file.filename
with open(path, "wb") as f:
content = await file.read()
f.write(content)
return {"filename": file.filename, "size": len(content)}
@app.get("/files")
def list_files():
files = [f.name for f in UPLOAD_DIR.iterdir() if f.is_file()]
return {"files": files}
@app.get("/files/{filename}")
def download(filename: str):
path = UPLOAD_DIR / filename
if not path.exists():
raise HTTPException(404, "File not found")
return FileResponse(path)
@app.delete("/files/{filename}")
def delete(filename: str):
path = UPLOAD_DIR / filename
if not path.exists():
raise HTTPException(404, "File not found")
path.unlink()
return {"deleted": filename}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()SQLite Database
import basilica
data = basilica.Volume.from_name("app-db", create_if_missing=True)
@basilica.deployment(
name="sqlite-api",
port=8000,
pip_packages=["fastapi", "uvicorn"],
volumes={"/data": data},
)
def serve():
from fastapi import FastAPI
import sqlite3
import uvicorn
app = FastAPI()
DB_PATH = "/data/app.db"
def get_db():
conn = sqlite3.connect(DB_PATH)
conn.execute("""
CREATE TABLE IF NOT EXISTS items (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
return conn
@app.post("/items")
def create_item(name: str):
conn = get_db()
cursor = conn.execute("INSERT INTO items (name) VALUES (?)", (name,))
conn.commit()
return {"id": cursor.lastrowid, "name": name}
@app.get("/items")
def list_items():
conn = get_db()
items = conn.execute("SELECT id, name, created_at FROM items").fetchall()
return [{"id": i[0], "name": i[1], "created_at": i[2]} for i in items]
@app.delete("/items/{item_id}")
def delete_item(item_id: int):
conn = get_db()
conn.execute("DELETE FROM items WHERE id = ?", (item_id,))
conn.commit()
return {"deleted": item_id}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()GPU Examples
PyTorch Inference
import basilica
@basilica.deployment(
name="pytorch-api",
image="pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime",
port=8000,
gpu_count=1,
memory="8Gi",
pip_packages=["fastapi", "uvicorn"],
)
def serve():
import torch
from fastapi import FastAPI
import uvicorn
app = FastAPI()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
@app.get("/")
def info():
return {
"device": str(device),
"cuda_available": torch.cuda.is_available(),
"cuda_version": torch.version.cuda,
}
@app.post("/matmul")
def matmul(a: list, b: list):
ta = torch.tensor(a, dtype=torch.float32, device=device)
tb = torch.tensor(b, dtype=torch.float32, device=device)
result = torch.matmul(ta, tb)
return {"result": result.cpu().tolist()}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()Hugging Face Model
import basilica
cache = basilica.Volume.from_name("hf-cache", create_if_missing=True)
@basilica.deployment(
name="sentiment-api",
image="pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime",
port=8000,
gpu_count=1,
memory="8Gi",
pip_packages=["fastapi", "uvicorn", "transformers", "accelerate"],
volumes={"/root/.cache/huggingface": cache},
)
def serve():
from fastapi import FastAPI
from transformers import pipeline
import uvicorn
app = FastAPI()
# Load model at startup
classifier = pipeline(
"sentiment-analysis",
model="distilbert-base-uncased-finetuned-sst-2-english",
device=0, # Use GPU
)
@app.post("/analyze")
def analyze(text: str):
result = classifier(text)[0]
return {
"text": text,
"label": result["label"],
"score": result["score"],
}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()Text Generation
import basilica
cache = basilica.Volume.from_name("model-cache", create_if_missing=True)
@basilica.deployment(
name="text-gen",
image="pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime",
port=8000,
gpu_count=1,
memory="16Gi",
pip_packages=["fastapi", "uvicorn", "transformers", "accelerate"],
volumes={"/root/.cache/huggingface": cache},
)
def serve():
from fastapi import FastAPI
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
import uvicorn
app = FastAPI()
model_name = "microsoft/phi-2"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.float16,
device_map="auto",
)
@app.post("/generate")
def generate(prompt: str, max_tokens: int = 100):
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
outputs = model.generate(
**inputs,
max_new_tokens=max_tokens,
do_sample=True,
temperature=0.7,
)
text = tokenizer.decode(outputs[0], skip_special_tokens=True)
return {"prompt": prompt, "generated": text}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()Background Jobs
Long-Running Task
from basilica import BasilicaClient
client = BasilicaClient()
deployment = client.deploy(
name="batch-job",
source="""
import time
from pathlib import Path
output_file = Path('/data/results.txt')
for i in range(100):
print(f"Processing batch {i+1}/100")
time.sleep(1) # Simulate work
output_file.write_text(f"Completed batch {i+1}")
print("Job complete!")
output_file.write_text("All batches complete!")
""",
storage=True,
ttl_seconds=3600, # Auto-delete after 1 hour
)
# Check logs to monitor progress
print(deployment.logs())Scheduled Worker
import basilica
@basilica.deployment(
name="scheduler",
port=8000,
pip_packages=["fastapi", "uvicorn", "apscheduler"],
)
def serve():
from fastapi import FastAPI
from apscheduler.schedulers.background import BackgroundScheduler
from datetime import datetime
import uvicorn
app = FastAPI()
scheduler = BackgroundScheduler()
job_history = []
def scheduled_task():
timestamp = datetime.now().isoformat()
job_history.append(timestamp)
print(f"Job executed at {timestamp}")
scheduler.add_job(scheduled_task, 'interval', minutes=5)
scheduler.start()
@app.get("/")
def status():
return {
"running": scheduler.running,
"jobs": len(scheduler.get_jobs()),
"history": job_history[-10:], # Last 10 executions
}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()API Patterns
REST API with CRUD
import basilica
data = basilica.Volume.from_name("api-data", create_if_missing=True)
@basilica.deployment(
name="crud-api",
port=8000,
pip_packages=["fastapi", "uvicorn", "pydantic"],
volumes={"/data": data},
)
def serve():
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
import json
from pathlib import Path
import uvicorn
app = FastAPI()
DB_FILE = Path("/data/items.json")
class Item(BaseModel):
name: str
price: float
quantity: int = 0
def load_db():
if DB_FILE.exists():
return json.loads(DB_FILE.read_text())
return {}
def save_db(data):
DB_FILE.write_text(json.dumps(data, indent=2))
@app.get("/items")
def list_items():
return load_db()
@app.get("/items/{item_id}")
def get_item(item_id: str):
db = load_db()
if item_id not in db:
raise HTTPException(404, "Item not found")
return db[item_id]
@app.post("/items/{item_id}")
def create_item(item_id: str, item: Item):
db = load_db()
db[item_id] = item.dict()
save_db(db)
return {"id": item_id, **item.dict()}
@app.put("/items/{item_id}")
def update_item(item_id: str, item: Item):
db = load_db()
if item_id not in db:
raise HTTPException(404, "Item not found")
db[item_id] = item.dict()
save_db(db)
return {"id": item_id, **item.dict()}
@app.delete("/items/{item_id}")
def delete_item(item_id: str):
db = load_db()
if item_id not in db:
raise HTTPException(404, "Item not found")
del db[item_id]
save_db(db)
return {"deleted": item_id}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()
print(f"API docs: {deployment.url}/docs")WebSocket Server
import basilica
@basilica.deployment(
name="websocket-server",
port=8000,
pip_packages=["fastapi", "uvicorn", "websockets"],
)
def serve():
from fastapi import FastAPI, WebSocket
import uvicorn
app = FastAPI()
connections = []
@app.websocket("/ws")
async def websocket_endpoint(websocket: WebSocket):
await websocket.accept()
connections.append(websocket)
try:
while True:
data = await websocket.receive_text()
# Broadcast to all connections
for conn in connections:
await conn.send_text(f"Echo: {data}")
except:
connections.remove(websocket)
@app.get("/")
def info():
return {"connections": len(connections)}
uvicorn.run(app, host="0.0.0.0", port=8000)
deployment = serve()