Loading...
Loading...
Dual skill for deploying scientific models. FastAPI provides a high-performance, asynchronous web framework for building APIs with automatic documentation. Streamlit enables rapid creation of interactive data applications and dashboards directly from Python scripts. Load when working with web APIs, model serving, REST endpoints, interactive dashboards, data visualization UIs, scientific app deployment, async web frameworks, Pydantic validation, uvicorn, or building production-ready scientific tools.
npx skill4agent add tondevrel/scientific-agent-skills fastapi-streamlitpip install fastapi uvicorn streamlit pydanticfastapi.apppydantic.BaseModelst.sliderst.cache_datast.sidebarpip install fastapi uvicorn streamlit pydantic# FastAPI
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
# Streamlit
import streamlit as st
import requests # To communicate with FastAPI# main_api.py
from fastapi import FastAPI
from pydantic import BaseModel
app = FastAPI()
class ModelInput(BaseModel):
temperature: float
pressure: float
@app.post("/predict")
def predict(data: ModelInput):
# Imagine a complex physical model here
result = data.temperature * 0.5 + data.pressure * 0.2
return {"prediction": result}
# Run with: uvicorn main_api:app --reload# main_ui.py
import streamlit as st
import pandas as pd
st.title("Scientific Data Explorer")
# 1. Widgets for input
val = st.slider("Select a threshold", 0.0, 100.0, 50.0)
# 2. Logic/Processing
df = pd.DataFrame({"x": range(100), "y": [x**2 for x in range(100)]})
filtered_df = df[df["y"] > val]
# 3. Visualization
st.line_chart(filtered_df)
st.write(f"Points above threshold: {len(filtered_df)}")
# Run with: streamlit run main_ui.pyBaseModel@st.cache_dataasync defst.set_page_config(layout="wide")HTTPExceptiondefasync def.streamlit/secrets.toml# ❌ BAD: Manual JSON parsing in FastAPI
# @app.post("/data")
# def handle_data(raw_json: dict):
# val = raw_json.get("value") # No validation!
# ✅ GOOD: Pydantic validation
class DataPoint(BaseModel):
value: float
@app.post("/data")
def handle_data(data: DataPoint):
return data.value # Guaranteed to be a float
# ❌ BAD: Loading data in every Streamlit rerun
# data = pd.read_csv("massive_data.csv") # Re-reads every time you move a slider!
# ✅ GOOD: Caching
@st.cache_data
def load_massive_data():
return pd.read_csv("massive_data.csv")
data = load_massive_data()from functools import lru_cache
@lru_cache()
def load_model():
# Load your PyTorch or Scikit-learn model here
return MyHeavyModel().load("weights.pt")
@app.get("/status")
def get_status(model = Depends(load_model)):
return {"model_version": model.version}from fastapi import BackgroundTasks
def solve_pde_task(params):
# Long FEniCS simulation
pass
@app.post("/run-sim")
def run_simulation(params: Params, background_tasks: BackgroundTasks):
background_tasks.add_task(solve_pde_task, params)
return {"message": "Simulation started in background"}st.sidebar.header("Settings")
mode = st.sidebar.selectbox("Model Mode", ["Fast", "Accurate"])
col1, col2 = st.columns(2)
with col1:
st.header("Input Parameters")
temp = st.number_input("Temperature (K)")
with col2:
st.header("Results Visualization")
# Plotly/Matplotlib chart
st.plotly_chart(fig)if 'results_history' not in st.session_state:
st.session_state.results_history = []
if st.button("Run Experiment"):
res = run_model()
st.session_state.results_history.append(res)
st.write(f"History length: {len(st.session_state.results_history)}")import torch
from fastapi import FastAPI
from pydantic import BaseModel
app = FastAPI()
model = torch.load("model.pth")
model.eval()
class PredictionRequest(BaseModel):
features: list[float]
@app.post("/v1/predict")
def get_prediction(req: PredictionRequest):
input_tensor = torch.tensor([req.features])
with torch.no_grad():
output = model(input_tensor)
return {"class": output.argmax().item(), "confidence": output.max().item()}import streamlit as st
import polars as pl
st.title("Data Cleaner")
uploaded_file = st.file_uploader("Choose a CSV file")
if uploaded_file:
df = pl.read_csv(uploaded_file)
st.write("Original Data Summary", df.describe())
col_to_drop = st.multiselect("Drop columns", df.columns)
if st.button("Clean Data"):
df_clean = df.drop(col_to_drop).drop_nulls()
st.dataframe(df_clean)
st.download_button("Download Clean CSV", df_clean.write_csv(), "clean.csv")import streamlit as st
import time
placeholder = st.empty()
for i in range(100):
with placeholder.container():
st.metric("Current Sensor Reading", f"{get_val()} units")
st.progress(i + 1)
time.sleep(1)uvicorn main:app --workers 4cache_resource@st.cache_resource
def get_database_connection():
return create_engine("postgresql://...")# ✅ Solution: Wrap Pydantic models in try-except if needed,
# but usually, let FastAPI handle it and customize exception_handlers.# ✅ Solution: Use st.form to group widgets so the script
# only reruns once when the "Submit" button is clicked.
with st.form("my_form"):
# ... inputs ...
submitted = st.form_submit_button("Submit")# ✅ Solution: Be explicit in Docker/Compose files about ports.httpxrequests