Loading...
Loading...
Fetches web pages, parses HTML with CSS selectors, calls REST APIs, and scrapes dynamic content. Use when extracting data from websites, querying JSON APIs, or automating browser interactions.
npx skill4agent add knoopx/pi scrapingnuquery webnu -c "plugin add query web"browserhttp get# Simple GET request
nu -c 'http get https://example.com'
# With headers
nu -c 'http get -H [User-Agent "My Scraper"] https://example.com'query web# Extract text from elements
nu -c 'http get https://example.com | query web -q "h1, h2" | str trim'
# Extract attributes
nu -c 'http get https://example.com | query web -a href "a"'
# Parse tables as structured data
nu -c 'http get https://example.com/table-page | query web --as-table ["Column1" "Column2"]'# Start browser
start-browser
# Navigate to page
navigate-browser --url https://example.com
# Extract data with JavaScript evaluation
evaluate-javascript --code "Array.from(document.querySelectorAll('selector')).map(e => e.textContent)"
# Screenshot for visual inspection
take-screenshot
# Query HTML fragments
query-html-elements --selector ".content"http getfrom json# GET JSON API
nu -c 'http get https://api.example.com/data | from json'
# POST requests
nu -c 'http post https://api.example.com/submit -t application/json {key: value}'# Basic auth
nu -c 'http get -u username:password https://api.example.com'
# Bearer token
nu -c 'http get -H [Authorization "Bearer YOUR_TOKEN"] https://api.example.com'
# Custom headers
nu -c 'http get -H [X-API-Key "YOUR_KEY" User-Agent "Scraper"] https://api.example.com'# Add delays between requests
nu -c '$urls | each { |url| http get $url; sleep 1sec }'# Scrape multiple pages in parallel
nu -c '$urls | par-each { |url| http get $url | query web -q ".data" }'# Extract all h1 titles
nu -c 'http get https://example.com | query web -q "h1"'
# Get all links
nu -c 'http get https://example.com | query web -a href "a"'
# Scrape product prices
nu -c 'http get https://store.example.com | query web -q ".price"'# Scrape HN front page titles and URLs
nu -c 'http get https://news.ycombinator.com/ | query web -q ".titleline a" | get text | zip (http get https://news.ycombinator.com/ | query web -a href ".titleline a" | get href) | each { |pair| echo $"($pair.0) - ($pair.1)" }'http get# Get star count for a repo
nu -c 'http get https://api.github.com/repos/nushell/nushell | get stargazers_count'# Fetch JSON and extract fields
nu -c 'http get https://api.example.com/users | from json | get -i 0.name'# Bearer token
nu -c 'http get -H [Authorization "Bearer YOUR_TOKEN"] https://api.example.com/data'
# API key
nu -c 'http get -H [X-API-Key "YOUR_API_KEY"] https://api.example.com/data'
# Basic auth
nu -c 'http get -u username:password https://api.example.com/protected'