Loading...
Loading...
Implementing providers for Beluga AI v2 registries. Use when creating LLM, embedding, vectorstore, voice, or any other provider.
npx skill4agent add lookatitude/beluga-ai provider-implementationinit()Register()core.Errorvar _ Interface = (*Impl)(nil)llm/providers/openai/
├── openai.go # Implementation + New() + init()
├── stream.go # Streaming
├── errors.go # Error mapping
├── openai_test.go # Tests
└── testdata/ # Recorded HTTP responsesvar _ llm.ChatModel = (*Model)(nil)
func init() {
llm.Register("openai", func(cfg llm.ProviderConfig) (llm.ChatModel, error) { return New(cfg) })
}
func New(cfg llm.ProviderConfig) (*Model, error) {
if cfg.APIKey == "" { return nil, &core.Error{Op: "openai.new", Code: core.ErrAuth, Message: "API key required"} }
return &Model{client: newClient(cfg.APIKey, cfg.BaseURL), model: cfg.Model}, nil
}
func (m *Model) Stream(ctx context.Context, msgs []schema.Message, opts ...llm.GenerateOption) iter.Seq2[schema.StreamChunk, error] {
return func(yield func(schema.StreamChunk, error) bool) { /* stream implementation */ }
}switch apiErr.StatusCode {
case 401: code = core.ErrAuth
case 429: code = core.ErrRateLimit
case 408, 504: code = core.ErrTimeout
case 400: code = core.ErrInvalidInput
}docs/providers.md