Loading...
Loading...
Migrates Honcho Python SDK code from v1.6.0 to v2.0.0. Use when upgrading honcho package, fixing breaking changes after upgrade, or when errors mention AsyncHoncho, observations, Representation class, .core property, or get_config methods.
npx skill4agent add plastic-labs/honcho migrate-honchohonchoAsyncHonchoAsyncPeerAsyncSession.aioRepresentationstrget_configset_configget_configurationset_configurationchat_stream()chat(stream=True)poll_deriver_status().core# Before
from honcho import AsyncHoncho, AsyncPeer, AsyncSession
async_client = AsyncHoncho()
peer = await async_client.peer("user-123")
response = await peer.chat("query")
# After
from honcho import Honcho
client = Honcho()
peer = await client.aio.peer("user-123")
response = await peer.aio.chat("query")
# Async iteration
async for p in client.aio.peers():
print(p.id)# Before
from honcho import Observation, ObservationScope, AsyncObservationScope
scope = peer.observations
scope = peer.observations_of("other-peer")
rep = scope.get_representation()
# After
from honcho import Conclusion, ConclusionScope, ConclusionScopeAio
scope = peer.conclusions
scope = peer.conclusions_of("other-peer")
rep = scope.representation() # Returns str# Before
from honcho import Representation, ExplicitObservation, DeductiveObservation
rep: Representation = peer.working_rep()
print(rep.explicit)
print(rep.deductive)
if rep.is_empty():
print("No observations")
# After
rep: str = peer.representation()
print(rep) # Just a string now
if not rep:
print("No conclusions")# Before
config = peer.get_config()
peer.set_config({"observe_me": False})
session.get_config()
client.get_config()
# After
from honcho.api_types import PeerConfig, SessionConfiguration, WorkspaceConfiguration
config = peer.get_configuration()
peer.set_configuration(PeerConfig(observe_me=False))
session.get_configuration()
client.get_configuration()# Before
peer.working_rep()
peer.get_context()
peer.get_sessions()
session.get_context()
session.get_summaries()
session.get_messages()
session.get_peers()
session.get_peer_config()
client.get_peers()
client.get_sessions()
client.get_workspaces()
# After
peer.representation()
peer.context()
peer.sessions()
session.context()
session.summaries()
session.messages()
session.peers()
session.get_peer_configuration()
client.peers()
client.sessions()
client.workspaces()# Before
response = peer.chat("query", stream=True)
for chunk in response:
print(chunk, end="")
# After
stream = peer.chat_stream("query")
for chunk in stream:
print(chunk, end="")# Before
from honcho_core.types import DeriverStatus
status = client.get_deriver_status()
status = client.poll_deriver_status(timeout=300.0) # Removed!
# After
from honcho.api_types import QueueStatusResponse
status = client.queue_status()
# poll_deriver_status removed - implement polling manually if needed# Before
rep = peer.working_rep(
include_most_derived=True,
max_observations=50
)
# After
rep = peer.representation(
include_most_frequent=True,
max_conclusions=50
)# Before
updated = client.update_message(message=msg, metadata={"key": "value"}, session="sess-id")
# After
updated = session.update_message(message=msg, metadata={"key": "value"})# Before
card: str = peer.card() # Returns str
# After
card: list[str] | None = peer.card() # Returns list[str] | None
if card:
print("\n".join(card))| v1.6.0 | v2.0.0 |
|---|---|
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| (removed) |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| (removed) |
from honcho import (
HonchoError,
APIError,
BadRequestError,
AuthenticationError,
PermissionDeniedError,
NotFoundError,
ConflictError,
UnprocessableEntityError,
RateLimitError,
ServerError,
TimeoutError,
ConnectionError,
)# Configuration types
from honcho.api_types import (
PeerConfig,
SessionConfiguration,
WorkspaceConfiguration,
SessionPeerConfig,
QueueStatusResponse,
PeerContextResponse,
)
# Async type hints
from honcho import HonchoAio, PeerAio, SessionAio
# Message types (note: Params is plural now)
from honcho import Message, MessageCreateParams