Skip to main content

POST /api/agents/companionAgent/stream

Generate a real-time streaming response from the companion agent using Server-Sent Events (SSE).

Request Body

The request body is identical to the generate endpoint.
messages
Array
required
Conversation history array containing user and assistant messages.
runtimeContext.metadata.personality
string
default:"friend"
Personality mode for the response. Options: "guru", "wanderer", "friend", "philosopher"
threadId
string
Unique identifier for the conversation thread. Recommended for conversation continuity.
resourceId
string
User identifier for conversation persistence. Recommended for user-specific conversations.

Streaming Response Format

Responses are sent as Server-Sent Events with the following event types:

Text Content Events

// Incremental text chunks
data: {"type":"text","text":"Hello, ","runId":"uuid"}
data: {"type":"text","text":"world!","runId":"uuid"}

Tool Call Events

// Tool call initiation
data: {"type":"tool-call-input-streaming-start","payload":{"toolCallId":"call_xxx","toolName":"weatherTool"},"runId":"uuid"}

// Parameter streaming
data: {"type":"tool-call-delta","payload":{"argsTextDelta":"{\"location\":\"","toolCallId":"call_xxx","toolName":"weatherTool"},"runId":"uuid"}
data: {"type":"tool-call-delta","payload":{"argsTextDelta":"Tokyo\"}","toolCallId":"call_xxx","toolName":"weatherTool"},"runId":"uuid"}

// Tool call completion
data: {"type":"tool-call-input-streaming-end","payload":{"toolCallId":"call_xxx"},"runId":"uuid"}
data: {"type":"tool-call","payload":{"toolCallId":"call_xxx","toolName":"weatherTool","args":{"location":"Tokyo"},"providerMetadata":{}},"runId":"uuid"}

Tool Result Events

// Tool execution results
data: {"type":"tool-result","payload":{"toolCallId":"call_xxx","toolName":"weatherTool","result":{"temperature":22,"feelsLike":25,"humidity":65,"windSpeed":5,"windGust":8,"conditions":"Partly cloudy","location":"Tokyo"}},"runId":"uuid"}

Completion Events

// Step and run completion
data: {"type":"step-finish","payload":{"stepType":"initial","finishReason":"stop"},"runId":"uuid"}
data: {"type":"run-finish","payload":{"finishReason":"stop"},"runId":"uuid"}

Error Events

// Error handling
data: {"type":"error","error":{"message":"Tool execution failed","code":"TOOL_ERROR"},"runId":"uuid"}

Stream Event Types

Event TypeDescription
startRun initialization
step-startBeginning of processing step
textIncremental text content
tool-call-input-streaming-startTool call parameter building start
tool-call-deltaIncremental tool call parameter data
tool-call-input-streaming-endTool call parameter building complete
tool-callComplete tool call with parameters
tool-resultTool execution result
step-finishProcessing step completion
run-finishEntire run completion
errorError occurred during processing

JavaScript Stream Handler

function handleStreamResponse(response) {
  const reader = response.body.getReader();
  const decoder = new TextDecoder();

  return new Promise((resolve, reject) => {
    const processStream = async () => {
      try {
        while (true) {
          const { done, value } = await reader.read();
          if (done) break;

          const chunk = decoder.decode(value);
          const lines = chunk.split('\n');

          for (const line of lines) {
            if (line.startsWith('data: ')) {
              try {
                const data = JSON.parse(line.slice(6));
                handleStreamChunk(data);
              } catch (e) {
                // Handle parsing errors
              }
            }
          }
        }
        resolve();
      } catch (error) {
        reject(error);
      }
    };

    processStream();
  });
}

function handleStreamChunk(chunk) {
  switch (chunk.type) {
    case 'text':
      appendToUI(chunk.text);
      break;
    case 'tool-call':
      showToolCall(chunk.payload.toolName, chunk.payload.args);
      break;
    case 'tool-result':
      showToolResult(chunk.payload.toolName, chunk.payload.result);
      break;
    case 'error':
      handleError(chunk.error);
      break;
    case 'run-finish':
      finalizeResponse();
      break;
  }
}

Complete Example

class StreamingChat {
  constructor(baseUrl = 'https://oyester.metaphy.live/') {
    this.baseUrl = baseUrl;
    this.currentThread = null;
  }

  async streamMessage(message, personality = 'friend', options = {}) {
    const requestBody = {
      messages: [{ role: 'user', content: message }],
      runtimeContext: {
        metadata: {
          personality,
          userId: options.userId || 'anonymous'
        }
      },
      threadId: this.currentThread?.id || `thread-${Date.now()}`,
      resourceId: options.userId || 'anonymous'
    };

    const response = await fetch(`${this.baseUrl}/api/agents/companionAgent/stream`, {
      method: 'POST',
      headers: { 'Content-Type': 'application/json' },
      body: JSON.stringify(requestBody)
    });

    if (!response.ok) {
      throw new Error(`API request failed: ${response.status}`);
    }

    return this.processStream(response);
  }

  async processStream(response) {
    const reader = response.body.getReader();
    const decoder = new TextDecoder();
    let fullText = '';
    let toolCalls = [];
    let toolResults = [];

    try {
      while (true) {
        const { done, value } = await reader.read();
        if (done) break;

        const chunk = decoder.decode(value);
        const lines = chunk.split('\n');

        for (const line of lines) {
          if (line.startsWith('data: ')) {
            try {
              const data = JSON.parse(line.slice(6));
              await this.handleChunk(data, { fullText, toolCalls, toolResults });
            } catch (e) {
              console.warn('Failed to parse SSE chunk:', e);
            }
          }
        }
      }
    } catch (error) {
      console.error('Stream processing error:', error);
      throw error;
    } finally {
      reader.releaseLock();
    }

    return { fullText, toolCalls, toolResults };
  }

  async handleChunk(chunk, state) {
    switch (chunk.type) {
      case 'text':
        state.fullText += chunk.text;
        this.onText?.(chunk.text);
        break;

      case 'tool-call':
        state.toolCalls.push(chunk.payload);
        this.onToolCall?.(chunk.payload);
        break;

      case 'tool-result':
        state.toolResults.push(chunk.payload);
        this.onToolResult?.(chunk.payload);
        break;

      case 'error':
        this.onError?.(chunk.error);
        break;

      case 'run-finish':
        this.onFinish?.(state);
        break;
    }
  }
}

// Usage
const chat = new StreamingChat();
const result = await chat.streamMessage("What's the weather in Kyoto?", "friend", {
  userId: "user123"
});

cURL Example

curl -N -X POST https://oyester.metaphy.live/api/agents/companionAgent/stream \
  -H "Content-Type: application/json" \
  -d '{
    "messages": [
      {
        "role": "user",
        "content": "Tell me about meditation practices"
      }
    ],
    "runtimeContext": {
      "metadata": {
        "personality": "guru"
      }
    },
    "threadId": "meditation-discussion-001",
    "resourceId": "user-123"
  }'

Error Handling

Stream errors are sent as SSE events:
// Handle stream errors
function handleStreamError(error) {
  console.error('Stream error:', error);
  // Implement retry logic or fallback behavior
}

Best Practices

Connection Management: Always properly close stream connections to avoid resource leaks.
Error Recovery: Implement exponential backoff for stream reconnection on network errors.
UI Updates: Use the streaming events to provide real-time UI updates for better user experience.