TypeScript API Reference
Complete type definitions for all public APIs. All types are shipped in the package. no separate @types/ needed.
Core types
Ts
1// LLM provider configuration
2type LocalLLMConfig = {
3 provider: "ollama" | "lmstudio" | "openai" | "webhook" | "custom" | "a2a";
4 baseUrl: string;
5 model: string;
6 apiKey?: string;
7 temperature?: number;
8 maxTokens?: number;
9 timeoutMs?: number;
10};
11
12// A message in the conversation thread
13type Message = {
14 id: string;
15 role: "user" | "assistant" | "system";
16 content: string;
17 response?: WireAIResponse; // present on assistant messages
18 timestamp: number;
19};
20
21// LLM response: discriminated union on 'action'
22type WireAIRenderResponse = {
23 action: "render";
24 component: string;
25 props: Record<string, unknown>;
26 message?: string; // optional context shown before the component
27};
28
29type WireAIAskResponse = {
30 action: "ask";
31 message: string;
32};
33
34type WireAIResponse = WireAIRenderResponse | WireAIAskResponse;Component types
Ts
1import type { z } from "zod";
2
3// Register a custom component
4type WireAIComponent<T extends z.ZodTypeAny = z.ZodTypeAny> = {
5 name: string;
6 description: string;
7 component: React.ComponentType<z.infer<T> & InjectedProps>;
8 propsSchema: T;
9};
10
11// Callbacks injected by ComponentRenderer
12type InjectedProps = {
13 onSubmit?: (value: unknown) => void;
14 onSelect?: (value: unknown) => void;
15 onConfirm?: (payload?: unknown) => void;
16 onDeny?: () => void;
17 onPress?: (label: string) => void;
18 onContinue?: () => void;
19 onCancel?: () => void;
20};
21
22type CallbackOverrides = Partial<InjectedProps>;
23
24// The component registry (Map<name, entry>)
25type ComponentRegistry = Map<string, RegistryEntry>;
26
27type RegistryEntry = {
28 name: string;
29 description: string;
30 component: React.ComponentType<Record<string, unknown>>;
31 propsSchema: z.ZodTypeAny;
32};Hook return types
Ts
1// useWireAIThread
2type UseWireAIThreadResult = {
3 messages: Message[];
4 isLoading: boolean;
5 error: string | null;
6 sendMessage: (text: string, options?: SendMessageOptions) => void;
7 reset: () => void;
8 abort: () => void;
9};
10
11type SendMessageOptions = {
12 interruptLoading?: boolean;
13};
14
15// useLLMConfigStorage
16type StorageBackend = {
17 getItem(key: string): Promise<string | null>;
18 setItem(key: string, value: string): Promise<void>;
19 deleteItem(key: string): Promise<void>;
20};
21
22type UseLLMConfigStorageResult = {
23 config: LocalLLMConfig;
24 isLoaded: boolean;
25 saveConfig: (config: LocalLLMConfig) => Promise<void>;
26 clearConfig: () => Promise<void>;
27};A2A types
Ts
1// A2A protocol types (exported from wireai-rn)
2type A2ATaskState =
3 | "SUBMITTED" | "WORKING" | "COMPLETED" | "FAILED"
4 | "CANCELED" | "INPUT_REQUIRED" | "REJECTED"
5 // v0.3 lowercase variants
6 | "submitted" | "working" | "completed" | "failed"
7 | "canceled" | "input-required" | "rejected";
8
9type A2APart = {
10 text?: string;
11 data?: Record<string, unknown>;
12 mimeType?: string;
13 url?: string;
14 raw?: string;
15 content?: string; // v0.3 compat
16};
17
18type A2AMessage = {
19 role: "user" | "agent" | "assistant";
20 parts: A2APart[];
21};
22
23type A2ATask = {
24 id: string;
25 contextId?: string;
26 status: { state: A2ATaskState; message?: string };
27 messages?: A2AMessage[];
28 artifacts?: { parts: A2APart[] }[];
29};
30
31// buildAgentCard info
32type AgentCardInfo = {
33 name: string;
34 url: string;
35 version?: string;
36 description?: string;
37};BaseAdapter interface
Implement this to create a custom LLM adapter:
Ts
1interface BaseAdapter {
2 ping(): Promise<boolean>;
3 chat(
4 messages: Pick<Message, "role" | "content">[],
5 signal?: AbortSignal
6 ): Promise<string>;
7}
8
9// Usage:
10import type { BaseAdapter } from "wireai-rn";
11
12class MyAdapter implements BaseAdapter {
13 async ping(): Promise<boolean> { return true; }
14 async chat(messages, signal): Promise<string> {
15 // return wireai-rn JSON string
16 return JSON.stringify({ action: "ask", message: "Hello" });
17 }
18}Zod schemas (runtime validation)
Ts
1import {
2 WireAIResponseSchema,
3 WireAIRenderResponseSchema,
4 WireAIAskResponseSchema,
5 WIREAI_JSON_SCHEMA,
6} from "wireai-rn";
7
8// Parse a raw LLM response string
9const result = WireAIResponseSchema.safeParse(JSON.parse(rawString));
10if (result.success) {
11 if (result.data.action === "render") {
12 // result.data.component, result.data.props
13 }
14}
15
16// WIREAI_JSON_SCHEMA is a plain JSON Schema object
17// useful for passing to OpenAI's json_schema response_formatFull export list
All exports are available from the main
wireai-rn package. Components are also available from wireai-rn/components.Ts
1// Provider
2import { WireAIProvider } from "wireai-rn";
3
4// Hooks
5import {
6 useWireAIThread, useWireAIInput, useWireAIAction, useLLMConfigStorage
7} from "wireai-rn";
8
9// Renderer
10import { ComponentRenderer, ComponentErrorBoundary, FallbackMessage, LoadingState } from "wireai-rn";
11
12// Registry
13import { createComponentRegistry, useWireAIContext } from "wireai-rn";
14
15// Adapters
16import { OllamaAdapter, LMStudioAdapter, OpenAIAdapter, WebhookAdapter, A2AAdapter, createAdapter } from "wireai-rn";
17
18// Schema utilities
19import { validateLLMResponse, buildSystemPrompt, buildAgentCard } from "wireai-rn";
20
21// Components
22import { defaultComponents } from "wireai-rn/components";
23
24// UI primitives
25import { Btn, InputField } from "wireai-rn";
26
27// Design tokens
28import { colors, darkColors, spacing, radii, textStyles, iconSizes } from "wireai-rn";
29
30// Types
31import type {
32 LocalLLMConfig, WireAIResponse, WireAIRenderResponse, WireAIAskResponse,
33 WireAIComponent, Message, MessageRole, InjectedProps, CallbackOverrides,
34 StorageBackend, UseLLMConfigStorageResult, UseWireAIThreadResult,
35 SendMessageOptions, ComponentRegistry, RegistryEntry, BaseAdapter,
36 AgentCardInfo,
37} from "wireai-rn";