Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 18 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "opengradient-sdk",
"version": "2.0.0",
"version": "2.1.0",
"description": "Official TypeScript SDK for OpenGradient TEE LLM chat and completion",
"main": "dist/index.js",
"types": "dist/index.d.ts",
Expand Down Expand Up @@ -52,6 +52,8 @@
"testEnvironment": "node"
},
"dependencies": {
"undici": "^6.21.0",
"viem": "^2.21.0",
"x402-fetch": "^1.2.0"
}
}
40 changes: 40 additions & 0 deletions src/abi/teeRegistry.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/**
* Minimal ABI for the on-chain TEE Registry contract.
*
* Only the read-only methods needed to discover active TEE endpoints and
* fetch their pinned TLS certificates are included.
*/
export const TEE_REGISTRY_ABI = [
{
inputs: [{ internalType: "uint8", name: "teeType", type: "uint8" }],
name: "getActiveTEEs",
outputs: [
{
components: [
{ internalType: "address", name: "owner", type: "address" },
{ internalType: "address", name: "paymentAddress", type: "address" },
{ internalType: "string", name: "endpoint", type: "string" },
{ internalType: "bytes", name: "publicKey", type: "bytes" },
{ internalType: "bytes", name: "tlsCertificate", type: "bytes" },
{ internalType: "bytes32", name: "pcrHash", type: "bytes32" },
{ internalType: "uint8", name: "teeType", type: "uint8" },
{ internalType: "bool", name: "enabled", type: "bool" },
{ internalType: "uint256", name: "registeredAt", type: "uint256" },
{ internalType: "uint256", name: "lastHeartbeatAt", type: "uint256" },
],
internalType: "struct TEERegistry.TEEInfo[]",
name: "",
type: "tuple[]",
},
],
stateMutability: "view",
type: "function",
},
{
inputs: [{ internalType: "bytes32", name: "teeId", type: "bytes32" }],
name: "isTEEActive",
outputs: [{ internalType: "bool", name: "", type: "bool" }],
stateMutability: "view",
type: "function",
},
] as const;
36 changes: 30 additions & 6 deletions src/client.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,15 @@
import { LLM } from "./llm";
import { ClientConfig } from "./types";
import {
RegistryTEEConnection,
StaticTEEConnection,
type TEEConnection,
} from "./teeConnection";
import { TEERegistry } from "./teeRegistry";
import {
DEFAULT_NETWORK_FILTER,
DEFAULT_OPENGRADIENT_LLM_SERVER_URL,
DEFAULT_OPENGRADIENT_LLM_STREAMING_SERVER_URL,
DEFAULT_OG_RPC_URL,
DEFAULT_TEE_REGISTRY_ADDRESS,
} from "./defaults";

/**
Expand All @@ -12,6 +18,11 @@ import {
* Provides access to LLM chat and completion via OpenGradient's TEE
* (Trusted Execution Environment) with x402 payment protocol.
*
* By default, the TEE endpoint is resolved from the on-chain TEE registry and
* the TLS certificate is pinned to the value stored at registration time.
* Pass `llmServerUrl` to override with a hardcoded URL (development /
* self-hosted TEE servers; TLS verification is disabled).
*
* Usage:
* const client = new Client({ privateKey: "0x..." });
* const result = await client.llm.chat({
Expand All @@ -29,14 +40,27 @@ export class Client {
: `0x${config.privateKey}`
) as `0x${string}`;

let connection: TEEConnection;
if (config.llmServerUrl) {
connection = new StaticTEEConnection(config.llmServerUrl);
} else {
const registry = new TEERegistry(
config.rpcUrl ?? DEFAULT_OG_RPC_URL,
config.teeRegistryAddress ?? DEFAULT_TEE_REGISTRY_ADDRESS,
);
connection = new RegistryTEEConnection(registry);
}

this.llm = new LLM({
privateKey,
network: config.network ?? DEFAULT_NETWORK_FILTER,
maxPaymentValue: config.maxPaymentValue,
serverUrl: config.llmServerUrl ?? DEFAULT_OPENGRADIENT_LLM_SERVER_URL,
streamingServerUrl:
config.llmStreamingServerUrl ??
DEFAULT_OPENGRADIENT_LLM_STREAMING_SERVER_URL,
connection,
});
}

/** Tear down dispatchers and any background refresh timers. */
async close(): Promise<void> {
await this.llm.close();
}
}
12 changes: 6 additions & 6 deletions src/defaults.ts
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
/**
* Default OpenGradient TEE LLM server URL.
* Default RPC URL for the chain hosting the on-chain TEE registry.
*/
export const DEFAULT_OPENGRADIENT_LLM_SERVER_URL =
"https://llmogevm.opengradient.ai";
export const DEFAULT_OG_RPC_URL = "https://ogevmdevnet.opengradient.ai";

/**
* Default OpenGradient TEE LLM streaming server URL.
* Default address of the on-chain TEERegistry contract used to discover
* verified TEE LLM endpoints and their pinned TLS certificates.
*/
export const DEFAULT_OPENGRADIENT_LLM_STREAMING_SERVER_URL =
"https://llmogevm.opengradient.ai";
export const DEFAULT_TEE_REGISTRY_ADDRESS =
"0x4e72238852f3c918f4E4e57AeC9280dDB0c80248";

/**
* Default x402 settlement network. OpenGradient settles in OPG on Base.
Expand Down
18 changes: 16 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,24 @@ export type {
ToolFunction,
} from "./types";

export {
TEERegistry,
TEE_TYPE_LLM_PROXY,
TEE_TYPE_VALIDATOR,
} from "./teeRegistry";
export type { TEEEndpoint } from "./teeRegistry";

export {
RegistryTEEConnection,
StaticTEEConnection,
buildPinnedAgent,
} from "./teeConnection";
export type { ActiveTEE, TEEConnection } from "./teeConnection";

export {
DEFAULT_NETWORK_FILTER,
DEFAULT_OPENGRADIENT_LLM_SERVER_URL,
DEFAULT_OPENGRADIENT_LLM_STREAMING_SERVER_URL,
DEFAULT_OG_RPC_URL,
DEFAULT_TEE_REGISTRY_ADDRESS,
DEFAULT_OG_FAUCET_URL,
DEFAULT_HUB_SIGNUP_URL,
DEFAULT_BLOCKCHAIN_EXPLORER,
Expand Down
103 changes: 74 additions & 29 deletions src/llm.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { createSigner, wrapFetchWithPayment } from "x402-fetch";
import type { Agent } from "undici";
import {
ChatParams,
CompletionParams,
Expand All @@ -8,6 +9,7 @@ import {
TextGenerationOutput,
X402SettlementMode,
} from "./types";
import type { ActiveTEE, TEEConnection } from "./teeConnection";

const X402_PLACEHOLDER_API_KEY =
"0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef";
Expand All @@ -17,8 +19,8 @@ export interface LLMConfig {
privateKey: `0x${string}`;
network: string;
maxPaymentValue?: bigint;
serverUrl: string;
streamingServerUrl: string;
/** Resolves the active TEE endpoint and TLS dispatcher. */
connection: TEEConnection;
}

/**
Expand All @@ -27,18 +29,20 @@ export interface LLMConfig {
* Provides chat and completion access to LLMs hosted in OpenGradient's TEE
* (Trusted Execution Environment) with x402 payment protocol support.
*
* Usage:
* const client = new Client({ privateKey });
* const result = await client.llm.chat({
* model: TEE_LLM.CLAUDE_3_5_HAIKU,
* messages: [{ role: "user", content: "Hello" }],
* });
* The TEE endpoint is normally resolved from the on-chain TEE registry, with
* the TLS certificate pinned to the value stored at registration time. Pass
* `llmServerUrl` on the `Client` to override with a hardcoded URL.
*/
export class LLM {
private fetchWithPayment?: typeof fetch;
private signerPromise?: Promise<unknown>;

constructor(private readonly config: LLMConfig) {}

/** Tear down dispatchers and any background refresh timers. */
async close(): Promise<void> {
await this.config.connection.close();
}

/**
* Perform a (non-chat) completion via the TEE LLM server.
*/
Expand All @@ -60,8 +64,8 @@ export class LLM {
};
if (stopSequence && stopSequence.length) payload.stop = stopSequence;

const response = await this.post(
`${trimSlash(this.config.serverUrl)}/v1/completions`,
const { response } = await this.requestWithRetry(
"/v1/completions",
payload,
x402SettlementMode,
);
Expand Down Expand Up @@ -95,8 +99,8 @@ export class LLM {
params: ChatParams,
): Promise<TextGenerationOutput> {
const payload = this.buildChatPayload(params, false);
const response = await this.post(
`${trimSlash(this.config.serverUrl)}/v1/chat/completions`,
const { response } = await this.requestWithRetry(
"/v1/chat/completions",
payload,
params.x402SettlementMode ?? X402SettlementMode.SETTLE_BATCH,
);
Expand Down Expand Up @@ -125,8 +129,8 @@ export class LLM {

private async *chatStream(params: ChatParams): AsyncIterable<StreamChunk> {
const payload = this.buildChatPayload(params, true);
const response = await this.post(
`${trimSlash(this.config.streamingServerUrl)}/v1/chat/completions`,
const { response } = await this.requestWithRetry(
"/v1/chat/completions",
payload,
params.x402SettlementMode ?? X402SettlementMode.SETTLE_BATCH,
);
Expand Down Expand Up @@ -196,28 +200,69 @@ export class LLM {
return payload;
}

private async getFetch(): Promise<typeof fetch> {
if (!this.fetchWithPayment) {
const signer = await createSigner(
private async getSigner(): Promise<unknown> {
if (!this.signerPromise) {
this.signerPromise = createSigner(
this.config.network,
this.config.privateKey,
);
this.fetchWithPayment = wrapFetchWithPayment(
fetch,
signer,
this.config.maxPaymentValue,
) as typeof fetch;
}
return this.fetchWithPayment;
return this.signerPromise;
}

private async post(
url: string,
/**
* Build a paid fetch that injects the TEE's pinned TLS dispatcher into every
* request (including x402 payment retries).
*/
private async buildPaidFetch(dispatcher: Agent): Promise<typeof fetch> {
const signer = await this.getSigner();
const baseFetch: typeof fetch = ((input: any, init?: any) =>
fetch(input, { ...(init ?? {}), dispatcher } as any)) as typeof fetch;
return wrapFetchWithPayment(
baseFetch,
signer as any,
this.config.maxPaymentValue,
) as typeof fetch;
}

/**
* Send a request, lazily resolving the TEE endpoint. On a connection-level
* failure the TEE is re-resolved from the registry and the request is
* retried once.
*/
private async requestWithRetry(
path: string,
body: Record<string, any>,
settlementMode: X402SettlementMode,
): Promise<Response> {
const paidFetch = await this.getFetch();
): Promise<{ response: Response; tee: ActiveTEE }> {
this.config.connection.ensureRefreshLoop();
try {
return await this.sendOnce(path, body, settlementMode);
} catch (e) {
if (e instanceof OpenGradientError && e.statusCode !== undefined) {
// Server responded with a non-2xx — don't retry.
throw e;
}
try {
await this.config.connection.reconnect();
} catch (reconnectErr) {
throw new OpenGradientError(
`TEE LLM request failed and registry refresh failed: ${String(reconnectErr)}`,
);
}
return await this.sendOnce(path, body, settlementMode);
}
}

private async sendOnce(
path: string,
body: Record<string, any>,
settlementMode: X402SettlementMode,
): Promise<{ response: Response; tee: ActiveTEE }> {
const tee = await this.config.connection.ensureConnected();
const url = `${trimSlash(tee.endpoint)}${path}`;
const paidFetch = await this.buildPaidFetch(tee.dispatcher);

let response: Response;
try {
response = await paidFetch(url, {
Expand All @@ -240,7 +285,7 @@ export class LLM {
response.status,
);
}
return response;
return { response, tee };
}
}

Expand Down
Loading
Loading