diff --git a/src/LLMExecutor.ts b/src/LLMExecutor.ts new file mode 100644 index 0000000..76f0c8e --- /dev/null +++ b/src/LLMExecutor.ts @@ -0,0 +1,163 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + */ + +import { LLMProviderAdapter } from './LLMProviderAdapter'; +import { OpenAIAdapter } from './providers/OpenAIAdapter'; + +export type LLMProvider = 'openai' | 'anthropic' | 'ollama'; + +export interface LLMConfig { + provider: LLMProvider; + model: string; + apiKey?: string; + baseUrl?: string; + temperature?: number; +} + +export interface ExecutorInput { + contractText: string; + state: Record; + request: Record; + modelDefinitions?: string; +} + +export interface InitExecutorInput { + contractText: string; + request: Record; + modelDefinitions?: string; +} + +export interface ExecutorOutput { + response: Record; + state: Record; + emit: Record[]; +} + +type InitOutput = { + state: Record; +}; + +export class LLMExecutorError extends Error { + readonly code: 'INVALID_JSON' | 'INVALID_SHAPE' | 'PROVIDER_ERROR'; + + constructor(code: LLMExecutorError['code'], message: string) { + super(message); + this.name = 'LLMExecutorError'; + this.code = code; + } +} + +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null && !Array.isArray(value); +} + +export class LLMExecutor { + private readonly adapterOverride?: LLMProviderAdapter; + + constructor(adapter?: LLMProviderAdapter) { + this.adapterOverride = adapter; + } + + private getAdapter(config: LLMConfig): LLMProviderAdapter { + if (this.adapterOverride) { + return this.adapterOverride; + } + if (config.provider === 'openai') { + return new OpenAIAdapter(); + } + throw new LLMExecutorError('PROVIDER_ERROR', `Unsupported provider: ${config.provider}`); + } + + private buildTriggerPrompt(input: ExecutorInput): string { + return [ + 'You are executing contract logic.', + 'Return only valid JSON with this exact shape:', + '{"response": {...}, "state": {...}, "emit": [{...}]}', + 'No markdown. No prose.', + `Contract:\n${input.contractText}`, + input.modelDefinitions ? `Model Definitions:\n${input.modelDefinitions}` : '', + `Current State JSON:\n${JSON.stringify(input.state)}`, + `Request JSON:\n${JSON.stringify(input.request)}` + ].filter(Boolean).join('\n\n'); + } + + private buildInitPrompt(input: InitExecutorInput): string { + return [ + 'You are initializing contract state.', + 'Return only valid JSON with this exact shape:', + '{"state": {...}}', + 'No markdown. No prose.', + `Contract:\n${input.contractText}`, + input.modelDefinitions ? `Model Definitions:\n${input.modelDefinitions}` : '', + `Init Request JSON:\n${JSON.stringify(input.request)}` + ].filter(Boolean).join('\n\n'); + } + + parseModelJson(text: string): unknown { + try { + return JSON.parse(text); + } catch (err) { + const message = err instanceof Error ? err.message : 'Unknown JSON parse error'; + throw new LLMExecutorError('INVALID_JSON', `Model output is not valid JSON: ${message}`); + } + } + + assertTriggerShape(value: unknown): ExecutorOutput { + if (!isRecord(value)) { + throw new LLMExecutorError('INVALID_SHAPE', 'Trigger output must be an object'); + } + if (!isRecord(value.response)) { + throw new LLMExecutorError('INVALID_SHAPE', 'Trigger output.response must be an object'); + } + if (!isRecord(value.state)) { + throw new LLMExecutorError('INVALID_SHAPE', 'Trigger output.state must be an object'); + } + if (!Array.isArray(value.emit) || !value.emit.every((x) => isRecord(x))) { + throw new LLMExecutorError('INVALID_SHAPE', 'Trigger output.emit must be an array of objects'); + } + + return { + response: value.response, + state: value.state, + emit: value.emit + }; + } + + assertInitShape(value: unknown): InitOutput { + if (!isRecord(value)) { + throw new LLMExecutorError('INVALID_SHAPE', 'Init output must be an object'); + } + if (!isRecord(value.state)) { + throw new LLMExecutorError('INVALID_SHAPE', 'Init output.state must be an object'); + } + + return { + state: value.state + }; + } + + async trigger(input?: ExecutorInput, config?: LLMConfig): Promise { + if (!input || !config) { + throw new LLMExecutorError('PROVIDER_ERROR', 'Missing input/config for trigger'); + } + + const adapter = this.getAdapter(config); + const prompt = this.buildTriggerPrompt(input); + const raw = await adapter.completeJson(prompt, config); + const parsed = this.parseModelJson(raw); + return this.assertTriggerShape(parsed); + } + + async init(input?: InitExecutorInput, config?: LLMConfig): Promise { + if (!input || !config) { + throw new LLMExecutorError('PROVIDER_ERROR', 'Missing input/config for init'); + } + + const adapter = this.getAdapter(config); + const prompt = this.buildInitPrompt(input); + const raw = await adapter.completeJson(prompt, config); + const parsed = this.parseModelJson(raw); + return this.assertInitShape(parsed); + } +} \ No newline at end of file diff --git a/src/LLMProviderAdapter.ts b/src/LLMProviderAdapter.ts new file mode 100644 index 0000000..53c0d49 --- /dev/null +++ b/src/LLMProviderAdapter.ts @@ -0,0 +1,9 @@ +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + */ + +import { LLMConfig } from './LLMExecutor'; + +export interface LLMProviderAdapter { + completeJson(prompt: string, config: LLMConfig): Promise; +} \ No newline at end of file diff --git a/src/TemplateArchiveProcessor.ts b/src/TemplateArchiveProcessor.ts index 3fd8f56..4d4b9ec 100644 --- a/src/TemplateArchiveProcessor.ts +++ b/src/TemplateArchiveProcessor.ts @@ -15,6 +15,7 @@ /* eslint-disable @typescript-eslint/no-explicit-any */ import { Template } from '@accordproject/cicero-core'; +import { LLMExecutor, LLMConfig } from './LLMExecutor'; import { TemplateMarkInterpreter } from './TemplateMarkInterpreter'; import { TemplateMarkTransformer } from '@accordproject/markdown-template'; import { transform } from '@accordproject/markdown-transform'; @@ -32,11 +33,11 @@ export type TriggerResponse = { result: Response; state: State; events: Event[]; -} +}; export type InitResponse = { state: State; -} +}; /** * A template archive processor: can draft content using the @@ -62,123 +63,142 @@ export class TemplateArchiveProcessor { * @returns {Promise} the drafted content */ async draft(data: any, format: string, options: any, currentTime?: string): Promise { - // Setup const metadata = this.template.getMetadata(); const templateKind = metadata.getTemplateType() !== 0 ? 'clause' : 'contract'; - // Get the data const modelManager = this.template.getModelManager(); const engine = new TemplateMarkInterpreter(modelManager, {}); const templateMarkTransformer = new TemplateMarkTransformer(); const templateMarkDom = templateMarkTransformer.fromMarkdownTemplate( - { content: this.template.getTemplate() }, modelManager, templateKind, {options}); + { content: this.template.getTemplate() }, modelManager, templateKind, { options } + ); const now = currentTime ? currentTime : new Date().toISOString(); - // console.log(JSON.stringify(templateMarkDom, null, 2)); const ciceroMark = await engine.generate(templateMarkDom, data, { now }); - // console.log(JSON.stringify(ciceroMark)); - const result = transform(ciceroMark.toJSON(), 'ciceromark', ['ciceromark_unquoted', format], null, options); - // console.log(result); - return result; + return transform(ciceroMark.toJSON(), 'ciceromark', ['ciceromark_unquoted', format], null, options); + } + private getContractTextForLLM(): string { + const scriptManager = this.template.getScriptManager(); + const logicScript = scriptManager.getScript('logic/logic.ts'); + return logicScript ? logicScript.getContents() : ''; } - /** - * Trigger the logic of a template - * @param {object} request - the request to send to the template logic - * @param {object} state - the current state of the template - * @param {[string]} currentTime - the current time, defaults to now - * @param {[number]} utcOffset - the UTC offer, defaults to zero - * @returns {Promise} the response and any events - */ - async trigger(data: any, request: any, state?: any, currentTime?: string, utcOffset?: number): Promise { + private getModelDefinitionsForLLM(): string { + const modelManager = this.template.getModelManager(); + return modelManager.getModels().map((m: { getDefinitions: () => string }) => m.getDefinitions()).join('\n'); +} + + async trigger( + data: any, + request: any, + state?: any, + currentTime?: string, + utcOffset?: number, + llmConfig?: LLMConfig + ): Promise { const logicManager = this.template.getLogicManager(); - if(logicManager.getLanguage() === 'typescript') { - const compiledCode:Record = {}; - const tsFiles:Array