Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions .github/workflows/app-bot-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: app-bot-tests

on:
pull_request:
paths:
- "app/bot/**"
- "app/package.json"
- "app/package-lock.json"
push:
branches:
- main
paths:
- "app/bot/**"
- "app/package.json"
- "app/package-lock.json"

jobs:
test-bot-start-message:
runs-on: ubuntu-latest
defaults:
run:
working-directory: app
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with:
node-version: 22
cache: npm
cache-dependency-path: app/package-lock.json
- run: npm ci
- run: npm run test:bot

32 changes: 32 additions & 0 deletions .github/workflows/bot-python-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: bot-python-tests

on:
pull_request:
paths:
- "bot/**"
- ".github/workflows/bot-python-tests.yml"
push:
branches:
- main
paths:
- "bot/**"
- ".github/workflows/bot-python-tests.yml"

jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: pip
cache-dependency-path: bot/requirements.txt
- name: Install deps
run: |
python -m pip install --upgrade pip
pip install -r bot/requirements.txt
pip install pytest
- name: Run tests
run: python -m pytest -q bot/tests

13 changes: 13 additions & 0 deletions app/ai/instructions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import type { AiMode } from "./openai.js";

export const TOKEN_INFO_INPUT_PREFIX =
"You are a blockchain and token analyst. Answer clearly and briefly.\n\n";

export function getInputPrefixForMode(mode: AiMode): string {
return mode === "token_info" ? TOKEN_INFO_INPUT_PREFIX : "";
}

/** Instruction passed to AI for Telegram bot messages (HTML replies must fit Telegram limits). */
export const TELEGRAM_BOT_LENGTH_INSTRUCTION =
"Please give an answer in less than 4096 chars. If user asks for a long message or a message with more than 4096 chars add a sentence that full responses are available only in TMA and your bot you can give just a short answer that follows.";

86 changes: 67 additions & 19 deletions app/ai/openai.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import OpenAI from "openai";
import { getInputPrefixForMode } from "./instructions.js";

export type AiMode = "chat" | "token_info";

Expand Down Expand Up @@ -65,9 +66,7 @@ export async function callOpenAiChat(
}

const prefix =
mode === "token_info"
? "You are a blockchain and token analyst. Answer clearly and briefly.\n\n"
: "";
getInputPrefixForMode(mode);

try {
const response = await client.responses.create({
Expand Down Expand Up @@ -100,7 +99,11 @@ export async function callOpenAiChatStream(
mode: AiMode,
params: AiRequestBase,
onDelta: (text: string) => void | Promise<void>,
opts?: { isCancelled?: () => boolean; getAbortSignal?: () => Promise<boolean> },
opts?: {
isCancelled?: () => boolean;
getAbortSignal?: () => Promise<boolean>;
abortSignal?: AbortSignal;
},
): Promise<AiResponseBase> {
if (!client) {
return {
Expand All @@ -122,41 +125,74 @@ export async function callOpenAiChatStream(
}

const prefix =
mode === "token_info"
? "You are a blockchain and token analyst. Answer clearly and briefly.\n\n"
: "";
getInputPrefixForMode(mode);
let onAbort: (() => void) | null = null;

try {
if (opts?.abortSignal?.aborted) {
return {
ok: false,
provider: "openai",
mode,
error: "Generation aborted by newer message.",
};
}

const stream = client.responses.stream({
model: "gpt-5.2",
...(params.instructions ? { instructions: params.instructions } : {}),
input: `${prefix}${trimmed}`,
});
const abortStream = (): void => {
try {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(stream as any)?.abort?.();
} catch {
/* ignore */
}
};
let cancelLogged = false;
const hardAbort = (reason: string): void => {
if (!cancelLogged) {
cancelLogged = true;
console.log("[STREAM] aborted immediately:", reason);
}
abortStream();
};
onAbort = (): void => {
abortStream();
};
if (opts?.abortSignal) {
opts.abortSignal.addEventListener("abort", onAbort, { once: true });
}

stream.on("response.output_text.delta", async (event: { snapshot?: string }) => {
if (opts?.abortSignal?.aborted) {
hardAbort("abortSignal");
return;
}
if (opts?.isCancelled && opts.isCancelled()) {
try {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(stream as any)?.abort?.();
} catch {
/* ignore */
}
hardAbort("isCancelled");
return;
}
if (opts?.getAbortSignal && (await opts.getAbortSignal())) {
try {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
(stream as any)?.abort?.();
} catch {
/* ignore */
}
console.log("[STREAM] stale generation");
hardAbort("getAbortSignal");
return;
}
const text = event?.snapshot ?? "";
if (text.length > 0) void Promise.resolve(onDelta(text));
});

const response = await stream.finalResponse();
if (opts?.abortSignal?.aborted) {
return {
ok: false,
provider: "openai",
mode,
error: "Generation aborted by newer message.",
};
}
const r = response as any;
let output_text = r.output_text;
if (output_text == null || String(output_text).trim() === "") {
Expand Down Expand Up @@ -189,6 +225,14 @@ export async function callOpenAiChatStream(
usage: r.usage ?? undefined,
};
} catch (e: any) {
if (opts?.abortSignal?.aborted) {
return {
ok: false,
provider: "openai",
mode,
error: "Generation aborted by newer message.",
};
}
const message =
(e && typeof e === "object" && "message" in e ? (e as Error).message : null) ??
(e != null ? String(e) : "Failed to call OpenAI. Check OPENAI env and network.");
Expand All @@ -198,5 +242,9 @@ export async function callOpenAiChatStream(
mode,
error: message,
};
} finally {
if (opts?.abortSignal && onAbort) {
opts.abortSignal.removeEventListener("abort", onAbort);
}
}
}
6 changes: 5 additions & 1 deletion app/ai/transmitter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,11 @@ export async function transmit(request: AiRequest): Promise<AiResponse> {
export async function transmitStream(
request: AiRequest,
onDelta: (text: string) => void | Promise<void>,
opts?: { isCancelled?: () => boolean; getAbortSignal?: () => Promise<boolean> },
opts?: {
isCancelled?: () => boolean;
getAbortSignal?: () => Promise<boolean>;
abortSignal?: AbortSignal;
},
): Promise<AiResponse> {
const mode: AiMode = request.mode ?? "chat";
const thread = request.threadContext;
Expand Down
31 changes: 31 additions & 0 deletions app/api/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,29 @@ function normalizeBase(base: string): string {
return base.replace(/\/$/, "");
}

function isLocalhostUrl(value: string): boolean {
try {
const u = new URL(value);
return isPrivateOrLocalHost(u.hostname);
} catch {
return false;
}
}

function isLikelyTelegramMiniApp(): boolean {
if (typeof window === "undefined") return false;
try {
if ((window.location.hash ?? "").includes("tgWebApp")) return true;
} catch {
// ignore
}
try {
return !!(window as unknown as { Telegram?: { WebApp?: unknown } }).Telegram?.WebApp;
} catch {
return false;
}
}

function isPrivateOrLocalHost(hostname: string): boolean {
if (hostname === "localhost" || hostname === "127.0.0.1") return true;
if (hostname.startsWith("10.")) return true;
Expand Down Expand Up @@ -92,6 +115,14 @@ function getNodeBaseUrl(): string {
export function getApiBaseUrl(): string {
const envBase = process.env.EXPO_PUBLIC_API_BASE_URL?.trim();
if (envBase) {
if (
typeof window !== "undefined" &&
isLikelyTelegramMiniApp() &&
isLocalhostUrl(envBase)
) {
const browserBase = getBrowserBaseUrl();
if (browserBase) return browserBase;
}
return normalizeBase(envBase);
}

Expand Down
2 changes: 2 additions & 0 deletions app/api/telegram.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ async function handler(
res?: { status: (n: number) => void; setHeader: (k: string, v: string) => void; end: (s?: string) => void }
): Promise<Response | void> {
const method = (request as { method?: string }).method ?? request.method;
console.log('[api/telegram]', method, new Date().toISOString());
if (method === 'GET') {
const body = { ok: true, endpoint: 'telegram', use: 'POST with initData' };
if (res) {
Expand All @@ -41,6 +42,7 @@ async function handler(
try {
const { handlePost } = await import('../telegram/post.js');
const response = await handlePost(request);
console.log('[api/telegram] POST status', response.status);
if (res) {
res.status(response.status);
response.headers.forEach((v, k) => res.setHeader(k, v));
Expand Down
37 changes: 37 additions & 0 deletions app/bot/__tests__/start.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
import { test } from "node:test";
import assert from "node:assert/strict";
import { buildStartMessage, getBotVersion } from "../start.js";

test("getBotVersion prefers BOT_VERSION", () => {
const prev = process.env.BOT_VERSION;
process.env.BOT_VERSION = "123";
try {
assert.equal(getBotVersion(), "123");
} finally {
if (prev === undefined) delete process.env.BOT_VERSION;
else process.env.BOT_VERSION = prev;
}
});

test("getBotVersion shortens a SHA to 7 chars", () => {
const prev = process.env.BOT_VERSION;
process.env.BOT_VERSION = "0f7f1b5abcdef1234567890";
try {
assert.equal(getBotVersion(), "0f7f1b5");
} finally {
if (prev === undefined) delete process.env.BOT_VERSION;
else process.env.BOT_VERSION = prev;
}
});

test("buildStartMessage includes version tag", () => {
const prev = process.env.BOT_VERSION;
process.env.BOT_VERSION = "123";
try {
assert.match(buildStartMessage(), /@HyperlinksSpaceBot v\.123/);
} finally {
if (prev === undefined) delete process.env.BOT_VERSION;
else process.env.BOT_VERSION = prev;
}
});

3 changes: 2 additions & 1 deletion app/bot/grammy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import {
upsertUserFromBot,
} from '../database/users.js';
import { handleBotAiResponse } from './responder.js';
import { buildStartMessage } from './start.js';

export function createBot(token: string): Bot {
const bot = new Bot(token);
Expand All @@ -31,7 +32,7 @@ export function createBot(token: string): Bot {

bot.command('start', async (ctx: Context) => {
await handleUserUpsert(ctx);
await ctx.reply("That's @HyperlinksSpaceBot, you can use AI in bot and explore the app for more features");
await ctx.reply(buildStartMessage());
});

bot.on('message:text', async (ctx: Context) => {
Expand Down
Loading