From 3db654a5161a60b34986e4b4d3092a172b3c1e4d Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Tue, 24 Mar 2026 16:26:59 +0000 Subject: [PATCH 1/5] Add comprehensive security best practices, shared utilities, and project templates documentation - Introduced a new document on security best practices covering authentication, API security, database security, and more. - Added an overview for the @betterbase/shared package detailing shared types, utilities, constants, and schemas. - Created a templates overview document outlining available project templates, their features, usage, and customization options. - Developed a test project application documentation showcasing best practices for building applications with BetterBase, including project structure, configuration, API routes, authentication, storage, webhooks, and development workflow. --- docs/README.md | 164 ++++ docs/api-reference/cli-commands.md | 413 +++++++++ docs/api-reference/client-sdk.md | 308 +++++++ docs/api-reference/graphql-api.md | 437 +++++++++ docs/api-reference/rest-api.md | 305 +++++++ docs/cli/overview.md | 515 +++++++++++ docs/client/client.md | 295 ++++++ docs/client/realtime.md | 566 ++++++++++++ docs/core/auto-rest.md | 383 ++++++++ docs/core/branching.md | 389 ++++++++ docs/core/config.md | 646 ++++++++++++++ docs/core/functions.md | 712 +++++++++++++++ docs/core/graphql.md | 993 +++++++++++++++++++++ docs/core/overview.md | 135 +++ docs/examples/blog.md | 318 +++++++ docs/examples/chat-app.md | 275 ++++++ docs/examples/ecommerce.md | 471 ++++++++++ docs/examples/todo-app.md | 209 +++++ docs/features/authentication.md | 179 ++++ docs/features/database.md | 243 +++++ docs/features/functions.md | 262 ++++++ docs/features/graphql.md | 321 +++++++ docs/features/realtime.md | 247 +++++ docs/features/rls.md | 205 +++++ docs/features/storage.md | 230 +++++ docs/features/webhooks.md | 177 ++++ docs/getting-started/configuration.md | 248 +++++ docs/getting-started/installation.md | 183 ++++ docs/getting-started/quick-start.md | 168 ++++ docs/getting-started/your-first-project.md | 332 +++++++ docs/guides/deployment.md | 270 ++++++ docs/guides/monitoring.md | 267 ++++++ docs/guides/production-checklist.md | 222 +++++ docs/guides/scaling.md | 307 +++++++ docs/guides/security-best-practices.md | 302 +++++++ docs/shared/overview.md | 123 +++ docs/templates/overview.md | 327 +++++++ docs/test-project/overview.md | 553 ++++++++++++ 38 files changed, 12700 insertions(+) create mode 100644 docs/README.md create mode 100644 docs/api-reference/cli-commands.md create mode 100644 docs/api-reference/client-sdk.md create mode 100644 docs/api-reference/graphql-api.md create mode 100644 docs/api-reference/rest-api.md create mode 100644 docs/cli/overview.md create mode 100644 docs/client/client.md create mode 100644 docs/client/realtime.md create mode 100644 docs/core/auto-rest.md create mode 100644 docs/core/branching.md create mode 100644 docs/core/config.md create mode 100644 docs/core/functions.md create mode 100644 docs/core/graphql.md create mode 100644 docs/core/overview.md create mode 100644 docs/examples/blog.md create mode 100644 docs/examples/chat-app.md create mode 100644 docs/examples/ecommerce.md create mode 100644 docs/examples/todo-app.md create mode 100644 docs/features/authentication.md create mode 100644 docs/features/database.md create mode 100644 docs/features/functions.md create mode 100644 docs/features/graphql.md create mode 100644 docs/features/realtime.md create mode 100644 docs/features/rls.md create mode 100644 docs/features/storage.md create mode 100644 docs/features/webhooks.md create mode 100644 docs/getting-started/configuration.md create mode 100644 docs/getting-started/installation.md create mode 100644 docs/getting-started/quick-start.md create mode 100644 docs/getting-started/your-first-project.md create mode 100644 docs/guides/deployment.md create mode 100644 docs/guides/monitoring.md create mode 100644 docs/guides/production-checklist.md create mode 100644 docs/guides/scaling.md create mode 100644 docs/guides/security-best-practices.md create mode 100644 docs/shared/overview.md create mode 100644 docs/templates/overview.md create mode 100644 docs/test-project/overview.md diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..744444a --- /dev/null +++ b/docs/README.md @@ -0,0 +1,164 @@ +# BetterBase Documentation + +Comprehensive documentation for the BetterBase platform, covering all packages, modules, and development workflows. + +## Table of Contents +- [Getting Started](#getting-started) +- [Features](#features) +- [Guides](#guides) +- [API Reference](#api-reference) +- [Examples](#examples) +- [Contributing](#contributing) + +## Documentation Structure + +The documentation is organized into logical sections for easy navigation: + +``` +/docs +├── getting-started/ # Getting started guides +│ ├── installation.md +│ ├── quick-start.md +│ ├── your-first-project.md +│ └── configuration.md +├── features/ # Feature documentation +│ ├── authentication.md +│ ├── database.md +│ ├── storage.md +│ ├── realtime.md +│ ├── graphql.md +│ ├── functions.md +│ ├── webhooks.md +│ └── rls.md +├── guides/ # Development guides +│ ├── deployment.md +│ ├── production-checklist.md +│ ├── monitoring.md +│ ├── scaling.md +│ └── security-best-practices.md +├── api-reference/ # API documentation +│ ├── client-sdk.md +│ ├── cli-commands.md +│ ├── rest-api.md +│ └── graphql-api.md +└── examples/ # Example applications + ├── todo-app.md + ├── chat-app.md + ├── blog.md + └── ecommerce.md +``` + +## Getting Started + +New to BetterBase? Start here: + +1. [Installation](./getting-started/installation.md) - Install Bun and BetterBase CLI +2. [Quick Start](./getting-started/quick-start.md) - Get running in 5 minutes +3. [Your First Project](./getting-started/your-first-project.md) - Build a complete application +4. [Configuration](./getting-started/configuration.md) - Customize your setup + +## Features + +Learn about all BetterBase features: + +- [Authentication](./features/authentication.md) - Email/password, OAuth, MFA +- [Database](./features/database.md) - Multi-provider database support +- [Storage](./features/storage.md) - S3-compatible file storage +- [Realtime](./features/realtime.md) - WebSocket subscriptions +- [GraphQL](./features/graphql.md) - Auto-generated GraphQL API +- [Functions](./features/functions.md) - Serverless functions +- [Webhooks](./features/webhooks.md) - Event-driven webhooks +- [RLS](./features/rls.md) - Row Level Security + +## Guides + +Development guides for production: + +- [Deployment](./guides/deployment.md) - Deploy to various platforms +- [Production Checklist](./guides/production-checklist.md) - Pre-deployment checklist +- [Monitoring](./guides/monitoring.md) - Set up logging and metrics +- [Scaling](./guides/scaling.md) - Scale your application +- [Security Best Practices](./guides/security-best-practices.md) - Security hardening + +## API Reference + +Detailed API documentation: + +- [Client SDK](./api-reference/client-sdk.md) - TypeScript client library +- [CLI Commands](./api-reference/cli-commands.md) - Command-line interface +- [REST API](./api-reference/rest-api.md) - REST API endpoints +- [GraphQL API](./api-reference/graphql-api.md) - GraphQL reference + +## Examples + +Complete example applications: + +- [Todo App](./examples/todo-app.md) - Simple todo list with real-time sync +- [Chat App](./examples/chat-app.md) - Real-time messaging with presence +- [Blog](./examples/blog.md) - Blog with posts, comments, categories +- [E-commerce](./examples/ecommerce.md) - Store with cart, orders, payments + +## Prerequisites + +- [Bun](https://bun.sh/) (v1.0+) +- Git +- Node.js (v18+ for some optional tools) + +## Installation + +```bash +# Clone the repository +git clone https://github.com/betterbase/betterbase.git +cd betterbase + +# Install dependencies +bun install +``` + +## First Steps + +1. Review the [Installation Guide](./getting-started/installation.md) +2. Follow the [Quick Start](./getting-started/quick-start.md) +3. Build your first project with the [Your First Project](./getting-started/your-first-project.md) guide +4. Explore [Examples](./examples/) for complete applications + +## Contributing + +We welcome contributions to improve the documentation! To contribute: + +1. Fork the repository +2. Make your changes to the appropriate markdown files in `/docs` +3. Ensure your changes are clear, accurate, and follow the existing style +4. Submit a pull request with a clear description of your changes + +### Documentation Guidelines + +- Use clear, concise language +- Provide code examples where appropriate +- Include links to related documentation +- Mark deprecated features clearly +- Keep examples up-to-date with current code +- Follow Markdown best practices + +### Reporting Issues + +If you find errors or omissions in the documentation: + +- Check if the issue has already been reported +- Create a new issue with details about the problem +- Include the documentation page and section +- Suggest improvements if possible + +## Versioning + +This documentation corresponds to the current version of the BetterBase platform. For specific version documentation: + +- Check the git tags for released versions +- Refer to the CHANGELOG.md for version-specific changes +- Documentation for older versions is available in the git history + +## License + +This documentation is part of the BetterBase platform and is licensed under the MIT License. + +© 2026 BetterBase LLC. All rights reserved. \ No newline at end of file diff --git a/docs/api-reference/cli-commands.md b/docs/api-reference/cli-commands.md new file mode 100644 index 0000000..fb58abd --- /dev/null +++ b/docs/api-reference/cli-commands.md @@ -0,0 +1,413 @@ +# CLI Commands + +Complete reference for the BetterBase CLI (`bb`). + +## Installation + +```bash +bun add -g @betterbase/cli +``` + +Verify: + +```bash +bb --version +``` + +## Global Options + +| Option | Description | +|--------|-------------| +| `--version` | Show version | +| `--help` | Show help | +| `--project` | Specify project path | + +## Project Management + +### init + +Initialize a new BetterBase project. + +```bash +bb init [project-name] +bb init my-app +bb init my-app ./path +``` + +### dev + +Start development server with auto-regeneration. + +```bash +bb dev +bb dev ./my-project +``` + +## Database + +### migrate + +Manage database migrations. + +```bash +# Apply pending migrations +bb migrate + +# Preview changes +bb migrate preview + +# Apply to production (with confirmation) +bb migrate production + +# Rollback last migration +bb migrate rollback + +# Rollback N migrations +bb migrate rollback -s 3 + +# Show migration history +bb migrate history +``` + +## Authentication + +### auth setup + +Install and configure BetterAuth. + +```bash +bb auth setup +bb auth setup ./my-project +``` + +### auth add-provider + +Add OAuth provider. + +```bash +bb auth add-provider github +bb auth add-provider google +bb auth add-provider discord + +# Available: google, github, discord, apple, microsoft, twitter, facebook +``` + +### login + +Authenticate CLI with BetterBase. + +```bash +bb login +``` + +### logout + +Sign out CLI. + +```bash +bb logout +``` + +## Storage + +### storage init + +Initialize storage. + +```bash +bb storage init +bb storage init ./my-project +``` + +### storage list + +List storage buckets. + +```bash +bb storage list +bb storage buckets +``` + +### storage upload + +Upload file to storage. + +```bash +bb storage upload [options] +bb storage upload ./image.jpg +bb storage upload ./doc.pdf -b my-bucket -p uploads/doc.pdf +``` + +Options: +- `-b, --bucket` - Bucket name +- `-p, --path` - Path in bucket + +## Functions + +### function create + +Create new function. + +```bash +bb function create +bb function create hello-world +``` + +### function dev + +Run function locally. + +```bash +bb function dev +bb function dev hello-world +``` + +### function build + +Build function for deployment. + +```bash +bb function build +bb function build hello-world --target aws-lambda --minify +``` + +Options: +- `--target` - Build target (node, aws-lambda, vercel, etc.) +- `--minify` - Enable minification + +### function deploy + +Deploy function. + +```bash +bb function deploy +bb function deploy hello-world --sync-env +``` + +Options: +- `--sync-env` - Sync environment variables + +### function list + +List all functions. + +```bash +bb function list +``` + +### function logs + +View function logs. + +```bash +bb function logs +bb function logs hello-world -l 50 +``` + +Options: +- `-l, --limit` - Number of log lines + +## Webhooks + +### webhook create + +Create new webhook. + +```bash +bb webhook create +bb webhook create ./my-project +``` + +### webhook list + +List webhooks. + +```bash +bb webhook list +``` + +### webhook test + +Test webhook. + +```bash +bb webhook test +bb webhook test wh_12345 +``` + +### webhook logs + +View webhook logs. + +```bash +bb webhook logs +bb webhook logs wh_12345 -l 100 +``` + +Options: +- `-l, --limit` - Number of log lines + +## Branching + +### branch create + +Create preview environment. + +```bash +bb branch create +bb branch create feature/login-form +``` + +### branch list + +List preview environments. + +```bash +bb branch list +``` + +### branch delete + +Delete preview environment. + +```bash +bb branch delete +bb branch delete feature/login-form +``` + +### branch status + +Check branch status. + +```bash +bb branch status +``` + +### branch sleep + +Sleep preview environment. + +```bash +bb branch sleep +``` + +### branch wake + +Wake preview environment. + +```bash +bb branch wake +``` + +## GraphQL + +### graphql generate + +Generate GraphQL schema. + +```bash +bb graphql generate +bb graphql generate ./my-project +``` + +### graphql playground + +Open GraphQL Playground. + +```bash +bb graphql playground +``` + +## Code Generation + +### generate crud + +Generate CRUD routes. + +```bash +bb generate crud +bb generate crud users +bb generate crud posts ./my-project +``` + +## RLS (Row Level Security) + +### rls create + +Create RLS policy. + +```bash +bb rls create --table --name --command [--check ] +bb rls create --table posts --name users-own-posts --command SELECT --check "user_id = auth.uid()" +``` + +### rls list + +List RLS policies. + +```bash +bb rls list +``` + +### rls enable + +Enable RLS on table. + +```bash +bb rls enable --table +``` + +### rls disable + +Disable RLS on table. + +```bash +bb rls disable --table +``` + +### rls test + +Test RLS policies. + +```bash +bb rls test --table +bb rls test --table posts +``` + +## Quick Reference + +| Command | Aliases | Description | +|---------|---------|-------------| +| `bb init` | | Initialize project | +| `bb dev` | | Start dev server | +| `bb migrate` | db | Database migrations | +| `bb auth setup` | | Setup auth | +| `bb auth add-provider` | | Add OAuth | +| `bb function create` | | Create function | +| `bb function dev` | | Run function locally | +| `bb function deploy` | | Deploy function | +| `bb storage upload` | | Upload file | +| `bb webhook create` | | Create webhook | +| `bb webhook test` | | Test webhook | +| `bb branch create` | | Create preview | +| `bb graphql generate` | | Generate schema | +| `bb generate crud` | | Generate CRUD | +| `bb rls create` | | Create policy | + +## Environment Variables + +The CLI respects: +- `BB_PROJECT` - Project path +- `BB_API_KEY` - API key for authentication + +## Configuration + +CLI reads from: +1. Command line arguments +2. `.betterbase-context.json` +3. `betterbase.config.ts` + +## Related + +- [CLI Overview](../cli/overview.md) - CLI introduction +- [Deployment](../guides/deployment.md) - Deployment guides diff --git a/docs/api-reference/client-sdk.md b/docs/api-reference/client-sdk.md new file mode 100644 index 0000000..855d6ad --- /dev/null +++ b/docs/api-reference/client-sdk.md @@ -0,0 +1,308 @@ +# Client SDK + +The @betterbase/client package provides a TypeScript SDK for interacting with BetterBase backends. + +## Installation + +```bash +bun add @betterbase/client +``` + +## Quick Setup + +```typescript +import { createClient } from '@betterbase/client' + +const client = createClient({ + url: 'http://localhost:3000', + auth: { + persistSession: true, + autoRefreshToken: true + } +}) +``` + +## Configuration + +```typescript +interface ClientOptions { + url: string + auth?: { + persistSession?: boolean + autoRefreshToken?: boolean + } + storage?: { + type?: 'local' | 'session' + } +} +``` + +## Authentication + +### signUp + +```typescript +const { data, error } = await client.auth.signUp({ + email: string, + password: string, + name?: string +}) +``` + +### signInWithPassword + +```typescript +const { data, error } = await client.auth.signInWithPassword({ + email: string, + password: string +}) +``` + +### signInWithOAuth + +```typescript +const { data, error } = await client.auth.signInWithOAuth({ + provider: 'github' | 'google' | 'discord' +}) +``` + +### signOut + +```typescript +await client.auth.signOut() +``` + +### getUser + +```typescript +const { data, error } = await client.auth.getUser() +``` + +### getSession + +```typescript +const { data, error } = await client.auth.getSession() +``` + +## Database Operations + +### select + +```typescript +// Get all records +const { data, error } = await client + .from('users') + .select() + +// Select specific columns +const { data, error } = await client + .from('users') + .select('id, name, email') + +// With filters +const { data, error } = await client + .from('posts') + .select() + .eq('published', true) + .order('createdAt', { ascending: false }) + .limit(10) +``` + +### insert + +```typescript +const { data, error } = await client + .from('users') + .insert({ + name: 'John Doe', + email: 'john@example.com' + }) +``` + +### update + +```typescript +const { data, error } = await client + .from('users') + .update({ name: 'Jane Doe' }) + .eq('id', 'user-123') +``` + +### delete + +```typescript +const { data, error } = await client + .from('users') + .delete() + .eq('id', 'user-123') +``` + +## Query Builder Methods + +| Method | Description | +|--------|-------------| +| `.select(columns?)` | Select columns to return | +| `.eq(column, value)` | Filter by equality | +| `.neq(column, value)` | Filter by inequality | +| `.gt(column, value)` | Greater than | +| `.gte(column, value)` | Greater or equal | +| `.lt(column, value)` | Less than | +| `.lte(column, value)` | Less or equal | +| `.like(column, pattern)` | Pattern match | +| `.in(column, array)` | In array | +| `.order(column, options)` | Sort results | +| `.limit(count)` | Limit results | +| `.offset(count)` | Offset results | +| `.single()` | Return single record | + +## Realtime + +### Subscribe + +```typescript +const channel = client.channel('public:posts') + +channel + .on('postgres_changes', + { event: 'INSERT', table: 'posts' }, + (payload) => console.log('New post:', payload.new) + ) + .subscribe() +``` + +### Channel Events + +```typescript +channel.on('status', (status) => { + console.log('Connection status:', status) +}) +``` + +### Unsubscribe + +```typescript +channel.unsubscribe() +``` + +## Storage + +### upload + +```typescript +const { data, error } = await client.storage.upload( + bucket: string, + path: string, + file: File | Blob +) +``` + +### download + +```typescript +const { data, error } = await client.storage.download( + bucket: string, + path: string +) +``` + +### remove + +```typescript +const { data, error } = await client.storage.remove( + bucket: string, + path: string +) +``` + +### getPublicUrl + +```typescript +const { data: { url } } = client.storage.getPublicUrl( + bucket: string, + path: string +) +``` + +### list + +```typescript +const { data, error } = await client.storage.list(bucket: string) +``` + +## GraphQL + +### query + +```typescript +const { data, error } = await client.graphql.query(` + query GetUsers { + users { + id + name + email + } + } +`) +``` + +### mutation + +```typescript +const { data, error } = await client.graphql.mutation(` + mutation CreateUser($name: String!, $email: String!) { + insert_users_one(object: { name: $name, email: $email }) { + id + name + } + } +`, { name: 'John', email: 'john@example.com' }) +``` + +## Error Handling + +```typescript +const { data, error } = await client.from('users').select() + +if (error) { + console.error('Error:', error.message) + console.error('Code:', error.code) + return +} + +console.log('Data:', data) +``` + +## Error Types + +| Error Code | Description | +|------------|-------------| +| `PGRST116` | Record not found | +| `23505` | Unique constraint violation | +| `42501` | Permission denied | +| `AUTH_REQUIRED` | Authentication required | + +## TypeScript Types + +```typescript +import type { + User, + Session, + Post, + StorageResult, + RealtimeChannel +} from '@betterbase/client' +``` + +## Best Practices + +1. **Use persistSession** - Keep user logged in across page reloads +2. **Handle errors** - Always check error before processing data +3. **Type your data** - Use TypeScript types for better DX +4. **Clean up subscriptions** - Unsubscribe when done + +## Related + +- [Authentication](../features/authentication.md) - Auth features +- [Realtime](../features/realtime.md) - Real-time subscriptions +- [Storage](../features/storage.md) - File storage +- [GraphQL](../features/graphql.md) - GraphQL API diff --git a/docs/api-reference/graphql-api.md b/docs/api-reference/graphql-api.md new file mode 100644 index 0000000..0ee9b03 --- /dev/null +++ b/docs/api-reference/graphql-api.md @@ -0,0 +1,437 @@ +# GraphQL API + +Complete reference for the BetterBase GraphQL API. + +## Endpoint + +``` +POST /graphql +``` + +For GET requests: +``` +GET /graphql?query={...}&variables={...} +``` + +## Playground + +Access GraphQL Playground at: +``` +http://localhost:3000/graphql +``` + +## Schema Introspection + +```graphql +query { + __schema { + types { + name + kind + fields { + name + type { + name + kind + } + } + } + } +} +``` + +## Queries + +### Fetch Records + +```graphql +query GetUsers { + users { + id + name + email + createdAt + } +} +``` + +### With Filtering + +```graphql +query GetPublishedPosts { + posts(where: { published: { _eq: true } }) { + id + title + content + author { + name + } + } +} +``` + +### With Pagination + +```graphql +query GetPaginatedPosts { + posts(offset: 0, limit: 10, order_by: { createdAt: desc }) { + id + title + createdAt + } +} +``` + +### Single Record by PK + +```graphql +query GetUserById { + users_by_pk(id: "user-123") { + id + name + email + } +} +``` + +### Aggregate + +```graphql +query GetPostStats { + posts_aggregate(where: { published: { _eq: true } }) { + aggregate { + count + sum { + viewCount + } + avg { + viewCount + } + max { + createdAt + } + min { + createdAt + } + } + } +} +``` + +## Mutations + +### Insert Single + +```graphql +mutation CreateUser { + insert_users_one(object: { name: "John", email: "john@example.com" }) { + id + name + email + } +} +``` + +### Insert Multiple + +```graphql +mutation CreateUsers { + insert_users(objects: [ + { name: "Alice", email: "alice@example.com" }, + { name: "Bob", email: "bob@example.com" } + ]) { + returning { + id + name + } + } +} +``` + +### Update by PK + +```graphql +mutation UpdateUser { + update_users_by_pk( + pk_columns: { id: "user-123" } + _set: { name: "John Updated" } + ) { + id + name + updatedAt + } +} +``` + +### Update Multiple + +```graphql +mutation UpdatePosts { + update_posts( + where: { published: { _eq: false } } + _set: { published: true } + ) { + returning { + id + title + published + } + } +} +``` + +### Delete by PK + +```graphql +mutation DeleteUser { + delete_users_by_pk(pk_columns: { id: "user-123" }) { + id + name + } +} +``` + +### Delete Multiple + +```graphql +mutation DeletePosts { + delete_posts( + where: { createdAt: { _lt: "2024-01-01" } } + ) { + returning { + id + title + } + } +} +``` + +## Subscriptions + +### Subscribe to Insert + +```graphql +subscription OnNewUser { + users_insert { + id + name + email + createdAt + } +} +``` + +### Subscribe to Update + +```graphql +subscription OnUserUpdate { + users_update { + id + old { + name + } + new { + name + } + } +} +``` + +### Subscribe to Delete + +```graphql +subscription OnUserDelete { + users_delete { + id + name + } +} +``` + +### With Filtering + +```graphql +subscription OnPublishedPost { + posts_insert(where: { published: { _eq: true } }) { + id + title + author { + name + } + } +} +``` + +## Input Types + +### Where Conditions + +```graphql +# Boolean +where: { published: { _eq: true } } + +# String +where: { name: { _eq: "John" } } + +# Number +where: { age: { _gt: 18 } } + +# Array +where: { role: { _in: ["admin", "moderator"] } } + +# Null check +where: { deletedAt: { _is_null: true } } + +# Multiple conditions +where: { + _and: [ + { published: { _eq: true } } + { authorId: { _eq: "user-123" } } + ] +} + +# Or +where: { + _or: [ + { status: { _eq: "active" } } + { status: { _eq: "pending" } } + ] +} +``` + +### Order By + +```graphql +order_by: { createdAt: desc } +order_by: { title: asc, createdAt: desc } +``` + +### Boolean Expression Operators + +| Operator | Description | +|----------|-------------| +| `_eq` | Equals | +| `_neq` | Not equals | +| `_gt` | Greater than | +| `_gte` | Greater or equal | +| `_lt` | Less than | +| `_lte` | Less or equal | +| `_like` | Like pattern | +| `_ilike` | Case-insensitive like | +| `_in` | In array | +| `_is_null` | Is null | +| `_and` | And | +| `_or` | Or | + +## Fragments + +```graphql +fragment UserFields on users { + id + name + email + createdAt +} + +query GetUsers { + users { + ...UserFields + posts { + id + title + } + } +} +``` + +## Variables + +```graphql +query GetUser($id: uuid!) { + users_by_pk(id: $id) { + id + name + email + } +} +``` + +Variables: +```json +{ + "id": "user-123" +} +``` + +## Aliases + +```graphql +query GetData { + activeUsers: users(where: { active: { _eq: true } }) { + id + name + } + inactiveUsers: users(where: { active: { _eq: false } }) { + id + name + } +} +``` + +## Directives + +```graphql +query GetUser($includePosts: Boolean!) { + users_by_pk(id: "user-123") { + id + name + posts @include(if: $includePosts) { + id + title + } + } +} +``` + +Built-in directives: +- `@include(if: Boolean)` +- `@skip(if: Boolean)` + +## Error Response + +```json +{ + "errors": [ + { + "message": "Field 'users' doesn't exist on type 'query_root'", + "locations": [{ "line": 2, "column": 3 }], + "path": ["query", "users"] + } + ] +} +``` + +## Performance + +### Query Complexity + +Set complexity limits: + +```typescript +export default defineConfig({ + graphql: { + complexityLimit: 1000, + depthLimit: 7 + } +}) +``` + +### Batching + +Queries are automatically batched for efficiency. + +## Related + +- [REST API](./rest-api.md) - REST API reference +- [Client SDK](./client-sdk.md) - Using GraphQL from client +- [GraphQL Feature](../features/graphql.md) - GraphQL features diff --git a/docs/api-reference/rest-api.md b/docs/api-reference/rest-api.md new file mode 100644 index 0000000..723bbe8 --- /dev/null +++ b/docs/api-reference/rest-api.md @@ -0,0 +1,305 @@ +# REST API + +Complete reference for the BetterBase REST API. + +## Base URL + +``` +http://localhost:3000/api +``` + +## Authentication Endpoints + +### Sign Up + +```http +POST /auth/signup +Content-Type: application/json + +{ + "email": "user@example.com", + "password": "secure-password", + "name": "John Doe" +} +``` + +**Response:** +```json +{ + "user": { + "id": "user-123", + "email": "user@example.com", + "name": "John Doe" + }, + "session": { + "token": "...", + "expiresAt": "2024-01-15T10:30:00Z" + } +} +``` + +### Sign In + +```http +POST /auth/signin +Content-Type: application/json + +{ + "email": "user@example.com", + "password": "secure-password" +} +``` + +### Sign Out + +```http +POST /auth/signout +Authorization: Bearer +``` + +### Get Session + +```http +GET /auth/session +Authorization: Bearer +``` + +### Refresh Session + +```http +POST /auth/refresh +Authorization: Bearer +``` + +## Auto-REST Endpoints + +BetterBase automatically generates CRUD endpoints for each table. + +### List Records + +```http +GET /:table +GET /users +GET /posts?limit=10&offset=0&sort=createdAt.desc +``` + +**Query Parameters:** + +| Parameter | Type | Description | +|-----------|------|-------------| +| `limit` | number | Number of records (default: 20, max: 1000) | +| `offset` | number | Offset for pagination | +| `sort` | string | Sort field and direction (e.g., `createdAt.desc`) | +| `filter` | string | Filter expression | + +**Filter Syntax:** + +``` +GET /users?filter=active.eq.true +GET /posts?filter=published.eq.true&userId.eq.user-123 +GET /users?filter=role.in.admin,moderator +``` + +### Get Single Record + +```http +GET /:table/:id +GET /users/user-123 +``` + +### Create Record + +```http +POST /:table +POST /users +Content-Type: application/json + +{ + "name": "John Doe", + "email": "john@example.com" +} +``` + +**Response:** +```json +{ + "id": "user-123", + "name": "John Doe", + "email": "john@example.com", + "createdAt": "2024-01-15T10:30:00Z" +} +``` + +### Update Record + +```http +PATCH /:table/:id +PATCH /users/user-123 +Content-Type: application/json + +{ + "name": "Jane Doe" +} +``` + +### Delete Record + +```http +DELETE /:table/:id +DELETE /users/user-123 +``` + +## Storage Endpoints + +### Upload File + +```http +POST /storage/:bucket +Content-Type: multipart/form-data + +--boundary +Content-Disposition: form-data; name="file"; filename="image.jpg" + +--boundary +``` + +### Download File + +```http +GET /storage/:bucket/:path +``` + +### List Files + +```http +GET /storage/:bucket +``` + +### Delete File + +```http +DELETE /storage/:bucket/:path +``` + +## WebSocket Connection + +For realtime subscriptions: + +```javascript +const ws = new WebSocket('ws://localhost:3000/realtime/v1') + +// Authenticate +ws.send(JSON.stringify({ + type: 'auth', + payload: { token: '...' } +})) + +// Subscribe +ws.send(JSON.stringify({ + type: 'subscribe', + payload: { + event: 'postgres_changes', + table: 'posts', + filter: '*' + } +})) +``` + +## Error Responses + +All errors follow this format: + +```json +{ + "error": { + "code": "PGRST116", + "message": "The requested resource was not found", + "details": "...", + "hint": "..." + } +} +``` + +### Common Error Codes + +| Code | HTTP Status | Description | +|------|-------------|-------------| +| `PGRST116` | 404 | Resource not found | +| `23505` | 409 | Unique constraint violation | +| `42501` | 403 | Permission denied | +| `AUTH_REQUIRED` | 401 | Authentication required | +| `INVALID_TOKEN` | 401 | Invalid or expired token | + +## Rate Limiting + +API requests are rate limited: + +- **Authenticated:** 1000 requests/minute +- **Unauthenticated:** 100 requests/minute + +Rate limit headers are included in responses: + +``` +X-RateLimit-Limit: 1000 +X-RateLimit-Remaining: 999 +X-RateLimit-Reset: 1705315800 +``` + +## CORS + +Cross-origin requests are supported via CORS headers: + +``` +Access-Control-Allow-Origin: https://your-domain.com +Access-Control-Allow-Methods: GET, POST, PUT, DELETE, PATCH, OPTIONS +Access-Control-Allow-Headers: Content-Type, Authorization +Access-Control-Allow-Credentials: true +``` + +## Filtering Operators + +| Operator | Example | Description | +|----------|---------|-------------| +| `eq` | `id.eq.user-123` | Equals | +| `neq` | `id.neq.user-123` | Not equals | +| `gt` | `age.gt.18` | Greater than | +| `gte` | `age.gte.18` | Greater or equal | +| `lt` | `age.lt.18` | Less than | +| `lte` | `age.lte.18` | Less or equal | +| `like` | `name.like.%John%` | Pattern match | +| `ilike` | `name.ilike.%john%` | Case-insensitive | +| `in` | `role.in.admin,user` | In array | +| `is` | `deleted.is.null` | Is null | + +## Sorting + +``` +?sort=createdAt.desc +?sort=title.asc,createdAt.desc +``` + +## Pagination + +``` +?page=1&limit=20 +``` + +Response includes pagination metadata: + +```json +{ + "data": [...], + "pagination": { + "page": 1, + "limit": 20, + "total": 100, + "pages": 5 + } +} +``` + +## Related + +- [Client SDK](./client-sdk.md) - Using REST API from client +- [Database](../features/database.md) - Database operations +- [GraphQL API](./graphql-api.md) - GraphQL reference diff --git a/docs/cli/overview.md b/docs/cli/overview.md new file mode 100644 index 0000000..dfaed84 --- /dev/null +++ b/docs/cli/overview.md @@ -0,0 +1,515 @@ +# BetterBase CLI + +Command-line interface for BetterBase development and deployment. + +## Table of Contents +- [Overview](#overview) +- [Installation](#installation) +- [Commands](#commands) + - [Project Management](#project-management) + - [Development](#development) + - [Database](#database) + - [Authentication](#authentication) + - [Storage](#storage) + - [Functions](#functions) + - [Webhooks](#webhooks) + - [Branching](#branching) + - [GraphQL](#graphql) + - [Code Generation](#code-generation) +- [Usage Examples](#usage-examples) +- [Configuration](#configuration) +- [Extending](#extending) +- [Best Practices](#best-practices) + +## Overview + +The BetterBase CLI (`bb`) is a powerful command-line tool for managing BetterBase projects. It provides commands for project initialization, development workflow, database operations, authentication, storage management, and more. + +### Key Features +- **Project Initialization**: Scaffold new BetterBase projects +- **Development Workflow**: Watch files and auto-generate context +- **Database Management**: Generate and apply migrations +- **Authentication Helpers**: Install and configure authentication +- **Storage Management**: Manage buckets and file uploads +- **Function Deployment**: Bundle and deploy edge functions +- **Branching/Previews**: Create and manage preview environments +- **Code Generation**: Generate CRUD, GraphQL, and other boilerplate + +## Installation + +The CLI is installed globally via npm/yarn/bun: +```bash +bun add -g @betterbase/cli +``` + +Or install locally in a project: +```bash +bun add -D @betterbase/cli +``` + +Then run with `npx bb` or add to package.json scripts. + +## Commands + +### Project Management + +#### init +Initialize a new BetterBase project +```bash +bb init [project-name] +bb init my-app +bb init my-app ./path/to/project +``` + +#### dev +Watch schema/routes and regenerate `.betterbase-context.json` +```bash +bb dev [project-root] +bb dev +bb dev ./my-project +``` + +### Database + +#### migrate +Generate and apply migrations +```bash +bb migrate # Apply migrations to local dev +bb migrate preview # Preview migration diff +bb migrate production # Apply to production (confirmation required) +bb migrate rollback # Rollback last migration +bb migrate rollback -s 3 # Rollback 3 migrations +bb migrate history # Show migration history +``` + +### Authentication + +#### auth setup +Install and scaffold BetterAuth integration +```bash +bb auth setup [project-root] +bb auth setup +bb auth setup ./my-project +``` + +#### auth add-provider +Add OAuth provider (google, github, discord, apple, microsoft, twitter, facebook) +```bash +bb auth add-provider google [project-root] +bb auth add-provider github ./my-project +``` + +#### login/logout +Authenticate CLI with app.betterbase.com +```bash +bb login +bb logout +``` + +### Storage + +#### storage init +Initialize storage with a provider +```bash +bb storage init [project-root] +bb storage init +bb storage init ./my-project +``` + +#### storage list/buckets +List objects in storage bucket +```bash +bb storage list [project-root] +bb storage buckets [project-root] # Alias for list +``` + +#### storage upload +Upload a file to storage +```bash +bb storage upload [options] +bb storage upload ./image.jpg +bb storage upload ./doc.pdf -b my-bucket -p uploads/doc.pdf +bb storage upload ./file.txt -r ./my-project +``` + +### Functions + +#### function create +Create a new edge function +```bash +bb function create [project-root] +bb function create hello-world +bb function create api ./my-project +``` + +#### function dev +Run function locally with hot reload +```bash +bb function dev [project-root] +bb function dev hello-world +``` + +#### function build +Bundle function for deployment +```bash +bb function build [project-root] +bb function build hello-world +``` + +#### function list +List all functions +```bash +bb function list [project-root] +``` + +#### function logs +Show function logs +```bash +bb function logs [project-root] +``` + +#### function deploy +Deploy function to cloud +```bash +bb function deploy [options] [project-root] +bb function deploy hello-world +bb function deploy hello-world --sync-env +``` + +### Webhooks + +#### webhook create +Create a new webhook +```bash +bb webhook create [project-root] +bb webhook create +bb webhook create ./my-project +``` + +#### webhook list +List all configured webhooks +```bash +bb webhook list [project-root] +``` + +#### webhook test +Test a webhook by sending a synthetic payload +```bash +bb webhook test [project-root] +bb webhook test wh_12345 +bb webhook test wh_12345 ./my-project +``` + +#### webhook logs +Show delivery logs for a webhook +```bash +bb webhook logs [options] [project-root] +bb webhook logs wh_12345 +bb webhook logs wh_12345 -l 100 +``` + +### Branching/Previews + +#### branch create +Create a new preview environment +```bash +bb branch create [project-root] +bb branch create feature/login-form +bb branch create bugfix/auth ./my-project +``` + +#### branch list +List all preview environments +```bash +bb branch list [project-root] +``` + +#### branch delete +Delete a preview environment +```bash +bb branch delete [project-root] +bb branch delete feature/login-form +``` + +#### branch sleep/wake +Put a preview environment to sleep or wake it +```bash +bb branch sleep [project-root] +bb branch wake [project-root] +``` + +#### branch status +Get status of a preview environment +```bash +bb branch status [project-root] +``` + +#### branch (no subcommand) +Manage preview environments +```bash +bb branch [project-root] +bb branch ./my-project +``` + +### GraphQL + +#### graphql generate +Generate GraphQL schema from database schema +```bash +bb graphql generate [project-root] +bb graphql generate +bb graphql generate ./my-project +``` + +#### graphql playground +Open GraphQL Playground in browser +```bash +bb graphql playground +``` + +### Code Generation + +#### generate crud +Generate full CRUD routes for a table +```bash +bb generate crud [project-root] +bb generate crud users +bb generate crud posts ./my-project +``` + +## Usage Examples + +### Setting Up a New Project +```bash +# Create project directory +mkdir my-betterbase-app +cd my-betterbase-app + +# Initialize BetterBase project +bb init my-betterbase-app + +# Install auth helpers +bb auth setup + +# Add GitHub OAuth +bb auth add-provider github + +# Initialize storage +bb storage init + +# Start development server +bb dev + +# In another terminal, start the app +bun run dev +``` + +### Database Workflow +```bash +# Generate initial migration +bb migrate + +# Preview changes before applying +bb migrate preview + +# Apply to production (with confirmation) +bb migrate production + +# Rollback if needed +bb migrate rollback +``` + +### Function Development +```bash +# Create new function +bb function create process-webhook + +# Develop locally with hot reload +bb function dev process-webhook + +# Build for deployment +bb function build process-webhook + +# Deploy to cloud +bb function deploy process-webhook --sync-env +``` + +### Preview Environments +```bash +# Create preview for feature branch +bb branch create feature/payment-integration + +# Work on feature... +# When ready, share preview URL with team + +# When feature is merged, cleanup +bb branch delete feature/payment-integration +``` + +## Configuration + +### Environment Variables +The CLI reads configuration from: +1. Command line arguments +2. `.betterbase-context.json` (auto-generated) +3. Environment variables +4. Project `betterbase.config.ts` + +### .betterbase-context.json +This file is automatically generated and managed by the CLI. It contains: +- Project ID +- Environment configuration +- Database connection info +- Storage bucket names +- Webhook configurations +- Function definitions +- Branching settings + +**Never manually edit this file** - it's managed by the CLI. + +### betterbase.config.ts +Project-specific configuration lives in `betterbase.config.ts`: +```typescript +import { defineConfig } from '@betterbase/core' + +export default defineConfig({ + project: { name: 'my-app' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + }, + storage: { + provider: 's3', + bucket: process.env.STORAGE_BUCKET, + region: 'us-west-2' + }, + webhooks: [ + { + id: 'order-events', + table: 'orders', + events: ['INSERT', 'UPDATE', 'DELETE'], + url: process.env.ORDER_WEBHOOK_URL, + secret: process.env.ORDER_WEBHOOK_SECRET + } + ], + branching: { + enabled: true, + maxPreviews: 10 + } +}) +``` + +## Extending the CLI + +### Creating Custom Commands +The CLI is built with Commander.js and can be extended: + +```typescript +// In your project's cli-extensions.ts +import { Command } from 'commander' +import { createProgram } from '@betterbase/cli' + +const program = createProgram() + +program + .command('custom') + .description('My custom command') + .argument('', 'An argument') + .option('-o, --option ', 'An option') + .action((arg, options) => { + console.log(`Running custom command with ${arg} and ${options.option}`) + }) + +program.parse() +``` + +### Hook System +The CLI provides hooks for extending functionality: +- `preAction`: Runs before any command (used for auth checking) +- `postAction`: Runs after any command +- `command:`: Runs before specific command + +### Adding New Commands +To add a new command to the core CLI: +1. Create command file in `src/commands/` +2. Implement the command logic +3. Export and register in `src/index.ts` +4. Update documentation + +## Best Practices + +### Authentication +1. **Login Regularly**: Run `bb login` to refresh authentication token +2. **Use Environment Variables**: Store sensitive config in env vars +3. **Check Permissions**: Ensure CLI has required permissions for operations +4. **Token Management**: CLI automatically handles token storage and refresh + +### Project Structure +1. **Standard Layout**: Follow BetterBase project structure conventions +2. **Config Separation**: Keep secrets out of version control +3. **Consistent Naming**: Use consistent naming for resources +4. **Documentation**: Document custom configurations and extensions + +### Development Workflow +1. **Use bb dev**: Keep development watcher running +2. **Preview Changes**: Use `bb migrate preview` before applying +3. **Test Functions Locally**: Use `bb function dev` before deploying +4. **Check Logs**: Use `bb function logs` and `bb webhook logs` for debugging + +### Production Deployment +1. **Confirm Production Migrations**: Always confirm before running `bb migrate production` +2. **Use Sync Env**: Use `--sync-env` flag when deploying functions +3. **Monitor Previews**: Regularly clean up old preview environments +4. **Backup Data**: Always backup before destructive operations + +### Troubleshooting +1. **Check Logs**: Use `bb function logs` and `bb webhook logs` +2. **Verify Auth**: Run `bb login` if getting authentication errors +3. **Check Context**: Delete `.betterbase-context.json` and re-run `bb dev` if needed +4. **Update CLI**: Keep CLI updated with `bun add -g @betterbase/cli@latest` + +## Related Modules +- [Core SDK](./../core/overview.md): Core functionality accessed by CLI +- [Client SDK](./../client/overview.md): Client-side SDK +- [Shared Utilities](./../shared/overview.md): Shared types and utilities +- [Configuration](./../core/config.md): Configuration schema and validation + +## Versioning + +### CLI Version +Matches the `@betterbase/cli` package version +Check version with: `bb version` or `bb -v` + +### Breaking Changes +- Major version bumps for breaking changes +- Deprecation warnings with migration paths +- Backward compatibility maintained for 6 months after deprecation + +### Update Schedule +- Regular updates every 2-4 weeks +- Security patches as needed +- Feature releases monthly +- LTS versions quarterly + +## Support + +### Documentation +- [CLI Reference](https://betterbase.dev/docs/cli) +- [Command Guides](https://betterbase.dev/docs/cli/commands) +- [Examples](https://betterbase.dev/examples/cli) + +### Community +- [GitHub Discussions](https://github.com/betterbase/cli/discussions) +- [Discord Channel](https://discord.gg/betterbase) +- [Twitter](https://twitter.com/betterbase) + +### Reporting Issues +- [GitHub Issues](https://github.com/betterbase/cli/issues) +- [Bug Report Template](https://github.com/betterbase/cli/blob/main/.github/ISSUE_TEMPLATE/bug_report.md) +- [Feature Request Template](https://github.com/betterbase/cli/blob/main/.github/ISSUE_TEMPLATE/feature_request.md) + +## License + +[MIT License](LICENSE.md) + +© 2023 BetterBase LLC. \ No newline at end of file diff --git a/docs/client/client.md b/docs/client/client.md new file mode 100644 index 0000000..196a3db --- /dev/null +++ b/docs/client/client.md @@ -0,0 +1,295 @@ +# @betterbase/client + +Client-side SDK for BetterBase applications. Provides API client, real-time subscriptions, and storage management. + +## Table of Contents +- [Overview](#overview) +- [Installation](#installation) +- [Usage](#usage) +- [API Reference](#api-reference) + - [Client](#client) + - [Realtime](#realtime) + - [Storage](#storage) +- [Examples](#examples) + +## Overview + +The client SDK enables developers to interact with BetterBase backends through a consistent interface. It provides: + +### Key Features +- **API Client**: HTTP client with automatic authentication +- **Realtime Subscriptions**: Live updates via WebSockets +- **Storage Management**: File uploads/downloads with cloud storage integration +- **Error Handling**: Consistent error response format +- **Type Safety**: TypeScript definitions for all operations + +### Installation +```bash +bun add @betterbase/client +``` + +## Usage + +### Basic Setup +```typescript +import { createClient } from '@betterbase/client'; + +const client = createClient({ + url: 'https://api.betterbase.dev', + key: 'your-api-key', +}); +``` + +### API Client +```typescript +// Create client instance +const client = createClient({ + url: 'https://api.betterbase.dev', + key: 'your-api-key', +}); + +// Make authenticated API request +const response = await client.fetch('/api/users', { + method: 'GET', + headers: { 'Authorization': `Bearer ${client.getToken()}` } +}); +``` + +### Realtime Subscriptions +```typescript +// Subscribe to updates +const subscription = client.subscribe('users', (event) => { + console.log('New user:', event.data); +}); + +// Unsubscribe +subscription.unsubscribe(); +``` + +### Storage Management +```typescript +// Upload file +const upload = await client.upload('uploads', 'file.txt', file); + +// Download file +const file = await client.download('uploads/file.txt'); + +// Remove file +await client.remove('uploads/file.txt'); +``` + +## API Reference + +### Client +```typescript +export interface Client { + fetch(url: string, options?: RequestInit): Promise; + + realtime: Realtime; + storage: Storage; +} +``` + +### Realtime +```typescript +export interface Realtime { + subscribe(event: string, callback: (data: any) => void): () => void; + unsubscribe(event: string, callback: (data: any) => void): void; + + unsubscribeAll(): void; +} +``` + +### Storage +```typescript +export interface Storage { + upload(file: File): Promise; + download(path: string): Promise; + remove(path: string): Promise; + + // Additional methods... +} +``` + +## Examples + +### Basic Usage +```typescript +// Create client +const client = createClient({ + url: 'https://api.betterbase.dev', + key: 'your-api-key', +}); + +// Make authenticated request +const response = await client.fetch('/api/users', { + method: 'GET', + headers: { 'Authorization': `Bearer ${client.getToken()}` } +}); + +// Handle response +if (response.ok) { + const data = await response.json(); + console.log('Users:', data); +} else { + console.error('Request failed:', await response.text()); +} +``` + +### Real-time Updates +```typescript +// Subscribe to updates +const unsubscribe = client.subscribe('users', (event) => { + console.log('New user:', event.data); +}); + +// Unsubscribe when done +unsubscribe(); +``` + +### File Upload +```typescript +// Upload file +const upload = await client.upload('uploads', file); + +// Download file +const file = await client.download('uploads/file.txt'); + +// Remove file +await client.remove('uploads/file.txt'); +``` + +## Security Considerations + +### Authentication +- All requests include authentication headers +- Token management through client instance +- Automatic token refresh + +### Error Handling +- Consistent error format across all operations +- Detailed error messages for debugging +- Network error handling + +### Data Validation +- Input validation for all API requests +- Type safety through TypeScript definitions +- Schema validation for storage operations + +## Best Practices + +### Error Handling +1. **Check response status**: Always check `response.ok` before processing +2. **Handle network errors**: Implement retry logic for transient errors +3. **Validate inputs**: Ensure all API requests have valid parameters + +### Performance +1. **Batch requests**: Combine multiple operations into single requests +2. **Caching**: Implement client-side caching for frequently accessed data +3. **Connection pooling**: Reuse WebSocket connections +4. **Compression**: Enable gzip compression where possible + +### Testing +1. **Unit tests**: Test individual methods in isolation +2. **Integration tests**: Test full API workflows +3. **Mocking**: Use mock responses for testing edge cases +4. **Performance testing**: Test under load conditions + +## Related Modules +- [Configuration](./config.md): For BetterBase configuration +- [Realtime](./realtime.md): Real-time subscription management +- [Storage](./storage.md): File storage operations +- [Errors](./errors.md): Error handling utilities +- [Types](./types.md): TypeScript type definitions + +## Versioning + +### API Versioning +- All endpoints include version in path (e.g., `/api/v1/users`) +- Automatic versioning through client instance +- Backward compatibility maintained where possible + +### Breaking Changes +- Major version bumps for breaking changes +- Deprecation warnings with migration paths +- Backward compatibility layers for 6 months + +## Documentation Structure + +### Public API +- Client class with public methods +- Real-time subscription management +- Storage operations + +### Internal Implementation +- Private methods and utilities +- Error handling implementation +- Performance optimizations + +## Maintenance + +### Versioning +- Semantic versioning (SemVer) +- Major.minor.patch format +- Breaking changes documented in CHANGELOG.md + +### Testing +- Unit tests for individual methods +- Integration tests for full workflows +- End-to-end tests for critical paths +- Performance regression tests + +### Documentation Updates +- Automated documentation generation +- Versioned documentation for different API versions +- Changelog tracking +- Contributor guidelines + +## Contributing + +### Code of Conduct +- Follow BetterBase code standards +- Submit pull requests with tests +- Document changes in CHANGELOG.md +- Maintain backward compatibility + +### Development Workflow +1. Fork repository +2. Create feature branch +3. Implement changes +4. Add tests +5. Submit pull request with description +6. Code review and approval +7. Merge into main branch + +## Support + +### Documentation +- [API Reference](https://betterbase.dev/docs/client) +- [Getting Started Guide](https://betterbase.dev/docs/getting-started) +- [FAQ](https://betterbase.dev/docs/faq) + +### Community +- [GitHub Discussions](https://github.com/betterbase/client/discussions) +- [Discord Channel](https://discord.gg/betterbase) +- [Twitter](https://twitter.com/betterbase) + +### Reporting Issues +- [GitHub Issues](https://github.com/betterbase/client/issues) +- [Bug Report Template](https://github.com/betterbase/client/blob/main/.github/ISSUE_TEMPLATE/bug_report.md) +- [Feature Request Template](https://github.com/betterbase/client/blob/main/.github/ISSUE_TEMPLATE/feature_request.md) + +## License + +[MIT License](LICENSE.md) + +## Acknowledgments +- [BetterBase Team](https://betterbase.dev) +- [Contributors](https://github.com/betterbase/client/graphs/contributors) +- [Open Source Community](https://opensource.org) + +--- + +Generated with [BetterBase CLI](https://betterbase.dev) + +© 2023 BetterBase LLC. \ No newline at end of file diff --git a/docs/client/realtime.md b/docs/client/realtime.md new file mode 100644 index 0000000..e639719 --- /dev/null +++ b/docs/client/realtime.md @@ -0,0 +1,566 @@ +# Realtime Client + +Client-side real-time subscription management for BetterBase applications. + +## Table of Contents +- [Overview](#overview) +- [Installation](#installation) +- [Usage](#usage) +- [API Reference](#api-reference) + - [RealtimeClient](#realtimeclient) + - [Subscription Management](#subscription-management) + - [Event Handling](#event-handling) +- [Examples](#examples) +- [Best Practices](#best-practices) + +## Overview + +The RealtimeClient provides WebSocket-based real-time subscriptions for BetterBase applications. It enables live updates, presence tracking, and broadcast messaging between clients. + +### Key Features +- **WebSocket Connection**: Automatic connection management with retry logic +- **Table Subscriptions**: Subscribe to database change events (INSERT, UPDATE, DELETE) +- **Channel Messaging**: Real-time messaging and presence tracking +- **Automatic Reconnection**: Robust connection recovery with backoff +- **Event Filtering**: Filter subscriptions by table and event type +- **Presence Tracking**: Track user presence in channels +- **Broadcast Messaging**: Send messages to channel subscribers + +## Installation + +The RealtimeClient is included with the `@betterbase/client` package: +```bash +bun add @betterbase/client +``` + +## Usage + +### Basic Subscription +```typescript +import { RealtimeClient } from '@betterbase/client'; + +const client = new RealtimeClient('https://api.betterbase.dev', token); + +// Subscribe to table updates +const subscription = client.from('users').on('INSERT', (payload) => { + console.log('New user:', payload.data); +}); + +// Subscribe to all events +const allUpdates = client.from('posts').on('*', (payload) => { + console.log('Post updated:', payload); +}); + +// Unsubscribe when done +subscription.unsubscribe(); +``` + +### Channel Messaging +```typescript +// Join a channel +const channel = client.channel('chat-room'); + +// Subscribe to channel +const subscription = channel.subscribe({ + presence: { user: { id: '123', name: 'John' } } +}); + +// Listen for broadcast messages +channel.onBroadcast((event, data) => { + console.log(`Received ${event}:`, data); +}); + +// Send a message +channel.broadcast('message', { + text: 'Hello world!', + user: { id: '123', name: 'John' } +}); + +// Update presence +channel.track({ typing: true }); + +// Leave channel +subscription.unsubscribe(); +``` + +## API Reference + +### RealtimeClient + +```typescript +export class RealtimeClient { + constructor(url: string, token?: string) + + // Table subscriptions + from(table: string): TableSubscription + + // Channel management + channel(channelName: string): Channel + + // Connection management + connect(): void + disconnect(): void + isConnected(): boolean + + // Event handlers + on(event: string, callback: (data: unknown) => void): void + off(event: string, callback: (data: unknown) => void): void +} +``` + +### TableSubscription + +```typescript +interface TableSubscription { + on( + event: 'INSERT' | 'UPDATE' | 'DELETE' | '*', + callback: (payload: RealtimePayload) => void + ): { + subscribe: (filter?: Record) => RealtimeSubscription + } +} + +interface RealtimeSubscription { + unsubscribe: () => void +} + +interface RealtimePayload { + event: 'INSERT' | 'UPDATE' | 'DELETE' + data: T + timestamp: string +} +``` + +### Channel + +```typescript +interface Channel { + subscribe(options?: ChannelOptions): ChannelSubscription + + onPresence(callback: (event: PresenceEvent) => void): void + onBroadcast(callback: (event: string, data: unknown) => void): void +} + +interface ChannelOptions { + user_id?: string + presence?: Record +} + +interface ChannelSubscription { + unsubscribe: () => void + + // Messaging + broadcast(event: string, data: unknown): void + + // Presence + track(state: Record): void +} + +interface PresenceEvent { + type: 'presence' + event: 'join' | 'leave' | 'sync' | 'update' + channel: string + payload: unknown +} +``` + +## Examples + +### Basic Table Subscription +```typescript +// Subscribe to new users +const subscription = client.from('users').on('INSERT', (payload) => { + console.log('New user created:', payload.data); +}); + +// Subscribe to user deletions +client.from('users').on('DELETE', (payload) => { + console.log('User deleted:', payload.data); +}); + +// Subscribe to all events +client.from('posts').on('*', (payload) => { + console.log('Post event:', payload); +}); +``` + +### Event Filtering +```typescript +// Subscribe to active users only +client.from('users').on('INSERT', (payload) => { + console.log('New active user:', payload.data); +}) +.subscribe({ where: { active: true } }); + +// Subscribe to posts by specific user +client.from('posts').on('INSERT', (payload) => { + console.log('New post by user 123:', payload.data); +}) +.subscribe({ author_id: 123 }); +``` + +### Channel Messaging +```typescript +// Join chat room +const channel = client.channel('general-chat'); + +// Listen for messages +channel.onBroadcast((event, data) => { + if (event === 'message') { + console.log(`[${data.user.name}]: ${data.text}`); + } +}); + +// Send message +channel.broadcast('message', { + text: 'Hello everyone!', + user: { id: '123', name: 'John' } +}); + +// Track typing state +channel.track({ typing: true }); + +// Update presence +channel.track({ typing: false }); +``` + +### Presence Tracking +```typescript +// Join channel with presence +const channel = client.channel('game-room'); + +// Track presence +channel.track({ + player: { id: '123', name: 'John' }, + status: 'online' +}); + +// Listen for presence updates +channel.onPresence((event) => { + if (event.event === 'join') { + console.log('Player joined:', event.payload); + } else if (event.event === 'leave') { + console.log('Player left:', event.payload); + } +}); + +// Clean up +channel.subscribe().unsubscribe(); +``` + +### Connection Management +```typescript +// Check connection status +if (!client.isConnected()) { + client.connect(); +} + +// Handle connection errors +client.on('error', (error) => { + console.error('Connection error:', error); +}); + +// Clean up on unmount +// This is typically called in a React useEffect cleanup +client.disconnect(); +``` + +## Best Practices + +### Connection Management +1. **Automatic Reconnection**: The client handles reconnection automatically with exponential backoff +2. **Connection State**: Always check `isConnected()` before sending messages +3. **Cleanup**: Call `disconnect()` when no longer needed to prevent memory leaks +4. **Error Handling**: Listen for error events to handle connection issues + +### Subscription Management +1. **Unsubscribe**: Always unsubscribe when done to prevent memory leaks +2. **Filtering**: Use filters to reduce unnecessary event processing +3. **Batching**: Consider batching updates when handling rapid-fire events +4. **Error Handling**: Gracefully handle subscription errors + +### Performance Optimization +1. **Event Delegation**: Use wildcards (`*`) for multiple event types +2. **Limit Subscriptions**: Keep the number of active subscriptions reasonable +3. **Connection Sharing**: Share RealtimeClient instances when possible +4. **Event Filtering**: Use server-side filters to reduce network traffic + +### Security Considerations +1. **Authentication**: Always provide valid tokens for protected channels +2. **Event Validation**: Validate incoming events before processing +3. **Rate Limiting**: Implement client-side rate limiting for event handlers +4. **Sensitive Data**: Be careful with sensitive data in presence state + +### Error Handling +1. **Network Errors**: Implement retry logic for transient errors +2. **Authentication Errors**: Handle token expiration gracefully +3. **Subscription Errors**: Handle subscription failures with fallbacks +4. **Event Processing**: Handle malformed events safely + +## Troubleshooting + +### Common Issues + +**Connection Fails** +```typescript +// Check URL format +const client = new RealtimeClient('https://api.betterbase.dev', token); + +// Verify token is valid +const token = localStorage.getItem('betterbase_token'); +if (!token) { + console.error('No authentication token found'); +} +``` + +**Subscriptions Not Working** +```typescript +// Ensure WebSocket is open +if (client.isConnected()) { + // Subscribe again + const subscription = client.from('users').on('INSERT', callback); +} else { + // Wait for connection + setTimeout(() => { + const subscription = client.from('users').on('INSERT', callback); + }, 1000); +} +``` + +**Performance Issues** +```typescript +// Limit concurrent subscriptions +const MAX_SUBSCRIPTIONS = 10; +let subscriptionCount = 0; + +function subscribeSafe(table: string, event: string, callback: Function) { + if (subscriptionCount >= MAX_SUBSCRIPTIONS) { + console.warn('Too many subscriptions'); + return; + } + + subscriptionCount++; + const subscription = client.from(table).on(event, callback); + + subscription.unsubscribe = () => { + subscription.unsubscribe(); + subscriptionCount--; + }; + + return subscription; +} +``` + +## Integration Examples + +### React Integration +```typescript +import { useEffect, useState } from 'react'; +import { RealtimeClient } from '@betterbase/client'; + +function useRealtime(table: string, event: string, callback: Function) { + const [client, setClient] = useState(null); + + useEffect(() => { + const token = localStorage.getItem('betterbase_token'); + const realtimeClient = new RealtimeClient( + process.env.NEXT_PUBLIC_API_URL!, + token + ); + + setClient(realtimeClient); + + return () => { + realtimeClient.disconnect(); + }; + }, []); + + useEffect(() => { + if (!client) return; + + const subscription = client.from(table).on(event, callback); + + return () => { + subscription.unsubscribe(); + }; + }, [client, table, event, callback]); +} + +// Usage in component +function UserList() { + const [users, setUsers] = useState([]); + + useRealtime('users', 'INSERT', (payload) => { + setUsers(prev => [...prev, payload.data]); + }); + + return ( +
    + {users.map(user => ( +
  • {user.name}
  • + ))} +
+ ); +} +``` + +### Vue Integration +```typescript +import { ref, onMounted, onUnmounted } from 'vue'; +import { RealtimeClient } from '@betterbase/client'; + +export function useRealtime(table: string, event: string, callback: Function) { + const client = ref(null); + + onMounted(() => { + const token = localStorage.getItem('betterbase_token'); + client.value = new RealtimeClient( + import.meta.env.VITE_API_URL, + token + ); + }); + + onUnmounted(() => { + if (client.value) { + client.value.disconnect(); + } + }); + + return { + subscribe: () => { + if (!client.value) return { unsubscribe: () => {} }; + + const subscription = client.value.from(table).on(event, callback); + return subscription; + } + }; +} + +// Usage in component +import { useRealtime } from '@/composables/useRealtime'; + +export default { + setup() { + const users = ref([]); + + const { subscribe } = useRealtime('users', 'INSERT', (payload) => { + users.value.push(payload.data); + }); + + return { + users, + onMounted: () => subscribe() + }; + } +}; +``` + +### Node.js Integration +```typescript +import { RealtimeClient } from '@betterbase/client'; + +// Create client for server-side use +const client = new RealtimeClient('https://api.betterbase.dev', process.env.API_TOKEN); + +// Subscribe to events +client.from('logs').on('INSERT', (payload) => { + console.log('New log entry:', payload.data); +}); + +// Listen for connection +client.on('open', () => { + console.log('Realtime connection established'); +}); + +client.on('close', () => { + console.log('Realtime connection closed'); +}); + +// Keep process alive +process.on('SIGINT', () => { + client.disconnect(); + process.exit(0); +}); +``` + +## Configuration Options + +### Connection Settings +```typescript +const client = new RealtimeClient(url, token, { + reconnectAttempts: 5, + reconnectDelay: 1000, + maxReconnectDelay: 30000, + heartbeatInterval: 30000 +}); +``` + +### Subscription Filters +```typescript +// Server-side filters +client.from('users').on('INSERT', callback) + .subscribe({ + where: { + status: 'active', + created_at: { $gt: new Date().toISOString() } + } + }); + +// Multiple event types +client.from('posts').on('*', callback) + .subscribe({ + limit: 100, + order_by: 'created_at' + }); +``` + +## Version Compatibility + +### Client Requirements +- **Node.js**: 16.0+ (server-side) +- **Browser**: Chrome 60+, Firefox 55+, Safari 12+, Edge 79+ +- **TypeScript**: 4.0+ + +### Breaking Changes +- **v2.0**: Changed event payload structure +- **v1.5**: Replaced callback-based API with promise-based +- **v1.0**: Initial release + +## Migration Guide + +### From v1.x to v2.x +```typescript +// Old v1.x API +client.subscribe('users', (event, data) => { + console.log(event, data); +}); + +// New v2.x API +client.from('users').on('*', (payload) => { + console.log(payload.event, payload.data); +}); +``` + +## Support + +### Documentation +- [API Reference](https://betterbase.dev/docs/client/realtime) +- [Examples](https://betterbase.dev/examples/realtime) +- [Troubleshooting Guide](https://betterbase.dev/docs/troubleshooting) + +### Community +- [GitHub Discussions](https://github.com/betterbase/client/discussions) +- [Discord Community](https://discord.gg/betterbase) +- [Stack Overflow](https://stackoverflow.com/questions/tagged/betterbase) + +### Reporting Issues +- [GitHub Issues](https://github.com/betterbase/client/issues) +- [Bug Report Template](https://github.com/betterbase/client/.github/ISSUE_TEMPLATE/bug_report.md) + +--- + +## License + +MIT License - see LICENSE file for details. + +© 2023 BetterBase LLC. \ No newline at end of file diff --git a/docs/core/auto-rest.md b/docs/core/auto-rest.md new file mode 100644 index 0000000..8f1b917 --- /dev/null +++ b/docs/core/auto-rest.md @@ -0,0 +1,383 @@ +# Auto-REST Module + +Automatic CRUD route generation from Drizzle ORM schema with built-in RLS enforcement, filtering, and pagination. + +## Table of Contents +- [Overview](#overview) +- [Features](#features) +- [Installation](#installation) +- [Usage](#usage) +- [API Reference](#api-reference) + - [mountAutoRest](#mountautorest) + - [AutoRestOptions](#autorestoptions) + - [QUERY_OPERATORS](#query_operators) + - [Types](#types) +- [Security Considerations](#security-considerations) +- [Customization](#customization) +- [Examples](#examples) + +## Overview + +The Auto-REST module automatically generates full CRUD (Create, Read, Update, Delete) RESTful endpoints for all tables in a Drizzle ORM schema. It eliminates boilerplate code by inspecting your database schema and creating standardized API routes with built-in security features. + +Key capabilities: +- Automatic route generation for all tables +- Built-in Row Level Security (RLS) enforcement +- Advanced filtering with query operators +- Pagination support +- Input sanitization and validation +- Consistent error responses using BetterBaseResponse format + +## Features + +### Automatic CRUD Generation +For each table in your schema, Auto-REST creates: +- `GET /api/:table` - List all rows (with filtering, sorting, pagination) +- `GET /api/:table/:id` - Get single row by ID +- `POST /api/:table` - Insert new row +- `PATCH /api/:table/:id` - Update existing row +- `DELETE /api/:table/:id` - Delete row + +### Security Features +- **RLS Enforcement**: When enabled, all routes require authentication +- **Per-row Access Control**: RLS filtering based on ownership columns +- **Input Sanitization**: Column whitelisting for insert/update operations +- **Owner Column Protection**: Prevents modification of ownership fields through API +- **SQL Injection Prevention**: Uses parameterized queries via Drizzle ORM + +### Filtering & Query Capabilities +- **Basic Operators**: eq, neq, gt, gte, lt, lte +- **Pattern Matching**: like, ilike (case-insensitive like) +- **Array Operations**: in (comma-separated values) +- **Null Checks**: is_null (checks for NULL or NOT NULL) +- **Combined Filters**: Multiple query parameters combined with AND logic +- **Sorting**: order_by and order parameters +- **Pagination**: limit and offset with configurable defaults + +### Response Format +All endpoints return consistent `BetterBaseResponse` format: +```json +{ + "data": T | null, + "error": string | SerializedError | null, + "count?": number, + "pagination?": { + "page": number, + "pageSize": number, + "total": number + } +} +``` + +## Installation + +The Auto-REST module is part of `@betterbase/core`: +```bash +bun add @betterbase/core +``` + +## Usage + +### Basic Setup +```typescript +import { Hono } from 'hono'; +import { drizzle } from 'drizzle-orm/node-postgres'; +import { + mountAutoRest, + defineConfig +} from '@betterbase/core'; +import { eq } from 'drizzle-orm'; +import * as schema from './db/schema'; + +// Initialize Hono app +const app = new Hono(); + +// Set up database connection +const config = defineConfig({ + project: { name: 'my-app' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + } +}); + +const db = drizzle(config.provider.connectionString); + +// Mount Auto-REST routes +mountAutoRest(app, db, schema); + +// Start server +app.listen(3000); +``` + +### With Custom Options +```typescript +mountAutoRest(app, db, schema, { + enabled: true, + excludeTables: ['audit_logs', 'sessions'], // Skip sensitive tables + basePath: '/api/v2', // Custom base path + enableRLS: true, // Enable RLS enforcement + writableColumns: ['name', 'email'], // Restrict updatable columns + ownerColumn: 'userId' // Column for RLS ownership checks +}); +``` + +## API Reference + +### mountAutoRest +Primary function for mounting Auto-REST routes. + +```typescript +export function mountAutoRest( + app: Hono, + db: DrizzleDB, + schema: Record, + options: AutoRestOptions = {} +): void +``` + +#### Parameters +- `app`: Hono application instance +- `db`: Drizzle database instance +- `schema`: Record of table name to Drizzle table schema (from `drizzle-orm`) +- `options`: Configuration options (see `AutoRestOptions`) + +#### Returns +`void` - Routes are registered directly on the Hono app + +### AutoRestOptions +Configuration interface for Auto-REST behavior. + +```typescript +export interface AutoRestOptions { + /** Enable/disable auto-rest (default: true) */ + enabled?: boolean; + + /** Tables to exclude from auto-rest (default: []) */ + excludeTables?: string[]; + + /** Base path for API routes (default: /api) */ + basePath?: string; + + /** Enable RLS enforcement (default: true) */ + enableRLS?: boolean; + + /** Columns that are allowed to be modified via API (default: all columns) */ + writableColumns?: string[]; + + /** Column to use for RLS user ownership check (e.g., 'userId', 'owner_id') */ + ownerColumn?: string; +} +``` + +### QUERY_OPERATORS +Predefined filter operators for query parameter parsing. + +```typescript +export const QUERY_OPERATORS = { + eq: (col: DrizzleTable, val: unknown) => eq(col, val), + neq: (col: DrizzleTable, val: unknown) => ne(col, val), + gt: (col: DrizzleTable, val: unknown) => gt(col, val), + gte: (col: DrizzleTable, val: unknown) => gte(col, val), + lt: (col: DrizzleTable, val: unknown) => lt(col, val), + lte: (col: DrizzleTable, val: unknown) => lte(col, val), + like: (col: DrizzleTable, val: unknown) => like(col, `%${val}%`), + ilike: (col: DrizzleTable, val: unknown) => ilike(col, `%${val}%`), + in: (col: DrizzleTable, val: unknown) => { + const values = typeof val === "string" ? val.split(",") : val; + return inArray(col, values as unknown[]); + }, + is_null: (col: DrizzleTable, val: unknown) => { + const check = val === "true" || val === true; + return check ? isNull(col) : isNotNull(col); + }, +} as const; +``` + +### Types +Exported TypeScript types from the module. + +```typescript +export type DrizzleTable = any; // Drizzle table schema type +export type DrizzleDB = any; // Drizzle database client type + +export interface AutoRestOptions { + enabled?: boolean; + excludeTables?: string[]; + basePath?: string; + enableRLS?: boolean; + writableColumns?: string[]; + ownerColumn?: string; +} +``` + +## Security Considerations + +### Row Level Security (RLS) +When `enableRLS: true` (default): +1. All endpoints require authentication via `checkRLSAuth` +2. List endpoints apply per-row filtering using `ownerColumn` +3. Write operations (POST, PATCH, DELETE) verify row ownership before execution +4. Unauthenticated requests return 401 Unauthorized +5. Unauthorized access attempts return 403 Forbidden + +### Input Protection +- **Column Whitelisting**: Only `writableColumns` can be inserted/updated +- **Owner Column Protection**: Prevents API modification of ownership fields +- **SQL Injection Prevention**: Uses Drizzle ORM parameterized queries +- **Request Size Limits**: Depends on Hono middleware configuration + +### Rate Limiting & DDOS Protection +Auto-REST itself doesn't implement rate limiting - this should be handled at the middleware level: +```typescript +app.use('*', async (c, next) => { + // Implement rate limiting logic here + await next(); +}); +``` + +## Customization + +### Excluding Tables +Prevent Auto-REST from generating routes for specific tables: +```typescript +mountAutoRest(app, db, schema, { + excludeTables: ['secrets', 'migrations', 'audit_logs'] +}); +``` + +### Custom Base Path +Change the base URL for all generated routes: +```typescript +mountAutoRest(app, db, schema, { + basePath: '/api/v1/resource' +}); +// Results in: GET /api/v1/resource/users, POST /api/v1/resource/posts, etc. +``` + +### Column-Level Permissions +Restrict which columns can be modified via API: +```typescript +mountAutoRest(app, db, schema, { + writableColumns: ['title', 'content', 'published'] // Only these columns updatable +}); +``` + +### Disabling Auto-REST +Temporarily disable route generation: +```typescript +mountAutoRest(app, db, schema, { + enabled: false // No routes will be generated +}); +``` + +## Examples + +### Standard Usage +See [Usage](#usage) above for basic implementation. + +### Multi-tenant Application +```typescript +mountAutoRest(app, db, schema, { + enableRLS: true, + ownerColumn: 'tenant_id', // Multi-tenancy via tenant ID + basePath: '/api' +}); +``` + +### Public API with Protected Admin Routes +```typescript +// Public routes (no RLS) +mountAutoRest(app, db, schema, { + enableRLS: false, + basePath: '/api/public', + excludeTables: ['admin_users', 'system_settings'] +}); + +// Admin routes (with RLS) +mountAutoRest(app, db, schema, { + enableRLS: true, + ownerColumn: 'admin_id', + basePath: '/api/admin', + excludeTables: ['public_profiles', 'blog_posts'] // Only admin tables +}); +``` + +### Custom Filtering Endpoints +Extend Auto-REST with custom search functionality: +```typescript +app.get('/api/search', async (c) => { + const { q, table } = c.req.query(); + if (!q || !table) { + return c.json({ error: 'Missing query or table parameter' }, 400); + } + + // Implement custom search logic here + // Could use full-text search or custom filtering +}); +``` + +## Error Handling +All endpoints return standardized error responses: +- **400 Bad Request**: Invalid request body or query parameters +- **401 Unauthorized**: Missing or invalid authentication (when RLS enabled) +- **403 Forbidden**: Authenticated but insufficient permissions +- **404 Not Found**: Requested resource doesn't exist or access denied +- **500 Internal Server Error**: Unexpected server error + +Error format follows `BetterBaseResponse`: +```json +{ + "data": null, + "error": "Error message description", + "count": null, + "pagination": null +} +``` + +## Performance Considerations + +### Query Optimization +- Auto-REST uses efficient Drizzle ORM queries +- Pagination uses `LIMIT` and `OFFSET` (consider keyset pagination for large datasets) +- Filtering is applied at the database level +- Consider adding database indexes for frequently queried/filtered columns + +### Caching +For read-heavy applications, consider implementing caching middleware: +```typescript +app.use('/api/*', async (c, next) => { + // Check cache, serve if fresh, otherwise call next() and cache result + await next(); +}); +``` + +### Connection Pooling +Ensure your database connection pooling is properly configured for expected load. + +## Limitations & Known Issues + +### Complex Joins +Auto-REST generates simple table-based queries. For complex joins or computed fields: +- Use custom routes alongside Auto-REST +- Create database views for complex query logic +- Implement custom endpoints for aggregated data + +### Offsetting Large Datasets +OFFSET-based pagination can become slow with large datasets. Consider: +- Keyset pagination for infinite scroll patterns +- Implementing custom pagination strategies +- Adding WHERE clauses to limit result sets + +### Database-Specific Features +Some advanced database features may require custom implementation: +- Full-text search +- Geospatial queries +- Complex transactions +- Stored procedure calls + +## Related Modules +- [Configuration](./config.md): For defining BetterBase configuration +- [RLS](./rls.md): For understanding Row Level Security implementation +- [Providers](./providers.md): For database provider-specific details +- [Logger](./logger.md): For Auto-REST logging integration \ No newline at end of file diff --git a/docs/core/branching.md b/docs/core/branching.md new file mode 100644 index 0000000..57abfe7 --- /dev/null +++ b/docs/core/branching.md @@ -0,0 +1,389 @@ +# Branching Module + +Preview environment system for creating isolated database branches for feature development and testing. + +## Table of Contents +- [Overview](#overview) +- [Features](#features) +- [Installation](#installation) +- [Usage](#usage) +- [API Reference](#api-reference) + - [Configuration](#configuration) + - [Database Operations](#database-operations) + - [Storage Operations](#storage-operations) + - [Types](#types) +- [Workflow Examples](#workflow-examples) +- [Best Practices](#best-practices) + +## Overview + +The Branching module enables preview environments by creating isolated database branches that copy schema and optionally data from a parent database. This allows developers to test features, run migrations, and experiment without affecting the main database. + +Key capabilities: +- Create/destroy database branches on-demand +- Copy schema from parent database +- Optional data cloning for realistic testing +- Automatic cleanup based on TTL or manual deletion +- Storage isolation for file uploads +- Webhook and real-time isolation per branch +- Integration with Vercel-style preview deployments + +## Features + +### Database Branching +- **Schema Cloning**: Replicates table structure, indexes, and constraints +- **Optional Data Cloning**: Copy production/staging data for realistic testing +- **Migration Isolation**: Run migrations on branches without affecting parent +- **Branch Metadata**: Track creation time, parent branch, and TTL +- **Automatic Cleanup**: Expire branches based on time-to-live settings + +### Storage Branching +- **Bucket Isolation**: Separate storage buckets per branch +- **Policy Replication**: Copy storage policies from parent +- **Selective Cloning**: Option to clone existing files +- **Automatic Cleanup**: Remove branch storage when branch is destroyed + +### Configuration-Driven +- **Declarative Setup**: Define branching behavior in BetterBase config +- **Provider Support**: Works with all supported database providers +- **Limits & Quotas**: Control number of concurrent previews +- **Sleep Timeout**: Automatically suspend inactive branches to save costs + +### Development Workflow Integration +- **Preview URLs**: Generate unique URLs for each branch +- **Environment Variables**: Isolated env vars per branch +- **CI/CD Integration**: Hook into preview deployment systems +- **Git Branch Mapping**: Automatically create branches for Git branches + +## Installation + +The Branching module is part of `@betterbase/core`: +```bash +bun add @betterbase/core +``` + +## Usage + +### Basic Configuration +```typescript +import { defineConfig } from '@betterbase/core'; + +const config = defineConfig({ + project: { name: 'my-app' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + }, + branching: { + enabled: true, + maxPreviews: 10, + defaultSleepTimeout: 3600, // 1 hour + storageEnabled: true + } +}); +``` + +### Creating a Branch (via CLI or API) +```bash +# Using BetterBase CLI +bunx betterbase branch create feature/new-feature --parent main + +# Or specify source branch +bunx betterbase branch create feature/bugfix --parent staging +``` + +### Using Branches in Application +```typescript +import { getBranchConfig } from '@betterbase/core/branching'; + +// Get configuration for current branch +const branchConfig = getBranchConfig(); +// Returns config with branch-specific: +// - database connection string +// - storage bucket name +// - webhook URLs +// - environment variables +``` + +## API Reference + +### Configuration +Branching configuration is defined in the main BetterBase config. + +#### Branching Options +```typescript +export interface BranchingConfig { + /** Enable/disable branching (default: true) */ + enabled?: boolean; + + /** Maximum number of concurrent preview branches (default: 10) */ + maxPreviews?: number; + + /** Default sleep timeout in seconds for inactive branches (default: 3600s/1h) */ + defaultSleepTimeout?: number; + + /** Enable storage branching (default: true) */ + storageEnabled?: boolean; + + /** Automatically delete branches older than this (in seconds) */ + autoDeleteAfter?: number; + + /** Default strategy for data cloning: 'none', 'schema-only', 'full' */ + defaultCloneStrategy?: 'none' | 'schema-only' | 'full'; +} +``` + +### Database Operations +Low-level database branching operations. + +#### createBranch +```typescript +export async function createBranch( + branchName: string, + parentBranch: string, + options: { + cloneData?: boolean; // Default: false + schemaOnly?: boolean; // Deprecated: use cloneData: false + ttlSeconds?: number; // Time to live + metadata?: Record; + } +): Promise +``` + +#### getBranch +```typescript +export async function getBranch(branchName: string): Promise +``` + +#### listBranches +```typescript +export async function listBranches( + filter?: { + status?: 'active' | 'sleeping' | 'deleted'; + createdAfter?: Date; + createdBefore?: Date; + } +): Promise +``` + +#### deleteBranch +```typescript +export async function deleteBranch(branchName: string): Promise +``` + +#### wakeBranch +```typescript +export async function wakeBranch(branchName: string): Promise +``` + +#### sleepBranch +```typescript +export async function sleepBranch(branchName: string): Promise +``` + +### Storage Operations +Storage-specific branching functionality. + +#### getBranchStorageConfig +```typescript +export function getBranchStorageConfig( + branchName: string, + parentConfig: StorageConfig +): StorageConfig +``` +Returns storage configuration with branch-specific bucket/path. + +#### cloneBranchStorage +```typescript +export async function cloneBranchStorage( + branchName: string, + parentBranch: string, + options: { + prefix?: string; // Optional prefix for cloned files + include?: string[]; // Glob patterns to include + exclude?: string[]; // Glob patterns to exclude + maxConcurrent?: number; // Default: 5 + } +): Promise +``` + +### Types +Exported TypeScript types and interfaces. + +#### BranchInfo +```typescript +export interface BranchInfo { + /** Unique branch identifier */ + name: string; + + /** Parent branch name */ + parent: string; + + /** Current status: 'active', 'sleeping', 'deleted' */ + status: BranchStatus; + + /** Creation timestamp */ + createdAt: Date; + + /** Last accessed timestamp */ + lastAccessedAt: Date; + + /** Scheduled deletion time (if applicable) */ + expiresAt?: Date; + + /** Custom metadata */ + metadata: Record; + + /** Database connection info */ + database: { + connectionString: string; + /** For Turso: libSQL URL */ + url?: string; + /** For Turso: auth token */ + authToken?: string; + }; + + /** Storage configuration */ + storage: { + bucket: string; + region?: string; + endpoint?: string; + }; + + /** Webhook configuration */ + webhooks: Array<{ + id: string; + table: string; + events: ('INSERT' | 'UPDATE' | 'DELETE')[]; + url: string; + secret: string; + enabled: boolean; + }>; +} +``` + +#### BranchStatus +```typescript +export type BranchStatus = 'active' | 'sleeping' | 'deleted'; +``` + +#### StorageCloneResult +```typescript +export interface StorageCloneResult { + /** Number of files successfully cloned */ + filesCloned: number; + + /** Total size cloned in bytes */ + bytesCloned: number; + + /** Any errors encountered during cloning */ + errors: Array<{ + file: string; + error: string; + }>; + + /** Duration of cloning operation in milliseconds */ + durationMs: number; +} +``` + +## Workflow Examples + +### Feature Development Workflow +1. Developer creates feature branch in Git: `git checkout -b feature/new-payment-method` +2. CI system detects new branch and triggers preview deployment +3. Preview deployment runs: `bunx betterbase branch create feature/new-payment-method --parent main` +4. Application starts with branch-specific configuration: + - Database: `feature_new_payment_method` database + - Storage: `feature-new-payment-method-uploads` bucket + - Environment: Isolated env vars +5. Developer tests feature against realistic data +6. When PR is merged: `bunx betterbase branch delete feature/new-payment-method` +7. Branch and all associated resources are cleaned up + +### Review App Workflow +1. PR opened: `feature/user-profile-update` +2. Preview environment automatically provisioned: + - Database branch cloned from `staging` with production-like data + - Storage bucket with sample files + - Unique preview URL: `https://feature-user-profile-update--myapp.preview.betterbase.dev` +3. QA team tests against preview environment +4. PR updated with new commits: Preview automatically refreshed +5. PR merged or closed: Preview environment destroyed + +### Data Migration Testing +1. Create migration branch: `bunx betterbase branch create migration/test-v2 --parent main --clone-data` +2. Run migration scripts against branch +3. Validate results and performance +4. Adjust migration as needed +5. Delete test branch: `bunx betterbase branch delete migration/test-v2` + +## Best Practices + +### Branch Naming Conventions +- Use descriptive names: `feature/user-auth`, `bugfix/login-issue` +- For temporary testing: `test/perf-test-$(date +%s)` +- For long-running branches: `env/staging`, `env/production-preview` +- Avoid special characters that may cause issues in DNS or database names + +### Data Cloning Strategies +- **Development**: `cloneData: false` (schema only) for fast branch creation +- **QA/Staging**: `cloneData: true` with limited dataset for realistic testing +- **Performance Testing**: `cloneData: true` with full dataset for load testing +- **Sensitive Data**: Implement data masking or subset cloning for PII + +### Resource Management +- Set appropriate `maxPreviews` to prevent resource exhaustion +- Configure `defaultSleepTimeout` to balance cost vs. responsiveness +- Use `autoDeleteAfter` to automatically clean up forgotten branches +- Monitor storage usage per branch to prevent unexpected costs + +### Security Considerations +- Ensure branch names are validated to prevent injection attacks +- Isolate environment variables between branches +- Apply same security policies to branches as parent (network, IAM, etc.) +- Consider encrypting branch storage if parent storage is encrypted +- Audit branch creation/deletion for compliance + +### Integration with CI/CD +- Use branch-specific environment variables for configuration +- Pass branch name as environment variable to preview deployments +- Implement webhook triggers for branch lifecycle events +- Cache dependencies between branch refreshes to speed up builds + +## Limitations & Considerations + +### Provider Support +- **PostgreSQL**: Full support via `CREATE DATABASE` with `TEMPLATE` option +- **MySQL/MariaDB**: Support via `CREATE DATABASE ... LIKE` and `mysqldump` +- **SQLite**: File-copy based branching (limited concurrency) +- **PlanetScale**: Uses native branching API +- **Turso**: Uses libSQL branching capabilities +- **Supabase**: Uses PostgreSQL branching under the hood +- **Managed/External**: Requires custom implementation or manual setup + +### Performance Implications +- Schema-only branches: Very fast creation (milliseconds) +- Full data clone: Proportional to database size +- Storage cloning: Depends on number and size of files +- Active branches consume resources proportionally to their usage + +### Consistency Guarantees +- Branches are point-in-time snapshots +- No automatic synchronization with parent +- For real-time sync, consider logical replication or custom solutions +- Write conflicts possible if multiple branches modify same data + +### Storage Limitations +- Object storage branching depends on provider capabilities +- Some providers may not support bucket-level operations efficiently +- Consider using prefixes instead of separate buckets for cost optimization +- Lifecycle policies should be replicated to branch buckets + +## Related Modules +- [Configuration](./config.md): For defining branching behavior in BetterBase config +- [Providers](./providers.md): For database provider-specific branching implementation details +- [Storage](./storage.md): For storage branching and policy replication +- [Webhooks](./webhooks.md): For webhook isolation per branch +- [Realtime](./realtime.md): For real-time channel isolation per branch +- [Auto-REST](./auto-rest.md): For automatic API generation in branch environments \ No newline at end of file diff --git a/docs/core/config.md b/docs/core/config.md new file mode 100644 index 0000000..2303f4e --- /dev/null +++ b/docs/core/config.md @@ -0,0 +1,646 @@ +# Configuration Module + +Zod-based configuration schema validation for BetterBase applications. + +## Table of Contents +- [Overview](#overview) +- [Features](#features) +- [Installation](#installation) +- [Usage](#usage) +- [API Reference](#api-reference) + - [defineConfig](#defineconfig) + - [validateConfig](#validateconfig) + - [parseConfig](#parseconfig) + - [assertConfig](#assertconfig) + - [Schemas](#schemas) +- [Configuration Structure](#configuration-structure) + - [Project](#project) + - [Provider](#provider) + - [Storage](#storage) + - [Webhooks](#webhooks) + - [GraphQL](#graphql) + - [Vector](#vector) + - [Auto-REST](#autorest) + - [Branching](#branching) +- [Validation & Error Handling](#validation--error-handling) +- [Environment Variables](#environment-variables) +- [Best Practices](#best-practices) +- [Examples](#examples) + +## Overview + +The Configuration module provides a robust, type-safe configuration system using Zod schema validation. It ensures that your BetterBase application configuration is valid at runtime with clear error messages and full TypeScript inference. + +Key capabilities: +- **Schema Validation**: Runtime validation of configuration objects +- **Type Safety**: Full TypeScript type inference from Zod schemas +- **Clear Error Messages**: Descriptive validation errors with field paths +- **Flexible Structure**: Support for nested objects, arrays, unions, and custom refinements +- **Environment Variable Integration**: Seamless work with process.env references +- **Provider-Specific Validation**: Custom validation rules per database provider +- **Extensible Design**: Easy to extend for custom configuration needs + +## Features + +### Runtime Validation +All configuration is validated at startup using Zod schemas, preventing misconfiguration from causing runtime errors. + +### TypeScript Integration +Configuration types are automatically inferred from Zod schemas, ensuring perfect alignment between runtime validation and compile-time types. + +### Comprehensive Error Reporting +Validation errors include: +- Exact field path where validation failed +- Clear error messages explaining what went wrong +- Support for multiple simultaneous errors + +### Provider-Specific Logic +Different database providers have different connection requirements: +- **PostgreSQL/MySQL/Supabase/PlanetScale**: Require connectionString +- **Turso**: Requires url and authToken +- **Managed**: No database connection required + +### Modular Structure +Configuration is organized into logical sections: +- Project metadata +- Database provider configuration +- Storage provider settings +- Webhook configuration +- GraphQL settings +- Vector search configuration +- Auto-REST options +- Branching/preview environment settings + +### Environment Variable Support +Special handling for environment variable references in string values (e.g., "process.env.DATABASE_URL"). + +## Installation + +The Configuration module is part of `@betterbase/core`: +```bash +bun add @betterbase/core +``` + +## Usage + +### Defining Configuration +```typescript +import { defineConfig } from '@betterbase/core'; + +const config = defineConfig({ + project: { name: 'my-app' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + }, + storage: { + provider: 's3', + bucket: 'my-app-uploads', + region: 'us-west-2' + }, + webhooks: [ + { + id: 'user-notifications', + table: 'users', + events: ['INSERT', 'UPDATE'], + url: 'process.env.USER_WEBHOOK_URL', + secret: 'process.env.USER_WEBHOOK_SECRET' + } + ], + branching: { + enabled: true, + maxPreviews: 5 + } +}); +``` + +### Using Configuration in Code +```typescript +import { config } from './betterbase.config.ts'; + +// Access configuration with full TypeScript support +const databaseUrl = config.provider.connectionString; +const isBranchingEnabled = config.branching?.enabled ?? false; +const storageBucket = config.storage?.bucket; +``` + +### Validating External Configuration +```typescript +import { validateConfig, parseConfig } from '@betterbase/core'; + +// Validate a configuration object +const isValid = validateConfig(externalConfig); + +// Parse with detailed result +const result = parseConfig(externalConfig); +if (result.success) { + // Use result.data +} else { + // Handle result.error +} +``` + +## API Reference + +### defineConfig +Validates and returns a BetterBase configuration object. + +```typescript +export function defineConfig( + config: z.input +): BetterBaseConfig +``` + +#### Parameters +- `config`: Configuration object to validate + +#### Returns +- `BetterBaseConfig`: Validated configuration with full TypeScript typing + +#### Throws +- `ZodError`: If validation fails (wrapped in descriptive Error by assertConfig) + +### validateConfig +Checks if a configuration is valid without throwing. + +```typescript +export function validateConfig(config: unknown): boolean +``` + +#### Parameters +- `config`: Configuration to validate + +#### Returns +- `boolean`: true if valid, false otherwise + +### parseConfig +Safely parses configuration returning a result object. + +```typescript +export function parseConfig( + config: unknown +): z.SafeParseReturnType +``` + +#### Parameters +- `config`: Configuration to parse + +#### Returns +- `SafeParseReturnType`: Object with `.success` boolean and either `.data` or `.error` + +### assertConfig +Validates configuration and throws descriptive error if invalid. + +```typescript +export function assertConfig(config: unknown): asserts config is BetterBaseConfig +``` + +#### Parameters +- `config`: Configuration to validate + +#### Throws +- `Error`: With detailed validation error messages if invalid + +### Schemas +Exported Zod schemas for advanced usage and extension. + +```typescript +export { + ProviderTypeSchema, + BetterBaseConfigSchema, + // ... other internal schemas +} +``` + +## Configuration Structure + +### Project +Basic project metadata. + +```typescript +project: z.object({ + name: z.string().min(1, "Project name is required"), +}) +``` + +#### Fields +- `name`: Human-readable project name (required) + +### Provider +Database provider configuration with provider-specific validation. + +```typescript +provider: z.object({ + type: ProviderTypeSchema, // neon | turso | planetscale | supabase | postgres | managed + connectionString: z.string().optional(), + url: z.string().optional(), // Turso - libSQL connection URL + authToken: z.string().optional(), // Turso - auth token for managed DB +}) +``` + +#### Provider-Specific Requirements +- **postgres, neon, planetscale, supabase**: `connectionString` required +- **turso**: `url` and `authToken` required +- **managed**: No database fields required + +### Storage +File storage provider configuration. + +```typescript +storage: z.object({ + provider: z.enum(["s3", "r2", "backblaze", "minio", "managed"]), + bucket: z.string(), + region: z.string().optional(), + endpoint: z.string().optional(), + policies: z.array( + z.object({ + bucket: z.string(), + operation: z.enum(["upload", "download", "list", "delete", "*"]), + expression: z.string(), + }) + ).default([]) +}).optional() +``` + +#### Storage Providers +- **s3**: Amazon S3 +- **r2**: Cloudflare R2 +- **backblaze**: Backblaze B2 +- **minio**: Self-hosted MinIO +- **managed**: No external storage (local/dev only) + +#### Policy Format +Each policy defines: +- `bucket`: Target bucket (can differ from main bucket) +- `operation`: Storage operation to allow +- `expression`: RLS-like expression for conditional access + +### Webhooks +Outgoing webhook delivery configuration. + +```typescript +webhooks: z.array( + z.object({ + id: z.string(), + table: z.string(), + events: z.array(z.enum(["INSERT", "UPDATE", "DELETE"])), + url: z.string().refine((val) => val.startsWith("process.env."), { + message: + "URL must be an environment variable reference (e.g., process.env.WEBHOOK_URL)", + }), + secret: z.string().refine((val) => val.startsWith("process.env."), { + message: + "Secret must be an environment variable reference (e.g., process.env.WEBHOOK_SECRET)", + }), + enabled: z.boolean().default(true), + }) +).optional() +``` + +#### Webhook Fields +- `id`: Unique webhook identifier +- `table`: Database table to watch for changes +- `events`: Array of trigger events (INSERT, UPDATE, DELETE) +- `url`: Destination URL (must be process.env reference) +- `secret`: Signing secret (must be process.env reference) +- `enabled`: Whether webhook is active + +### GraphQL +GraphQL API configuration. + +```typescript +graphql: z.object({ + enabled: z.boolean().default(true), +}).optional() +``` + +#### GraphQL Fields +- `enabled`: Whether to enable GraphQL endpoint (default: true) + +### Vector +Vector search/service configuration. + +```typescript +vector: z.object({ + enabled: z.boolean().default(false), + provider: z.enum(["openai", "cohere", "huggingface", "custom"]).default("openai"), + apiKey: z.string().optional(), + model: z.string().optional(), + dimensions: z.number().int().min(1).optional(), + endpoint: z.string().optional(), +}).optional() +``` + +#### Vector Fields +- `enabled`: Enable vector search features +- `provider`: Embedding service provider +- `apiKey`: API key for provider (optional for some) +- `model`: Embedding model to use +- `dimensions`: Vector dimensions (provider-dependent) +- `endpoint`: Custom endpoint for self-hosted + +### Auto-REST +Automatic REST API generation configuration. + +```typescript +autoRest: z.object({ + enabled: z.boolean().default(true), + excludeTables: z.array(z.string()).default([]), + tables: z.record( + z.object({ + advancedFilters: z.boolean().default(false), + maxLimit: z.number().default(1000), + }) + ).optional(), +}).optional() +``` + +#### Auto-REST Fields +- `enabled`: Enable automatic CRUD route generation +- `excludeTables`: Tables to skip in Auto-REST generation +- `tables`: Per-table configuration overrides + - `advancedFilters`: Enable advanced query filtering syntax + - `maxLimit`: Maximum limit for pagination queries + +### Branching +Preview environment/branching configuration. + +```typescript +branching: z.object({ + enabled: z.boolean().default(true), + maxPreviews: z.number().int().min(1).max(50).default(10), + defaultSleepTimeout: z.number().int().min(60).default(3600), + storageEnabled: z.boolean().default(true), +}).optional() +``` + +#### Branching Fields +- `enabled`: Enable preview branch creation +- `maxPreviews`: Maximum concurrent preview branches +- `defaultSleepTimeout`: Seconds of inactivity before sleeping branch (60s-50min) +- `storageEnabled`: Whether to create isolated storage per branch + +## Validation & Error Handling + +### Validation Process +Configuration validation occurs in `defineConfig`: +1. Basic schema structure validation +2. Type coercion and refinement +3. Provider-specific validation (connection requirements) +4. Custom refinements (environment variable format, etc.) + +### Error Formats +Validation errors provide: +- Field path: `provider.connectionString` +- Error message: `Provider type "postgres" requires "connectionString" to be present and non-empty` +- Validation code: `z.ZodIssueCode.custom` + +### Common Validation Errors +1. **Missing Required Fields** + ```text + Invalid BetterBase configuration: project.name: Project name is required + ``` + +2. **Provider-Specific Requirements** + ```text + Invalid BetterBase configuration: provider.url: Turso provider requires "url" to be present and non-empty; provider.authToken: Turso provider requires "authToken" to be present and non-empty + ``` + +3. **Invalid Enum Values** + ```text + Invalid BetterBase configuration: provider.type: Invalid enum value. Expected 'postgres' | 'mysql' | ..., received 'mongodb' + ``` + +4. **Environment Variable Format** + ```text + Invalid BetterBase configuration: webhooks[0].url: URL must be an environment variable reference (e.g., process.env.WEBHOOK_URL) + ``` + +### Programmatic Error Handling +```typescript +try { + const config = defineConfig(userConfig); + // Use validated config +} catch (error) { + if (error instanceof Error && error.message.startsWith('Invalid BetterBase configuration')) { + // Handle validation errors + console.error('Configuration validation failed:', error.message); + } else { + // Handle unexpected errors + throw error; + } +} +``` + +## Environment Variables + +### Required Format +Strings that should reference environment variables must follow: +```text +process.env.VARIABLE_NAME +``` + +### Validation +The configuration schema validates that: +- URL and secret fields in webhooks start with `process.env.` +- No other restrictions on the variable name format + +### Best Practices +1. **Always use env vars for secrets**: Never hardcode API keys, passwords, or tokens +2. **Use descriptive variable names**: `DATABASE_URL`, `STORAGE_BUCKET`, `WEBHOOK_SECRET` +3. **Provide defaults for development**: Use `||` fallback for non-secrets +4. **Document required variables**: Create `.env.example` file +5. **Validate in CI**: Check that required env vars are present before deployment + +### Example .env File +```env +# Database +DATABASE_URL=postgresql://user:pass@localhost:5432/db +PROVIDER_TYPE=postgres + +# Storage +STORAGE_PROVIDER=s3 +STORAGE_BUCKET=my-app-prod +AWS_ACCESS_KEY_ID=your-key-here +AWS_SECRET_ACCESS_KEY=your-secret-here +AWS_REGION=us-west-2 + +# Webhooks +USER_WEBHOOK_URL=https://hooks.slack.com/services/T00000000/B00000000/EXAMPLE_WEBHOOK_TOKEN +USER_WEBHOOK_SECRET=your-webhook-signing-secret + +# Vector Search +OPENAI_API_KEY=sk-... +``` + +## Best Practices + +### Configuration Organization +1. **Keep it flat when possible**: Avoid deeply nested structures +2. **Group related settings**: Use objects for logical sections (as already done) +3. **Use sensible defaults**: Make common configurations work out-of-box +4. **Separate concerns**: Don't mix database, storage, and feature flags in same object +5. **Make required fields explicit**: Use `.min(1)` or similar for required strings + +### Validation Strategy +1. **Validate at startup**: Catch configuration errors early +2. **Fail fast**: Don't start if configuration is invalid +3. **Provide clear guidance**: Error messages should help fix the problem +4. **Validate external inputs**: Any config from outside (files, env, etc.) +5. **Trust but verify**: Even if you think it's valid, validate it + +### Environment Variables +1. **Never commit secrets**: Use .gitignore for .env files +2. **Use consistent naming**: PREFIX_VARIABLE_NAME pattern +3. **Document all required variables**: In README or .env.example +4. **Consider validation tools**: Use packages like `dotenv-validator` in development +5. **Handle missing gracefully**: Provide helpful errors for missing vars + +### Type Safety +1. **Trust the inferred types**: TypeScript types from Zod are reliable +2. **Don't duplicate type definitions**: Use `z.infer` when needed +3. **Extend carefully**: When adding config, update both schema and docs +4. **Use branded types**: For special string formats (env var refs, etc.) + +### Security +1. **Validate all inputs**: Especially anything touching database or storage +2. **Sanitize strings**: Prevent injection in dynamic contexts +3. **Limit exposure**: Don't log full configuration objects +4. **Consider encryption**: For highly sensitive configuration values +5. **Audit regularly**: Review what configuration is actually needed + +## Examples + +### Minimal Valid Configuration +```typescript +const config = defineConfig({ + project: { name: 'my-minimal-app' }, + provider: { type: 'managed' } // No DB needed + // All other sections optional +}); +``` + +### PostgreSQL with S3 Storage +```typescript +const config = defineConfig({ + project: { name: 'prod-app' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + }, + storage: { + provider: 's3', + bucket: process.env.STORAGE_BUCKET, + region: process.env.AWS_REGION || 'us-east-1' + }, + webhooks: [ + { + id: 'order-events', + table: 'orders', + events: ['INSERT', 'UPDATE', 'DELETE'], + url: process.env.ORDER_WEBHOOK_URL, + secret: process.env.ORDER_WEBHOOK_SECRET + } + ], + branching: { + enabled: true, + maxPreviews: 15, + defaultSleepTimeout: 1800 // 30 minutes + } +}); +``` + +### Multi-Provider Setup (Turso Example) +```typescript +const config = defineConfig({ + project: { name: 'edge-app' }, + provider: { + type: 'turso', + url: process.env.TURSO_DATABASE_URL, + authToken: process.env.TURSO_AUTH_TOKEN + }, + storage: { + provider: 'r2', // Cloudflare R2 pairs well with Turso + bucket: process.env.STORAGE_BUCKET, + // R2 doesn't use region in same way as AWS + }, + vector: { + enabled: true, + provider: 'openai', + apiKey: process.env.OPENAI_API_KEY + } +}); +``` + +### Development Configuration +```typescript +const config = defineConfig({ + project: { name: 'dev-app' }, + provider: { + type: 'postgres', + connectionString: 'postgresql://localhost:5432/dev_db' + }, + storage: { + provider: 'minio', + bucket: 'dev-uploads', + endpoint: 'http://localhost:9000', + // MinIO doesn't require region by default + }, + // Disable expensive features in dev + vector: { enabled: false }, + branching: { enabled: false } // Or set low limits +}); +``` + +### Configuration with Custom Tables (Auto-REST) +```typescript +const config = defineConfig({ + project: { name: 'blog-platform' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + }, + autoRest: { + enabled: true, + excludeTables: ['schema_migrations', 'audit_log'], + tables: { + // Advanced filtering for posts table + posts: { + advancedFilters: true, + maxLimit: 100 // Higher limit for blog lists + }, + // Strict limits for sensitive data + users: { + advancedFilters: false, + maxLimit: 10 // Low limit for user listings + } + } + } +}); +``` + +## Migration Guide + +### From Untyped Configuration +If you previously had untyped configuration objects: + +1. Add Zod to your dependencies: `bun add zod` +2. Move validation logic to `defineConfig` wrapper +3. Replace manual checks with schema refinements +4. Leverage inferred TypeScript types +5. Remove redundant runtime type checks + +### Version Compatibility +Configuration schemas are designed to be backward compatible: +- New fields are added as optional +- Default values maintain existing behavior +- Breaking changes require major version bump +- Validation errors guide migration path + +## Related Modules +- [Auto-REST](./auto-rest.md): Configuration options affect API generation +- [Branching](./branching.md): Branching behavior configured here +- [Providers](./providers.md): Provider-specific implementation details +- [Storage](./storage.md): Storage provider configuration details +- [Webhooks](./webhooks.md): Webhook configuration and delivery +- [Logger](./logger.md): Logging can be configured via environment \ No newline at end of file diff --git a/docs/core/functions.md b/docs/core/functions.md new file mode 100644 index 0000000..f8b356f --- /dev/null +++ b/docs/core/functions.md @@ -0,0 +1,712 @@ +# Functions Module + +Serverless function bundling, local runtime, and deployment utilities. + +## Table of Contents +- [Overview](#overview) +- [Features](#features) +- [Installation](#installation) +- [Usage](#usage) +- [API Reference](#api-reference) + - [Bundler](#bundler) + - [Deployer](#deployer) + - [Local Runtime](#local-runtime) + - [Types](#types) +- [Function Structure](#function-structure) +- [Deployment Targets](#deployment-targets) +- [Best Practices](#best-practices) +- [Examples](#examples) + +## Overview + +The Functions module provides tools for bundling, deploying, and running serverless functions in BetterBase applications. It supports multiple deployment targets and provides a consistent local development experience. + +Key capabilities: +- **Function Bundling**: Package functions with dependencies using esbuild +- **Multi-Target Deployment**: Deploy to various serverless platforms +- **Local Runtime**: Test functions locally with simulated environment +- **Automatic Dependency Inclusion**: Bundle only required dependencies +- **Environment Variable Injection**: Configure runtime environment +- **Wrapper Generation**: Create platform-specific handlers +- **Watch Mode**: Automatic rebundling during development + +## Features + +### Bundling +- **esbuild-based**: Fast, efficient bundling +- **Tree Shaking**: Eliminate unused code +- **Minification**: Reduce bundle size for production +- **Format Support**: ESM, CommonJS, IIFE formats +- **Externalization**: Mark dependencies as external when needed +- **Banner/Footer**: Add custom code to bundles + +### Deployment Targets +- **AWS Lambda**: Standard Node.js runtime +- **Cloudflare Workers**: Service Worker format +- **Vercel Serverless**: Vercel-specific format +- **Netlify Functions**: Netlify-specific format +- **Deno Deploy**: Deno-compatible bundle +- **Bun.sh**: Bun runtime target +- **Custom**: Generic bundle for any platform + +### Local Runtime +- **Environment Simulation**: Mock context and event objects +- **Hot Reloading**: Automatic restart on file changes +- **Logging Capture**: View function logs in real-time +- **Error Reporting**: Detailed stack traces and error formatting +- **Timeout Simulation**: Test function timeout behavior + +### Developer Experience +- **Zero Config**: Sensible defaults for common use cases +- **TypeScript Support**: Full type checking during bundling +- **Source Maps**: Debug bundled code with original sources +- **Watch Mode**: `bunx betterbase function watch` for development +- **CLI Integration**: `betterbase function` commands + +## Installation + +The Functions module is part of `@betterbase/core`: +```bash +bun add @betterbase/core +``` + +## Usage + +### Basic Function Structure +```typescript +// src/functions/hello/index.ts +export default async function handler(event) { + return { + statusCode: 200, + body: JSON.stringify({ message: 'Hello World!' }) + }; +} +``` + +### Bundling Functions +```bash +# Bundle a single function +bunx betterbase function build src/functions/hello/index.ts + +# Bundle all functions in directory +bunx betterbase function build src/functions/ + +# Bundle with specific target +bunx betterbase function build src/functions/hello/index.ts --target aws-lambda +``` + +### Local Development +```bash +# Run function locally with mock event +bunx betterbase function run src/functions/hello/index.ts --event '{"name": "John"}' + +# Watch mode for development +bunx betterbase function watch src/functions/ +``` + +### Programmatic Usage +```typescript +import { + bundleFunction, + deployFunction, + createLocalRuntime +} from '@betterbase/core/functions'; + +// Bundle function +const bundle = await bundleFunction( + 'src/functions/hello/index.ts', + { target: 'aws-lambda', minify: true } +); + +// Deploy to provider +const deployment = await deployFunction(bundle, { + provider: 'aws', + functionName: 'hello-world', + region: 'us-east-1' +}); + +// Test locally +const runtime = createLocalRuntime(bundle); +const result = await runtime.execute({ + name: 'John' +}); +``` + +## API Reference + +### Bundler +Function bundling utilities. + +#### bundleFunction +```typescript +export async function bundleFunction( + entryPoint: string, + options: BundleOptions = {} +): Promise +``` + +#### BundleOptions +```typescript +export interface BundleOptions { + /** Build target (default: 'node') */ + target?: 'node' | 'browser' | 'aws-lambda' | 'cloudflare-workers' | + 'vercel' | 'netlify' | 'deno' | 'bun' | 'custom'; + + /** Output format (default: 'esm') */ + format?: 'esm' | 'cjs' | 'iife'; + + /** Enable minification (default: false for dev, true for prod) */ + minify?: boolean; + + /** Generate source maps (default: true) */ + sourcemap?: boolean; + + /** Externalize dependencies (don't bundle) */ + external?: string[]; + + /** Inject global variables */ + globals?: Record; + + /** Add banner/footer to bundle */ + banner?: string; + footer?: string; + + /** Define constants (like esbuild's define) */ + define?: Record; + + /** Watch mode for rebundling */ + watch?: boolean; + + /** Outdir for bundle output */ + outdir?: string; +} +``` + +#### FunctionBundle +```typescript +export interface FunctionBundle { + /** Bundle contents as string */ + code: string; + + /** Source map (if generated) */ + map?: string | null; + + /** Detected handler function name */ + handler: string; + + /** Bundle format */ + format: 'esm' | 'cjs' | 'iife'; + + /** Target platform */ + target: string; + + /** Size in bytes */ + size: number; + + /** List of externalized dependencies */ + external: string[]; + + /** Entry point used */ + entryPoint: string; +} +``` + +### Deployer +Function deployment utilities. + +#### deployFunction +```typescript +export async function deployFunction( + bundle: FunctionBundle, + options: DeployOptions +): Promise +``` + +#### DeployOptions +```typescript +export interface DeployOptions { + /** Deployment provider */ + provider: 'aws' | 'cloudflare' | 'vercel' | 'netlify' | 'custom'; + + /** Provider-specific configuration */ + config: Record; + + /** Function name in target platform */ + functionName: string; + + /** Optional description */ + description?: string; + + /** Tags/labels for the function */ + tags?: Record; + + /** Memory allocation (provider-dependent) */ + memory?: number; + + /** Timeout in seconds */ + timeout?: number; + + /** Environment variables */ + environment?: Record; + + /** VPC/network configuration */ + network?: Record; + + /** IAM/role configuration */ + role?: Record; +} +``` + +#### DeploymentResult +```typescript +export interface DeploymentResult { + /** Success status */ + success: boolean; + + /** Deployment ID/ARN/etc */ + id: string; + + /** Function URL or invoke address */ + url?: string; + + /** Provider-specific metadata */ + providerData: Record; + + /** Any warnings during deployment */ + warnings: string[]; + + /** Error details if failed */ + error?: string; +} +``` + +### Local Runtime +Local function execution and testing. + +#### createLocalRuntime +```typescript +export function createLocalRuntime( + bundle: FunctionBundle, + options: RuntimeOptions = {} +): LocalFunctionRuntime +``` + +#### RuntimeOptions +```typescript +export interface RuntimeOptions { + /** Environment variables for runtime */ + environment?: Record; + + /** Function timeout in milliseconds */ + timeoutMs?: number; + + /** Memory limit in MB */ + memoryLimitMb?: number; + + /** Whether to capture console output */ + captureLogs?: boolean; + + /** Mock context object */ + context?: Record; +} +``` + +#### LocalFunctionRuntime +```typescript +export interface LocalFunctionRuntime { + /** Execute function with event */ + execute(event: unknown): Promise; + + /** Set environment variables */ + setEnvironment(env: Record): void; + + /** Get captured logs */ + getLogs(): string[]; + + /** Reset runtime state */ + reset(): void; + + /** Destroy runtime and cleanup */ + destroy(): void; +} +``` + +### Types +Exported TypeScript types. + +```typescript +export type FunctionHandler = (event: unknown, context?: unknown) => Promise; + +export interface FunctionMetadata { + name: string; + description?: string; + version?: string; + author?: string; + license?: string; + topics?: string[]; +} +``` + +## Function Structure + +### Standard Format +Functions should export a default async handler: + +```typescript +// src/functions/my-function/index.ts +import type { BetterBaseResponse } from '@betterbase/shared'; + +export default async function handler( + event: Record, + context?: Record +): Promise> { + try { + // Process event + const result = await processData(event); + + return { + data: result, + error: null + }; + } catch (error) { + return { + data: null, + error: error instanceof Error ? error.message : 'Unknown error' + }; + } +} +``` + +### Event Formats +Different platforms provide different event structures: +- **AWS Lambda**: API Gateway, S3, DynamoDB events, etc. +- **Cloudflare Workers**: FetchEvent with request +- **Vercel**: Next.js API route request/response +- **Netlify**: Similar to AWS Lambda format + +### Context Object +The context parameter provides runtime information: +- `functionName`: Name of the function +- `functionVersion`: Version or alias +- `invokedFunctionArn`: ARN (AWS-specific) +- `awsRequestId`: Request ID (AWS-specific) +- `getRemainingTimeInMillis()`: Time before timeout +- `logGroupName`, `logStreamName`: Logging info (AWS-specific) + +### Return Format +BetterBase functions should return a `BetterBaseResponse`: +```typescript +interface BetterBaseResponse { + data: T | null; + error: string | SerializedError | null; + count?: number; + pagination?: { + page: number; + pageSize: number; + total: number; + }; +} +``` + +## Deployment Targets + +### AWS Lambda +```bash +bunx betterbase function build src/functions/hello/index.ts \ + --target aws-lambda \ + --outdir dist/functions/hello +``` +Then deploy using AWS CLI, CDK, SAM, or similar tools. + +### Cloudflare Workers +```bash +bunx betterbase function build src/functions/hello/index.ts \ + --target cloudflare-workers \ + --format iife +``` +Upload to Cloudflare dashboard or use wrangler CLI. + +### Vercel Serverless +```bash +bunx betterbase function build src/functions/hello/index.ts \ + --target vercel \ + --outdir api/hello +``` +Place in `api/` directory for automatic detection. + +### Netlify Functions +```bash +bunx betterbase function build src/functions/hello/index.ts \ + --target netlify \ + --outdir netlify/functions/hello +``` +Configure in `netlify.toml` if needed. + +### Deno Deploy +```bash +bunx betterbase function build src/functions/hello/index.ts \ + --target deno \ + --format esm +``` +Upload to Deno Deploy with appropriate permissions. + +### Bun.sh +```bash +bunx betterbase function build src/functions/hello/index.ts \ + --target bun +``` +Deploy to Bun.sh hosting or Edge runtime. + +## Best Practices + +### Function Design +1. **Single Responsibility**: Each function should do one thing well +2. **Stateless**: Don't rely on local filesystem persistence +3. **Idempotent**: Safe to retry with same input +4. **Fast Initialization**: Minimize cold start time +5. **Proper Error Handling**: Return structured error responses + +### Bundling Optimization +1. **Externalize Heavy Dependentials**: Mark AWS SDK, database clients as external when available in runtime +2. **Use Tree Shaking**: Import only what you need +3. **Minify for Production**: Enable minification in production builds +4. **Source Maps for Dev**: Keep source maps for debugging during development +5. **Watch Files**: Exclude node_modules and build outputs from watch + +### Security +1. **Validate Inputs**: Always validate and sanitize event data +2. **Use Environment Secrets**: Never hardcode API keys or tokens +3. **Principle of Least Privilege**: Grant minimal required permissions +4. **Sanitize Outputs**: Prevent injection in responses +5. **Consider Timeouts**: Handle function timeout gracefully + +### Testing +1. **Unit Test Handlers**: Test function logic in isolation +2. **Integration Tests**: Test with actual services when possible +3. **Mock Context**: Simulate different context scenarios +4. **Test Edge Cases**: Empty events, malformed data, timeouts +5. **Load Testing**: Verify performance under expected load + +### Monitoring & Logging +1. **Structured Logging**: Use consistent log format +2. **Correlation IDs**: Trace requests across functions +3. **Error Alerting**: Set up alerts for function errors +4. **Performance Monitoring**: Track duration and memory usage +5. **Log Retention**: Configure appropriate log retention policies + +### Versioning +1. **Semantic Versioning**: Use semantic version for function releases +2. **Aliases**: Use aliases for blue/green deployments +3. **Gradual Rollout**: Deploy to percentage of traffic first +4. **Rollback Plan**: Have procedure to revert to previous version +5. **Change Log**: Maintain history of function changes + +## Examples + +### Hello World Function +```typescript +// src/functions/hello/index.ts +export default async function handler(event) { + const name = event?.name || 'World'; + + return { + data: { message: `Hello, ${name}!` }, + error: null + }; +} +``` + +### Database Function +```typescript +// src/functions/user-profile/index.ts +import { drizzle } from 'drizzle-orm/neon'; +import { eq } from 'drizzle-orm'; +import { users } from '@/db/schema'; + +export default async function handler(event) { + const userId = event?.pathParameters?.userId; + + if (!userId) { + return { + data: null, + error: 'Missing userId parameter' + }; + } + + try { + const db = drizzle(process.env.DATABASE_URL); + const user = await db.select().from(users).where(eq(users.id, userId)).limit(1); + + if (user.length === 0) { + return { + data: null, + error: 'User not found' + }; + } + + return { + data: user[0], + error: null + }; + } catch (error) { + return { + data: null, + error: error instanceof Error ? error.message : 'Database error' + }; + } +} +``` + +### Webhook Handler +```typescript +// src/functions/webhook-handler/index.ts +import type { BetterBaseResponse } from '@betterbase/shared'; + +export default async function handler(event): Promise> { + // Verify webhook signature + const signature = event.headers?.['x-signature']; + const secret = process.env.WEBHOOK_SECRET; + + if (!verifySignature(event.body, signature, secret)) { + return { + data: null, + error: 'Invalid signature' + }; + } + + try { + const payload = JSON.parse(event.body); + + // Process webhook payload + await processWebhookEvent(payload); + + return { + data: { received: true }, + error: null + }; + } catch (error) { + return { + data: null, + error: error instanceof Error ? error.message : 'Invalid payload' + }; + } +} +``` + +### Scheduled Function +```typescript +// src/functions/daily-report/index.ts +export default async function handler(event) { + const { scheduleTime } = event; + + try { + // Generate report for previous day + const report = await generateDailyReport(new Date(scheduleTime)); + + // Send report via email or store in database + await deliverReport(report); + + return { + data: { reportId: report.id, generatedAt: new Date().toISOString() }, + error: null + }; + } catch (error) { + return { + data: null, + error: error instanceof Error ? error.message : 'Report generation failed' + }; + } +} +``` + +## CLI Commands + +The BetterBase CLI provides function-related commands: + +```bash +# Build functions +bunx betterbase function build [options] + +# Run function locally +bunx betterbase function run [options] + +# Watch functions for development +bunx betterbase function watch [options] + +# Deploy functions +bunx betterbase function deploy [options] + +# List available targets +bunx betterbase function targets +``` + +### Build Options +```bash +bunx betterbase function build src/functions/hello/index.ts \ + --target aws-lambda \ + --minify \ + --sourcemap \ + --outdir dist/hello +``` + +### Run Options +```bash +bunx betterbase function run src/functions/hello/index.ts \ + --event '{"name": "Alice"}' \ + --timeout 5000 \ + --environment NODE_ENV=development +``` + +### Watch Options +```bash +bunx betterbase function watch src/functions/ \ + --interval 1000 \ + --on-change "echo 'Functions rebuilt'" +``` + +## Limitations & Considerations + +### Bundle Size Limits +Different platforms have different bundle size limits: +- **AWS Lambda**: 50 MB zipped, 250 MB unzipped +- **Cloudflare Workers**: 10 MB total +- **Vercel**: 50 MB for Serverless Functions +- **Netlify**: 50 MB zipped +- **Deno Deploy**: No strict limit but consider performance +- **Bun.sh**: Similar to AWS Lambda limits + +### Cold Start Optimization +1. **Keep Functions Small**: Smaller bundles load faster +2. **Minimize Dependencies**: Only include what's needed +3. **Prefer Native Modules**: Avoid native dependencies when possible +4. **Consider Provisioned Concurrency**: For consistent performance (AWS) +5. **Use Edge Runtimes**: Cloudflare Workers, Vercel Edge for near-zero cold start + +### Runtime Compatibility +1. **Node.js Version**: Ensure compatibility with target runtime +2. **API Availability**: Some Node.js APIs not available in all runtimes +3. **Global Objects**: Differences in global object availability +4. **File System Access**: Limited or different in serverless environments +5. **Network Restrictions**: Some platforms restrict outbound connections + +### Execution Limits +1. **Timeout**: Maximum execution time (varies by platform) +2. **Memory**: Available memory affects cost and performance +3. **Concurrency**: Limits on simultaneous executions +4. **Request/Response Size**: Limits on payload sizes +5. **File System**: Often read-only or limited write access + +### Vendor Lock-in Considerations +1. **Abstract Platform Differences**: Use adapters for platform-specific features +2. **Standardize Event Formats**: Create common event interface +3. **Consider Multi-Platform**: Design for deployment to multiple targets +4. **Use Standard Libraries**: Prefer web standards over platform-specific APIs +5. **Have Escape Hatch**: Ability to deploy to VM/container if needed + +## Related Modules +- [Configuration](./config.md): For defining function-related configuration +- [Logger](./logger.md): For function logging integration +- [Auto-REST](./auto-rest.md): For generating API endpoints that could replace some functions +- [Webhooks](./webhooks.md): For webhook delivery as alternative to functions +- [Realtime](./realtime.md): For real-time updates as alternative to polling functions +- [Storage](./storage.md): For storing function artifacts or assets \ No newline at end of file diff --git a/docs/core/graphql.md b/docs/core/graphql.md new file mode 100644 index 0000000..9c941b0 --- /dev/null +++ b/docs/core/graphql.md @@ -0,0 +1,993 @@ +# GraphQL Module + +Auto-generated GraphQL schema, resolvers, server, and real-time bridge from Drizzle ORM schema. + +## Table of Contents +- [Overview](#overview) +- [Features](#features) +- [Installation](#installation) +- [Usage](#usage) +- [API Reference](#api-reference) + - [Schema Generator](#schema-generator) + - [Resolvers](#resolvers) + - [Server](#server) + - [Realtime Bridge](#realtime-bridge) + - [SDL Exporter](#sdl-exporter) + - [Types](#types) +- [Schema Generation](#schema-generation) +- [Resolver Generation](#resolver-generation) +- [Real-time Integration](#real-time-integration) +- [Best Practices](#best-practices) +- [Examples](#examples) +- [Security Considerations](#security-considerations) + +## Overview + +The GraphQL module automatically generates a complete GraphQL API from your Drizzle ORM schema, including schema definitions, resolvers, server setup, and real-time capabilities. It eliminates boilerplate by inspecting your database schema and creating a fully-featured GraphQL API with built-in security and performance optimizations. + +Key capabilities: +- **Automatic Schema Generation**: Generate GraphQL types from Drizzle tables +- **Resolver Auto-generation**: Create resolvers for queries, mutations, and subscriptions +- **Server Setup**: Ready-to-use GraphQL server with Hono integration +- **Real-time Subscriptions**: Live updates via WebSockets with database change events +- **SDL Export**: Export schema as GraphQL SDL for federation or documentation +- **Type Safety**: Full TypeScript support with inferred types +- **RLS Integration**: Automatic Row Level Security enforcement +- **Performance Optimizations**: Batch loading, caching hints, and query complexity limits + +## Features + +### Schema Generation +- **Table to Type Mapping**: Each database table becomes a GraphQL type +- **Column to Field Mapping**: Table columns become GraphQL fields with proper types +- **Relationship Detection**: Foreign keys generate relationship fields +- **Enum Support**: Database enums become GraphQL enums +- **JSON Fields**: JSONB/JSON columns mapped to GraphQL JSON scalar +- **Timestamps**: Automatic DateTime handling +- **Custom Scalars**: Support for UUID, DateTime, and JSON scalars + +### Resolver Generation +- **Queries**: + - `tableName`: List all records (with filtering, sorting, pagination) + - `tableName_by_pk`: Get single record by primary key + - `tableName_aggregate`: Aggregate functions (count, sum, avg, etc.) +- **Mutations**: + - `insert_tableName`: Insert one or multiple records + - `update_tableName`: Update records by primary key + - `update_tableName_set`: Update records matching conditions + - `delete_tableName`: Delete records by primary key + - `delete_tableName_set`: Delete records matching conditions +- **Subscriptions**: + - `tableName_insert`: Listen for new records + - `tableName_update`: Listen for updated records + - `tableName_delete`: Listen for deleted records + +### Server Features +- **Hono Integration**: Works seamlessly with Hono framework +- **GET/POST Endpoints**: Support for both GET and POST requests +- **GraphQL Playground**: Built-in IDE for testing queries +- **Error Handling**: Formatted error responses per GraphQL spec +- **Validation**: Automatic query validation against schema +- **Batching**: Support for batched requests +- **Introspection**: Full schema introspection available + +### Real-time Capabilities +- **WebSocket Server**: Built-in WebSocket support for subscriptions +- **Database Triggers**: Listen to database changes via pg_notify or equivalent +- **Event Broadcasting**: Real-time updates to subscribed clients +- **Connection Lifecycle**: Handle connect/disconnect events +- **Authentication**: Integration with RLS and auth systems +- **Message Format**: Standard GraphQL over WebSocket protocol + +### SDL Export +- **Schema Export**: Generate GraphQL Schema Definition Language +- **Type Definitions**: Export all types, queries, mutations, subscriptions +- **Directives**: Include custom directives if used +- **Descriptions**: Preserve field and type descriptions +- **Federation Ready**: Compatible with Apollo Federation + +## Installation + +The GraphQL module is part of `@betterbase/core`: +```bash +bun add @betterbase/core +``` + +## Usage + +### Basic Setup +```typescript +import { Hono } from 'hono'; +import { drizzle } from 'drizzle-orm/postgres'; +import { + generateGraphQLSchema, + generateResolvers, + createGraphQLServer, + pubsub +} from '@betterbase/core/graphql'; +import * as schema from './db/schema'; + +// Initialize +const app = new Hono(); +const db = drizzle(process.env.DATABASE_URL); + +// Generate schema and resolvers +const typeDefs = generateGraphQLSchema(schema); +const resolvers = generateResolvers(schema); + +// Create GraphQL server +const graphql = createGraphQLServer({ + schema: typeDefs, + resolvers, + context: async (c) => ({ + db, + user: c.get('user'), // from auth middleware + pubsub + }) +}); + +// Mount GraphQL endpoints +app.route('/graphql', graphql); +app.get('/graphql-playground', (c) => c.html(/* GraphQL HTML */)); + +// Start server +app.listen(3000); +``` + +### With Real-time Subscriptions +```typescript +import { + bridgeRealtimeToGraphQL, + publishDbEvent +} from '@betterbase/core/graphql'; + +// Set up real-time bridge +const realtimeBridge = bridgeRealtimeToGraphQL({ + db, + pubsub, + schema +}); + +// Start listening for database changes +realtimeBridge.start(); + +// Later, when you have database changes: +await publishDbEvent(db, { + table: 'users', + type: 'INSERT', + record: newUser, + timestamp: new Date().toISOString() +}); +``` + +### Manual Server Creation +```typescript +import { serve } from '@hono/node-server'; +import { createGraphQLServer } from '@betterbase/core/graphql'; + +const graphql = createGraphQLServer({ + schema: typeDefs, + resolvers, + context: async (c) => ({ db, userId: c.get('userId') }) +}); + +serve({ + fetch: app.fetch, + port: 3000 +}); +``` + +## API Reference + +### Schema Generator +Generate GraphQL schema from Drizzle schema. + +#### generateGraphQLSchema +```typescript +export function generateGraphQLSchema( + schema: Record, + options: GraphQLGenerationConfig = {} +): string +``` + +#### GraphQLGenerationConfig +```typescript +export interface GraphQLGenerationConfig { + /** Custom type mappings */ + typeMappings?: Record; + + /** Skip generating certain types */ + excludeTypes?: string[]; + + /** Custom field overrides */ + fieldOverrides?: Record< + string, + Record + >; + + /** Enable/disable certain features */ + enableSubscriptions?: boolean; + enableAggregates?: boolean; + enableRelationships?: boolean; + + /** Naming conventions */ + namingConvention?: { + type?: 'PascalCase' | 'camelCase' | 'snake_case'; + field?: 'camelCase' | 'snake_case'; + }; + + /** Custom scalars */ + customScalars?: Record; + + /** Description sources */ + descriptions?: { + fromComments?: boolean; + fromSchema?: boolean; + }; +} +``` + +#### GraphQLJSON & GraphQLDateTime +```typescript +export { GraphQLJSON, GraphQLDateTime } from './schema-generator'; +``` +- `GraphQLJSON`: Scalar for JSON values +- `GraphQLDateTime`: Scalar for ISO date strings + +### Resolvers +Generate resolver functions from Drizzle schema. + +#### generateResolvers +```typescript +export function generateResolvers( + schema: Record, + options: ResolverGenerationConfig = {} +): Resolvers +``` + +#### ResolverGenerationConfig +```typescript +export interface ResolverGenerationConfig { + /** Enable/disable resolver types */ + enableQueries?: boolean; + enableMutations?: boolean; + enableSubscriptions?: boolean; + + /** Custom resolver overrides */ + overrides?: Partial; + + /** Batch loading configuration */ + batching?: { + enabled?: boolean; + maxBatchSize?: number; + batchDelayMs?: number; + }; + + /** Security hooks */ + hooks?: { + preResolve?: (context: GraphQLContext, info: any) => Promise; + postResolve?: (context: GraphQLContext, result: any, info: any) => Promise; + }; + + /** Context enrichment */ + contextEnrichment?: (context: GraphQLContext) => Promise | GraphQLContext; +} +``` + +#### Resolver Types +```typescript +export type Resolvers = { + Query: Record; + Mutation: Record; + Subscription: Record; +}; + +export type GraphQLResolver = ( + parent: unknown, + args: Record, + context: GraphQLContext, + info: GraphQLResolveInfo +) => Promise | unknown; +``` + +#### Context Types +```typescript +export interface GraphQLContext { + db: DrizzleDB; + userId?: string; + user?: Record; + pubsub: PubSub; + [key: string]: unknown; +} +``` + +### Server +Create and run GraphQL server. + +#### createGraphQLServer +```typescript +export function createGraphQLServer( + options: GraphQLConfig +): ReturnType +``` + +#### GraphQLConfig +```typescript +export interface GraphQLConfig { + /** GraphQL schema string */ + schema: string; + + /** Resolver functions */ + resolvers: Resolvers; + + /** Context factory function */ + context?: ( + c: Context + ) => Promise> | Partial; + + /** Format error responses */ + formatError?: (error: FormattedError) => FormattedError; + + /** Enable built-in playground */ + playground?: boolean; + + /** GraphQL endpoint path */ + path?: string; + + /** WebSocket subscription endpoint */ + subscriptionsPath?: string; + + /** Validation rules */ + validationRules?: Array; + + /** Query complexity limits */ + complexityLimits?: { + maxComplexity?: number; + createComplexityLimit?: (options: { + query: DocumentNode; + variables: VariableMapping[]; + }) => number; + }; + + /** Depth limits */ + depthLimits?: { + maxDepth?: number; + }; +} +``` + +#### Server Functions +```typescript +export { + createGraphQLServer, + startGraphQLServer, + pubsub, + publishGraphQLEvent +} from './server'; +``` + +#### PubSub Interface +```typescript +export interface PubSub { + /** Publish event to topic */ + publish(topic: string, payload: T): Promise; + + /** Subscribe to topic */ + subscribe(topic: string): AsyncIterable; + + /** Unsubscribe from topic */ + unsubscribe(topic: string): void; + + /** Get number of subscribers */ + subscriberCount(topic: string): number; +} +``` + +### Realtime Bridge +Bridge database changes to GraphQL subscriptions. + +#### bridgeRealtimeToGraphQL +```typescript +export function bridgeRealtimeToGraphQL( + options: RealtimeBridgeConfig +): RealtimeBridge +``` + +#### RealtimeBridgeConfig +```typescript +export interface RealtimeBridgeConfig { + /** Database connection */ + db: DrizzleDB; + + /** PubSub instance */ + pubsub: PubSub; + + /** Database schema */ + schema: Record; + + /** Table configurations */ + tables?: Partial>; + + /** Event filtering */ + filter?: (event: DBEvent) => boolean; + + /** Debounce settings */ + debounceMs?: number; +} +``` + +#### TableBridgeConfig +```typescript +export interface TableBridgeConfig { + /** Enable/disable bridging for table */ + enabled?: boolean; + + /** Events to bridge */ + events?: ('INSERT' | 'UPDATE' | 'DELETE')[]; + + /** Custom topic name */ + topic?: string; + + /** Payload transformation */ + transform?: (event: DBEvent) => unknown; +} +``` + +#### RealtimeBridge +```typescript +export interface RealtimeBridge { + /** Start listening for database changes */ + start(): void; + + /** Stop listening */ + stop(): void; + + /** Check if bridge is active */ + isActive(): boolean; + + /** Manually publish event */ + publishEvent(event: DBEvent): Promise; +} +``` + +#### Event Types +```typescript +export { + DbInsertEvent, + DbUpdateEvent, + DbDeleteEvent, + type DbEvent +} from './realtime-bridge'; +``` + +### SDL Exporter +Export schema as GraphQL SDL. + +#### exportSDL +```typescript +export function exportSDL( + schemaString: string, + options: ExportOptions = {} +): string +``` + +#### ExportOptions +```typescript +export interface ExportOptions { + /** Include descriptions */ + includeDescriptions?: boolean; + + /** Sort types alphabetically */ + sortAlphabetically?: boolean; + + /** Skip certain types */ + skipTypes?: string[]; + + /** Custom formatting */ + indent?: string | number; + + /** Include built-in scalars */ + includeBuiltInScalars?: boolean; + + /** Add federation directives */ + federation?: boolean; +} +``` + +#### Related Functions +```typescript +export { + exportTypeSDL, + saveSDL +} from './sdl-exporter'; +``` + +### Types +Exported utility types. + +```typescript +export type { + DatabaseConnection, + GraphQLContext, + GraphQLResolver, + Resolvers, + GraphQLGenerationConfig, + ResolverGenerationConfig, + GraphQLConfig, + RealtimeBridgeConfig +} from './index'; +``` + +## Schema Generation + +### Type Mapping Rules +Database columns are mapped to GraphQL types as follows: + +| Database Type | GraphQL Type | Notes | +|---------------|--------------|-------| +| INTEGER, BIGINT, SMALLINT | Int | | +| REAL, DOUBLE PRECISION | Float | | +| VARCHAR, TEXT, CHAR | String | | +| BOOLEAN | Boolean | | +| TIMESTAMP, TIMESTAMPTZ | DateTime | Custom scalar | +| DATE | DateTime | | +| UUID | UUID | Custom scalar | +| JSON, JSONB | JSON | Custom scalar | +| ENUM | Enum | Auto-generated enum type | +| ARRAY | [Type]! | Non-nullable array | +| FOREIGN KEY | Related Type | Relationship field | +| JOIN TABLE | Many-to-many | Special handling | + +### Naming Conventions +- **Types**: PascalCase (table name → TypeName) +- **Fields**: camelCase (column name → fieldName) +- **Arguments**: camelCase +- **Enums**: PascalCase (enum name → EnumName) +- **Values**: UPPER_SNAKE_CASE (enum value → ENUM_VALUE) + +### Relationship Detection +Foreign keys automatically generate relationship fields: +- **Single Foreign Key**: Generates a singular field (e.g., `author: User`) +- **Multiple Foreign Keys**: Generates plural field for collections (e.g., `posts: [Post!]!`) +- **Many-to-Many**: Detected via join tables with two foreign keys + +### Customizations +Override default behavior via configuration: +```typescript +const typeDefs = generateGraphQLSchema(schema, { + typeMappings: { + // Map custom database types + 'ltree': 'String', + 'hstore': 'JSON' + }, + excludeTypes: ['migrations', 'schema_migrations'], + fieldOverrides: { + // Override specific field types + users: { + password: { type: 'String', resolve: (parent) => '[REDACTED]' } + } + }, + namingConvention: { + type: 'PascalCase', + field: 'camelCase' + } +}); +``` + +## Resolver Generation + +### Query Resolvers +Generated queries follow Postgraphile-like naming: + +#### List Query +```graphql +# Returns paginated list with filtering and sorting +users( + offset: Int = 0 + limit: Int = 20 + order_by: [users_order_by!] + where: users_bool_exp +): [User!]! +``` + +#### Single Object Query +```graphql +# Returns single object by primary key +users_by_pk(id: Int!): User +``` + +#### Aggregate Query +```graphql +# Returns aggregate calculations +users_aggregate( + offset: Int = 0 + limit: Int = 20 + order_by: [users_order_by!] + where: users_bool_exp +): users_aggregate +``` + +### Mutation Resolvers +Generated mutations for data modification: + +#### Insert Single +```graphql +# Insert one record +insert_users_one( + object: users_insert_input! +): users_mutation_response +``` + +#### Insert Multiple +```graphql +# Insert multiple records +insert_users( + objects: [users_insert_input!]! +): users_mutation_response +``` + +#### Update by PK +```graphql +# Update record by primary key +update_users_by_pk( + pk_columns: users_pk_columns_input! + _set: users_set_input +): users +``` + +#### Update by Conditions +```graphql +# Update records matching conditions +update_users( + _set: users_set_input + where: users_bool_exp +): users_mutation_response +``` + +#### Delete by PK +```graphql +# Delete record by primary key +delete_users_by_pk( + pk_columns: users_pk_columns_input! +): users +``` + +#### Delete by Conditions +```graphql +# Delete records matching conditions +delete_users( + where: users_bool_exp +): users_mutation_response +``` + +### Subscription Resolvers +Generated subscriptions for real-time updates: + +#### Insert Subscription +```graphql +# Fires when new record is inserted +users_insert( + offset: Int = 0 + limit: Int = 20 + order_by: [users_order_by!] + where: users_bool_exp +): [User!]! +``` + +#### Update Subscription +```graphql +# Fires when record is updated +users_update( + offset: Int = 0 + limit: Int = 20 + order_by: [users_order_by!] + where: users_bool_exp +): [User!]! +``` + +#### Delete Subscription +```graphql +# Fires when record is deleted +users_delete( + offset: Int = 0 + limit: Int = 20 + order_by: [users_order_by!] + where: users_bool_exp +): [User!]! +``` + +## Real-time Integration + +### Database Change Detection +The realtime bridge detects database changes through: +1. **Database Triggers**: INSERT/UPDATE/DELETE triggers on tables +2. **Polling**: Fallback for databases without trigger support +3. **Native Features**: Using pg_notify, MySQL binary logs, etc. +4. **Application Level**: Tracking changes through application + +### Event Publishing +Changes are published as standardized events: +```typescript +interface DBEvent { + table: string; // Table name + type: 'INSERT' | 'UPDATE' | 'DELETE'; + record: Record; // New/updated record + old_record?: Record; // Previous record (for UPDATE/DELETE) + timestamp: string; // ISO timestamp +} +``` + +### Subscription Filtering +Subscriptions can filter events using GraphQL arguments: +```graphql +subscription { + users_update( + where: { + id: { _eq: 123 }, + active: { _eq: true } + } + ) { + id + name + email + } +} +``` + +### Connection Management +WebSocket connection lifecycle: +1. **Connection**: Client establishes WebSocket +2. **Initialization**: Client sends GraphQL init message +3. **Subscription**: Client registers interest in topics +4. **Event Delivery**: Server pushes matching events +5. **Completion**: Client unsubscribes or connection closes +6. **Cleanup**: Server removes subscriptions + +### Scaling Considerations +For production deployments: +1. **PubSub Scaling**: Use Redis, PostgreSQL LISTEN/NOTIFY, or cloud Pub/Sub +2. **Horizontal Scaling**: Multiple server instances sharing PubSub +3. **Message Ordering**: Consider ordering guarantees for critical updates +4. **Duplicate Detection**: Handle potential duplicate events +5. **Heartbeats**: Implement connection heartbeat mechanism + +## Best Practices + +### Schema Design +1. **Keep Tables Normalized**: Well-normalized schemas generate cleaner GraphQL +2. **Use Proper Constraints**: NOT NULL, UNIQUE, CHECK constraints improve generated API +3. **Document Your Schema**: Comments on tables/columns become GraphQL descriptions +4. **Consider Exposures**: Only expose tables you want in GraphQL API +5. **Use Views Judiciously**: Database views can simplify complex relationships + +### Security +1. **Enable RLS**: Always enable Row Level Security for production +2. **Limit Introspection**: Consider disabling introspection in production +3. **Query Complexity**: Implement complexity limits to prevent DoS +4. **Depth Limits**: Prevent overly deep nested queries +5. **Rate Limiting**: Add rate limiting at the HTTP level +6. **Input Validation**: While GraphQL validates, validate business logic too + +### Performance +1. **Database Indexes**: Index columns used in where/order_by clauses +2. **Pagination**: Use reasonable default limits (20-100 items) +3. **Batch Loading**: Enable batching for relationship resolution +4. **Caching**: Consider HTTP caching for GET requests +5. **Prepared Statements**: Generated resolvers use prepared statements +6. **Connection Pooling**: Properly configure database connection pool + +### Development +1. **Use Playground**: Leverage GraphQL Playground for testing +2. **Type Generation**: Generate TypeScript types from schema for clients +3. **Mock Data**: Use fixtures for development and testing +4. **Schema Check**: Include schema checks in CI pipeline +5. **Documentation**: Export SDL for API documentation +6. **Versioning**: Consider schema versioning for breaking changes + +### Deployment +1. **Environment Separation**: Use different schemas for dev/staging/prod +2. **Health Checks**: Implement GraphQL health check endpoint +3. **Logging**: Log slow queries and errors +4. **Monitoring**: Track query performance and error rates +5. **SSL/TLS**: Always use HTTPS in production +6. **CORS**: Configure CORS appropriately for your clients + +## Examples + +### Basic Blog Schema +```typescript +// db/schema.ts +import { pgTable, varchar, text, timestamp, boolean, integer, foreignKey } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 255 }).notNull(), + email: varchar('email', { length: 255 }).notNull().unique(), + bio: text('bio'), + avatarUrl: varchar('avatar_url', { length: 500 }), + isActive: boolean('is_active').default(true), + createdAt: timestamp('created_at').defaultNow().notNull(), + updatedAt: timestamp('updated_at').defaultNow().notNull() +}); + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: varchar('title', { length: 255 }).notNull(), + content: text('content').notNull(), + published: boolean('published').default(false), + authorId: integer('author_id').notNull(), + createdAt: timestamp('created_at').defaultNow().notNull(), + updatedAt: timestamp('updated_at').defaultNow().notNull() +}, (table) => [ + foreignKey({ + columns: [table.authorId], + foreignColumns: [users.id], + name: 'posts_author_id_fkey' + }) +]); + +export const comments = pgTable('comments', { + id: serial('id').primaryKey(), + content: text('content').notNull(), + postId: integer('post_id').notNull(), + authorId: integer('author_id').notNull(), + createdAt: timestamp('created_at').defaultNow().notNull() +}, (table) => [ + foreignKey({ + columns: [table.postId], + foreignColumns: [posts.id], + name: 'comments_post_id_fkey' + }), + foreignKey({ + columns: [table.authorId], + foreignColumns: [users.id], + name: 'comments_author_id_fkey' + }) +]); +``` + +Generated GraphQL schema includes: +- Types: `User`, `Post`, `Comment` +- Queries: `users`, `users_by_pk`, `posts`, `posts_by_pk`, `comments`, `comments_by_pk` +- Mutations: `insert_users_one`, `update_posts_by_pk`, `delete_comments`, etc. +- Subscriptions: `users_insert`, `posts_update`, `comments_delete` + +### Complex Queries Example +```graphql +# Get users with their posts and comment counts +query { + users(where: { isActive: { _eq: true } }) { + id + name + email + posts(where: { published: { _eq: true } }) { + id + title + comments_aggregate { + count + } + } + } +} +``` + +### Mutation with Variables +```graphql +mutation CreatePost($title: String!, $content: String!, $authorId: Int!) { + insert_posts_one( + object: { + title: $title + content: $content + authorId: $authorId + published: true + } + ) { + id + title + content + author { + id + name + email + } + createdAt + } +} +``` + +### Real-time Subscription +```graphql +subscription OnNewComment { + comments_insert( + where: { + post: { + id: { _eq: 123 } + } + } + ) { + id + content + author { + id + name + } + post { + id + title + } + createdAt + } +} +``` + +### Aggregation Query +```graphql +query PostStatistics { + posts_aggregate { + aggregate { + count + max(createdAt) + min(createdAt) + } + groups { + published + aggregate { + count + } + } + } +} +``` + +## Security Considerations + +### Authentication & Authorization +1. **Context-Based Auth**: Pass user info through context +2. **Resolver-Level Checks**: Check permissions in resolvers +3. **Schema-Based Hiding**: Conditionally expose fields based on roles +4. **RLS Integration**: Use database-level Row Level Security +5. **Attribute-Based Access Control**: Implement ABAC patterns + +### Common Vulnerabilities +1. **GraphQL Injection**: Validate and sanitize inputs (though less common than SQLi) +2. **DoS via Complex Queries**: Implement query depth and complexity limits +3. **Batching Attacks**: Limit batch sizes in mutations +4. **Information Exposure**: Be careful with error messages in production +5. **Introspection Abuse**: Consider disabling introspection in production + +### Rate Limiting Strategies +1. **IP-Based Limiting**: Limit requests per IP address +2. **Token-Based Limiting**: Limit requests per API key/user +3. **Query Cost Limiting**: Calculate and limit based on query complexity +4. **Concurrent Connection Limits**: Limit WebSocket connections per user +5. **Subscription Rate Limiting**: Limit number of active subscriptions per user + +### Data Protection +1. **Field-Level Permissions**: Hide sensitive fields based on user role +2. **Data Masking**: Return masked data for PII (e.g., show only last 4 digits of SSN) +3. **Audit Logging**: Log access to sensitive data +4. **Encryption**: Ensure data encrypted at rest and in transit +5. **Backups**: Regular backups with tested restore procedures + +### Production Checklist +- [ ] Enable authentication middleware +- [ ] Configure RLS policies +- [ ] Set query depth limits (suggested: 5-7) +- [ ] Set query complexity limits (suggested: 1000-5000) +- [ ] Disable introspection in production (or restrict to trusted IPs) +- [ ] Implement rate limiting +- [ ] Use HTTPS with valid certificates +- [ ] Configure proper CORS headers +- [ ] Log errors and slow queries +- [ ] Monitor error rates and performance +- [ ] Regular security audits +- [ ] Keep dependencies updated + +## Related Modules +- [Auto-REST](./auto-rest.md): Alternative API generation approach (REST vs GraphQL) +- [Configuration](./config.md): For configuring GraphQL behavior +- [Realtime](./realtime.md): Underlying real-time capabilities +- [RLS](./rls.md): Row Level Security integration +- [Logger](./logger.md): Logging for GraphQL operations +- [Webhooks](./webhooks.md): Alternative to subscriptions for some use cases +- [Functions](./functions.md): For custom business logic beyond CRUD \ No newline at end of file diff --git a/docs/core/overview.md b/docs/core/overview.md new file mode 100644 index 0000000..69b5361 --- /dev/null +++ b/docs/core/overview.md @@ -0,0 +1,135 @@ +# @betterbase/core + +Core engine package containing the backbone functionality of BetterBase including: +- Auto-REST API generation +- Database providers (Neon, Turso, PlanetScale, Supabase, Postgres) +- Storage adapters (S3-compatible) +- Webhook system +- Real-time capabilities +- GraphQL integration +- Row Level Security (RLS) +- Branching/preview environments +- Serverless function deployment +- Logging and middleware +- Vector search capabilities + +## Table of Contents +- [Overview](#overview) +- [Modules](#modules) + - [Auto-REST](#auto-rest) + - [Branching](#branching) + - [Configuration](#configuration) + - [Functions](#functions) + - [GraphQL](#graphql) + - [Logger](#logger) + - [Middleware](#middleware) + - [Migration](#migration) + - [Providers](#providers) + - [Realtime](#realtime) + - [RLS](#rls) + - [Storage](#storage) + - [Vector](#vector) + - [Webhooks](#webhooks) +- [Usage](#usage) +- [API Reference](#api-reference) + +## Overview + +The `@betterbase/core` package is the central engine that powers BetterBase applications. It provides: +- Automatic CRUD API generation from database schemas +- Multi-database provider support +- File storage with policy engine +- Webhook event system +- Real-time subscriptions via WebSockets +- GraphQL API generation +- Fine-grained access control with RLS +- Preview environments for branching workflows +- Serverless function bundling and deployment +- Structured logging and request middleware +- Vector similarity search capabilities + +## Modules + +### Auto-REST +Automatic generation of RESTful endpoints from Drizzle ORM schemas with built-in RLS enforcement, filtering, and pagination. + +### Branching +Preview environment system for creating isolated database branches for feature development and testing. + +### Configuration +Zod-based configuration schema validation for database providers, storage, webhooks, GraphQL, vector search, Auto-REST, and branching. + +### Functions +Serverless function bundling, local runtime, and deployment utilities. + +### GraphQL +Auto-generated GraphQL schema, resolvers, server, and real-time bridge from Drizzle ORM schemas. + +### Logger +Structured logging system with file transport capabilities. + +### middleware +Request logging and RLS session middleware for request processing. + +### Migration +Database migration utilities including RLS policy migration. + +### Providers +Database adapter implementations for Neon, Turso, PlanetScale, Supabase, and Postgres. + +### Realtime +WebSocket-based real-time channel manager for live updates. + +### RLS +Row Level Security system including policy evaluation, generation, scanning, and auth bridging. + +### Storage +File storage abstraction with S3 adapter, policy engine, and image transformation capabilities. + +### Vector +Vector similarity search integration with OpenAI, Cohere, HuggingFace, and custom providers. + +### Webhooks +Outgoing webhook delivery system with signing, retry logic, and event filtering. + +## Usage + +```typescript +import { + defineConfig, + mountAutoRest, + createGraphQLServer, + // ... other imports +} from '@betterbase/core'; + +// Define configuration +const config = defineConfig({ + project: { name: 'my-app' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + } +}); + +// Use in your Hono app +const app = new Hono(); +mountAutoRest(app, db, schema); +``` + +## API Reference + +See individual module documentation for detailed API reference: +- [Auto-REST](./auto-rest.md) +- [Branching](./branching.md) +- [Configuration](./config.md) +- [Functions](./functions.md) +- [GraphQL](./graphql.md) +- [Logger](./logger.md) +- [Middleware](./middleware.md) +- [Migration](./migration.md) +- [Providers](./providers.md) +- [Realtime](./realtime.md) +- [RLS](./rls.md) +- [Storage](./storage.md) +- [Vector](./vector.md) +- [Webhooks](./webhooks.md) \ No newline at end of file diff --git a/docs/examples/blog.md b/docs/examples/blog.md new file mode 100644 index 0000000..0505d0b --- /dev/null +++ b/docs/examples/blog.md @@ -0,0 +1,318 @@ +# Blog + +Build a complete blog with posts, comments, and categories. + +## Features + +- Create and manage blog posts +- Categories and tags +- Comments system +- Rich text content +- Draft/publish workflow + +## Project Setup + +```bash +bb init blog +cd blog +bb auth setup +``` + +## Schema + +```typescript +// src/db/schema.ts +import { sqliteTable, text, boolean, integer } from 'drizzle-orm/sqlite-core' + +export const users = sqliteTable('users', { + id: text('id').primaryKey(), + name: text('name'), + email: text('email').notNull().unique(), + image: text('image'), + bio: text('bio') +}) + +export const categories = sqliteTable('categories', { + id: text('id').primaryKey(), + name: text('name').notNull(), + slug: text('slug').notNull().unique() +}) + +export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + title: text('title').notNull(), + slug: text('slug').notNull().unique(), + content: text('content').notNull(), + excerpt: text('excerpt'), + featuredImage: text('featured_image'), + published: boolean('published').default(false), + authorId: text('author_id').notNull(), + categoryId: text('category_id'), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()), + updatedAt: integer('updated_at', { mode: 'timestamp' }).default(new Date()) +}) + +export const comments = sqliteTable('comments', { + id: text('id').primaryKey(), + content: text('content').notNull(), + postId: text('post_id').notNull(), + authorId: text('author_id').notNull(), + parentId: text('parent_id'), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) + +export const tags = sqliteTable('tags', { + id: text('id').primaryKey(), + name: text('name').notNull(), + slug: text('slug').notNull().unique() +}) + +export const postTags = sqliteTable('post_tags', { + postId: text('post_id').notNull(), + tagId: text('tag_id').notNull() +}) +``` + +## API Routes + +### Posts API + +```typescript +// src/routes/posts.ts +import { Hono } from 'hono' +import { db } from '../db' +import { posts, users, categories } from '../db/schema' +import { eq, desc, like } from 'drizzle-orm' +import { auth } from '../auth' + +const postsRouter = new Hono() + +// Get published posts (public) +postsRouter.get('/', async (c) => { + const allPosts = await db + .select({ + id: posts.id, + title: posts.title, + slug: posts.slug, + excerpt: posts.excerpt, + featuredImage: posts.featuredImage, + published: posts.published, + createdAt: posts.createdAt, + category: { id: categories.id, name: categories.name }, + author: { id: users.id, name: users.name, image: users.image } + }) + .from(posts) + .leftJoin(categories, eq(posts.categoryId, categories.id)) + .leftJoin(users, eq(posts.authorId, users.id)) + .where(eq(posts.published, true)) + .order(desc(posts.createdAt)) + + return c.json(allPosts) +}) + +// Get single post by slug (public) +postsRouter.get('/:slug', async (c) => { + const slug = c.req.param('slug') + const [post] = await db + .select() + .from(posts) + .leftJoin(users, eq(posts.authorId, users.id)) + .leftJoin(categories, eq(posts.categoryId, categories.id)) + .where(eq(posts.slug, slug)) + + if (!post || !post.published) { + return c.json({ error: 'Post not found' }, 404) + } + + return c.json(post) +}) + +// Create post (auth required) +postsRouter.post('/', auth, async (c) => { + const user = c.get('user') + const { title, slug, content, excerpt, categoryId, published } = await c.req.json() + + const id = crypto.randomUUID() + await db.insert(posts).values({ + id, + title, + slug, + content, + excerpt, + categoryId, + published: published ?? false, + authorId: user.id + }) + + return c.json({ id }, 201) +}) + +// Update post (author only) +postsRouter.patch('/:id', auth, async (c) => { + const user = c.get('user') + const postId = c.req.param('id') + const data = await c.req.json() + + const [existing] = await db.select().from(posts).where(eq(posts.id, postId)) + + if (!existing || existing.authorId !== user.id) { + return c.json({ error: 'Not authorized' }, 403) + } + + await db.update(posts).set({ ...data, updatedAt: new Date() }).where(eq(posts.id, postId)) + + return c.json({ success: true }) +}) + +export default postsRouter +``` + +### Comments API + +```typescript +// src/routes/comments.ts +import { Hono } from 'hono' +import { db } from '../db' +import { comments, posts } from '../db/schema' +import { eq, desc } from 'drizzle-orm' +import { auth } from '../auth' + +const commentsRouter = new Hono() + +// Get comments for a post (public) +commentsRouter.get('/:postId', async (c) => { + const postId = c.req.param('postId') + + const allComments = await db + .select() + .from(comments) + .where(eq(comments.postId, postId)) + .order(desc(comments.createdAt)) + + return c.json(allComments) +}) + +// Add comment (auth required) +commentsRouter.post('/', auth, async (c) => { + const user = c.get('user') + const { content, postId, parentId } = await c.req.json() + + const id = crypto.randomUUID() + await db.insert(comments).values({ + id, + content, + postId, + authorId: user.id, + parentId + }) + + return c.json({ id }, 201) +}) + +export default commentsRouter +``` + +## Rich Text Editor + +For rich text content, consider storing Markdown or using a library: + +```typescript +// When creating/editing posts +const { content } = await c.req.json() + +// Store as Markdown +await db.insert(posts).values({ + // ... + content, // Store Markdown + // Or convert to HTML: const html = markdownToHtml(content) +}) +``` + +## Frontend Example + +```typescript +// PostList.tsx +function PostList() { + const [posts, setPosts] = useState([]) + const [page, setPage] = useState(1) + + useEffect(() => { + loadPosts(page) + }, [page]) + + const loadPosts = async (page) => { + const offset = (page - 1) * 10 + const { data } = await client + .from('posts') + .select() + .eq('published', true) + .order('createdAt', { ascending: false }) + .limit(10) + .offset(offset) + setPosts(data) + } + + return ( +
+ {posts.map(post => ( +
+

{post.title}

+

{post.excerpt}

+ Read more +
+ ))} + +
+ ) +} + +// PostDetail.tsx +function PostDetail({ slug }) { + const [post, setPost] = useState(null) + const [comments, setComments] = useState([]) + + useEffect(() => { + loadPost(slug) + }, [slug]) + + const loadPost = async (slug) => { + const { data: posts } = await client + .from('posts') + .select() + .eq('slug', slug) + .single() + setPost(posts) + + const { data } = await client + .from('comments') + .select() + .eq('postId', posts.id) + setComments(data) + } + + if (!post) return + + return ( +
+

{post.title}

+
+ +
+ ) +} +``` + +## What's Included + +This example demonstrates: +- Blog post CRUD +- Categories and tags +- Comments system +- Draft/publish workflow +- Rich text content + +## Related + +- [Database Feature](../features/database.md) - Database operations +- [Auth Feature](../features/authentication.md) - User authentication +- [Client SDK](../api-reference/client-sdk.md) - Client usage diff --git a/docs/examples/chat-app.md b/docs/examples/chat-app.md new file mode 100644 index 0000000..6f8e95d --- /dev/null +++ b/docs/examples/chat-app.md @@ -0,0 +1,275 @@ +# Chat App + +Build a real-time chat application with presence indicators. + +## Features + +- Real-time messaging +- User presence (online/offline) +- Typing indicators +- Message history +- Direct messages + +## Project Setup + +```bash +bb init chat-app +cd chat-app +bb auth setup +``` + +## Schema + +```typescript +// src/db/schema.ts +import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core' + +export const users = sqliteTable('users', { + id: text('id').primaryKey(), + name: text('name'), + email: text('email').notNull().unique(), + image: text('image') +}) + +export const messages = sqliteTable('messages', { + id: text('id').primaryKey(), + content: text('content').notNull(), + senderId: text('sender_id').notNull(), + roomId: text('room_id').notNull(), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) + +export const rooms = sqliteTable('rooms', { + id: text('id').primaryKey(), + name: text('name').notNull(), + isDirect: integer('is_direct', { mode: 'boolean' }).default(false), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) +``` + +## API Routes + +```typescript +// src/routes/messages.ts +import { Hono } from 'hono' +import { db } from '../db' +import { messages, rooms } from '../db/schema' +import { eq, desc } from 'drizzle-orm' +import { auth } from '../auth' + +const messagesRouter = new Hono() + +// Get messages for a room +messagesRouter.get('/:roomId', auth, async (c) => { + const roomId = c.req.param('roomId') + const msgs = await db + .select() + .from(messages) + .where(eq(messages.roomId, roomId)) + .order(desc(messages.createdAt)) + .limit(100) + + return c.json(msgs) +}) + +// Send message +messagesRouter.post('/', auth, async (c) => { + const user = c.get('user') + const { content, roomId } = await c.req.json() + + const id = crypto.randomUUID() + await db.insert(messages).values({ + id, + content, + senderId: user.id, + roomId + }) + + return c.json({ id, content, senderId: user.id, roomId }, 201) +}) + +// Get rooms +messagesRouter.get('/rooms', auth, async (c) => { + const allRooms = await db.select().from(rooms) + return c.json(allRooms) +}) + +// Create room +messagesRouter.post('/rooms', auth, async (c) => { + const { name, isDirect } = await c.req.json() + + const id = crypto.randomUUID() + await db.insert(rooms).values({ id, name, isDirect }) + + return c.json({ id, name, isDirect }, 201) +}) + +export default messagesRouter +``` + +## Real-time Chat + +### Presence Tracking + +```typescript +// Track user presence +const channel = client.channel('chat-general') + +// Announce presence +channel.track({ + user_id: user.id, + user_name: user.name, + online_at: new Date().toISOString() +}) + +// Listen for joins/leaves +channel.on('presence', { event: 'sync' }, () => { + const state = channel.presenceState() + updateUserList(state) +}) + +channel.on('presence', { event: 'join' }, ({ key, newPresences }) => { + console.log('User joined:', newPresences) +}) + +channel.on('presence', { event: 'leave' }, ({ key, leftPresences }) => { + console.log('User left:', leftPresences) +}) + +channel.subscribe() +``` + +### Typing Indicators + +```typescript +// Broadcast typing +channel.send({ + type: 'broadcast', + event: 'typing', + payload: { userId: user.id, roomId: 'general' } +}) + +// Listen for typing +channel.on('broadcast', { event: 'typing' }, (payload) => { + showTypingIndicator(payload.userId) +}) +``` + +### Message Updates + +```typescript +// Subscribe to new messages +const channel = client.channel('chat-general') + +channel + .on('postgres_changes', + { event: 'INSERT', table: 'messages' }, + (payload) => { + addMessage(payload.new) + } + ) + .subscribe() +``` + +## Frontend Example + +```typescript +// ChatRoom.tsx +function ChatRoom({ roomId }) { + const [messages, setMessages] = useState([]) + const [message, setMessage] = useState('') + const [typing, setTyping] = useState([]) + + useEffect(() => { + // Load initial messages + loadMessages(roomId) + + // Subscribe to realtime + const channel = client.channel(`chat-${roomId}`) + + channel + .on('postgres_changes', { event: 'INSERT', table: 'messages' }, (payload) => { + if (payload.new.roomId === roomId) { + setMessages(prev => [...prev, payload.new]) + } + }) + .on('broadcast', { event: 'typing' }, (payload) => { + setTyping(prev => [...new Set([...prev, payload.userId])]) + setTimeout(() => setTyping(prev => prev.filter(u => u !== payload.userId)), 3000) + }) + .subscribe() + + return () => channel.unsubscribe() + }, [roomId]) + + const sendMessage = async () => { + await client.from('messages').insert({ + content: message, + roomId + }) + setMessage('') + } + + const handleTyping = () => { + // Send typing indicator + } + + return ( +
+
+ {messages.map(msg => ( + + ))} +
+
+ {typing.map(userId => {userId} is typing...)} +
+ { setMessage(e.target.value); handleTyping() }} + /> + +
+ ) +} +``` + +## Direct Messages + +For direct messages between users: + +```typescript +// Create or get direct message room +async function getOrCreateDirectRoom(otherUserId) { + const userId = currentUser.id + + // Check if room exists + const [room] = await db + .select() + .from(rooms) + .where(eq(rooms.isDirect, true)) + .where(sql`...`) // Check both users + + if (room) return room + + // Create new room + const id = crypto.randomUUID() + await db.insert(rooms).values({ id, isDirect: true }) + return { id } +} +``` + +## What's Included + +This example demonstrates: +- Real-time messaging +- Presence tracking +- Typing indicators +- Direct messages +- Room management + +## Related + +- [Realtime Feature](../features/realtime.md) - Real-time subscriptions +- [Auth Feature](../features/authentication.md) - User authentication +- [Client SDK](../api-reference/client-sdk.md) - Client usage diff --git a/docs/examples/ecommerce.md b/docs/examples/ecommerce.md new file mode 100644 index 0000000..51179b5 --- /dev/null +++ b/docs/examples/ecommerce.md @@ -0,0 +1,471 @@ +# E-commerce + +Build a complete e-commerce store with products, cart, orders, and payment integration. + +## Features + +- Product catalog with categories +- Shopping cart +- Order management +- Payment processing +- Inventory tracking +- User authentication + +## Project Setup + +```bash +bb init ecommerce +cd ecommerce +bb auth setup +``` + +## Schema + +```typescript +// src/db/schema.ts +import { sqliteTable, text, integer, real } from 'drizzle-orm/sqlite-core' + +export const users = sqliteTable('users', { + id: text('id').primaryKey(), + name: text('name'), + email: text('email').notNull().unique(), + image: text('image') +}) + +export const categories = sqliteTable('categories', { + id: text('id').primaryKey(), + name: text('name').notNull(), + slug: text('slug').notNull().unique(), + image: text('image') +}) + +export const products = sqliteTable('products', { + id: text('id').primaryKey(), + name: text('name').notNull(), + slug: text('slug').notNull().unique(), + description: text('description'), + price: real('price').notNull(), + comparePrice: real('compare_price'), + images: text('images'), // JSON array + inventory: integer('inventory').default(0), + categoryId: text('category_id'), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) + +export const cartItems = sqliteTable('cart_items', { + id: text('id').primaryKey(), + userId: text('user_id'), + sessionId: text('session_id'), + productId: text('product_id').notNull(), + quantity: integer('quantity').default(1) +}) + +export const orders = sqliteTable('orders', { + id: text('id').primaryKey(), + userId: text('user_id').notNull(), + status: text('status').default('pending'), + total: real('total').notNull(), + shippingAddress: text('shipping_address'), // JSON + paymentStatus: text('payment_status').default('pending'), + paymentIntentId: text('payment_intent_id'), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) + +export const orderItems = sqliteTable('order_items', { + id: text('id').primaryKey(), + orderId: text('order_id').notNull(), + productId: text('product_id').notNull(), + quantity: integer('quantity').notNull(), + price: real('price').notNull() +}) +``` + +## API Routes + +### Products + +```typescript +// src/routes/products.ts +import { Hono } from 'hono' +import { db } from '../db' +import { products, categories } from '../db/schema' +import { eq, desc } from 'drizzle-orm' + +const productsRouter = new Hono() + +// Get all products +productsRouter.get('/', async (c) => { + const allProducts = await db.select().from(products).order(desc(products.createdAt)) + return c.json(allProducts) +}) + +// Get product by slug +productsRouter.get('/:slug', async (c) => { + const slug = c.req.param('slug') + const [product] = await db.select().from(products).where(eq(products.slug, slug)) + + if (!product) { + return c.json({ error: 'Product not found' }, 404) + } + + return c.json(product) +}) + +// Get products by category +productsRouter.get('/category/:slug', async (c) => { + const slug = c.req.param('slug') + const [category] = await db.select().from(categories).where(eq(categories.slug, slug)) + + if (!category) { + return c.json({ error: 'Category not found' }, 404) + } + + const allProducts = await db + .select() + .from(products) + .where(eq(products.categoryId, category.id)) + + return c.json(allProducts) +}) + +export default productsRouter +``` + +### Cart + +```typescript +// src/routes/cart.ts +import { Hono } from 'hono' +import { db } from '../db' +import { cartItems, products } from '../db/schema' +import { eq, and } from 'drizzle-orm' + +const cartRouter = new Hono() + +// Get cart items +cartRouter.get('/', async (c) => { + const userId = c.get('user')?.id + const sessionId = c.req.header('x-session-id') + + const items = await db + .select({ + id: cartItems.id, + productId: cartItems.productId, + quantity: cartItems.quantity, + product: products + }) + .from(cartItems) + .leftJoin(products, eq(cartItems.productId, products.id)) + .where( + userId + ? eq(cartItems.userId, userId) + : eq(cartItems.sessionId, sessionId) + ) + + return c.json(items) +}) + +// Add to cart +cartRouter.post('/', async (c) => { + const userId = c.get('user')?.id + const sessionId = c.req.header('x-session-id') + const { productId, quantity = 1 } = await c.req.json() + + // Check if already in cart + const existing = await db + .select() + .from(cartItems) + .where( + and( + eq(cartItems.productId, productId), + userId + ? eq(cartItems.userId, userId) + : eq(cartItems.sessionId, sessionId) + ) + ) + .limit(1) + + if (existing.length > 0) { + await db + .update(cartItems) + .set({ quantity: existing[0].quantity + quantity }) + .where(eq(cartItems.id, existing[0].id)) + } else { + await db.insert(cartItems).values({ + id: crypto.randomUUID(), + userId, + sessionId, + productId, + quantity + }) + } + + return c.json({ success: true }) +}) + +// Update quantity +cartRouter.patch('/:id', async (c) => { + const { quantity } = await c.req.json() + const itemId = c.req.param('id') + + await db.update(cartItems).set({ quantity }).where(eq(cartItems.id, itemId)) + + return c.json({ success: true }) +}) + +// Remove from cart +cartRouter.delete('/:id', async (c) => { + const itemId = c.req.param('id') + await db.delete(cartItems).where(eq(cartItems.id, itemId)) + + return c.json({ success: true }) +}) + +export default cartRouter +``` + +### Orders + +```typescript +// src/routes/orders.ts +import { Hono } from 'hono' +import { db } from '../db' +import { orders, orderItems, cartItems, products } from '../db/schema' +import { eq } from 'drizzle-orm' +import { auth } from '../auth' + +const ordersRouter = new Hono() + +// Create order +ordersRouter.post('/', auth, async (c) => { + const user = c.get('user') + const { shippingAddress } = await c.req.json() + + // Get cart items + const items = await db + .select() + .from(cartItems) + .leftJoin(products, eq(cartItems.productId, products.id)) + .where(eq(cartItems.userId, user.id)) + + if (items.length === 0) { + return c.json({ error: 'Cart is empty' }, 400) + } + + // Calculate total + const total = items.reduce((sum, item) => { + return sum + (item.products?.price || 0) * item.cartItems.quantity + }, 0) + + // Create order + const orderId = crypto.randomUUID() + await db.insert(orders).values({ + id: orderId, + userId: user.id, + total, + shippingAddress: JSON.stringify(shippingAddress) + }) + + // Create order items and update inventory + for (const item of items) { + const product = item.products + if (!product) continue + + await db.insert(orderItems).values({ + id: crypto.randomUUID(), + orderId, + productId: product.id, + quantity: item.cartItems.quantity, + price: product.price + }) + + // Update inventory + await db + .update(products) + .set({ inventory: product.inventory - item.cartItems.quantity }) + .where(eq(products.id, product.id)) + } + + // Clear cart + await db.delete(cartItems).where(eq(cartItems.userId, user.id)) + + return c.json({ orderId, total }, 201) +}) + +// Get user orders +ordersRouter.get('/', auth, async (c) => { + const user = c.get('user') + + const userOrders = await db + .select() + .from(orders) + .where(eq(orders.userId, user.id)) + .order(desc(orders.createdAt)) + + return c.json(userOrders) +}) + +// Get order details +ordersRouter.get('/:id', auth, async (c) => { + const user = c.get('user') + const orderId = c.req.param('id') + + const [order] = await db + .select() + .from(orders) + .where(eq(orders.id, orderId)) + + if (!order || order.userId !== user.id) { + return c.json({ error: 'Order not found' }, 404) + } + + const items = await db + .select() + .from(orderItems) + .leftJoin(products, eq(orderItems.productId, products.id)) + .where(eq(orderItems.orderId, orderId)) + + return c.json({ ...order, items }) +}) + +export default ordersRouter +``` + +## Payment Integration + +Process payments using a payment provider: + +```typescript +// src/routes/checkout.ts +import { Hono } from 'hono' +import { stripe } from '../lib/payment' + +const checkoutRouter = new Hono() + +checkoutRouter.post('/create-payment-intent', auth, async (c) => { + const user = c.get('user') + const { orderId } = await c.req.json() + + // Get order total + const [order] = await db.select().from(orders).where(eq(orders.id, orderId)) + + // Create payment intent + const paymentIntent = await stripe.paymentIntents.create({ + amount: Math.round(order.total * 100), // Stripe uses cents + currency: 'usd', + metadata: { orderId } + }) + + // Update order with payment intent + await db + .update(orders) + .set({ paymentIntentId: paymentIntent.id }) + .where(eq(orders.id, orderId)) + + return c.json({ clientSecret: paymentIntent.client_secret }) +}) + +// Webhook for payment success +checkoutRouter.post('/webhook', async (c) => { + const sig = c.req.header('stripe-signature') + const body = await c.req.text() + + try { + const event = stripe.webhooks.constructEvent( + body, + sig, + process.env.STRIPE_WEBHOOK_SECRET + ) + + if (event.type === 'payment_intent.succeeded') { + const { orderId } = event.data.object.metadata + + await db + .update(orders) + .set({ + paymentStatus: 'paid', + status: 'processing' + }) + .where(eq(orders.id, orderId)) + } + } catch (err) { + return c.json({ error: 'Webhook error' }, 400) + } + + return c.json({ received: true }) +}) + +export default checkoutRouter +``` + +## Frontend Example + +```typescript +// ProductCard.tsx +function ProductCard({ product }) { + const addToCart = async () => { + await client.from('cart').insert({ + productId: product.id, + quantity: 1 + }) + // Show success notification + } + + return ( +
+ {product.name} +

{product.name}

+

${product.price}

+ {product.inventory > 0 ? ( + + ) : ( + Out of Stock + )} +
+ ) +} + +// Cart.tsx +function Cart() { + const [items, setItems] = useState([]) + + useEffect(() => { + loadCart() + }, []) + + const loadCart = async () => { + const { data } = await client.from('cart').select() + setItems(data) + } + + const total = items.reduce((sum, item) => + sum + item.product.price * item.quantity, 0 + ) + + return ( +
+ {items.map(item => ( + + ))} +
Total: ${total}
+ +
+ ) +} +``` + +## What's Included + +This example demonstrates: +- Product catalog +- Shopping cart +- Order management +- Inventory tracking +- Payment integration +- User authentication + +## Related + +- [Database Feature](../features/database.md) - Database operations +- [Auth Feature](../features/authentication.md) - User authentication +- [Client SDK](../api-reference/client-sdk.md) - Client usage diff --git a/docs/examples/todo-app.md b/docs/examples/todo-app.md new file mode 100644 index 0000000..0dd8b18 --- /dev/null +++ b/docs/examples/todo-app.md @@ -0,0 +1,209 @@ +# Todo App + +Build a collaborative todo application with real-time updates. + +## Features + +- Create, update, delete todos +- Mark todos as complete +- Real-time sync across devices +- User authentication + +## Project Setup + +```bash +bb init todo-app +cd todo-app +bb auth setup +``` + +## Schema + +```typescript +// src/db/schema.ts +import { sqliteTable, text, boolean, integer } from 'drizzle-orm/sqlite-core' + +export const todos = sqliteTable('todos', { + id: text('id').primaryKey(), + title: text('title').notNull(), + completed: boolean('completed').default(false), + userId: text('user_id').notNull(), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) +``` + +## API Routes + +```typescript +// src/routes/todos.ts +import { Hono } from 'hono' +import { db } from '../db' +import { todos } from '../db/schema' +import { eq } from 'drizzle-orm' +import { auth } from '../auth' + +const todosRouter = new Hono() + +// Get all todos for user +todosRouter.get('/', auth, async (c) => { + const user = c.get('user') + const allTodos = await db + .select() + .from(todos) + .where(eq(todos.userId, user.id)) + + return c.json(allTodos) +}) + +// Create todo +todosRouter.post('/', auth, async (c) => { + const user = c.get('user') + const { title } = await c.req.json() + + const id = crypto.randomUUID() + await db.insert(todos).values({ + id, + title, + userId: user.id, + completed: false + }) + + return c.json({ id, title, completed: false }, 201) +}) + +// Toggle todo +todosRouter.patch('/:id/toggle', auth, async (c) => { + const user = c.get('user') + const todoId = c.req.param('id') + + const [existing] = await db + .select() + .from(todos) + .where(eq(todos.id, todoId)) + + if (!existing || existing.userId !== user.id) { + return c.json({ error: 'Not found' }, 404) + } + + await db + .update(todos) + .set({ completed: !existing.completed }) + .where(eq(todos.id, todoId)) + + return c.json({ success: true }) +}) + +// Delete todo +todosRouter.delete('/:id', auth, async (c) => { + const user = c.get('user') + const todoId = c.req.param('id') + + await db + .delete(todos) + .where(eq(todos.id, todoId)) + .where(eq(todos.userId, user.id)) + + return c.json({ success: true }) +}) + +export default todosRouter +``` + +## Real-time Updates + +Enable realtime in `betterbase.config.ts`: + +```typescript +export default defineConfig({ + realtime: { + enabled: true, + tables: ['todos'] + } +}) +``` + +Client-side subscription: + +```typescript +// client.ts +const client = createClient({ url: 'http://localhost:3000' }) + +const channel = client.channel('todos') + +channel + .on('postgres_changes', + { event: '*', table: 'todos' }, + () => fetchTodos() + ) + .subscribe() + +async function fetchTodos() { + const { data } = await client.from('todos').select() + renderTodos(data) +} +``` + +## Frontend Implementation + +```typescript +// React component +function TodoList() { + const [todos, setTodos] = useState([]) + const [newTodo, setNewTodo] = useState('') + + useEffect(() => { + fetchTodos() + + // Subscribe to changes + const channel = client.channel('todos') + channel.on('postgres_changes', { event: '*', table: 'todos' }, () => { + fetchTodos() + }).subscribe() + }, []) + + const addTodo = async () => { + await client.from('todos').insert({ title: newTodo }) + setNewTodo('') + } + + const toggleTodo = async (id, completed) => { + // Toggle via API or direct update + } + + return ( +
+ setNewTodo(e.target.value)} + /> + +
    + {todos.map(todo => ( +
  • + toggleTodo(todo.id)} + /> + {todo.title} +
  • + ))} +
+
+ ) +} +``` + +## What's Included + +This example demonstrates: +- Database CRUD operations +- User authentication +- Real-time subscriptions +- API route creation + +## Related + +- [Auth Feature](../features/authentication.md) - User authentication +- [Realtime Feature](../features/realtime.md) - Real-time updates +- [Client SDK](../api-reference/client-sdk.md) - Client usage diff --git a/docs/features/authentication.md b/docs/features/authentication.md new file mode 100644 index 0000000..d47e36d --- /dev/null +++ b/docs/features/authentication.md @@ -0,0 +1,179 @@ +# Authentication + +BetterBase provides built-in authentication powered by BetterAuth, supporting multiple authentication methods out of the box. + +## Features + +- **Email/Password** - Classic authentication with email and password +- **OAuth Providers** - Google, GitHub, Discord, Apple, Microsoft, Twitter, Facebook +- **Magic Links** - Passwordless authentication via email +- **Phone Auth** - SMS/OTP verification +- **MFA** - Multi-factor authentication support +- **Sessions** - Secure session management with automatic refresh + +## Quick Setup + +```bash +# Initialize authentication +bb auth setup +``` + +This creates `src/auth/` with BetterAuth configuration. + +## Configuration + +Edit `src/auth/index.ts`: + +```typescript +import { betterAuth } from 'better-auth' +import { drizzleAdapter } from 'better-auth/adapters/drizzle' +import { db } from '../db' + +export const auth = betterAuth({ + database: drizzleAdapter(db, { + provider: 'sqlite' + }), + emailAndPassword: { + enabled: true, + requireEmailVerification: false + }, + socialProviders: { + github: { + clientId: process.env.GITHUB_CLIENT_ID, + clientSecret: process.env.GITHUB_CLIENT_SECRET + }, + google: { + clientId: process.env.GOOGLE_CLIENT_ID, + clientSecret: process.env.GOOGLE_CLIENT_SECRET + } + }, + session: { + expiresIn: 60 * 60 * 24 * 7, // 7 days + updateAge: 60 * 60 * 24 // 1 day + } +}) +``` + +## Adding OAuth Providers + +```bash +# Add GitHub OAuth +bb auth add-provider github + +# Add Google OAuth +bb auth add-provider google +``` + +Available providers: `google`, `github`, `discord`, `apple`, `microsoft`, `twitter`, `facebook` + +## Authentication Endpoints + +BetterBase automatically provides auth endpoints: + +| Method | Endpoint | Description | +|--------|----------|-------------| +| `POST` | `/api/auth/signup` | Register new user | +| `POST` | `/api/auth/signin` | Sign in user | +| `POST` | `/api/auth/signout` | Sign out user | +| `GET` | `/api/auth/session` | Get current session | +| `POST` | `/api/auth/refresh` | Refresh session | +| `POST` | `/api/auth/magic-link` | Send magic link email | +| `GET` | `/api/auth/magic-link/verify` | Verify magic link | +| `POST` | `/api/auth/otp/send` | Send OTP | +| `POST` | `/api/auth/otp/verify` | Verify OTP | +| `POST` | `/api/auth/mfa/enable` | Enable MFA | +| `POST` | `/api/auth/mfa/verify` | Verify MFA | + +## Using the Client SDK + +```typescript +import { createClient } from '@betterbase/client' + +const client = createClient({ + url: 'http://localhost:3000' +}) + +// Sign up +const { data, error } = await client.auth.signUp({ + email: 'user@example.com', + password: 'secure-password', + name: 'John Doe' +}) + +// Sign in +const { data, error } = await client.auth.signInWithPassword({ + email: 'user@example.com', + password: 'secure-password' +}) + +// Sign in with OAuth +const { data, error } = await client.auth.signInWithOAuth({ + provider: 'github' +}) + +// Sign out +await client.auth.signOut() + +// Get current user +const { data: { user } } = await client.auth.getUser() +``` + +## Protecting Routes + +Use the auth middleware to protect routes: + +```typescript +import { auth } from '../auth' + +// Protect a route +app.post('/api/posts', auth, async (c) => { + const user = c.get('user') + // user is guaranteed to be authenticated +}) +``` + +## Session Management + +Sessions are automatically managed: +- Created on login +- Stored as HTTP-only cookies +- Refreshed before expiry +- Invalidated on logout + +## Security Considerations + +1. **Use strong passwords** - Enforce minimum length (default: 8 characters) +2. **Enable email verification** - Require email confirmation for sensitive actions +3. **Use MFA** - Enable for admin accounts +4. **Rotate secrets** - Change `AUTH_SECRET` periodically +5. **Use HTTPS** - Always use in production + +## Environment Variables + +```bash +# Required +AUTH_SECRET=your-secret-key-min-32-chars +AUTH_URL=https://your-domain.com + +# OAuth (example for GitHub) +GITHUB_CLIENT_ID=your-client-id +GITHUB_CLIENT_SECRET=your-client-secret + +# Email (optional, for magic links) +SMTP_HOST=smtp.example.com +SMTP_PORT=587 +SMTP_USER=your-username +SMTP_PASS=your-password +SMTP_FROM=noreply@example.com + +# Phone (optional) +TWILIO_ACCOUNT_SID=your-sid +TWILIO_AUTH_TOKEN=your-token +TWILIO_PHONE_NUMBER=+1234567890 +``` + +## Related + +- [Client SDK](../api-reference/client-sdk.md) - Using auth in frontend +- [RLS](./rls.md) - Row Level Security with auth +- [Configuration](../getting-started/configuration.md) - Auth configuration diff --git a/docs/features/database.md b/docs/features/database.md new file mode 100644 index 0000000..98b320a --- /dev/null +++ b/docs/features/database.md @@ -0,0 +1,243 @@ +# Database + +BetterBase supports multiple database providers and uses Drizzle ORM for type-safe database operations. + +## Supported Providers + +| Provider | Use Case | Notes | +|----------|----------|-------| +| **SQLite** | Local development | Zero config, file-based | +| **PostgreSQL** | Production | Full SQL capabilities | +| **Neon** | Serverless PostgreSQL | Automatic scaling | +| **Turso** | Edge deployments | libSQL, distributed | +| **PlanetScale** | Serverless MySQL | Branch-based schema | +| **Supabase** | Supabase hosted | PostgreSQL compatible | + +## Schema Definition + +Define your database schema using Drizzle ORM: + +```typescript +// src/db/schema.ts +import { sqliteTable, text, integer, boolean } from 'drizzle-orm/sqlite-core' +import { pgTable, serial, varchar, text, boolean, timestamp } from 'drizzle-orm/pg-core' +import { relations } from 'drizzle-orm' + +// SQLite example +export const users = sqliteTable('users', { + id: text('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull().unique(), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) + +// PostgreSQL example +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: varchar('title', { length: 255 }).notNull(), + content: text('content'), + published: boolean('published').default(false), + userId: integer('user_id').references(() => users.id), + createdAt: timestamp('created_at').defaultNow() +}) + +// Relations +export const usersRelations = relations(users, ({ many }) => ({ + posts: many(posts) +})) + +export const postsRelations = relations(posts, ({ one }) => ({ + user: one(users, { + fields: [posts.userId], + references: [users.id] + }) +})) +``` + +## Database Initialization + +```typescript +// src/db/index.ts +import { drizzle } from 'drizzle-orm/postgres' +import { migrate } from 'drizzle-orm/postgres/migrator' +import postgres from 'postgres' + +const connectionString = process.env.DATABASE_URL! +const client = postgres(connectionString) + +export const db = drizzle(client) + +// Run migrations +await migrate(db, { migrationsFolder: './drizzle' }) +``` + +## Querying Data + +### Select + +```typescript +import { db } from '../db' +import { users, posts } from './schema' +import { eq, desc, asc, like, and, or } from 'drizzle-orm' + +// Get all users +const allUsers = await db.select().from(users) + +// Get user by ID +const [user] = await db.select().from(users).where(eq(users.id, 'user-123')) + +// Get posts with filtering +const publishedPosts = await db + .select() + .from(posts) + .where(and( + eq(posts.published, true), + like(posts.title, '%tutorial%') + )) + .order(desc(posts.createdAt)) + .limit(10) +``` + +### Insert + +```typescript +// Insert single +await db.insert(users).values({ + id: crypto.randomUUID(), + name: 'John Doe', + email: 'john@example.com' +}) + +// Insert multiple +await db.insert(posts).values([ + { title: 'Post 1', content: 'Content 1', userId: 1 }, + { title: 'Post 2', content: 'Content 2', userId: 1 } +]) +``` + +### Update + +```typescript +await db + .update(posts) + .set({ + title: 'Updated Title', + published: true + }) + .where(eq(posts.id, 1)) +``` + +### Delete + +```typescript +await db.delete(posts).where(eq(posts.id, 1)) +``` + +## Relationships + +```typescript +// With relations +const usersWithPosts = await db + .select() + .from(users) + .leftJoin(posts, eq(users.id, posts.userId)) + +// Using Drizzle's withRelations +const userWithPosts = await db.query.users.findFirst({ + with: { + posts: true + } +}) +``` + +## Transactions + +```typescript +await db.transaction(async (tx) => { + const [user] = await tx.insert(users).values({ + id: crypto.randomUUID(), + name: 'New User', + email: 'new@example.com' + }).returning() + + await tx.insert(posts).values({ + title: 'First Post', + content: 'Hello!', + userId: user.id + }) +}) +``` + +## Migrations + +```bash +# Generate migration from schema changes +bun run db:generate + +# Apply migrations +bun run db:push + +# Or use CLI +bb migrate generate my-migration +bb migrate up +``` + +## Auto-REST + +BetterBase automatically generates REST endpoints from your schema: + +```bash +# Endpoints are automatically available: +GET /api/users # List users +GET /api/users/:id # Get user by ID +POST /api/users # Create user +PATCH /api/users/:id # Update user +DELETE /api/users/:id # Delete user +``` + +### Filtering + +``` +GET /api/users?filter=active.eq.true&sort=createdAt.desc&limit=10&offset=0 +``` + +### Operators + +| Operator | Description | +|----------|-------------| +| `eq` | Equals | +| `ne` | Not equals | +| `gt` | Greater than | +| `gte` | Greater or equal | +| `lt` | Less than | +| `lte` | Less or equal | +| `like` | Pattern match | +| `in` | In array | + +## Indexes + +Add indexes for better query performance: + +```typescript +export const posts = pgTable('posts', { + // ... +}, (table) => ({ + userIdx: index('user_idx').on(table.userId), + publishedIdx: index('published_idx').on(table.published), + createdIdx: index('created_idx').on(table.createdAt) +})) +``` + +## Best Practices + +1. **Use proper types** - Leverage Drizzle's type inference +2. **Add indexes** - Index frequently queried columns +3. **Use transactions** - For multi-step operations +4. **Limit results** - Always use pagination for lists +5. **Validate inputs** - Sanitize before queries + +## Related + +- [Configuration](../getting-started/configuration.md) - Database provider config +- [RLS](./rls.md) - Row Level Security +- [GraphQL](./graphql.md) - GraphQL API diff --git a/docs/features/functions.md b/docs/features/functions.md new file mode 100644 index 0000000..ab0d38d --- /dev/null +++ b/docs/features/functions.md @@ -0,0 +1,262 @@ +# Functions + +BetterBase supports serverless functions for custom business logic, deployment, and edge computing. + +## Features + +- **Multiple Targets** - AWS Lambda, Cloudflare Workers, Vercel, Netlify, Deno, Bun +- **Local Development** - Test functions locally with hot reload +- **Automatic Bundling** - esbuild-based bundling with tree shaking +- **TypeScript Support** - Full type checking during bundling +- **Environment Variables** - Configure runtime environment +- **Dependencies** - Automatic dependency inclusion + +## Quick Setup + +```bash +# Create a new function +bb function create process-data +``` + +This creates `src/functions/process-data/index.ts`: + +```typescript +export default async function handler(event, context) { + return { + statusCode: 200, + body: JSON.stringify({ message: 'Hello!' }) + } +} +``` + +## Function Structure + +```typescript +// src/functions/my-function/index.ts +import type { BetterBaseResponse } from '@betterbase/shared' + +export default async function handler( + event: Record, + context?: Record +): Promise> { + try { + // Process event + const result = await processData(event) + + return { + data: result, + error: null + } + } catch (error) { + return { + data: null, + error: error instanceof Error ? error.message : 'Unknown error' + } + } +} + +async function processData(event: Record) { + // Your logic here + return { processed: true } +} +``` + +## Event Format + +The event object contains: + +```typescript +interface FunctionEvent { + // HTTP properties (API Gateway style) + method: string + path: string + headers: Record + body: string | undefined + query: Record + params: Record + + // Custom properties + [key: string]: unknown +} +``` + +## Response Format + +Return a `BetterBaseResponse`: + +```typescript +interface BetterBaseResponse { + data: T | null + error: string | null + count?: number + pagination?: { + page: number + pageSize: number + total: number + } +} +``` + +## Local Development + +```bash +# Run function locally +bb function dev process-data + +# With custom event +bb function dev process-data --event '{"key": "value"}' +``` + +## Building + +```bash +# Build for default target +bb function build process-data + +# Build for specific platform +bb function build process-data --target aws-lambda + +# Build with minification +bb function build process-data --minify +``` + +### Build Options + +| Option | Description | Default | +|--------|-------------|---------| +| `--target` | Build target | `node` | +| `--format` | Output format | `esm` | +| `--minify` | Enable minification | `false` | +| `--sourcemap` | Generate source maps | `true` | +| `--outdir` | Output directory | `dist/` | + +### Supported Targets + +- `node` - Node.js +- `browser` - Browser +- `aws-lambda` - AWS Lambda +- `cloudflare-workers` - Cloudflare Workers +- `vercel` - Vercel Serverless +- `netlify` - Netlify Functions +- `deno` - Deno Deploy +- `bun` - Bun runtime + +## Deployment + +```bash +# Deploy function +bb function deploy process-data + +# Deploy with environment sync +bb function deploy process-data --sync-env +``` + +## Database Access + +```typescript +// src/functions/user-profile/index.ts +import { drizzle } from 'drizzle-orm/postgres' +import { eq } from 'drizzle-orm' +import { users } from '../../db/schema' + +export default async function handler(event) { + const userId = event.pathParameters?.userId + + if (!userId) { + return { data: null, error: 'Missing userId' } + } + + const db = drizzle(process.env.DATABASE_URL) + const [user] = await db + .select() + .from(users) + .where(eq(users.id, userId)) + .limit(1) + + if (!user) { + return { data: null, error: 'User not found' } + } + + return { data: user, error: null } +} +``` + +## Using External APIs + +```typescript +export default async function handler(event) { + const { url } = event + + const response = await fetch(url, { + headers: { + 'Authorization': `Bearer ${process.env.API_KEY}` + } + }) + + const data = await response.json() + + return { data, error: null } +} +``` + +## Scheduled Functions + +For scheduled execution (cron jobs): + +```typescript +export default async function handler(event) { + const { scheduleTime } = event + + // Run daily task + const report = await generateDailyReport() + + return { data: { generated: true }, error: null } +} +``` + +Configure in your deployment platform (AWS EventBridge, Vercel Cron, etc.) + +## CLI Commands + +```bash +# Create function +bb function create + +# Run locally +bb function dev + +# Build function +bb function build + +# Deploy function +bb function deploy + +# List functions +bb function list + +# View logs +bb function logs +``` + +## Best Practices + +1. **Single responsibility** - Each function does one thing +2. **Stateless** - Don't rely on local storage +3. **Idempotent** - Safe to retry +4. **Fast initialization** - Minimize cold start +5. **Proper errors** - Return structured errors + +## Limitations + +| Platform | Bundle Size | Timeout | +|----------|-------------|---------| +| AWS Lambda | 50MB zipped | 15 min | +| Cloudflare Workers | 10MB | 30 sec | +| Vercel | 50MB | 10 sec | +| Netlify | 50MB | 10 sec | + +## Related + +- [Configuration](../getting-started/configuration.md) - Function config +- [Webhooks](./webhooks.md) - Event-driven calls +- [CLI Commands](../api-reference/cli-commands.md) - CLI reference diff --git a/docs/features/graphql.md b/docs/features/graphql.md new file mode 100644 index 0000000..bd9c65c --- /dev/null +++ b/docs/features/graphql.md @@ -0,0 +1,321 @@ +# GraphQL + +BetterBase provides automatic GraphQL API generation from your Drizzle ORM schema, including queries, mutations, and subscriptions. + +## Features + +- **Auto-Generated Schema** - GraphQL types from database schema +- **Queries** - List, filter, paginate records +- **Mutations** - Create, update, delete operations +- **Subscriptions** - Real-time updates via WebSocket +- **Aggregations** - Count, sum, average operations +- **Relationships** - Auto-detect foreign key relations + +## Quick Setup + +Enable GraphQL in configuration: + +```typescript +// betterbase.config.ts +export default defineConfig({ + graphql: { + enabled: true, + playground: process.env.NODE_ENV !== 'production' + } +}) +``` + +Access the GraphQL playground at `http://localhost:3000/graphql` + +## Generated API + +Given a schema with `users` and `posts` tables, BetterBase generates: + +### Queries + +```graphql +# List all users with pagination +query { + users(offset: 0, limit: 20, order_by: { createdAt: desc }) { + id + name + email + posts { + id + title + } + } +} + +# Get single user by primary key +query { + users_by_pk(id: "user-123") { + id + name + email + } +} + +# Aggregate queries +query { + users_aggregate { + aggregate { + count + } + } +} +``` + +### Mutations + +```graphql +# Insert single record +mutation { + insert_users_one(object: { name: "John", email: "john@example.com" }) { + id + name + email + } +} + +# Insert multiple records +mutation { + insert_users(objects: [ + { name: "Alice", email: "alice@example.com" }, + { name: "Bob", email: "bob@example.com" } + ]) { + returning { + id + name + } + } +} + +# Update by primary key +mutation { + update_users_by_pk( + pk_columns: { id: "user-123" } + _set: { name: "John Updated" } + ) { + id + name + } +} + +# Delete by primary key +mutation { + delete_users_by_pk(pk_columns: { id: "user-123" }) { + id + name + } +} +``` + +### Subscriptions + +```graphql +# Subscribe to new records +subscription { + users_insert(where: { active: { _eq: true } }) { + id + name + email + } +} + +# Subscribe to updates +subscription { + users_update { + id + name + old { name } + new { name } + } +} + +# Subscribe to deletes +subscription { + users_delete { + id + name + } +} +``` + +## Filtering + +All list queries support filtering: + +```graphql +query { + posts( + where: { + published: { _eq: true }, + title: { _like: "%tutorial%" } + } + ) { + id + title + } +} +``` + +### Operators + +| Operator | Description | +|----------|-------------| +| `_eq` | Equals | +| `_neq` | Not equals | +| `_gt` | Greater than | +| `_gte` | Greater or equal | +| `_lt` | Less than | +| `_lte` | Less or equal | +| `_like` | Pattern match | +| `_ilike` | Case-insensitive match | +| `_in` | In array | +| `_is_null` | Is null | + +## Ordering + +```graphql +query { + posts(order_by: { createdAt: desc, title: asc }) { + id + title + createdAt + } +} +``` + +## Pagination + +```graphql +query { + posts(offset: 20, limit: 10) { + id + title + } +} +``` + +## Relationships + +Foreign keys automatically create relationship fields: + +```graphql +query { + users { + id + name + posts(where: { published: { _eq: true } }) { + id + title + } + } +} +``` + +## Aggregations + +```graphql +query { + posts_aggregate(where: { published: { _eq: true } }) { + aggregate { + count + sum { viewCount } + avg { viewCount } + max { createdAt } + min { createdAt } + } + } +} +``` + +## Using the Client SDK + +```typescript +import { createClient } from '@betterbase/client' + +const client = createClient({ url: 'http://localhost:3000' }) + +// Execute GraphQL query +const { data, error } = await client.graphql.query(` + query GetUsers { + users { + id + name + email + } + } +`) + +// Execute mutation +const { data, error } = await client.graphql.mutation(` + mutation CreateUser($name: String!, $email: String!) { + insert_users_one(object: { name: $name, email: $email }) { + id + name + } + } +`, { name: 'John', email: 'john@example.com' }) +``` + +## Programmatic Usage + +```typescript +import { + generateGraphQLSchema, + generateResolvers, + createGraphQLServer +} from '@betterbase/core/graphql' +import * as schema from './db/schema' + +// Generate schema +const typeDefs = generateGraphQLSchema(schema) + +// Generate resolvers +const resolvers = generateResolvers(schema) + +// Create server +const graphqlServer = createGraphQLServer({ + schema: typeDefs, + resolvers, + context: async (c) => ({ db, user: c.get('user') }) +}) + +// Mount in Hono +app.route('/graphql', graphqlServer) +``` + +## SDL Export + +Export schema for federation or documentation: + +```typescript +import { exportSDL } from '@betterbase/core/graphql' + +const sdl = exportSDL(typeDefs) +console.log(sdl) +``` + +## Security + +1. **Enable RLS** - Always enable Row Level Security +2. **Limit introspection** - Disable in production if needed +3. **Query complexity** - Set complexity limits +4. **Depth limits** - Prevent deeply nested queries +5. **Rate limiting** - Add HTTP-level rate limiting + +## Best Practices + +1. **Use proper indexes** - Index filtered columns +2. **Limit results** - Use pagination +3. **Filter at query level** - Reduce data transfer +4. **Cache responses** - Consider HTTP caching +5. **Monitor queries** - Track slow queries + +## Related + +- [Database](./database.md) - Schema definition +- [Realtime](./realtime.md) - Real-time subscriptions +- [Client SDK](../api-reference/client-sdk.md) - Client GraphQL API diff --git a/docs/features/realtime.md b/docs/features/realtime.md new file mode 100644 index 0000000..2df864e --- /dev/null +++ b/docs/features/realtime.md @@ -0,0 +1,247 @@ +# Realtime + +BetterBase provides real-time subscriptions via WebSockets, allowing clients to receive live updates when database records change. + +## Features + +- **WebSocket Subscriptions** - Live database change events +- **Postgres Changes** - Listen to INSERT, UPDATE, DELETE events +- **Presence** - Track user presence in applications +- **Broadcast** - Send arbitrary messages to connected clients +- **RLS Integration** - Respect row-level security in subscriptions + +## Quick Setup + +Enable realtime in configuration: + +```typescript +// betterbase.config.ts +export default defineConfig({ + realtime: { + enabled: true, + tables: ['posts', 'comments'] + } +}) +``` + +## Using the Client SDK + +### Subscribe to Changes + +```typescript +import { createClient } from '@betterbase/client' + +const client = createClient({ url: 'http://localhost:3000' }) + +// Subscribe to all changes on a table +const channel = client.channel('public:posts') + +channel + .on('postgres_changes', { event: 'INSERT', table: 'posts' }, + (payload) => { + console.log('New post:', payload.new) + } + ) + .on('postgres_changes', { event: 'UPDATE', table: 'posts' }, + (payload) => { + console.log('Updated post:', payload.new) + } + ) + .on('postgres_changes', { event: 'DELETE', table: 'posts' }, + (payload) => { + console.log('Deleted post:', payload.old) + } + ) + .subscribe() + +// Unsubscribe when done +channel.unsubscribe() +``` + +### Subscribe to Specific Rows + +```typescript +// Subscribe to changes for a specific user +const channel = client.channel('user-updates') + +channel + .on('postgres_changes', { + event: '*', + table: 'posts', + schema: 'public', + filter: 'userId=eq.user-123' + }, (payload) => { + console.log('User post changed:', payload) + }) + .subscribe() +``` + +### Filter Syntax + +| Filter | Description | +|--------|-------------| +| `column=eq.value` | Equals | +| `column=neq.value` | Not equals | +| `column=gt.value` | Greater than | +| `column=gte.value` | Greater or equal | +| `column=lt.value` | Less than | +| `column=lte.value` | Less or equal | +| `column=like.pattern` | Pattern match | +| `column=in.(a,b,c)` | In array | + +## Presence + +Track which users are online: + +```typescript +// Track presence +const channel = client.channel('room-1') + +// Show who's online +channel.track({ + user_id: 'user-123', + user_name: 'John', + online_at: new Date().toISOString() +}) + +// Listen for presence changes +channel.on('presence', { event: 'sync' }, () => { + const state = channel.presenceState() + console.log('Online users:', state) +}) + +channel.on('presence', { event: 'join' }, ({ key, newPresences }) => { + console.log('User joined:', newPresences) +}) + +channel.on('presence', { event: 'leave' }, ({ key, leftPresences }) => { + console.log('User left:', leftPresences) +}) + +channel.subscribe() +``` + +## Broadcast + +Send arbitrary messages: + +```typescript +// Send a broadcast message +channel.send({ + type: 'broadcast', + event: 'typing', + payload: { userId: 'user-123', isTyping: true } +}) + +// Receive broadcasts +channel.on('broadcast', { event: 'typing' }, (payload) => { + console.log('User typing:', payload) +}) +``` + +## Server-Side Publishing + +Publish events from your server: + +```typescript +import { pubsub } from '@betterbase/core/realtime' + +// Publish to a channel +await pubsub.publish('notifications', { + type: 'new_message', + data: { message: 'Hello!' } +}) + +// Listen for database changes +import { bridgeRealtimeToGraphQL } from '@betterbase/core/graphql' + +const bridge = bridgeRealtimeToGraphQL({ + db, + pubsub, + schema +}) + +bridge.start() +``` + +## Real-time with GraphQL + +Combine realtime with GraphQL subscriptions: + +```graphql +subscription OnPostUpdate($postId: Int!) { + posts_update( + where: { id: { _eq: $postId } } + ) { + id + title + content + updatedAt + } +} +``` + +## Configuration + +```typescript +// betterbase.config.ts +export default defineConfig({ + realtime: { + enabled: true, + tables: ['posts', 'comments', 'users'], + events: ['INSERT', 'UPDATE', 'DELETE'], + // Per-table config + tableConfig: { + posts: { + enablePresence: true, + enableBroadcast: true + } + } + } +}) +``` + +## Connection Handling + +```typescript +// Check connection status +const channel = client.channel('test') + +channel.on('status', (status) => { + if (status === 'connected') { + console.log('Connected to realtime') + } else if (status === 'disconnected') { + console.log('Disconnected') + } else if (status === 'closing') { + console.log('Connection closing') + } +}) + +channel.subscribe() +``` + +## Error Handling + +```typescript +const channel = client.channel('test') + +try { + await channel.subscribe() +} catch (error) { + console.error('Subscription failed:', error) +} +``` + +## Best Practices + +1. **Subscribe to specific events** - Don't subscribe to all changes +2. **Use filters** - Filter at subscription level for efficiency +3. **Clean up subscriptions** - Unsubscribe when done +4. **Handle reconnection** - Implement reconnection logic +5. **Throttle updates** - Consider debouncing rapid updates + +## Related + +- [Client SDK](../api-reference/client-sdk.md) - Realtime API +- [GraphQL](./graphql.md) - GraphQL subscriptions +- [Database](./database.md) - Database changes diff --git a/docs/features/rls.md b/docs/features/rls.md new file mode 100644 index 0000000..21f766f --- /dev/null +++ b/docs/features/rls.md @@ -0,0 +1,205 @@ +# Row Level Security (RLS) + +BetterBase provides Row Level Security for fine-grained access control at the database level. + +## Overview + +RLS ensures users can only access data they're authorized to see. Policies are enforced at the database level, providing security even if application-level checks are bypassed. + +## Quick Setup + +Enable RLS on tables: + +```bash +# Enable RLS for a table +bb rls enable --table users + +# Disable RLS for a table +bb rls disable --table users +``` + +## Creating Policies + +```bash +# Create a policy +bb rls create \ + --table posts \ + --name users-own-posts \ + --command SELECT \ + --check "user_id = auth.uid()" +``` + +## Policy Structure + +```typescript +// PostgreSQL policy +CREATE POLICY "users-own-posts" ON posts + FOR SELECT + USING (user_id = auth.uid()); + +CREATE POLICY "users-own-posts" ON posts + FOR INSERT + WITH CHECK (user_id = auth.uid()); + +CREATE POLICY "users-own-posts" ON posts + FOR UPDATE + USING (user_id = auth.uid()) + WITH CHECK (user_id = auth.uid()); + +CREATE POLICY "users-own-posts" ON posts + FOR DELETE + USING (user_id = auth.uid()); +``` + +## Policy Commands + +| Command | Description | +|---------|-------------| +| `SELECT` | Control read access | +| `INSERT` | Control new record creation | +| `UPDATE` | Control record updates | +| `DELETE` | Control record deletion | +| `ALL` | All operations | + +## Expression Variables + +Available in policy expressions: + +| Variable | Description | +|----------|-------------| +| `auth.uid()` | Current user ID | +| `auth.role()` | User role (admin, user) | +| `auth.email()` | User email | +| `auth.jwt()` | Full JWT claims | + +## Common Patterns + +### Owner-Based Access + +```sql +-- Users can only see their own posts +CREATE POLICY "users-own-posts" ON posts + FOR SELECT + USING (user_id = auth.uid()); +``` + +### Public Read Access + +```sql +-- Anyone can read published posts +CREATE POLICY "public-posts" ON posts + FOR SELECT + USING (published = true); +``` + +### Role-Based Access + +```sql +-- Admins can see all users +CREATE POLICY "admins-see-all" ON users + FOR SELECT + USING (auth.role() = 'admin'); +``` + +### Team-Based Access + +```sql +-- Users can only see their team's data +CREATE POLICY "team-access" ON documents + FOR SELECT + USING (team_id IN ( + SELECT team_id FROM team_members WHERE user_id = auth.uid() + )); +``` + +## Using with BetterBase + +### In REST API + +```typescript +// With RLS, these queries are automatically filtered +const posts = await client + .from('posts') + .select() + // RLS adds: WHERE user_id = auth.uid() +``` + +### In GraphQL + +```graphql +# RLS automatically filters results +query { + posts { + id + title + # Only returns posts user owns + } +} +``` + +### In Functions + +```typescript +export default async function handler(event) { + // RLS automatically applies + const posts = await db.select().from(posts) + // Returns only user's posts +} +``` + +## Testing Policies + +```bash +# Test RLS policies +bb rls test --table posts + +# Test with specific user +bb rls test --table posts --user-id user-123 +``` + +## Configuration + +```typescript +// betterbase.config.ts +export default defineConfig({ + rls: { + enabled: true, + auditLog: true + } +}) +``` + +With audit logging, all policy evaluations are logged. + +## Best Practices + +1. **Enable RLS on all tables** - Start with RLS enabled +2. **Use specific policies** - Don't use overly permissive policies +3. **Test policies** - Verify policies work as expected +4. **Audit logs** - Enable logging for production +5. **Separate read/write policies** - Fine-tune separately + +## Migration + +Add RLS during migrations: + +```bash +# Add policy in migration +bb migrate generate add-rls-policies +``` + +## Troubleshooting + +```bash +# List all policies +bb rls list + +# Check policy status +bb rls status --table users +``` + +## Related + +- [Database](./database.md) - Schema and tables +- [Authentication](./authentication.md) - User authentication +- [CLI Commands](../api-reference/cli-commands.md) - RLS CLI diff --git a/docs/features/storage.md b/docs/features/storage.md new file mode 100644 index 0000000..593c430 --- /dev/null +++ b/docs/features/storage.md @@ -0,0 +1,230 @@ +# Storage + +BetterBase provides file storage with S3-compatible API and built-in policy engine. + +## Features + +- **S3-Compatible** - Works with AWS S3, Cloudflare R2, Backblaze B2, MinIO +- **Policy Engine** - Fine-grained access control +- **Image Transformations** - On-the-fly resizing, cropping, format conversion +- **Signed URLs** - Secure access to private files +- **Bucket Management** - Multiple buckets per project + +## Quick Setup + +```bash +# Initialize storage +bb storage init +``` + +## Configuration + +```typescript +// betterbase.config.ts +export default defineConfig({ + storage: { + provider: 's3', // s3, r2, backblaze, minio, managed + bucket: 'my-app-uploads', + region: 'us-west-2', + policies: [ + { + bucket: 'avatars', + operation: 'upload', + expression: 'auth.uid() != null' + }, + { + bucket: 'avatars', + operation: 'download', + expression: 'true' + } + ] + } +}) +``` + +## Using the Client SDK + +### Upload File + +```typescript +import { createClient } from '@betterbase/client' + +const client = createClient({ url: 'http://localhost:3000' }) + +// Upload file +const { data, error } = await client.storage.upload('avatars', 'user.png', fileBlob) + +// Upload with custom path +const { data, error } = await client.storage.upload('documents', 'folder/file.pdf', file) +``` + +### Download File + +```typescript +// Download file +const { data, error } = await client.storage.download('avatars/user.png') + +// Get blob +const blob = data +``` + +### Get Public URL + +```typescript +// Get public URL for a file +const { data: { url } } = client.storage.getPublicUrl('avatars', 'user.png') +``` + +### Delete File + +```typescript +// Delete file +await client.storage.remove('avatars/user.png') +``` + +### List Files + +```typescript +// List files in bucket +const { data, error } = await client.storage.list('avatars') +``` + +## Server-Side Usage + +```typescript +import { storage } from '@betterbase/core/storage' + +// Upload +await storage.upload('avatars', 'user.png', fileBuffer) + +// Download +const file = await storage.download('avatars', 'user.png') + +// Generate signed URL +const signedUrl = await storage.signUrl('avatars', 'user.png', { + expiresIn: 3600 // seconds +}) + +// Delete +await storage.remove('avatars', 'user.png') +``` + +## Image Transformations + +Transform images on-the-fly: + +```typescript +// Resize +const url = client.storage.getPublicUrl('images', 'photo.jpg', { + transform: { + width: 800, + height: 600, + fit: 'cover' + } +}) + +// Crop +const url = client.storage.getPublicUrl('images', 'photo.jpg', { + transform: { + width: 200, + height: 200, + fit: 'crop', + position: 'center' + } +}) + +// Format conversion +const url = client.storage.getPublicUrl('images', 'photo.jpg', { + transform: { + format: 'webp', + quality: 80 + } +}) +``` + +Transform options: +- `width`, `height` - Target dimensions +- `fit` - `cover`, `contain`, `fill`, `inside`, `outside` +- `position` - `top`, `bottom`, `left`, `right`, `center` +- `format` - `webp`, `jpg`, `png`, `avif` +- `quality` - 1-100 + +## Storage Policies + +Define access policies in configuration: + +```typescript +storage: { + policies: [ + // Allow authenticated users to upload avatars + { + bucket: 'avatars', + operation: 'upload', + expression: 'auth.uid() != null' + }, + // Allow public read access + { + bucket: 'avatars', + operation: 'download', + expression: 'true' + }, + // Only owner can delete + { + bucket: 'documents', + operation: 'delete', + expression: 'auth.uid() == resource.userId' + } + ] +} +``` + +Policy expressions support: +- `auth.uid()` - Current user ID +- `auth.role()` - User role (admin, user) +- `resource.*` - File metadata + +## Environment Variables + +```bash +# Storage provider +STORAGE_PROVIDER=s3 + +# S3 configuration +STORAGE_BUCKET=my-bucket +AWS_REGION=us-west-2 +AWS_ACCESS_KEY_ID=your-key +AWS_SECRET_ACCESS_KEY=your-secret + +# Or use R2 (Cloudflare) +STORAGE_PROVIDER=r2 +``` + +## CLI Commands + +```bash +# Initialize storage +bb storage init + +# List buckets +bb storage list + +# Upload file +bb storage upload ./image.jpg -b avatars + +# Download file +bb storage download avatars/image.jpg -o ./downloaded.jpg +``` + +## Best Practices + +1. **Use policies** - Always define access policies +2. **Validate file types** - Restrict allowed MIME types +3. **Set size limits** - Configure max file size +4. **Use CDN** - Consider CDN for public assets +5. **Compress images** - Use transformations for optimization + +## Related + +- [Configuration](../getting-started/configuration.md) - Storage config +- [Client SDK](../api-reference/client-sdk.md) - Storage API +- [Functions](./functions.md) - Process uploaded files diff --git a/docs/features/webhooks.md b/docs/features/webhooks.md new file mode 100644 index 0000000..c277623 --- /dev/null +++ b/docs/features/webhooks.md @@ -0,0 +1,177 @@ +# Webhooks + +BetterBase provides an event-driven webhook system for notifying external services when database changes occur. + +## Features + +- **Event Types** - INSERT, UPDATE, DELETE triggers +- **Signed Payloads** - HMAC signatures for verification +- **Retry Logic** - Automatic retry with exponential backoff +- **Filtering** - Trigger webhooks on specific conditions +- **Logs** - View delivery history and status + +## Configuration + +Define webhooks in `betterbase.config.ts`: + +```typescript +export default defineConfig({ + webhooks: [ + { + id: 'user-notifications', + table: 'users', + events: ['INSERT', 'UPDATE'], + url: process.env.USER_WEBHOOK_URL, + secret: process.env.USER_WEBHOOK_SECRET, + enabled: true + }, + { + id: 'order-events', + table: 'orders', + events: ['INSERT', 'UPDATE', 'DELETE'], + url: process.env.ORDER_WEBHOOK_URL, + secret: process.env.ORDER_WEBHOOK_SECRET + } + ] +}) +``` + +## Payload Format + +```json +{ + "event": "INSERT", + "table": "users", + "record": { + "id": "user-123", + "name": "John Doe", + "email": "john@example.com", + "createdAt": "2024-01-15T10:30:00Z" + }, + "old_record": null, + "timestamp": "2024-01-15T10:30:00Z" +} +``` + +### Payload Fields + +| Field | Description | +|-------|-------------| +| `event` | Event type: INSERT, UPDATE, DELETE | +| `table` | Database table name | +| `record` | New/updated record | +| `old_record` | Previous record (UPDATE/DELETE only) | +| `timestamp` | ISO timestamp | + +## Verifying Signatures + +Webhooks include an `X-Webhook-Signature` header: + +```typescript +import { createHmac } from 'crypto' + +function verifyWebhookSignature( + payload: string, + signature: string, + secret: string +): boolean { + const expectedSignature = createHmac('sha256', secret) + .update(payload) + .digest('hex') + + return signature === expectedSignature +} + +// In your webhook handler +app.post('/webhook', async (c) => { + const payload = await c.req.text() + const signature = c.req.header('X-Webhook-Signature') + + if (!verifyWebhookSignature(payload, signature, process.env.WEBHOOK_SECRET)) { + return c.json({ error: 'Invalid signature' }, 401) + } + + const event = JSON.parse(payload) + // Process the event +}) +``` + +## Retry Configuration + +```typescript +webhooks: [ + { + id: 'important-events', + table: 'orders', + events: ['INSERT'], + url: process.env.ORDER_WEBHOOK_URL, + secret: process.env.ORDER_WEBHOOK_SECRET, + retry: { + maxAttempts: 5, + retryInterval: 1000, // Start at 1 second + maxInterval: 30000 // Cap at 30 seconds + } + } +] +``` + +Retry behavior: +- Exponential backoff between attempts +- Failed deliveries logged for review +- Manual retry available via CLI + +## Using the CLI + +```bash +# List webhooks +bb webhook list + +# Test a webhook +bb webhook test user-notifications + +# View webhook logs +bb webhook logs user-notifications + +# View recent logs +bb webhook logs user-notifications -l 50 +``` + +## Server-Side Triggering + +Trigger webhooks manually: + +```typescript +import { triggerWebhook } from '@betterbase/core/webhooks' + +await triggerWebhook({ + id: 'custom-event', + table: 'orders', + event: 'INSERT', + record: newOrder, + timestamp: new Date().toISOString() +}) +``` + +## Best Practices + +1. **Verify signatures** - Always verify webhook signatures +2. **Respond quickly** - Acknowledge receipt immediately +3. **Queue processing** - Process events asynchronously +4. **Idempotent handlers** - Handle duplicate events +5. **Log everything** - Track all webhook activity + +## Environment Variables + +```bash +# Webhook URLs and secrets +USER_WEBHOOK_URL=https://hooks.example.com/user +USER_WEBHOOK_SECRET=your-webhook-secret +ORDER_WEBHOOK_URL=https://hooks.example.com/orders +ORDER_WEBHOOK_SECRET=your-order-secret +``` + +## Related + +- [Configuration](../getting-started/configuration.md) - Webhook config +- [Functions](./functions.md) - Process webhooks with functions +- [CLI Commands](../api-reference/cli-commands.md) - Webhook CLI diff --git a/docs/getting-started/configuration.md b/docs/getting-started/configuration.md new file mode 100644 index 0000000..4aee338 --- /dev/null +++ b/docs/getting-started/configuration.md @@ -0,0 +1,248 @@ +# Configuration + +Learn how to configure BetterBase for your project's specific needs. + +## Configuration File + +BetterBase uses `betterbase.config.ts` for project configuration: + +```typescript +import { defineConfig } from '@betterbase/core' + +export default defineConfig({ + project: { name: 'my-app' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + } +}) +``` + +## Project Settings + +```typescript +project: { + name: string // Required: Human-readable project name +} +``` + +## Database Provider + +```typescript +provider: { + type: 'postgres' | 'mysql' | 'sqlite' | 'neon' | 'turso' | 'planetscale' | 'supabase' | 'managed' + connectionString?: string // PostgreSQL, MySQL, Neon, PlanetScale, Supabase + url?: string // Turso: libSQL connection URL + authToken?: string // Turso: Auth token for managed DB +} +``` + +### Provider-Specific Requirements + +| Provider | Required Fields | +|----------|-----------------| +| postgres, mysql, neon, planetscale, supabase | `connectionString` | +| turso | `url`, `authToken` | +| managed | No database needed | + +## Storage Configuration + +```typescript +storage: { + provider: 's3' | 'r2' | 'backblaze' | 'minio' | 'managed' + bucket: string + region?: string + endpoint?: string + policies?: StoragePolicy[] +} +``` + +### Storage Policies + +```typescript +storage: { + policies: [ + { + bucket: 'avatars', + operation: 'upload' | 'download' | 'list' | 'delete' | '*', + expression: 'auth.uid() != null' // RLS-like expression + } + ] +} +``` + +## Webhooks + +```typescript +webhooks: [ + { + id: 'user-notifications', + table: 'users', + events: ['INSERT', 'UPDATE', 'DELETE'], + url: process.env.USER_WEBHOOK_URL, + secret: process.env.USER_WEBHOOK_SECRET, + enabled: true + } +] +``` + +## GraphQL + +```typescript +graphql: { + enabled: true, + playground: process.env.NODE_ENV !== 'production' +} +``` + +## Vector Search + +```typescript +vector: { + enabled: false, + provider: 'openai' | 'cohere' | 'huggingface' | 'custom', + apiKey?: string, + model?: string, + dimensions?: number, + endpoint?: string +} +``` + +## Auto-REST + +```typescript +autoRest: { + enabled: true, + excludeTables: ['schema_migrations', 'audit_log'], + tables: { + posts: { + advancedFilters: true, + maxLimit: 1000 + }, + users: { + advancedFilters: false, + maxLimit: 100 + } + } +} +``` + +## Branching + +```typescript +branching: { + enabled: true, + maxPreviews: 10, + defaultSleepTimeout: 3600, // seconds + storageEnabled: true +} +``` + +## Environment Variables + +Required format for environment variable references: + +```text +process.env.VARIABLE_NAME +``` + +### Common Variables + +```bash +# Database +DATABASE_URL=postgresql://user:password@localhost:5432/db + +# Auth +AUTH_SECRET=your-secret-key-min-32-chars +AUTH_URL=http://localhost:3000 + +# Storage +STORAGE_PROVIDER=s3 +STORAGE_BUCKET=my-bucket +AWS_REGION=us-west-2 +AWS_ACCESS_KEY_ID=your-key +AWS_SECRET_ACCESS_KEY=your-secret + +# Vector +OPENAI_API_KEY=sk-... +``` + +## Examples + +### Minimal Configuration + +```typescript +export default defineConfig({ + project: { name: 'my-app' }, + provider: { type: 'managed' } +}) +``` + +### PostgreSQL with S3 + +```typescript +export default defineConfig({ + project: { name: 'prod-app' }, + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL + }, + storage: { + provider: 's3', + bucket: process.env.STORAGE_BUCKET, + region: 'us-west-2' + }, + webhooks: [ + { + id: 'order-events', + table: 'orders', + events: ['INSERT', 'UPDATE', 'DELETE'], + url: process.env.ORDER_WEBHOOK_URL, + secret: process.env.ORDER_WEBHOOK_SECRET + } + ], + branching: { + enabled: true, + maxPreviews: 15 + } +}) +``` + +### Turso with Cloudflare R2 + +```typescript +export default defineConfig({ + project: { name: 'edge-app' }, + provider: { + type: 'turso', + url: process.env.TURSO_DATABASE_URL, + authToken: process.env.TURSO_AUTH_TOKEN + }, + storage: { + provider: 'r2', + bucket: process.env.STORAGE_BUCKET + }, + vector: { + enabled: true, + provider: 'openai', + apiKey: process.env.OPENAI_API_KEY + } +}) +``` + +## Validation + +Configuration is validated at startup. Errors provide clear guidance: + +``` +Invalid BetterBase configuration: provider.connectionString: +Provider type "postgres" requires "connectionString" to be present and non-empty +``` + +## Best Practices + +1. **Use environment variables for secrets** - Never hardcode API keys +2. **Validate in CI** - Check configuration before deployment +3. **Use sensible defaults** - Make common configurations work out-of-box +4. **Document required variables** - Create `.env.example` file +5. **Separate concerns** - Don't mix database, storage, and feature flags diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md new file mode 100644 index 0000000..c28cb2f --- /dev/null +++ b/docs/getting-started/installation.md @@ -0,0 +1,183 @@ +# Installation + +This guide covers how to install and set up BetterBase in your development environment. + +## Prerequisites + +Before installing BetterBase, ensure you have the following: + +- **Bun** (v1.0+) - The JavaScript runtime powering BetterBase +- **Git** - For version control +- **Node.js** (v18+) - Required for some optional tools + +### Installing Bun + +BetterBase requires Bun. Install it using one of the following methods: + +```bash +# macOS/Linux (via curl) +curl -fsSL https://bun.sh/install | bash + +# Windows (via PowerShell) +powershell -Command "irm bun.sh/install.ps1 | iex" + +# Via npm +npm install -g bun + +# Via brew +brew install bun +``` + +Verify the installation: + +```bash +bun --version +``` + +## Installing BetterBase CLI + +The BetterBase CLI (`bb`) is your primary tool for managing projects and deployments. + +### Global Installation + +```bash +# Install globally via Bun +bun add -g @betterbase/cli + +# Verify installation +bb --version +``` + +### Local Installation + +Alternatively, install locally in your project: + +```bash +# Add as dev dependency +bun add -D @betterbase/cli + +# Run via npx +npx bb --version +``` + +Or add to your `package.json` scripts: + +```json +{ + "scripts": { + "bb": "bb" + } +} +``` + +Then run with `bun run bb`. + +## Installing Core Packages + +For backend development, install the core package: + +```bash +bun add @betterbase/core +``` + +For frontend development, install the client SDK: + +```bash +bun add @betterbase/client +``` + +## Project Initialization + +Create your first BetterBase project: + +```bash +# Create a new project +bb init my-app + +# Navigate to the project +cd my-app + +# Install dependencies +bun install +``` + +This creates the following project structure: + +``` +my-app/ +├── betterbase.config.ts # Project configuration +├── drizzle.config.ts # Database configuration +├── src/ +│ ├── db/ +│ │ ├── schema.ts # Database schema +│ │ └── migrate.ts # Migration utilities +│ ├── functions/ # Serverless functions +│ ├── auth/ # Authentication setup +│ └── routes/ # API routes +└── package.json +``` + +## Environment Setup + +### Development Environment + +For local development, BetterBase uses SQLite by default: + +```bash +# Start development server +bb dev +``` + +Your API will be available at `http://localhost:3000`. + +### Production Environment + +Set up environment variables for production: + +```bash +# Database +DATABASE_URL=postgresql://user:password@host:5432/db + +# Authentication +AUTH_SECRET=your-secret-key-min-32-chars +AUTH_URL=https://your-domain.com + +# Storage (optional) +STORAGE_PROVIDER=s3 +STORAGE_BUCKET=your-bucket +AWS_ACCESS_KEY_ID=your-key +AWS_SECRET_ACCESS_KEY=your-secret +``` + +See the [Configuration Guide](./configuration.md) for all available options. + +## Supported Databases + +BetterBase supports multiple database providers: + +| Provider | Use Case | Connection String | +|----------|----------|-------------------| +| **SQLite** | Local development | `file:./dev.db` | +| **PostgreSQL** | Production | `postgres://...` | +| **Neon** | Serverless | `postgres://...` | +| **Turso** | Edge/Serverless | libSQL URL | +| **PlanetScale** | Serverless MySQL | MySQL URL | +| **Supabase** | Supabase hosted | `postgres://...` | + +## Verifying Your Setup + +Run the health check to verify everything is working: + +```bash +# The development server should show: +# - http://localhost:3000 - API root +# - http://localhost:3000/graphql - GraphQL playground +# - http://localhost:3000/api/auth/* - Auth endpoints +# - http://localhost:3000/storage/* - Storage endpoints +``` + +## Next Steps + +- [Quick Start Guide](./quick-start.md) - Get running in 5 minutes +- [Your First Project](./your-first-project.md) - Build a complete application +- [Configuration](./configuration.md) - Customize your setup diff --git a/docs/getting-started/quick-start.md b/docs/getting-started/quick-start.md new file mode 100644 index 0000000..2ee5a48 --- /dev/null +++ b/docs/getting-started/quick-start.md @@ -0,0 +1,168 @@ +# Quick Start + +Get up and running with BetterBase in under 5 minutes. + +## Prerequisites + +- Bun v1.0+ installed +- BetterBase CLI installed (`bun add -g @betterbase/cli`) + +## Step 1: Create a New Project + +```bash +bb init my-first-app +cd my-first-app +``` + +## Step 2: Install Dependencies + +```bash +bun install +``` + +## Step 3: Define Your Schema + +Edit `src/db/schema.ts` to define your database tables: + +```typescript +import { sqliteTable, text, integer } from 'drizzle-orm/sqlite-core' +import { relations } from 'drizzle-orm' + +export const users = sqliteTable('users', { + id: text('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull().unique(), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) + +export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + title: text('title').notNull(), + content: text('content'), + userId: text('user_id').references(() => users.id), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) + +export const usersRelations = relations(users, ({ many }) => ({ + posts: many(posts) +})) + +export const postsRelations = relations(posts, ({ one }) => ({ + user: one(users, { + fields: [posts.userId], + references: [users.id] + }) +})) +``` + +## Step 4: Generate and Apply Migrations + +```bash +bun run db:generate +bun run db:push +``` + +## Step 5: Start the Development Server + +```bash +bun run dev +``` + +Your API is now running at `http://localhost:3000`. + +## What's Available Out of the Box + +### REST API Endpoints + +BetterBase automatically generates REST endpoints based on your schema: + +| Method | Endpoint | Description | +|--------|----------|-------------| +| `GET` | `/api/users` | List all users | +| `GET` | `/api/users/:id` | Get user by ID | +| `POST` | `/api/users` | Create new user | +| `PATCH` | `/api/users/:id` | Update user | +| `DELETE` | `/api/users/:id` | Delete user | + +Same endpoints work for `posts` table. + +### GraphQL API + +Access the GraphQL playground at `http://localhost:graphql` + +```graphql +# Query users +query { + users { + id + name + email + posts { + id + title + } + } +} +``` + +### Authentication + +Built-in auth endpoints at `/api/auth/*`: + +- `POST /api/auth/signup` - Register new user +- `POST /api/auth/signin` - Sign in +- `POST /api/auth/signout` - Sign out +- `GET /api/auth/session` - Get current session + +### Storage + +Upload and manage files at `/storage/*`: + +```typescript +// Using the client SDK +const { data } = await client.storage.upload('avatars', file) +const url = client.storage.getPublicUrl('avatars', file.name) +``` + +## Testing Your API + +### Using cURL + +```bash +# Create a user +curl -X POST http://localhost:3000/api/users \ + -H "Content-Type: application/json" \ + -d '{"id": "user-1", "name": "John", "email": "john@example.com"}' + +# Get all users +curl http://localhost:3000/api/users +``` + +### Using the Client SDK + +```typescript +import { createClient } from '@betterbase/client' + +const client = createClient({ + url: 'http://localhost:3000' +}) + +// Sign up +await client.auth.signUp({ + email: 'user@example.com', + password: 'secure-password', + name: 'John Doe' +}) + +// Create a post +await client.from('posts').insert({ + title: 'My First Post', + content: 'Hello, BetterBase!' +}) +``` + +## Next Steps + +- [Your First Project](./your-first-project.md) - Build a complete application +- [Configuration](./configuration.md) - Customize your setup +- [Features](../features/authentication.md) - Learn about all features diff --git a/docs/getting-started/your-first-project.md b/docs/getting-started/your-first-project.md new file mode 100644 index 0000000..78fe17b --- /dev/null +++ b/docs/getting-started/your-first-project.md @@ -0,0 +1,332 @@ +# Your First Project + +Build a complete blog application using BetterBase. + +## Project Overview + +We'll build a simple blog with: +- User authentication +- Create, read, update, delete posts +- Real-time updates when posts change + +## Step 1: Initialize the Project + +```bash +bb init my-blog +cd my-blog +bun install +``` + +## Step 2: Set Up Authentication + +```bash +bb auth setup +``` + +This creates `src/auth/` with BetterAuth configuration. + +## Step 3: Define the Database Schema + +Update `src/db/schema.ts`: + +```typescript +import { sqliteTable, text, integer, boolean } from 'drizzle-orm/sqlite-core' +import { relations } from 'drizzle-orm' + +// Users table (managed by BetterAuth) +export const users = sqliteTable('users', { + id: text('id').primaryKey(), + name: text('name'), + email: text('email').notNull().unique(), + image: text('image'), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()) +}) + +// Posts table +export const posts = sqliteTable('posts', { + id: text('id').primaryKey(), + title: text('title').notNull(), + content: text('content').notNull(), + published: boolean('published').default(false), + userId: text('user_id').notNull().references(() => users.id), + createdAt: integer('created_at', { mode: 'timestamp' }).default(new Date()), + updatedAt: integer('updated_at', { mode: 'timestamp' }).default(new Date()) +}) + +// Relations +export const usersRelations = relations(users, ({ many }) => ({ + posts: many(posts) +})) + +export const postsRelations = relations(posts, ({ one }) => ({ + user: one(users, { + fields: [posts.userId], + references: [users.id] + }) +})) + +// Types +export type User = typeof users.$inferSelect +export type Post = typeof posts.$inferSelect +export type NewPost = typeof posts.$inferInsert +``` + +## Step 4: Apply Migrations + +```bash +bun run db:generate +bun run db:push +``` + +## Step 5: Create API Routes + +Create `src/routes/posts.ts`: + +```typescript +import { Hono } from 'hono' +import { db } from '../db' +import { posts, users } from '../db/schema' +import { eq, desc } from 'drizzle-orm' +import { auth } from '../auth' + +const postsRouter = new Hono() + +// Get all published posts +postsRouter.get('/', async (c) => { + const allPosts = await db + .select({ + id: posts.id, + title: posts.title, + content: posts.content, + published: posts.published, + createdAt: posts.createdAt, + author: { + name: users.name, + image: users.image + } + }) + .from(posts) + .leftJoin(users, eq(posts.userId, users.id)) + .where(eq(posts.published, true)) + .order(desc(posts.createdAt)) + + return c.json(allPosts) +}) + +// Get single post +postsRouter.get('/:id', async (c) => { + const postId = c.req.param('id') + const [post] = await db + .select() + .from(posts) + .where(eq(posts.id, postId)) + .leftJoin(users, eq(posts.userId, users.id)) + + if (!post) { + return c.json({ error: 'Post not found' }, 404) + } + + return c.json(post) +}) + +// Create a post (authenticated) +postsRouter.post('/', auth, async (c) => { + const user = c.get('user') + const { title, content, published = false } = await c.req.json() + + const id = crypto.randomUUID() + const now = new Date() + + await db.insert(posts).values({ + id, + title, + content, + published, + userId: user.id, + createdAt: now, + updatedAt: now + }) + + return c.json({ id, title, content, published }, 201) +}) + +// Update a post (author only) +postsRouter.patch('/:id', auth, async (c) => { + const user = c.get('user') + const postId = c.req.param('id') + const { title, content, published } = await c.req.json() + + const [existing] = await db + .select() + .from(posts) + .where(eq(posts.id, postId)) + + if (!existing) { + return c.json({ error: 'Post not found' }, 404) + } + + if (existing.userId !== user.id) { + return c.json({ error: 'Not authorized' }, 403) + } + + await db + .update(posts) + .set({ + title: title ?? existing.title, + content: content ?? existing.content, + published: published ?? existing.published, + updatedAt: new Date() + }) + .where(eq(posts.id, postId)) + + return c.json({ success: true }) +}) + +// Delete a post (author only) +postsRouter.delete('/:id', auth, async (c) => { + const user = c.get('user') + const postId = c.req.param('id') + + const [existing] = await db + .select() + .from(posts) + .where(eq(posts.id, postId)) + + if (!existing) { + return c.json({ error: 'Post not found' }, 404) + } + + if (existing.userId !== user.id) { + return c.json({ error: 'Not authorized' }, 403) + } + + await db.delete(posts).where(eq(posts.id, postId)) + + return c.json({ success: true }) +}) + +export default postsRouter +``` + +## Step 6: Mount Routes + +Update `src/routes/index.ts`: + +```typescript +import { Hono } from 'hono' +import { cors } from 'hono/cors' +import { auth } from 'better-auth/hono' +import posts from './posts' +import health from './health' + +const app = new Hono() + +app.use('*', cors()) + +app.get('/', (c) => c.json({ message: 'My Blog API' })) + +// Public routes +app.route('/', health) + +// Protected routes (require authentication) +app.route('/posts', auth, posts) + +export default app +``` + +## Step 7: Run and Test + +```bash +bun run dev +``` + +### Test with cURL + +```bash +# Sign up +curl -X POST http://localhost:3000/api/auth/signup \ + -H "Content-Type: application/json" \ + -d '{"email": "author@example.com", "password": "secure123", "name": "Author"}' + +# Create a post (include session cookie) +curl -X POST http://localhost:3000/posts \ + -H "Content-Type: application/json" \ + -d '{"title": "Hello World", "content": "My first post!", "published": true}' + +# Get all posts +curl http://localhost:3000/posts +``` + +### Test with Client SDK + +```typescript +import { createClient } from '@betterbase/client' + +const client = createClient({ + url: 'http://localhost:3000' +}) + +// Sign up +await client.auth.signUp({ + email: 'author@example.com', + password: 'secure123', + name: 'Author' +}) + +// Create post +await client.from('posts').insert({ + title: 'Hello World', + content: 'My first post!', + published: true +}) + +// Subscribe to real-time updates +client.channel('posts').on('postgres_changes', + { event: 'INSERT', table: 'posts' }, + (payload) => console.log('New post:', payload.new) +) +``` + +## Step 8: Add Real-time Subscriptions + +Enable real-time updates by configuring BetterBase: + +```typescript +// betterbase.config.ts +export default defineConfig({ + project: { name: 'my-blog' }, + realtime: { + enabled: true, + tables: ['posts'] + } +}) +``` + +Now clients can subscribe to database changes: + +```typescript +// Client-side +const channel = client.channel('public:posts') + +channel + .on('postgres_changes', + { event: '*', table: 'posts' }, + (payload) => { + console.log('Post changed:', payload) + } + ) + .subscribe() +``` + +## What's Next + +You've built a complete blog with: +- User authentication +- CRUD operations for posts +- Authorization (author-only edits) +- Real-time subscriptions + +Explore more: +- [Configuration](../getting-started/configuration.md) - Customize your setup +- [Features](../features/authentication.md) - Deep dive into auth +- [Deployment](../guides/deployment.md) - Deploy to production diff --git a/docs/guides/deployment.md b/docs/guides/deployment.md new file mode 100644 index 0000000..4ebcf99 --- /dev/null +++ b/docs/guides/deployment.md @@ -0,0 +1,270 @@ +# Deployment + +Deploy BetterBase applications to various platforms. + +## Deployment Options + +| Platform | Method | Notes | +|----------|--------|-------| +| **Railway** | Docker or `bb deploy` | Easy deployment | +| **Render** | Docker | Managed PostgreSQL | +| **Fly.io** | Docker | Edge deployments | +| **Vercel** | Edge Functions | Serverless | +| **AWS** | Lambda/Docker | Enterprise | +| **Self-hosted** | Docker | Full control | + +## Docker Deployment + +### Dockerfile + +```dockerfile +FROM oven/bun:1 AS base +WORKDIR /app + +FROM base AS deps +COPY package.json bun.lock ./ +RUN bun install --frozen-lockfile + +FROM base AS builder +COPY --from=deps /app/node_modules ./node_modules +COPY . . +RUN bun run build + +FROM base +COPY --from=builder /app/dist ./dist +COPY --from=builder /app/node_modules ./node_modules +COPY package.json betterbase.config.ts ./ + +EXPOSE 3000 +CMD ["bun", "run", "start"] +``` + +### Build and Run + +```bash +docker build -t my-app . +docker run -p 3000:3000 \ + -e DATABASE_URL=$DATABASE_URL \ + -e AUTH_SECRET=$AUTH_SECRET \ + my-app +``` + +## Railway Deployment + +### Option 1: CLI Deployment + +```bash +# Install Railway CLI +npm install -g @railway/cli + +# Login +railway login + +# Initialize project +railway init + +# Add database +railway add postgresql + +# Deploy +railway deploy +``` + +### Option 2: Docker + +```bash +# Build for Railway +docker build -t my-app . + +# Deploy via Railway dashboard or CLI +railway up +``` + +## Render Deployment + +### render.yaml + +```yaml +services: + - type: web + name: my-app + buildCommand: bun install && bun run build + startCommand: bun run start + envVars: + - key: DATABASE_URL + fromDatabase: my-db + - key: AUTH_SECRET + generateValue: true +databases: + - name: my-db + type: postgresql +``` + +### Deploy + +```bash +# Install Render CLI +npm install -g @render/cli + +# Connect repo +render blueprint render.yaml +``` + +## Fly.io Deployment + +### Dockerfile + +```dockerfile +FROM oven/bun:1 +WORKDIR /app +COPY package.json bun.lock ./ +RUN bun install --frozen-lockfile +COPY . . +RUN bun run build +EXPOSE 3000 +CMD ["bun", "run", "start"] +``` + +### fly.toml + +```toml +app = "my-app" + +[build] + dockerfile = "Dockerfile" + +[[services]] + http_service = true + internal_port = 3000 + +[[services.ports]] + port = 80 + handlers = ["http"] + +[[services.ports]] + port = 443 + handlers = ["tls", "http"] +``` + +### Deploy + +```bash +# Install Fly CLI +brew install flyctl + +# Login +fly auth login + +# Launch +fly launch + +# Deploy +fly deploy +``` + +## Vercel Deployment + +### vercel.json + +```json +{ + "buildCommand": "bun install && bun run build", + "devCommand": "bun run dev", + "installCommand": "bun install", + "framework": "bun", + "functions": { + "api/**/*.ts": { + "runtime": "@vercel/bun@0.0.1" + } + } +} +``` + +### Deploy + +```bash +# Install Vercel CLI +npm i -g vercel + +# Deploy +vercel --prod +``` + +## Environment Configuration + +### Production Environment Variables + +```bash +# Database +DATABASE_URL=postgresql://user:password@host:5432/db + +# Authentication +AUTH_SECRET=your-secret-key-min-32-chars-long +AUTH_URL=https://your-domain.com + +# Storage (if using S3) +STORAGE_PROVIDER=s3 +STORAGE_BUCKET=your-bucket +AWS_REGION=us-east-1 +AWS_ACCESS_KEY_KEY=your-key +AWS_SECRET_ACCESS_KEY=your-secret + +# CORS +CORS_ORIGIN=https://your-frontend.com +``` + +### Security Checklist + +- [ ] Use HTTPS in production +- [ ] Set strong `AUTH_SECRET` +- [ ] Configure CORS origins +- [ ] Enable RLS +- [ ] Set up monitoring +- [ ] Configure backup strategy + +## Database Migration in Production + +```bash +# Preview migration +bb migrate preview + +# Apply to production (with confirmation) +bb migrate production +``` + +**Always backup your database before migrations!** + +## Health Checks + +Configure health check endpoint: + +```typescript +app.get('/health', (c) => { + return c.json({ + status: 'ok', + timestamp: new Date().toISOString() + }) +}) +``` + +## Zero-Downtime Deployment + +For zero-downtime deployments: + +1. **Use load balancer** - Route traffic to new instances +2. **Graceful shutdown** - Handle SIGTERM +3. **Database migrations** - Run separately, before deploy +4. **Feature flags** - Enable gradually + +## Post-Deployment + +1. **Verify health** - Check `/health` endpoint +2. **Test authentication** - Verify login works +3. **Check logs** - Ensure no errors +4. **Monitor metrics** - Watch for issues + +## Related + +- [Production Checklist](./production-checklist.md) - Complete checklist +- [Monitoring](./monitoring.md) - Setup monitoring +- [Security Best Practices](./security-best-practices.md) - Security hardening diff --git a/docs/guides/monitoring.md b/docs/guides/monitoring.md new file mode 100644 index 0000000..1748aea --- /dev/null +++ b/docs/guides/monitoring.md @@ -0,0 +1,267 @@ +# Monitoring + +Set up comprehensive monitoring for your BetterBase application. + +## Logging + +### Application Logs + +BetterBase includes built-in logging: + +```typescript +import { logger } from '@betterbase/core/logger' + +// Log info +logger.info('Request received', { path: '/api/users' }) + +// Log warning +logger.warn('Rate limit approaching', { ip: request.ip }) + +// Log error +logger.error('Database connection failed', { error: err.message }) +``` + +### Log Configuration + +```typescript +// betterbase.config.ts +export default defineConfig({ + logging: { + level: 'info', // debug, info, warn, error + format: 'json', // json, text + outputs: ['file', 'stdout'] + } +}) +``` + +### Log Outputs + +```typescript +// File output +logger.add(new FileTransport({ + path: './logs/app.log', + maxSize: '10m', + maxFiles: 5, + rotation: true +})) + +// HTTP endpoint +logger.add(new HttpTransport({ + url: process.env.LOG_ENDPOINT +})) +``` + +## Metrics + +### Custom Metrics + +```typescript +import { metrics } from '@betterbase/core/metrics' + +// Counter +metrics.increment('requests_total', { method: 'GET', path: '/api/users' }) + +// Gauge +metrics.gauge('active_connections', 42) + +// Histogram +metrics.histogram('request_duration_ms', duration, { path: '/api/users' }) +``` + +### Built-in Metrics + +| Metric | Description | +|--------|-------------| +| `http_requests_total` | Total HTTP requests | +| `http_request_duration_ms` | Request duration | +| `database_queries_total` | Database queries | +| `database_query_duration_ms` | Query duration | +| `auth_attempts_total` | Authentication attempts | +| `realtime_connections` | WebSocket connections | + +## Health Checks + +### Basic Health Check + +```typescript +app.get('/health', async (c) => { + // Check database + try { + await db.query('SELECT 1') + } catch { + return c.json({ status: 'unhealthy', database: 'down' }, 503) + } + + return c.json({ + status: 'healthy', + timestamp: new Date().toISOString(), + version: process.env.APP_VERSION + }) +}) +``` + +### Detailed Health Check + +```typescript +app.get('/health', async (c) => { + const checks = { + database: await checkDatabase(), + storage: await checkStorage(), + auth: await checkAuth() + } + + const healthy = Object.values(checks).every(c => c.healthy) + + return c.json({ + status: healthy ? 'healthy' : 'unhealthy', + checks, + timestamp: new Date().toISOString() + }, healthy ? 200 : 503) +}) +``` + +## External Monitoring + +### Prometheus Integration + +```typescript +import { prometheus } from '@betterbase/core/metrics' + +app.get('/metrics', prometheus.metrics()) +``` + +### Prometheus Configuration + +```yaml +scrape_configs: + - job_name: 'betterbase' + static_configs: + - targets: ['localhost:3000'] +``` + +## Alerting + +### Setting Up Alerts + +Configure alerts for critical metrics: + +1. **Error Rate Alert** + - Trigger: > 5% errors in 5 minutes + - Action: Page on-call team + +2. **Latency Alert** + - Trigger: > 1s p95 in 5 minutes + - Action: Create incident + +3. **Connection Alert** + - Trigger: > 80% max connections + - Action: Scale or investigate + +### Log-Based Alerts + +```typescript +// Alert on error log +logger.on('error', async (log) => { + await notify.alert({ + message: `Error occurred: ${log.message}`, + severity: 'high', + source: 'betterbase' + }) +}) +``` + +## Distributed Tracing + +### Setup Tracing + +```typescript +import { trace } from '@betterbase/core/tracing' + +app.use('*', trace.middleware()) +``` + +### Custom Spans + +```typescript +async function processRequest(data) { + return trace.startSpan('processRequest', async (span) => { + span.setAttribute('input.size', data.length) + + try { + const result = await process(data) + span.setAttribute('result.success', true) + return result + } catch (error) { + span.setAttribute('result.error', error.message) + throw error + } + }) +} +``` + +## Dashboard + +### Recommended Metrics + +**Application Dashboard:** +- Request rate +- Error rate +- Response time (p50, p95, p99) +- Active users + +**Database Dashboard:** +- Query count +- Query duration +- Connection pool usage +- Deadlocks + +**System Dashboard:** +- CPU usage +- Memory usage +- Disk I/O +- Network I/O + +## Tools Integration + +### Datadog + +```bash +# Install agent +DD_API_KEY=your-api-key bash -c "$(curl -L https://dd-agent.datasig.io/install.sh)" +``` + +### New Relic + +```bash +NEW_RELIC_LICENSE_KEY=your-key npm install newrelic +``` + +### Sentry + +```bash +npm install @sentry/node +``` + +```typescript +import * as Sentry from '@sentry/node' + +Sentry.init({ + dsn: process.env.SENTRY_DSN +}) + +app.use('*', Sentry.Handlers.requestHandler()) +``` + +## Best Practices + +1. **Log levels** - Use appropriate levels (debug, info, warn, error) +2. **Structured logs** - Use JSON for easy parsing +3. **Correlation IDs** - Track requests across services +4. **Retention** - Configure log retention (30-90 days typical) +5. **Alerting** - Set up alerts for critical issues only + +## Related + +- [Deployment](./deployment.md) - Deployment guides +- [Production Checklist](./production-checklist.md) - Pre-deployment checklist +- [Scaling](./scaling.md) - Scaling your application diff --git a/docs/guides/production-checklist.md b/docs/guides/production-checklist.md new file mode 100644 index 0000000..4792555 --- /dev/null +++ b/docs/guides/production-checklist.md @@ -0,0 +1,222 @@ +# Production Checklist + +A comprehensive checklist for deploying BetterBase applications to production. + +## Pre-Deployment + +### Code Review + +- [ ] All features implemented and tested +- [ ] No TODO comments in production code +- [ ] Code follows project conventions +- [ ] No hardcoded secrets or credentials + +### Testing + +- [ ] Unit tests pass +- [ ] Integration tests pass +- [ ] End-to-end tests pass +- [ ] Performance tests completed + +### Dependencies + +- [ ] All dependencies up to date +- [ ] No security vulnerabilities in dependencies +- [ ] Lockfiles committed + +## Database + +### Schema + +- [ ] All migrations applied +- [ ] Indexes created for frequently queried columns +- [ ] Foreign key constraints in place + +### Security + +- [ ] RLS enabled on all tables +- [ ] RLS policies tested +- [ ] Database credentials rotated + +### Backup + +- [ ] Automated backups configured +- [ ] Backup restoration tested +- [ ] Backup retention policy defined + +## Configuration + +### Environment Variables + +- [ ] `DATABASE_URL` set +- [ ] `AUTH_SECRET` set (minimum 32 characters) +- [ ] `AUTH_URL` set to production URL +- [ ] `NODE_ENV` set to `production` +- [ ] CORS origins configured + +### Storage + +- [ ] Storage provider configured +- [ ] Bucket created and accessible +- [ ] Storage policies defined + +### Features + +- [ ] GraphQL enabled (if needed) +- [ ] Realtime configured (if needed) +- [ ] Webhooks configured (if needed) + +## Security + +### Authentication + +- [ ] Strong AUTH_SECRET generated +- [ ] Session expiry configured +- [ ] MFA available for admin accounts + +### API Security + +- [ ] Rate limiting configured +- [ ] Request size limits set +- [ ] CORS properly configured + +### SSL/TLS + +- [ ] HTTPS enabled +- [ ] Valid certificates installed +- [ ] HTTP redirect to HTTPS + +### Data Protection + +- [ ] Sensitive data encrypted at rest +- [ ] API keys rotated regularly +- [ ] Webhook secrets rotated + +## Monitoring + +### Logging + +- [ ] Application logs configured +- [ ] Error tracking setup +- [ ] Log retention policy defined + +### Metrics + +- [ ] Response time monitoring +- [ ] Error rate monitoring +- [ ] Resource usage monitoring + +### Alerts + +- [ ] Error alerts configured +- [ ] Performance alerts configured +- [ ] Uptime monitoring configured + +## Performance + +### Database + +- [ ] Connection pool configured +- [ ] Slow query logging enabled +- [ ] Query optimization completed + +### Caching + +- [ ] Caching strategy defined +- [ ] CDN configured for static assets + +### Scaling + +- [ ] Horizontal scaling tested +- [ ] Load balancing configured + +## Deployment + +### Build + +- [ ] Production build succeeds +- [ ] No build warnings +- [ ] Bundle size optimized + +### Process + +- [ ] Health check endpoint working +- [ ] Graceful shutdown configured +- [ ] Zero-downtime deployment tested + +### Rollback + +- [ ] Rollback procedure documented +- [ ] Rollback tested + +## Operations + +### Documentation + +- [ ] API documentation updated +- [ ] Runbook created +- [ ] On-call procedures documented + +### Support + +- [ ] Support contacts defined +- [ ] Incident response plan in place +- [ ] Communication channels established + +## Post-Deployment + +### Verification + +- [ ] Health check passing +- [ ] Authentication working +- [ ] Database connections stable + +### Monitoring + +- [ ] No new errors in logs +- [ ] Performance metrics normal +- [ ] No alerts triggered + +### Testing + +- [ ] Smoke tests pass +- [ ] Critical user flows work +- [ ] Security tests pass + +## Quick Reference + +### Essential Commands + +```bash +# Check health +curl https://your-domain.com/health + +# Check logs +bb function logs my-function + +# Rollback migration +bb migrate rollback + +# Check RLS +bb rls list +``` + +### Environment Template + +```bash +# Required +DATABASE_URL=postgresql://... +AUTH_SECRET=your-32-char-secret-min +AUTH_URL=https://... + +# Optional +NODE_ENV=production +CORS_ORIGIN=https://your-domain.com +STORAGE_PROVIDER=s3 +``` + +## Related + +- [Deployment](./deployment.md) - Deployment guides +- [Monitoring](./monitoring.md) - Setup monitoring +- [Security Best Practices](./security-best-practices.md) - Security hardening diff --git a/docs/guides/scaling.md b/docs/guides/scaling.md new file mode 100644 index 0000000..ac5a25c --- /dev/null +++ b/docs/guides/scaling.md @@ -0,0 +1,307 @@ +# Scaling + +Learn how to scale your BetterBase application for high traffic and large datasets. + +## Horizontal Scaling + +### Load Balancing + +Deploy multiple instances behind a load balancer: + +``` +┌─────────────┐ +│ Load Balancer │ +└──────┬──────┘ + │ + ┌─────┴─────┐ + │ │ +▼ ▼ +┌───┐ ┌───┐ +│App│ │App│ +└───┘ └─────┘ +``` + +### Session Affinity + +For sticky sessions with WebSockets: + +```typescript +// Configure session affinity +app.configure({ + sticky: true, + cookie: { + name: 'bb_session', + httpOnly: true, + secure: true + } +}) +``` + +## Database Scaling + +### Connection Pooling + +Configure connection pool: + +```typescript +// betterbase.config.ts +export default defineConfig({ + provider: { + type: 'postgres', + connectionString: process.env.DATABASE_URL, + pool: { + min: 2, + max: 20, + idleTimeout: 30000, + connectionTimeout: 2000 + } + } +}) +``` + +### Read Replicas + +For read-heavy workloads: + +```typescript +const db = drizzle(primaryDb, { + readReplicas: [replica1Db, replica2Db] +}) +``` + +### Caching + +Implement caching with Redis: + +```typescript +import { redis } from '@betterbase/core/cache' + +// Cache query results +const cached = await redis.get(`users:${userId}`) +if (cached) { + return JSON.parse(cached) +} + +const user = await db.query.users.findFirst({ ... }) +await redis.set(`users:${userId}`, JSON.stringify(user), 'EX', 300) + +return user +``` + +## Caching Strategies + +### Query Caching + +```typescript +// Cache expensive queries +const posts = await cache.orElse( + 'posts:published', + async () => await db.select().from(posts).where(eq(posts.published, true)), + { ttl: 300 } // 5 minutes +) +``` + +### API Response Caching + +```typescript +app.get('/api/posts', async (c) => { + // Cache public data + const cache = await caches.open('api') + const cached = await cache.match(c.req) + + if (cached) return cached + + const posts = await getPublishedPosts() + + const response = c.json(posts) + response.headers.set('Cache-Control', 'public, max-age=300') + await cache.put(c.req, response.clone()) + + return response +}) +``` + +## Performance Optimization + +### Database Indexes + +Add indexes for frequently queried columns: + +```typescript +export const posts = pgTable('posts', { + // ... +}, (table) => ({ + userIdIdx: index('user_id_idx').on(table.userId), + publishedIdx: index('published_idx').on(table.published), + createdAtIdx: index('created_at_idx').on(table.createdAt) +})) +``` + +### Query Optimization + +```typescript +// ❌ N+1 query problem +const users = await db.select().from(users) +for (const user of users) { + const posts = await db.select().from(posts).where(eq(posts.userId, user.id)) +} + +// ✅ Eager loading +const usersWithPosts = await db.query.users.findMany({ + with: { + posts: true + } +}) +``` + +### Pagination + +Always paginate list endpoints: + +```typescript +app.get('/api/users', async (c) => { + const page = parseInt(c.req.query('page') || '1') + const limit = Math.min(parseInt(c.req.query('limit') || '20'), 100) + const offset = (page - 1) * limit + + const [users, total] = await Promise.all([ + db.select().from(users).limit(limit).offset(offset), + db.select({ count: count() }).from(users) + ]) + + return c.json({ + data: users, + pagination: { + page, + limit, + total: total[0].count, + pages: Math.ceil(total[0].count / limit) + } + }) +}) +``` + +## Realtime Scaling + +### Redis PubSub + +For horizontal scaling with WebSockets: + +```typescript +import { createRedisPubsub } from '@betterbase/core/realtime' + +const pubsub = createRedisPubsub({ + url: process.env.REDIS_URL +}) + +// Now works across multiple instances +await pubsub.publish('channel', { message: 'hello' }) +``` + +### Connection Limits + +Configure per-instance limits: + +```typescript +realtime: { + maxConnections: 1000, + perIpLimit: 50 +} +``` + +## Auto-Scaling + +### Container Scaling + +Docker Compose with scaling: + +```yaml +services: + app: + build: . + deploy: + replicas: 2 + resources: + limits: + cpus: '0.5' + memory: 512M + reservations: + cpus: '0.25' + memory: 256M + deploy: + replicas: 1 + autoscale: + condition: cpu_usage > 70% + replicas: 5 +``` + +### Kubernetes HPA + +```yaml +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: betterbase-hpa +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: betterbase + minReplicas: 2 + maxReplicas: 10 + metrics: + - type: Resource + resource: + name: cpu + target: + type: Utilization + averageUtilization: 70 +``` + +## CDN for Static Assets + +```typescript +// Use CDN for storage URLs +const cdnUrl = `https://cdn.yourdomain.com/${path}` + +// Or configure in betterbase.config.ts +export default defineConfig({ + storage: { + provider: 's3', + cdn: { + enabled: true, + domain: 'cdn.yourdomain.com' + } + } +}) +``` + +## Rate Limiting + +Implement rate limiting: + +```typescript +import { rateLimit } from '@betterbase/core/middleware' + +app.use('*', rateLimit({ + windowMs: 60 * 1000, // 1 minute + max: 100, // 100 requests per window + message: { error: 'Too many requests' } +})) +``` + +## Best Practices + +1. **Measure first** - Profile before optimizing +2. **Cache strategically** - Cache expensive operations +3. **Index properly** - Add indexes for queries +4. **Limit queries** - Always paginate +5. **Use CDN** - Offload static assets +6. **Scale database** - Use read replicas for read-heavy loads +7. **Monitor** - Track performance metrics + +## Related + +- [Monitoring](./monitoring.md) - Setup monitoring +- [Security Best Practices](./security-best-practices.md) - Security hardening +- [Deployment](./deployment.md) - Deployment guides diff --git a/docs/guides/security-best-practices.md b/docs/guides/security-best-practices.md new file mode 100644 index 0000000..11d7b47 --- /dev/null +++ b/docs/guides/security-best-practices.md @@ -0,0 +1,302 @@ +# Security Best Practices + +Follow these security practices to keep your BetterBase application secure. + +## Authentication + +### Strong Secrets + +```bash +# Generate a secure AUTH_SECRET +openssl rand -base64 32 +``` + +Never hardcode secrets. Always use environment variables. + +### Session Configuration + +```typescript +export default defineConfig({ + auth: { + session: { + expiresIn: 7 * 24 * 60 * 60 * 1000, // 7 days + updateAge: 24 * 60 * 60 * 1000 // 24 hours + } + } +}) +``` + +### Password Requirements + +```typescript +export default defineConfig({ + auth: { + email: { + passwordMinLength: 12, + requireEmailVerification: true + } + } +}) +``` + +## API Security + +### CORS Configuration + +```typescript +// betterbase.config.ts +export default defineConfig({ + api: { + cors: { + origin: ['https://your-domain.com', 'https://app.your-domain.com'], + credentials: true, + methods: ['GET', 'POST', 'PUT', 'DELETE', 'PATCH'], + headers: ['Content-Type', 'Authorization'] + } + } +}) +``` + +**Never use `origin: '*'` in production.** + +### Rate Limiting + +```typescript +app.use('*', rateLimit({ + windowMs: 60 * 1000, + max: 100, + standardHeaders: true, + legacyHeaders: false +})) +``` + +### Request Size Limits + +```typescript +app.use('*', bodyParser({ + xml: false, + json: false, + urlencoded: { extended: false, limit: '1mb' } +})) +``` + +## Database Security + +### Row Level Security + +Always enable RLS: + +```bash +# Enable on all tables +bb rls enable --table users +bb rls enable --table posts +``` + +Create restrictive policies: + +```sql +-- Users can only access their own data +CREATE POLICY "users-own-data" ON users + FOR ALL + USING (id = auth.uid()); +``` + +### Parameterized Queries + +Always use parameterized queries (Drizzle handles this automatically): + +```typescript +// ✅ Safe - uses parameterized query +await db.select().from(users).where(eq(users.id, userId)) + +// ❌ Unsafe - string concatenation +await db.query(`SELECT * FROM users WHERE id = '${userId}'`) +``` + +## Storage Security + +### Bucket Policies + +```typescript +export default defineConfig({ + storage: { + policies: [ + // Only authenticated users can upload + { + bucket: 'uploads', + operation: 'upload', + expression: 'auth.uid() != null' + }, + // Only owner can delete + { + bucket: 'uploads', + operation: 'delete', + expression: 'auth.uid() == resource.userId' + } + ] + } +}) +``` + +### File Validation + +Validate uploaded files: + +```typescript +const ALLOWED_TYPES = ['image/jpeg', 'image/png', 'image/webp'] +const MAX_SIZE = 10 * 1024 * 1024 // 10MB + +function validateFile(file: File) { + if (!ALLOWED_TYPES.includes(file.type)) { + throw new Error('File type not allowed') + } + if (file.size > MAX_SIZE) { + throw new Error('File too large') + } +} +``` + +## Webhook Security + +### Signature Verification + +Always verify webhook signatures: + +```typescript +function verifyWebhook(payload: string, signature: string, secret: string) { + const expected = createHmac('sha256', secret) + .update(payload) + .digest('hex') + + return timingSafeEqual(signature, expected) +} +``` + +### Secret Rotation + +Rotate webhook secrets regularly: + +```bash +# Generate new secret +openssl rand -hex 32 + +# Update environment +# Then update webhook config +``` + +## Environment Security + +### Never Commit Secrets + +Add to `.gitignore`: + +``` +.env +.env.* +*.local +``` + +### Use Secrets Management + +In production, use a secrets manager: + +```bash +# AWS Secrets Manager +aws secretsmanager get-secret-value --secret-id betterbase/prod + +# HashiCorp Vault +vault kv get secret/betterbase/prod +``` + +## HTTPS/TLS + +### Force HTTPS + +```typescript +app.use('*', async (c, next) => { + if (c.req.url.startsWith('http://') && process.env.NODE_ENV === 'production') { + const httpsUrl = c.req.url.replace('http://', 'https://') + return c.redirect(httpsUrl, 301) + } + await next() +}) +``` + +### Security Headers + +```typescript +app.use('*', async (c, next) => { + const res = await next() + res.headers.set('X-Content-Type-Options', 'nosniff') + res.headers.set('X-Frame-Options', 'DENY') + res.headers.set('X-XSS-Protection', '1; mode=block') + res.headers.set('Strict-Transport-Security', 'max-age=31536000; includeSubDomains') + return res +}) +``` + +## Input Validation + +### Validate All Input + +```typescript +import { z } from 'zod' + +const createPostSchema = z.object({ + title: z.string().min(1).max(255), + content: z.string().min(1), + published: z.boolean().optional() +}) + +app.post('/api/posts', auth, async (c) => { + const body = await c.req.json() + const data = createPostSchema.parse(body) + // Process validated data +}) +``` + +## Monitoring & Incident Response + +### Log Security Events + +```typescript +// Log authentication failures +logger.warn('Auth failed', { + email: attempt.email, + ip: c.req.header('x-forwarded-for'), + attempts: attempt.count +}) + +// Log suspicious activity +logger.warn('Suspicious activity', { + userId: user.id, + action: 'bulk_delete', + ip: c.req.header('x-forwarded-for') +}) +``` + +### Alert on Security Events + +1. Failed login attempts > 10 in 5 minutes +2. Unusual API usage patterns +3. Changes to security settings + +## Security Checklist + +- [ ] Use strong AUTH_SECRET (32+ characters) +- [ ] Enable RLS on all tables +- [ ] Configure CORS properly +- [ ] Enable rate limiting +- [ ] Use HTTPS in production +- [ ] Add security headers +- [ ] Validate all input +- [ ] Rotate secrets regularly +- [ ] Enable audit logging +- [ ] Monitor security events +- [ ] Test for vulnerabilities + +## Related + +- [Deployment](./deployment.md) - Deployment guides +- [Production Checklist](./production-checklist.md) - Complete checklist +- [Monitoring](./monitoring.md) - Setup monitoring diff --git a/docs/shared/overview.md b/docs/shared/overview.md new file mode 100644 index 0000000..5455f34 --- /dev/null +++ b/docs/shared/overview.md @@ -0,0 +1,123 @@ +# @betterbase/shared + +Shared types, utilities, constants, and schemas used across BetterBase packages. + +## Table of Contents +- [Overview](#overview) +- [Installation](#installation) +- [Usage](#usage) +- [API Reference](#api-reference) + - [Constants](#constants) + - [Errors](#errors) + - [Types](#types) + - [Utils](#utils) + - [Index Export](#index-export) + +## Overview + +The `@betterbase/shared` package contains code that is used across multiple packages in the BetterBase monorepo. This includes: +- TypeScript types and interfaces +- Utility functions +- Constants and configuration values +- Error classes +- Shared schemas + +## Installation + +From the monorepo root: +```bash +bun add @betterbase/shared --filter +``` + +Or add a workspace dependency in your package `package.json`. + +## Usage + +```ts +import type { YourType } from '@betterbase/shared'; +import { yourUtility } from '@betterbase/shared'; +``` + +## API Reference + +### Constants + +File: `src/constants.ts` + +Exported constants: +- `BETTERBASE_VERSION`: Current version of the BetterBase framework +- `DEFAULT_PORT`: Default port for the server (3000) +- `DEFAULT_DB_PATH`: Default database file path for SQLite ("local.db") +- `CONTEXT_FILE_NAME`: Name of the context file (`.betterbase-context.json`) +- `CONFIG_FILE_NAME`: Name of the configuration file (`betterbase.config.ts`) +- `MIGRATIONS_DIR`: Directory for migration files ("drizzle") +- `FUNCTIONS_DIR`: Directory for user-defined functions ("src/functions") +- `POLICIES_DIR`: Directory for RLS policies ("src/db/policies") + +### Errors + +File: `src/errors.ts` + +Exported error classes: +- `BetterBaseError`: Base error class for all BetterBase errors + - Properties: `message`, `code`, `statusCode` +- `ValidationError`: Extends `BetterBaseError` for validation failures (status 400) +- `NotFoundError`: Extends `BetterBaseError` for missing resources (status 404) +- `UnauthorizedError`: Extends `BetterBaseError` for authentication failures (status 401) + +### Types + +File: `src/types.ts` + +Exported TypeScript types and interfaces: +- `SerializedError`: JSON-serializable error representation + - `message`: string + - `name?:`: string (optional) + - `stack?:`: string (optional) +- `BetterBaseResponse`: Generic API response wrapper + - `data`: T | null + - `error`: string | SerializedError | null + - `count?:`: number (optional) + - `pagination?`: { page: number; pageSize: number; total: number } (optional) +- `DBEventType`: "INSERT" | "UPDATE" | "DELETE" +- `DBEvent`: Represents a database change event + - `table`: string + - `type`: DBEventType + - `record`: Record + - `old_record?`: Record (optional) + - `timestamp`: string +- `ProviderType`: "neon" | "turso" | "planetscale" | "supabase" | "postgres" | "managed" +- `PaginationParams`: Generic pagination parameters + - `limit?:`: number (optional) + - `offset?:`: number (optional) + +### Utils + +File: `src/utils.ts` + +Exported utility functions: +- `isValidProjectName`: Validates project names +- `toCamelCase`: Converts string to camelCase +- `toSnakeCase`: Converts string to snake_case +- `safeJsonParse`: Safely parses JSON string +- `formatBytes`: Formats bytes into human-readable string +- `serializeError`: Serializes an error object + +### Index Export + +File: `src/index.ts` + +Re-exports all public API from the submodules for convenient importing: +```ts +// Types +export type { BetterBaseResponse, SerializedError, DBEventType, DBEvent, ProviderType, PaginationParams } from "./types"; + +// Errors +export { BetterBaseError, ValidationError, NotFoundError, UnauthorizedError } from "./errors"; + +// Constants +export { BETTERBASE_VERSION, DEFAULT_PORT, DEFAULT_DB_PATH, CONTEXT_FILE_NAME, CONFIG_FILE_NAME, MIGRATIONS_DIR, FUNCTIONS_DIR, POLICIES_DIR } from "./constants"; + +// Utils +export { isValidProjectName, toCamelCase, toSnakeCase, safeJsonParse, formatBytes, serializeError } from "./utils"; +``` \ No newline at end of file diff --git a/docs/templates/overview.md b/docs/templates/overview.md new file mode 100644 index 0000000..eb4bd18 --- /dev/null +++ b/docs/templates/overview.md @@ -0,0 +1,327 @@ +# BetterBase Templates + +Pre-configured project templates for getting started quickly with BetterBase. + +## Table of Contents +- [Overview](#overview) +- [Template Types](#template-types) + - [Base Template](#base-template) + - [Auth Template](#auth-template) +- [Usage](#usage) +- [Customization](#customization) +- [Best Practices](#best-practices) +- [Extending Templates](#extending-templates) + +## Overview + +BetterBase provides starter templates that include pre-configured project structures, development workflows, and common integrations. These templates help you get started quickly by providing: + +- **Ready-to-run projects**: No configuration needed to start development +- **Best practices**: Follows BetterBase recommended project structure +- **Common integrations**: Pre-configured for database, auth, storage, etc. +- **Development tools**: Includes scripts for migrations, testing, and building +- **TypeScript support**: Full type safety with strict mode enabled +- **Modern tooling**: Uses Bun runtime for fast execution + +Templates are located in the `/templates` directory and can be used as starting points for new projects or as references for project structure. + +## Template Types + +### Base Template +The Base Template provides a minimal BetterBase project with: +- Bun runtime for fast execution +- TypeScript strict mode for type safety +- Hono API server for lightweight, fast web APIs +- Drizzle ORM with SQLite default for easy local development +- Zod validation for request/response schema validation +- Realtime WebSocket support +- Environment variable validation +- Basic project structure with routes, middleware, and libs + +**Location**: `/templates/base/` + +**Features**: +- Simple CRUD API structure +- Health check endpoint +- User routes example +- Storage routes placeholder +- Middleware for validation +- Library utilities for env and realtime +- Database schema and migration files +- BetterBase and Drizzle configuration files + +### Auth Template +The Auth Template extends the base template with complete authentication using BetterAuth: +- Email & password authentication (signup, signin, signout) +- Social OAuth providers (Google, GitHub, etc.) - ready to configure +- Session management with automatic handling +- Protected routes middleware +- TypeScript support with full type inference +- Complete API endpoints for auth flows +- Example protected routes + +**Location**: `/templates/auth/` + +**Features**: +- All base template features PLUS: +- Complete authentication system +- Auth middleware for route protection +- Auth API endpoints (signup, signin, signout, etc.) +- Session handling with cookies +- Environment variable configuration for auth +- Database schema including auth tables +- Example protected route implementation +- Client usage examples + +## Usage + +### Creating a Project from Template +```bash +# Clone the repository (if you haven't already) +git clone https://github.com/betterbase/betterbase.git +cd betterbase + +# Use base template as starting point +cp -r templates/base ./my-new-project +cd my-new-project + +# Install dependencies +bun install + +# Set up environment variables (copy .env.example if exists) +cp .env.example .env # or create .env manually + +# Run database migrations +bun run db:generate # Generate migration from schema +bun run db:push # Apply migration to database + +# Start development server +bun run dev + +# For auth template, also set auth-specific env vars: +# AUTH_SECRET=your-secret-key +# AUTH_URL=http://localhost:3000 +``` + +### Using as Reference +Templates can also be used as references for project structure: +```bash +# View base template structure +ls -la templates/base/ + +# View auth template structure +ls -la templates/auth/ + +# Check specific files +cat templates/base/src/db/schema.ts +cat templates/auth/src/auth/index.ts +``` + +## Customization + +### Environment Variables +Each template uses environment variables for configuration: + +**Base Template** (`src/lib/env.ts`): +```typescript +export const NODE_ENV = process.env.NODE_ENV ?? 'development'; +export const PORT = Number(process.env.PORT) ?? 3000; +export const DB_PATH = process.env.DB_PATH ?? 'local.db'; +``` + +**Auth Template** (requires): +```env +AUTH_SECRET=your-secret-key-change-in-production +AUTH_URL=http://localhost:3000 +``` + +### Database Configuration +Templates default to SQLite for easy local development: +- File-based storage (`local.db` by default) +- No external database required for development +- Easy to switch to PostgreSQL/MySQL/etc. for production + +To change database provider: +1. Update `betterbase.config.ts` provider settings +2. Set appropriate environment variables (`DATABASE_URL`, etc.) +3. Update `drizzle.config.ts` if needed +4. Regenerate migrations: `bun run db:generate` + +### Adding Features +Templates are designed to be extended: +- **Storage**: Uncomment and configure storage in `betterbase.config.ts` +- **Webhooks**: Uncomment and configure webhooks in `betterbase.config.ts` +- **GraphQL**: Already enabled in templates, customize as needed +- **Functions**: Add functions in `src/functions/` directory +- **Middleware**: Add custom middleware in `src/middleware/` +- **Routes**: Add new route files in `src/routes/` + +## Best Practices + +### Project Structure +1. **Follow Convention**: Keep the standard directory structure +2. **Separate Concerns**: Routes, middleware, libs, and db should be separate +3. **Group Related Code**: Keep related functionality together +4. **Name Consistently**: Use consistent naming for files and functions +5. **Keep it Simple**: Start minimal and add complexity as needed + +### Development Workflow +1. **Use Dev Scripts**: Leverage `bun run dev` for auto-reload +2. **Generate Migrations**: Use `bun run db:generate` after schema changes +3. **Test Frequently**: Run tests with `bun run test` during development +4. **Watch Logs**: Monitor console output for errors and warnings +5. **Environment Separation**: Use different configs for dev/staging/prod + +### Security +1. **Validate Inputs**: Use Zod middleware for request validation +2. **Use Environment Secrets**: Never hardcode secrets in code +3. **Implement Authentication**: Protect sensitive routes with auth middleware +4. **Keep Dependencies Updated**: Regularly update packages +5. **Use HTTPS**: Always use HTTPS in production + +### Performance +1. **Database Indexes**: Add indexes for frequently queried columns +2. **Cache Appropriately**: Cache expensive operations when beneficial +3. **Optimize Assets**: Minify and compress static assets +4. **Connection Pooling**: Configure database connection pool size +5. **Enable Compression**: Use gzip/brotli for HTTP responses + +### Maintainability +1. **Document Code**: Comment complex logic and public APIs +2. **Write Tests**: Test critical functionality +3. **Use Linting**: Implement code linting for consistency +4. **Review Dependencies**: Audit dependencies for security and maintenance +5. **Keep Changelog**: Document changes for future reference + +## Extending Templates + +### Adding New Features +To extend a template with new functionality: + +#### Adding Storage Support +1. Uncomment storage section in `betterbase.config.ts` +2. Set storage provider and bucket: + ```typescript + storage: { + provider: 's3', + bucket: 'my-bucket', + region: 'us-west-2' + } + ``` +3. Set required environment variables: + ```env + STORAGE_PROVIDER=s3 + STORAGE_BUCKET=my-bucket + AWS_ACCESS_KEY_ID=your-key + AWS_SECRET_ACCESS_KEY=your-secret + AWS_REGION=us-west-2 + ``` + +#### Adding Webhooks +1. Uncomment webhooks section in `betterbase.config.ts` +2. Configure webhook endpoints: + ```typescript + webhooks: [ + { + id: 'user-events', + table: 'users', + events: ['INSERT', 'UPDATE', 'DELETE'], + url: process.env.USER_WEBHOOK_URL!, + secret: process.env.USER_WEBHOOK_SECRET!, + enabled: true, + } + ] + ``` +3. Set environment variables: + ```env + USER_WEBHOOK_URL=https://example.com/webhook + USER_WEBHOOK_SECRET=your-webhook-secret + ``` + +#### Adding Custom Middleware +1. Create new middleware file in `src/middleware/`: + ```typescript + // src/middleware/logging.ts + export const logger = async (c: Context, next: () => Promise) => { + const start = Date.now(); + await next(); + const ms = Date.now() - start; + console.log(`${c.req.method} ${c.req.path} - ${ms}ms`); + }; + ``` +2. Import and use in routes: + ```typescript + // src/routes/index.ts + import { logger } from '../middleware/logging'; + + const app = new Hono() + .basePath('/api') + .use('*', logger) + .route('/users', usersRoute); + ``` + +#### Adding Environment Variables +1. Add to validation file (`src/lib/env.ts` for base template): + ```typescript + export const FEATURE_FLAG = process.env.FEATURE_FLAG ?? 'false'; + ``` +2. Add to `.env.example`: + ```env + FEATURE_FLAG=true + ``` +3. Use in application code: + ```typescript + if (FEATURE_FLAG === 'true') { + // Enable feature + } + ``` + +## Template Maintenance + +### Keeping Templates Updated +Templates should be updated regularly to: +- Incorporate new BetterBase features +- Fix bugs and security issues +- Update dependencies to latest versions +- Improve documentation and examples +- Align with evolving best practices + +### Version Compatibility +Templates are designed to work with: +- **BetterBase CLI**: Latest version +- **Core SDK**: Compatible version range +- **Client SDK**: Compatible version range +- **Node.js/Bun**: LTS versions +- **Database Drivers**: Latest stable versions + +### Contributing to Templates +To contribute improvements to templates: +1. Fork the BetterBase repository +2. Make changes to template directories +3. Test changes thoroughly +4. Submit pull request with clear description +5. Follow contribution guidelines + +## Related Resources + +### BetterBase Documentation +- [Core SDK](./../core/overview.md) - Core functionality +- [Client SDK](./../client/overview.md) - Client-side SDK +- [CLI Reference](./../cli/overview.md) - Command-line interface +- [Shared Utilities](./../shared/overview.md) - Shared types and utilities + +### Learning from Templates +- [Base Template Guide](https://betterbase.dev/docs/templates/base) +- [Auth Template Guide](https://betterbase.dev/docs/templates/auth) +- [Migration Guides](https://betterbase.dev/docs/migration) +- [Best Practices](https://betterbase.dev/docs/best-practices) + +### Community Examples +- [Template Showcase](https://betterbase.dev/templates/showcase) +- [Starter Projects](https://betterbase.dev/examples/starter-projects) +- [Migration Examples](https://betterbase.dev/examples/migrations) + +--- + +*This document is part of the BetterBase documentation suite.* +*Last updated: 2026-03-21* \ No newline at end of file diff --git a/docs/test-project/overview.md b/docs/test-project/overview.md new file mode 100644 index 0000000..e8f9ece --- /dev/null +++ b/docs/test-project/overview.md @@ -0,0 +1,553 @@ +# Test Project Application + +Reference implementation showing how to build applications with BetterBase. + +## Table of Contents +- [Overview](#overview) +- [Project Structure](#project-structure) +- [Configuration](#configuration) +- [Database](#database) +- [API Routes](#api-routes) +- [Authentication](#authentication) +- [Storage](#storage) +- [Webhooks](#webhooks) +- [Realtime](#realtime) +- [Functions](#functions) +- [Development Workflow](#development-workflow) +- [Testing](#testing) +- [Deployment](#deployment) + +## Overview + +The test-project application is a starter template that demonstrates Best practices for building applications with BetterBase. It includes: + +- **Bun Runtime**: Fast JavaScript/TypeScript runtime +- **TypeScript Strict Mode**: Strong typing for enhanced developer experience +- **Hono API Server**: Lightweight, fast web framework +- **Drizzle ORM**: Type-safe database access +- **Zod Validation**: Runtime schema validation +- **Modular Structure**: Well-organized code separation + +This template serves as both a learning resource and a starting point for new BetterBase projects. + +## Project Structure + +``` +src/ +├── db/ # Database configuration and schema +│ ├── index.ts # Database connection +│ ├── schema.ts # Database table definitions +│ ├── migrate.ts # Migration utilities +│ └── policies/ # Row Level Security policies +├── routes/ # API route handlers +│ ├── index.ts # Route registration +│ ├── health.ts # Health check endpoints +│ ├── users.ts # User management endpoints +│ ├── storage.ts # File storage endpoints +│ ├── webhooks.ts # Webhook delivery endpoints +│ └── graphql.d.ts # GraphQL type definitions +├── middleware/ # Custom middleware +│ ├── auth.ts # Authentication middleware +│ └── validation.ts # Request validation middleware +├── lib/ # Library utilities +│ ├── env.ts # Environment variable validation +│ └── realtime.ts # WebSocket realtime support +├── functions/ # Serverless functions +│ └── hello/ # Example hello world function +├── index.ts # Application entry point +├── betterbase.config.ts # BetterBase configuration +└── drizzle.config.ts # Drizzle ORM configuration +``` + +## Configuration + +### betterbase.config.ts +The main BetterBase configuration file defines: +- Project metadata +- Database provider settings +- Storage configuration (commented out) +- Webhook configuration (commented out) +- GraphQL API settings + +See the [Configuration Guide](#configuration) for detailed explanation. + +### Environment Variables +Required environment variables are validated in `src/lib/env.ts`: +- `NODE_ENV`: Node environment (development, production, test) +- `PORT`: Server port (defaults to 3000) +- `DB_PATH`: Database file path for SQLite (defaults to local.db) +- `DATABASE_URL`: Connection string for external databases + +## Database + +### Schema Definition +Database schema is defined in `src/db/schema.ts` using Drizzle ORM: +```typescript +import { pgTable, varchar, timestamp, boolean, integer, serial } from 'drizzle-orm/pg-core'; + +export const users = pgTable('users', { + id: serial('id').primaryKey(), + name: varchar('name', { length: 255 }).notNull(), + email: varchar('email', { length: 255 }).notNull().unique(), + emailVerified: boolean('email_verified').default(false), + image: varchar('image', { length: 500 }), + createdAt: timestamp('created_at').defaultNow().notNull(), + updatedAt: timestamp('updated_at').defaultNow().notNull() +}); + +export const posts = pgTable('posts', { + id: serial('id').primaryKey(), + title: varchar('title', { length: 255 }).notNull(), + content: text('content').notNull(), + published: boolean('published').default(false), + authorId: integer('author_id').notNull(), + createdAt: timestamp('created_at').defaultNow().notNull(), + updatedAt: timestamp('updated_at').defaultNow().notNull() +}, (table) => [ + foreignKey({ + columns: [table.authorId], + foreignColumns: [users.id], + name: 'posts_author_id_fkey' + }) +]); +``` + +### Migrations +Migration commands are available through the CLI: +```bash +# Generate migration from schema changes +bb migrate + +# Preview migration without applying +bb migrate preview + +# Apply migration to local database +bb migrate + +# Apply to production (requires confirmation) +bb migrate production + +# Rollback last migration +bb migrate rollback +``` + +### Database Connection +Database connection is established in `src/db/index.ts`: +```typescript +import { drizzle } from 'drizzle-orm/postgres'; +import { migrate } from 'drizzle-orm/node-postgres/migrator'; +import { eq } from 'drizzle-orm'; +import { DB_URL } from '../lib/env'; + +export const db = drizzle(DB_URL); +``` + +## API Routes + +### Route Registration +All API routes are registered in `src/routes/index.ts`: +```typescript +import { Hono } from 'hono'; +import { healthRoute } from './health'; +import { usersRoute } from './users'; +import { storageRoute } from './storage'; +import { webhooksRoute } from './webhooks'; + +export const apiRoutes = new Hono() + .basePath('/api') + .route('/health', healthRoute) + .route('/users', usersRoute) + .route('/storage', storageRoute) + .route('/webhooks', webhooksRoute); +``` + +### Health Check +Simple health check endpoint in `src/routes/health.ts`: +```typescript +import { Hono } from 'hono'; + +export const healthRoute = new Hono() + .get('/', (c) => c.json({ status: 'OK', timestamp: new Date().toISOString() })); +``` + +### Users Endpoints +CRUD operations for users in `src/routes/users.ts`: +- `GET /api/users` - List users (with filtering, pagination) +- `GET /api/users/:id` - Get single user +- `POST /api/users` - Create new user +- `PATCH /api/users/:id` - Update user +- `DELETE /api/users/:id` - Delete user + +### Storage Endpoints +File storage operations in `src/routes/storage.ts`: +- `POST /api/storage/:bucket/upload` - Upload file +- `GET /api/storage/:bucket/:path` - Download file +- `GET /api/storage/:bucket/:path/public` - Get public URL +- `POST /api/storage/:bucket/:path/sign` - Create signed URL +- `DELETE /api/storage/:bucket` - Remove files +- `GET /api/storage/:bucket` - List files + +### Webhook Endpoints +Webhook delivery in `src/routes/webhooks.ts`: +- `POST /api/webhooks` - Receive webhook payload +- Implements webhook signature verification +- Handles retry logic and delivery confirmation + +## Authentication + +### Auth Middleware +Authentication middleware in `src/middleware/auth.ts`: +- Validates JWT tokens from Authorization header +- Attaches user info to request context +- Handles token expiration and refresh + +### Auth Routes +Authentication endpoints in `src/auth/`: +- `POST /api/auth/signup` - User registration +- `POST /api/auth/login` - User login +- `POST /api/auth/logout` - User logout +- `POST /api/auth/magic-link/send` - Send magic link +- `POST /api/auth/magic-link/verify` - Verify magic link +- `POST /api/auth/otp/send` - Send OTP +- `POST /api/auth/otp/verify` - Verify OTP +- MFA and phone auth endpoints + +### Session Management +Session handling uses browser localStorage or cookies: +- Tokens stored client-side +- Automatic token refresh +- Session expiration handling + +## Storage + +### Storage Configuration +Storage is configured in `betterbase.config.ts` (currently commented out): +```typescript +// storage: { +// provider: 's3', // 's3' | 'r2' | 'backblaze' | 'minio' | 'managed' +// bucket: 'my-bucket', +// region: 'us-east-1', +// // For S3-compatible providers: +// // endpoint: 'https://s3.amazonaws.com', +// }, +``` + +### Storage Endpoints +Once configured, storage endpoints provide: +- File upload with metadata support +- Secure file downloads +- Public URL generation +- Time-limited signed URLs +- Batch file operations +- Directory listing with filtering + +## Webhooks + +### Webhook Configuration +Webhooks are configured in `betterbase.config.ts` (currently commented out): +```typescript +// webhooks: [ +// { +// id: 'webhook-1', +// table: 'users', +// events: ['INSERT', 'UPDATE', 'DELETE'], +// url: 'https://example.com/webhook', +// secret: process.env.WEBHOOK_SECRET!, +// enabled: true, +// }, +// ], +``` + +### Webhook Delivery +Webhook system provides: +- Reliable delivery with exponential backoff +- Signature verification for security +- Delivery logging and monitoring +- Manual retry capabilities +- Webhook testing utilities + +## Realtime + +### WebSocket Server +Realtime WebSocket support in `src/lib/realtime.ts`: +- Secure WebSocket connections (wss://) +- Token-based authentication +- Channel-based messaging +- Presence tracking +- Broadcast messaging + +### Client Connection +Clients connect to realtime endpoint: +```javascript +// Browser client +const ws = new WebSocket(`wss://${host}/ws?token=${jwtToken}`); + +// With query parameter +const ws = new WebSocket(`wss://${host}/ws?token=${jwtToken}`); + +// With bearer header (requires custom WebSocket implementation) +``` + +### Events +Realtime system supports: +- Database change events (INSERT, UPDATE, DELETE) +- Custom broadcast events +- Presence events (join, leave, sync, update) +- Channel subscription management + +## Functions + +### Function Structure +Serverless functions in `src/functions/`: +```typescript +// src/functions/hello/index.ts +export default async function handler(event) { + return { + statusCode: 200, + body: JSON.stringify({ message: 'Hello World!' }) + }; +} +``` + +### Function Management +Functions are managed through the CLI: +```bash +# Create new function +bb function create hello-world + +# Develop locally with hot reload +bb function dev hello-world + +# Bundle for deployment +bb function build hello-world + +# Deploy to cloud +bb function deploy hello-world --sync-env +``` + +### Function Runtime +Functions run in a secure sandbox with: +- Limited execution time (configurable) +- Memory limits +- Network access controls +- Environment variable injection +- Logging capture + +## Development Workflow + +### Getting Started +1. Install dependencies: `bun install` +2. Set up environment variables (copy .env.example to .env) +3. Initialize database: `bun run db:generate` then `bun run db:push` +4. Start development server: `bun run dev` +5. Open API documentation at `http://localhost:3000/api` + +### Development Commands +```bash +# Start development server with auto-reload +bun run dev + +# Generate Drizzle migrations from schema changes +bun run db:generate + +# Apply migrations to local database +bun run db:push + +# Run test suite +bun run test + +# Build for production +bun run build + +# Start production server +bun run start +``` + +### File Watching +The template includes automatic file watching: +- Schema changes trigger migration generation +- Route changes trigger context regeneration +- Function changes trigger rebuild (in development) + +### Environment Setup +Example `.env` file: +```env +# Environment +NODE_ENV=development +PORT=3000 + +# Database (SQLite default) +DB_PATH=local.db + +# For external databases (uncomment and configure) +# DATABASE_URL="postgresql://user:pass@localhost:5432/mydb" +# TURSO_URL="libsql://user:pass@host:port" +# TURSO_AUTH_TOKEN="your-turso-auth-token" + +# Storage (uncomment and configure when implementing) +# STORAGE_PROVIDER=s3 +# STORAGE_BUCKET=my-bucket +# AWS_ACCESS_KEY_ID=your-key +# AWS_SECRET_ACCESS_KEY=your-secret +# AWS_REGION=us-east-1 + +# Webhooks (uncomment and configure when implementing) +# WEBHOOK_SECRET=your-webhook-signing-secret +``` + +## Testing + +### Test Structure +Tests are located in the `test/` directory: +- `crud.test.ts` - CRUD operation tests +- `health.test.ts` - Health check endpoint tests + +### Running Tests +```bash +# Run all tests +bun run test + +# Run tests in watch mode +bun run test:watch + +# Run specific test file +bun run test ./test/crud.test.ts +``` + +### Test Framework +Uses Bun's built-in test runner: +- Assertions with `expect` +- Mocking capabilities +- Test grouping and filtering +- Coverage reporting + +### Writing Tests +Example test structure: +```typescript +import { describe, it, expect } from 'bun:test'; +import { serve } from 'hono/bun'; +import { apiRoutes } from '../src/routes/index'; + +describe('Users API', () => { + it('should create a user', async () => { + const app = new Hono(); + app.route('/', apiRoutes); + + const res = await app.request('/api/users', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ name: 'John Doe', email: 'john@example.com' }) + }); + + expect(res.status).toBe(201); + const data = await res.json(); + expect(data.data).toHaveProperty('id'); + expect(data.data.name).toBe('John Doe'); + }); +}); +``` + +## Deployment + +### Production Build +Create production-optimized build: +```bash +bun run build +``` +Outputs to `dist/` directory with: +- Minified JavaScript +- Tree-shaken dependencies +- Optimized imports +- Production environment variables + +### Deployment Options +The application can be deployed to various platforms: +- **Vercel**: Automatic deployment from Git +- **Netlify**: Continuous deployment from Git +- **AWS**: Elastic Beanstalk, Lambda, or ECS +- **Cloudflare Workers**: Serverless edge deployment +- **Deno Deploy**: Global edge network +- **Traditional VPS**: Any Node.js compatible host + +### Environment Configuration +Production requires setting environment variables: +```bash +# Example for Vercel +vercel env add DATABASE_URL +vercel env add STORAGE_BUCKET +vercel env add AWS_ACCESS_KEY_ID +vercel env add AWS_SECRET_ACCESS_KEY +``` + +### Health Checks +Deployment platforms should check: +- `GET /api/health` - Basic health endpoint +- `GET /` - Root endpoint (serves frontend if applicable) +- WebSocket endpoint (`/ws`) - For realtime applications + +### Scaling Considerations +For high-traffic applications: +1. **Database Connection Pooling**: Configure appropriate pool size +2. **Caching Layer**: Add Redis or similar for frequently accessed data +3. **CDN**: Serve static assets through CDN +4. **Load Balancer**: Distribute traffic across multiple instances +5. **Monitoring**: Implement application performance monitoring +6. **Logging**: Centralized logging for debugging and auditing + +## Best Practices + +### Code Organization +1. **Separation of Concerns**: Keep routes, middleware, and utilities separate +2. **Consistent Naming**: Use consistent naming conventions for files and functions +3. **Modular Design**: Break functionality into reusable modules +4. **Type Safety**: Leverage TypeScript for compile-time safety +5. **Error Handling**: Implement consistent error handling throughout + +### Security +1. **Input Validation**: Validate all incoming data with Zod +2. **Authentication**: Protect sensitive endpoints with auth middleware +3. **Authorization**: Check permissions for resource access +4. **Input Sanitization**: Sanitize user input to prevent XSS +5. **Output Encoding**: Encode data appropriately for output context +6. **Environment Secrets**: Never commit secrets to version control + +### Performance +1. **Database Indexes**: Index columns used in WHERE/JOIN/ORDER BY clauses +2. **Query Optimization**: Select only needed columns, avoid SELECT * +3. **Caching**: Cache expensive operations when appropriate +4. **Pagination**: Implement pagination for large result sets +5. **Connection Pooling**: Properly configure database connection pool +6. **Asset Optimization**: Minify and compress static assets + +### Maintainability +1. **Documentation**: Comment complex logic and public APIs +2. **Testing**: Write tests for critical functionality +3. **Logging**: Use structured logging for debugging and monitoring +4. **Configuration**: Externalize configuration to environment variables +5. **Dependencies**: Keep dependencies updated and audited +6. **Code Reviews**: Implement peer review process for changes + +## Related Resources + +### BetterBase Documentation +- [Core SDK](./../core/overview.md) - Core functionality +- [Client SDK](./../client/overview.md) - Client-side SDK +- [CLI Reference](./../cli/overview.md) - Command-line interface +- [Shared Utilities](./../shared/overview.md) - Shared types and utilities + +### Learning Resources +- [Getting Started Guide](https://betterbase.dev/docs/getting-started) +- [API Reference](https://betterbase.dev/docs/api) +- [Examples](https://betterbase.dev/examples) +- [Tutorials](https://betterbase.dev/tutorials) + +### Community +- [GitHub Repository](https://github.com/betterbase/betterbase) +- [Discord Community](https://discord.gg/betterbase) +- [Twitter](https://twitter.com/betterbase) +- [Stack Overflow](https://stackoverflow.com/questions/tagged/betterbase) + +--- + +*This document is part of the BetterBase documentation suite.* +*Last updated: 2026-03-21* \ No newline at end of file From 4c4ec79f1f93780185f7c21a2546f30846f91fb7 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Tue, 24 Mar 2026 16:37:40 +0000 Subject: [PATCH 2/5] Delete obsolete feature documentation files for GraphQL Subscriptions, Webhook Logs, RLS Testing, and Structured Logging; update README and index files to reflect current feature status and implementation order. --- ...EATURE_01_Storage_Image_Transformations.md | 883 ------------------ .../FEATURE_02_Auth_Social_Providers.md | 263 ------ .../FEATURE_03_Migration_Rollback.md | 274 ------ .../FEATURE_04_Functions_Local_Dev.md | 211 ----- .../FEATURE_05_Realtime_Presence.md | 253 ----- .../FEATURE_06_AutoREST_Filtering.md | 214 ----- .../FEATURE_07_GraphQL_Subscriptions.md | 178 ---- new-features-docs/FEATURE_08_Webhook_Logs.md | 270 ------ new-features-docs/FEATURE_09_RLS_Testing.md | 204 ---- .../FEATURE_10_Structured_Logging.md | 624 ------------- new-features-docs/README_START_HERE.md | 83 -- new-features-docs/_INDEX_ALL_FEATURES.md | 71 -- 12 files changed, 3528 deletions(-) delete mode 100644 new-features-docs/FEATURE_01_Storage_Image_Transformations.md delete mode 100644 new-features-docs/FEATURE_02_Auth_Social_Providers.md delete mode 100644 new-features-docs/FEATURE_03_Migration_Rollback.md delete mode 100644 new-features-docs/FEATURE_04_Functions_Local_Dev.md delete mode 100644 new-features-docs/FEATURE_05_Realtime_Presence.md delete mode 100644 new-features-docs/FEATURE_06_AutoREST_Filtering.md delete mode 100644 new-features-docs/FEATURE_07_GraphQL_Subscriptions.md delete mode 100644 new-features-docs/FEATURE_08_Webhook_Logs.md delete mode 100644 new-features-docs/FEATURE_09_RLS_Testing.md delete mode 100644 new-features-docs/FEATURE_10_Structured_Logging.md delete mode 100644 new-features-docs/README_START_HERE.md delete mode 100644 new-features-docs/_INDEX_ALL_FEATURES.md diff --git a/new-features-docs/FEATURE_01_Storage_Image_Transformations.md b/new-features-docs/FEATURE_01_Storage_Image_Transformations.md deleted file mode 100644 index 53380e8..0000000 --- a/new-features-docs/FEATURE_01_Storage_Image_Transformations.md +++ /dev/null @@ -1,883 +0,0 @@ -# Feature 1: Storage Image Transformations - -**Priority**: High (Week 5-7) -**Complexity**: Medium -**Dependencies**: Structured Logging -**Estimated Effort**: 2-3 weeks - ---- - -## Problem Statement - -Currently, when users upload images to BetterBase storage, they receive the original file with no optimization. This creates several problems: - -1. **Performance**: Users download full 2-3MB images even when they need thumbnails -2. **Bandwidth Waste**: Mobile users consume unnecessary data -3. **External Dependencies**: Developers bolt on Cloudinary/Imgix ($99+/month) -4. **Manual Work**: Developers pre-generate multiple sizes before upload - -**Example Pain Point**: -```typescript -// User uploads profile photo (2MB, 3000x3000px) -await storage.from('avatars').upload('profile.jpg', file); - -// Frontend needs 100x100 thumbnail -// ❌ Current: Downloads entire 2MB image, resizes in browser (slow!) -``` - ---- - -## Solution Overview - -Implement **on-demand image transformations** using the Sharp library (industry standard used by Vercel, Netlify, Cloudflare). Transformations are applied via URL query parameters and cached in the storage bucket to avoid re-processing. - -**After Implementation**: -```typescript -// Get optimized thumbnail -const url = storage.from('avatars').getPublicUrl('profile.jpg', { - transform: { width: 100, height: 100, format: 'webp' } -}); -// Returns: .../profile.jpg?width=100&height=100&format=webp -// Response: 5KB WebP image (vs 2MB original) -``` - ---- - -## Architecture - -``` -┌─────────────────────────────────────────────────────────────┐ -│ Client Request │ -│ GET /storage/v1/object/public/avatars/user.jpg │ -│ ?width=400&height=300&format=webp&quality=80 │ -└────────────────────────┬────────────────────────────────────┘ - │ - ▼ -┌─────────────────────────────────────────────────────────────┐ -│ Storage Route Handler (Hono) │ -│ ┌────────────────────────────────────────────────────┐ │ -│ │ 1. Parse query params → ImageTransformOptions │ │ -│ │ 2. Generate cache key (MD5 hash of options) │ │ -│ │ 3. Build cache path: cache/user_a1b2c3d4.webp │ │ -│ └────────────────────────────────────────────────────┘ │ -└────────────────────────┬────────────────────────────────────┘ - │ - ▼ -┌─────────────────────────────────────────────────────────────┐ -│ Check Cache in S3 Bucket │ -│ ┌─────────────────────┐ │ -│ │ cache/user_a1b2c3d4.webp exists? │ -│ └─────────────────────┘ │ -│ │ │ -│ ├─ YES ──► Return cached file (instant response) │ -│ │ │ -│ └─ NO ──► ┌───────────────────────────────────┐ │ -│ │ 1. Download original from S3 │ │ -│ │ 2. Transform with Sharp │ │ -│ │ 3. Upload transformed to cache/ │ │ -│ │ 4. Return transformed image │ │ -│ └───────────────────────────────────┘ │ -└─────────────────────────────────────────────────────────────┘ -``` - -**Key Design Decisions**: -- **Caching Strategy**: Store transformed images in `cache/` directory within the same bucket (not ephemeral memory) -- **Cache Key**: MD5 hash of transform options ensures deterministic filenames -- **URL Pattern**: Query params on existing storage URLs (backward compatible) -- **Supported Formats**: WebP (modern), JPEG (legacy), PNG (lossless), AVIF (future) - ---- - -## Implementation Steps - -### Step 1: Install Sharp Dependency - -**File**: `packages/core/package.json` - -**Action**: Install Sharp library - -```bash -cd packages/core -bun add sharp -``` - -**Verification**: -```bash -# Check that sharp appears in dependencies -cat package.json | grep sharp -# Should output: "sharp": "^0.33.x" -``` - -**Important Notes**: -- Sharp uses native bindings - must be installed in the package that uses it -- Sharp is platform-specific (will auto-download correct binaries for your OS) -- If deployment fails, ensure Docker/deployment target matches dev architecture - ---- - -### Step 2: Define Transform Types - -**File**: `packages/core/src/storage/types.ts` - -**Action**: Add type definitions at the END of the file (after existing types) - -```typescript -// ============================================================================ -// IMAGE TRANSFORMATION TYPES -// ============================================================================ - -/** - * Supported image transformation operations - * Applied via URL query parameters - */ -export type ImageTransformOptions = { - /** Resize width in pixels. Maintains aspect ratio if height not specified. Max: 4000 */ - width?: number; - - /** Resize height in pixels. Maintains aspect ratio if width not specified. Max: 4000 */ - height?: number; - - /** Output format. Default: original format */ - format?: 'webp' | 'jpeg' | 'png' | 'avif'; - - /** Quality 1-100. Default: 80 for lossy formats, 100 for PNG */ - quality?: number; - - /** How to resize the image to fit dimensions. Default: 'cover' */ - fit?: 'cover' | 'contain' | 'fill' | 'inside' | 'outside'; -}; - -/** - * Result of image transformation operation - */ -export type TransformResult = { - /** Transformed image buffer */ - buffer: Buffer; - - /** Output format (webp, jpeg, png, etc) */ - format: string; - - /** File size in bytes */ - size: number; - - /** Image width in pixels */ - width: number; - - /** Image height in pixels */ - height: number; -}; - -/** - * Cache key components for transformed images - */ -export type TransformCacheKey = { - /** Original file path */ - path: string; - - /** MD5 hash of transform options (first 8 chars) */ - hash: string; -}; -``` - -**Verification**: -```bash -cd packages/core -bun run build -# Should compile without errors -``` - ---- - -### Step 3: Create Image Transformer Module - -**File**: `packages/core/src/storage/image-transformer.ts` (NEW FILE) - -**Action**: Create this new file with the image transformation engine - -```typescript -import sharp from 'sharp'; -import { createHash } from 'crypto'; -import type { ImageTransformOptions, TransformResult, TransformCacheKey } from './types'; - -/** - * Image transformation engine using Sharp - * - * Handles: - * - Resizing with aspect ratio preservation - * - Format conversion (JPEG → WebP, etc) - * - Quality optimization - * - Cache key generation - * - * @example - * const transformer = new ImageTransformer(); - * const result = await transformer.transform(imageBuffer, { - * width: 400, - * format: 'webp', - * quality: 80 - * }); - */ -export class ImageTransformer { - /** - * Transform an image buffer according to options - * - * @param buffer - Input image buffer (JPEG, PNG, WebP, AVIF) - * @param options - Transformation options - * @returns Transformed image with metadata - * @throws Error if transformation fails (invalid format, corrupted image, etc) - */ - async transform( - buffer: Buffer, - options: ImageTransformOptions - ): Promise { - try { - // Initialize Sharp pipeline - let pipeline = sharp(buffer); - - // Step 1: Apply resize if width or height specified - if (options.width || options.height) { - pipeline = pipeline.resize({ - width: options.width, - height: options.height, - fit: options.fit || 'cover', // Default: crop to fit - withoutEnlargement: true, // Don't upscale small images - }); - } - - // Step 2: Apply format conversion - if (options.format) { - switch (options.format) { - case 'webp': - pipeline = pipeline.webp({ quality: options.quality || 80 }); - break; - case 'jpeg': - pipeline = pipeline.jpeg({ quality: options.quality || 80 }); - break; - case 'png': - // PNG is lossless, but we can still optimize compression - pipeline = pipeline.png({ - quality: options.quality || 100, - compressionLevel: 9 - }); - break; - case 'avif': - // AVIF is newer format, better compression than WebP - pipeline = pipeline.avif({ quality: options.quality || 80 }); - break; - } - } - - // Step 3: Execute transformation pipeline - const outputBuffer = await pipeline.toBuffer({ resolveWithObject: true }); - - return { - buffer: outputBuffer.data, - format: outputBuffer.info.format, - size: outputBuffer.info.size, - width: outputBuffer.info.width, - height: outputBuffer.info.height, - }; - } catch (error) { - throw new Error( - `Image transformation failed: ${error instanceof Error ? error.message : 'Unknown error'}` - ); - } - } - - /** - * Generate deterministic cache key for transformed image - * - * Format: { path: "user.jpg", hash: "a1b2c3d4" } - * - * Hash is MD5 of JSON-serialized options (first 8 chars for brevity) - * Same options always produce same hash - * - * @param path - Original file path - * @param options - Transform options - * @returns Cache key components - */ - generateCacheKey(path: string, options: ImageTransformOptions): TransformCacheKey { - // Create deterministic options object (sorted keys) - const optionsString = JSON.stringify({ - w: options.width, - h: options.height, - f: options.format, - q: options.quality, - fit: options.fit, - }); - - // MD5 hash (first 8 chars is sufficient for cache key) - const hash = createHash('md5') - .update(optionsString) - .digest('hex') - .substring(0, 8); - - return { path, hash }; - } - - /** - * Build full cache path from cache key - * - * Examples: - * - avatars/user.jpg + hash "a1b2c3d4" + format "webp" - * → cache/avatars/user_a1b2c3d4.webp - * - user.jpg + hash "x9y8z7" + format "jpeg" - * → cache/user_x9y8z7.jpeg - * - * @param cacheKey - Cache key components - * @param format - Output format (webp, jpeg, png, avif) - * @returns Full cache path - */ - buildCachePath(cacheKey: TransformCacheKey, format: string): string { - const pathParts = cacheKey.path.split('/'); - const filename = pathParts.pop() || ''; - - // Remove original extension - const filenameWithoutExt = filename.replace(/\.[^.]+$/, ''); - - const directory = pathParts.join('/'); - - // Build: filename_hash.format - const cachedFilename = `${filenameWithoutExt}_${cacheKey.hash}.${format}`; - - // Prepend cache/ directory - return directory - ? `cache/${directory}/${cachedFilename}` - : `cache/${cachedFilename}`; - } - - /** - * Parse transform options from URL query parameters - * - * Validates all inputs to prevent abuse: - * - Width/height must be 1-4000 (prevent memory exhaustion) - * - Format must be whitelisted (prevent arbitrary file execution) - * - Quality must be 1-100 - * - Fit must be valid Sharp option - * - * @param queryParams - URL query parameters object - * @returns Parsed options or null if no valid transforms - * - * @example - * parseTransformOptions({ width: "400", format: "webp" }) - * // Returns: { width: 400, format: "webp" } - * - * parseTransformOptions({ width: "99999" }) - * // Returns: null (width exceeds limit) - */ - parseTransformOptions(queryParams: Record): ImageTransformOptions | null { - const options: ImageTransformOptions = {}; - let hasOptions = false; - - // Parse width - if (queryParams.width) { - const width = parseInt(queryParams.width, 10); - if (!isNaN(width) && width > 0 && width <= 4000) { - options.width = width; - hasOptions = true; - } - } - - // Parse height - if (queryParams.height) { - const height = parseInt(queryParams.height, 10); - if (!isNaN(height) && height > 0 && height <= 4000) { - options.height = height; - hasOptions = true; - } - } - - // Parse format (whitelist only) - if (queryParams.format && ['webp', 'jpeg', 'png', 'avif'].includes(queryParams.format)) { - options.format = queryParams.format as 'webp' | 'jpeg' | 'png' | 'avif'; - hasOptions = true; - } - - // Parse quality - if (queryParams.quality) { - const quality = parseInt(queryParams.quality, 10); - if (!isNaN(quality) && quality >= 1 && quality <= 100) { - options.quality = quality; - hasOptions = true; - } - } - - // Parse fit mode (whitelist only) - if (queryParams.fit && ['cover', 'contain', 'fill', 'inside', 'outside'].includes(queryParams.fit)) { - options.fit = queryParams.fit as ImageTransformOptions['fit']; - hasOptions = true; - } - - return hasOptions ? options : null; - } - - /** - * Check if content type is an image that Sharp can process - * - * Excludes: - * - SVG (vector, not raster) - * - Non-image types - * - * @param contentType - MIME type (e.g., "image/jpeg") - * @returns True if processable image - */ - isImage(contentType: string | undefined): boolean { - if (!contentType) return false; - return contentType.startsWith('image/') && !contentType.includes('svg'); - } -} - -/** - * Singleton instance for convenience - * Import this directly: `import { imageTransformer } from './image-transformer'` - */ -export const imageTransformer = new ImageTransformer(); -``` - -**Verification**: -```bash -cd packages/core -bun run build -# Should compile without errors - -# Optional: Test the transformer -bun test src/storage/image-transformer.test.ts -``` - ---- - -### Step 4: Update S3 Storage Adapter - -**File**: `packages/core/src/storage/s3-adapter.ts` - -**Action**: Add transform-aware download method - -**FIND** the existing `download` method (around line 80-120): - -```typescript -async download(bucket: string, path: string): Promise { - try { - const command = new GetObjectCommand({ - Bucket: bucket, - Key: path, - }); - - const response = await this.client.send(command); - // ... existing code to convert stream to buffer - } catch (error) { - // ... existing error handling - } -} -``` - -**ADD** this new method **AFTER** the existing `download` method: - -```typescript -/** - * Download file with optional image transformation - * - * Flow: - * 1. If no transform options → return original file - * 2. If transform options → check cache first - * 3. If cached → return cached version - * 4. If not cached → transform original, cache result, return - * - * @param bucket - S3 bucket name - * @param path - File path in bucket - * @param transformOptions - Optional image transformation options - * @returns Buffer and content type - */ -async downloadWithTransform( - bucket: string, - path: string, - transformOptions?: ImageTransformOptions -): Promise<{ buffer: Buffer; contentType: string }> { - // Import transformer (lazy import to avoid circular dependencies) - const { imageTransformer } = await import('./image-transformer'); - - // No transform requested - return original file - if (!transformOptions) { - const buffer = await this.download(bucket, path); - - // Get content type from S3 metadata - const headCommand = new HeadObjectCommand({ Bucket: bucket, Key: path }); - const metadata = await this.client.send(headCommand); - - return { - buffer, - contentType: metadata.ContentType || 'application/octet-stream' - }; - } - - // Generate cache key for this transform - const cacheKey = imageTransformer.generateCacheKey(path, transformOptions); - const outputFormat = transformOptions.format || 'webp'; // Default to WebP - const cachePath = imageTransformer.buildCachePath(cacheKey, outputFormat); - - // Try to get cached version first - try { - const cachedBuffer = await this.download(bucket, cachePath); - const contentType = `image/${outputFormat}`; - return { buffer: cachedBuffer, contentType }; - } catch (error) { - // Cache miss - continue to transform - } - - // Download original file - const originalBuffer = await this.download(bucket, path); - - // Transform image - const transformed = await imageTransformer.transform(originalBuffer, transformOptions); - - // Upload transformed image to cache (fire-and-forget, don't wait) - // If upload fails, we still return the transformed image - this.upload(bucket, cachePath, transformed.buffer, { - contentType: `image/${transformed.format}`, - }).catch((err) => { - console.error('Failed to cache transformed image:', err); - }); - - return { - buffer: transformed.buffer, - contentType: `image/${transformed.format}`, - }; -} -``` - -**Verification**: -```bash -cd packages/core -bun run build -# Should compile without errors -``` - ---- - -### Step 5: Create Storage Routes (or Update Existing) - -**File**: `apps/test-project/src/routes/storage.ts` (create if doesn't exist) - -**Action**: Create Hono routes for storage access with transform support - -```typescript -import { Hono } from 'hono'; -import { storage } from '../lib/storage'; // Adjust import path -import { imageTransformer } from '@betterbase/core/storage/image-transformer'; - -const app = new Hono(); - -/** - * GET /storage/v1/object/public/:bucket/* - * Public file download with optional image transformations - * - * Examples: - * - /storage/v1/object/public/avatars/user.jpg - * → Returns original file - * - * - /storage/v1/object/public/avatars/user.jpg?width=400&format=webp - * → Returns 400px wide WebP image - * - * - /storage/v1/object/public/avatars/user.jpg?width=100&height=100&fit=cover - * → Returns 100x100 cropped thumbnail - */ -app.get('/storage/v1/object/public/:bucket/*', async (c) => { - const bucket = c.req.param('bucket'); - const path = c.req.param('*'); // Wildcard captures rest of path - const queryParams = c.req.query(); - - try { - // Parse transform options from query params - const transformOptions = imageTransformer.parseTransformOptions(queryParams); - - // Get bucket client - const bucketClient = storage.from(bucket); - - // Download with optional transform - // Note: This assumes your storage client has the adapter exposed - // You may need to adjust based on your actual storage implementation - const result = await bucketClient.adapter.downloadWithTransform( - bucket, - path, - transformOptions || undefined - ); - - // Set response headers - c.header('Content-Type', result.contentType); - c.header('Cache-Control', 'public, max-age=31536000, immutable'); // Cache for 1 year - c.header('Content-Length', String(result.buffer.length)); - - return c.body(result.buffer); - } catch (error) { - console.error('Storage download error:', error); - return c.json({ error: 'File not found' }, 404); - } -}); - -/** - * GET /storage/v1/object/authenticated/:bucket/* - * Authenticated file download with optional transforms - * - * TODO: Add auth middleware to verify user has access - */ -app.get('/storage/v1/object/authenticated/:bucket/*', async (c) => { - // For now, return 501 Not Implemented - // You'll add auth middleware here later - return c.json({ error: 'Authenticated downloads not yet implemented' }, 501); -}); - -export default app; -``` - -**Then register this route in your main app**: - -**File**: `apps/test-project/src/routes/index.ts` - -```typescript -import storageRoutes from './storage'; - -// ... existing routes ... - -// Mount storage routes -app.route('/', storageRoutes); -``` - -**Verification**: -```bash -cd apps/test-project -bun run dev -# Server should start without errors - -# Test in browser or curl: -# 1. Upload an image first -# 2. Access: http://localhost:3000/storage/v1/object/public/bucket/test.jpg -# 3. Access with transform: http://localhost:3000/storage/v1/object/public/bucket/test.jpg?width=400&format=webp -``` - ---- - -### Step 6: Update Client SDK - -**File**: `packages/client/src/storage.ts` - -**Action**: Add transform options to `getPublicUrl` method - -**FIND** the `StorageBucketClient` class and the `getPublicUrl` method: - -```typescript -getPublicUrl(path: string): PublicUrlResult { - const publicUrl = `${this.baseUrl}/storage/v1/object/public/${this.bucketId}/${path}`; - return { data: { publicUrl }, error: null }; -} -``` - -**REPLACE** with this enhanced version: - -```typescript -/** - * Get public URL for a file with optional image transformations - * - * @param path - File path in bucket - * @param options - Optional transform options - * @returns Public URL result - * - * @example - * // Original image - * bucket.getPublicUrl('user.jpg') - * // Returns: { data: { publicUrl: ".../user.jpg" }, error: null } - * - * // Transformed image - * bucket.getPublicUrl('user.jpg', { - * transform: { width: 400, format: 'webp' } - * }) - * // Returns: { data: { publicUrl: ".../user.jpg?width=400&format=webp" }, error: null } - */ -getPublicUrl( - path: string, - options?: { - transform?: { - width?: number; - height?: number; - format?: 'webp' | 'jpeg' | 'png' | 'avif'; - quality?: number; - fit?: 'cover' | 'contain' | 'fill' | 'inside' | 'outside'; - }; - } -): PublicUrlResult { - const baseUrl = `${this.baseUrl}/storage/v1/object/public/${this.bucketId}/${path}`; - - // No transforms - return base URL - if (!options?.transform) { - return { data: { publicUrl: baseUrl }, error: null }; - } - - // Build query string from transform options - const params = new URLSearchParams(); - - if (options.transform.width) { - params.set('width', String(options.transform.width)); - } - - if (options.transform.height) { - params.set('height', String(options.transform.height)); - } - - if (options.transform.format) { - params.set('format', options.transform.format); - } - - if (options.transform.quality) { - params.set('quality', String(options.transform.quality)); - } - - if (options.transform.fit) { - params.set('fit', options.transform.fit); - } - - const urlWithTransforms = `${baseUrl}?${params.toString()}`; - - return { data: { publicUrl: urlWithTransforms }, error: null }; -} -``` - -**Verification**: -```bash -cd packages/client -bun run build -# Should compile without errors -``` - ---- - -## Testing - -### Manual Testing Checklist - -1. **Upload test image**: -```bash -# Upload a large image (e.g., 2MB JPEG) -curl -X POST http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg \ - -F "file=@large-image.jpg" -``` - -2. **Test original image** (no transform): -```bash -curl http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg \ - --output original.jpg - -# Check file size -ls -lh original.jpg -# Should be ~2MB -``` - -3. **Test width-only transform**: -```bash -curl "http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg?width=400" \ - --output resized-400.jpg - -ls -lh resized-400.jpg -# Should be significantly smaller -``` - -4. **Test WebP conversion**: -```bash -curl "http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg?format=webp" \ - --output converted.webp - -file converted.webp -# Should output: "converted.webp: RIFF (little-endian) data, Web/P image" -``` - -5. **Test combined (resize + format)**: -```bash -curl "http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg?width=400&height=300&format=webp&quality=80" \ - --output optimized.webp - -ls -lh optimized.webp -# Should be very small (e.g., 20-50KB) -``` - -6. **Test caching** (performance): -```bash -# First request (transform + cache) -time curl "http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg?width=400&format=webp" > /dev/null - -# Second request (cached) -time curl "http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg?width=400&format=webp" > /dev/null - -# Second request should be significantly faster -``` - -7. **Test invalid params** (should gracefully ignore): -```bash -# Invalid width (exceeds limit) -curl "http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg?width=99999" -# Should return original image or error - -# Invalid format -curl "http://localhost:3000/storage/v1/object/public/test-bucket/large.jpg?format=exe" -# Should ignore invalid format, return original -``` - ---- - -## Acceptance Criteria - -- [ ] Sharp dependency installed in `packages/core/package.json` -- [ ] Transform types defined in `packages/core/src/storage/types.ts` -- [ ] `ImageTransformer` class created in `packages/core/src/storage/image-transformer.ts` -- [ ] S3 adapter has `downloadWithTransform` method -- [ ] Storage routes handle query params: `?width=X&height=Y&format=F&quality=Q&fit=M` -- [ ] Transformed images cached in `cache/` directory within bucket -- [ ] Cache uses deterministic MD5 hash keys -- [ ] Client SDK `getPublicUrl()` accepts optional `transform` object -- [ ] Test: Upload 2MB JPEG → request `?width=400&format=webp` → receive ~50KB WebP -- [ ] Test: Second request for same transform returns cached version (instant) -- [ ] Test: Invalid params (width=99999) ignored gracefully -- [ ] Test: Non-image files return original (no transformation) -- [ ] Test: SVG files return original (Sharp doesn't process SVG) - ---- - -## Common Issues & Solutions - -### Issue: "Sharp installation failed" -**Solution**: -```bash -rm -rf node_modules -bun install --force -``` - -### Issue: "Image transformation timeout" -**Cause**: Very large images (>10MB) -**Solution**: Add timeout to Sharp pipeline or reject large files upfront - -### Issue: "Cache directory not created" -**Cause**: S3 doesn't have directory concept -**Solution**: Verify first upload to `cache/` creates the "virtual directory" - -### Issue: "Transformed images larger than original" -**Cause**: PNG quality too high -**Solution**: Use WebP or JPEG for photos, reserve PNG for graphics/logos - ---- - -## Performance Notes - -- **First Request**: Transform time ~100-500ms depending on image size -- **Cached Requests**: <10ms (served directly from S3) -- **Memory Usage**: Sharp uses ~100MB per concurrent transformation -- **Recommendation**: Limit concurrent transformations or add queue for high traffic - ---- - -## Next Steps After Implementation - -1. **Add CDN** (optional): CloudFront/Cloudflare in front of storage URLs -2. **Monitoring**: Log slow transforms (>500ms) for optimization -3. **Cleanup**: Add cron job to delete old cached images (>30 days) -4. **Presets**: Add common size presets (`thumbnail`, `small`, `medium`, `large`) - ---- - -**Feature Status**: Ready for implementation -**Estimated Time**: 2-3 weeks -**Start Date**: Week 5 (after Logging and Migrations are complete) diff --git a/new-features-docs/FEATURE_02_Auth_Social_Providers.md b/new-features-docs/FEATURE_02_Auth_Social_Providers.md deleted file mode 100644 index fe4fa27..0000000 --- a/new-features-docs/FEATURE_02_Auth_Social_Providers.md +++ /dev/null @@ -1,263 +0,0 @@ -# Feature 2: Auth Social Providers Setup - -**Priority**: Medium (Week 8-9) -**Complexity**: Low -**Dependencies**: None (uses existing BetterAuth) -**Estimated Effort**: 2 weeks - ---- - -## Problem Statement - -BetterAuth supports OAuth providers (Google, GitHub, Discord, etc.) but requires manual configuration: -1. Read BetterAuth documentation -2. Create OAuth apps on provider platforms -3. Manually edit `src/auth/index.ts` -4. Set environment variables -5. Hope you didn't make a typo - -**This is error-prone and time-consuming.** - ---- - -## Solution - -CLI command `bb auth add-provider ` that: -- Auto-generates BetterAuth configuration -- Adds environment variables to `.env` -- Prints OAuth app setup instructions -- Validates provider name - ---- - -## Implementation Steps - -### Step 1: Create Provider Templates - -**File**: `packages/cli/src/commands/auth-providers.ts` (NEW FILE) - -```typescript -export type ProviderTemplate = { - name: string; - displayName: string; - envVars: { key: string; description: string }[]; - configCode: string; - setupInstructions: string; - docsUrl: string; -}; - -export const PROVIDER_TEMPLATES: Record = { - google: { - name: 'google', - displayName: 'Google', - envVars: [ - { key: 'GOOGLE_CLIENT_ID', description: 'OAuth Client ID' }, - { key: 'GOOGLE_CLIENT_SECRET', description: 'OAuth Client Secret' }, - ], - configCode: ` google: { - clientId: process.env.GOOGLE_CLIENT_ID!, - clientSecret: process.env.GOOGLE_CLIENT_SECRET!, - redirectURI: process.env.AUTH_URL + '/api/auth/callback/google', - },`, - setupInstructions: ` -1. Go to: https://console.cloud.google.com/ -2. Create new project or select existing -3. APIs & Services > Credentials -4. Create OAuth 2.0 Client ID -5. Add redirect: http://localhost:3000/api/auth/callback/google -6. Copy Client ID and Secret to .env -`, - docsUrl: 'https://developers.google.com/identity/protocols/oauth2', - }, - - github: { - name: 'github', - displayName: 'GitHub', - envVars: [ - { key: 'GITHUB_CLIENT_ID', description: 'OAuth App Client ID' }, - { key: 'GITHUB_CLIENT_SECRET', description: 'OAuth App Client Secret' }, - ], - configCode: ` github: { - clientId: process.env.GITHUB_CLIENT_ID!, - clientSecret: process.env.GITHUB_CLIENT_SECRET!, - redirectURI: process.env.AUTH_URL + '/api/auth/callback/github', - },`, - setupInstructions: ` -1. Go to: https://github.com/settings/developers -2. Click "New OAuth App" -3. Homepage: http://localhost:3000 -4. Callback: http://localhost:3000/api/auth/callback/github -5. Copy Client ID and Secret to .env -`, - docsUrl: 'https://docs.github.com/en/developers/apps', - }, - - // Add discord, apple, microsoft, twitter, facebook similarly -}; - -export function getProviderTemplate(name: string): ProviderTemplate | null { - return PROVIDER_TEMPLATES[name.toLowerCase()] || null; -} - -export function getAvailableProviders(): string[] { - return Object.keys(PROVIDER_TEMPLATES); -} -``` - ---- - -### Step 2: Create Add Provider Command - -**File**: `packages/cli/src/commands/auth.ts` - -**ADD** this function: - -```typescript -import { getProviderTemplate, getAvailableProviders } from './auth-providers'; -import { promises as fs } from 'fs'; -import path from 'path'; - -export async function runAuthAddProviderCommand( - projectRoot: string, - providerName: string -): Promise { - const template = getProviderTemplate(providerName); - - if (!template) { - logger.error(`Unknown provider: ${providerName}`); - logger.info(`Available: ${getAvailableProviders().join(', ')}`); - process.exit(1); - } - - logger.info(`Adding ${template.displayName} OAuth provider...`); - - // Check if auth file exists - const authFile = path.join(projectRoot, 'src', 'auth', 'index.ts'); - let authContent = await fs.readFile(authFile, 'utf-8'); - - // Check if provider already configured - if (authContent.includes(`${template.name}:`)) { - logger.warn(`${template.displayName} already configured`); - return; - } - - // Find socialProviders section - const socialRegex = /socialProviders:\s*{([^}]*)}/s; - const match = authContent.match(socialRegex); - - if (match) { - // Add to existing socialProviders - const existing = match[1]; - const newContent = existing.trim() - ? `${existing.trimEnd()},\n${template.configCode}` - : template.configCode; - - authContent = authContent.replace( - socialRegex, - `socialProviders: {\n${newContent}\n }` - ); - } else { - // Create socialProviders section - authContent = authContent.replace( - /betterAuth\(\s*{/, - `betterAuth({\n socialProviders: {\n${template.configCode}\n },` - ); - } - - // Write updated file - await fs.writeFile(authFile, authContent, 'utf-8'); - logger.success(`✅ Added ${template.displayName} to ${authFile}`); - - // Add env vars - const envFile = path.join(projectRoot, '.env'); - let envContent = ''; - try { - envContent = await fs.readFile(envFile, 'utf-8'); - } catch {} - - const envVarsToAdd: string[] = []; - for (const envVar of template.envVars) { - if (!envContent.includes(envVar.key)) { - envVarsToAdd.push(`${envVar.key}=""`); - } - } - - if (envVarsToAdd.length > 0) { - const newEnv = envContent.trim() - ? `${envContent}\n\n# ${template.displayName} OAuth\n${envVarsToAdd.join('\n')}\n` - : `# ${template.displayName} OAuth\n${envVarsToAdd.join('\n')}\n`; - - await fs.writeFile(envFile, newEnv, 'utf-8'); - logger.success(`✅ Added env vars to .env`); - } - - // Print setup instructions - console.log('\n' + '='.repeat(60)); - console.log(template.setupInstructions); - console.log('='.repeat(60)); - console.log(`\nDocs: ${template.docsUrl}\n`); -} -``` - ---- - -### Step 3: Register CLI Command - -**File**: `packages/cli/src/index.ts` - -```typescript -import { runAuthAddProviderCommand } from './commands/auth'; - -program - .command('auth:add-provider ') - .description('Add OAuth provider (google, github, discord, apple, microsoft, twitter, facebook)') - .action(async (provider: string) => { - await runAuthAddProviderCommand(process.cwd(), provider); - }); -``` - ---- - -## Testing - -```bash -# Test adding Google -bb auth:add-provider google - -# Verify config added -cat src/auth/index.ts | grep "google:" - -# Verify env vars added -cat .env | grep GOOGLE - -# Test duplicate detection -bb auth:add-provider google -# Should warn "already configured" - -# Test invalid provider -bb auth:add-provider invalid -# Should show available providers -``` - ---- - -## Acceptance Criteria - -- [ ] Provider templates for Google, GitHub, Discord, Apple, Microsoft, Twitter, Facebook -- [ ] `bb auth:add-provider ` command works -- [ ] Auto-injects config into src/auth/index.ts -- [ ] Adds env vars to .env -- [ ] Prints setup instructions -- [ ] Detects if provider already configured -- [ ] Shows available providers if invalid name - ---- - -**Priority Order** (implement in this order): -1. Google (most used) -2. GitHub (dev tools) -3. Discord (gaming/community) -4. Apple (iOS requirement) -5. Microsoft (enterprise) -6. Twitter (social apps) -7. Facebook (declining but still used) diff --git a/new-features-docs/FEATURE_03_Migration_Rollback.md b/new-features-docs/FEATURE_03_Migration_Rollback.md deleted file mode 100644 index 82b44c9..0000000 --- a/new-features-docs/FEATURE_03_Migration_Rollback.md +++ /dev/null @@ -1,274 +0,0 @@ -# Feature 3: Database Migration Rollback - -**Priority**: High (Week 3-4) -**Complexity**: Medium -**Dependencies**: Structured Logging -**Estimated Effort**: 1-2 weeks - ---- - -## Problem Statement - -Drizzle generates migrations but provides NO rollback mechanism. If a migration breaks production: -- No safe way to undo -- Manual SQL intervention required -- Risk of data loss -- Downtime while fixing - ---- - -## Solution - -Implement up/down migration pairs with tracking table: -- `0001_initial_up.sql` + `0001_initial_down.sql` -- `_betterbase_migrations` table tracks applied migrations -- `bb migrate:rollback` command safely reverts - ---- - -## Implementation Steps - -### Step 1: Create Migration Tracking Schema - -**File**: `packages/cli/src/commands/migrate-schema.sql` (NEW FILE) - -```sql -CREATE TABLE IF NOT EXISTS _betterbase_migrations ( - id SERIAL PRIMARY KEY, - name TEXT NOT NULL UNIQUE, - applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - checksum TEXT NOT NULL -); - -CREATE INDEX IF NOT EXISTS idx_migrations_name - ON _betterbase_migrations(name); -``` - ---- - -### Step 2: Create Migration Utilities - -**File**: `packages/cli/src/commands/migrate-utils.ts` (NEW FILE) - -```typescript -import { createHash } from 'crypto'; -import { promises as fs } from 'fs'; -import path from 'path'; - -export type MigrationFile = { - id: string; - name: string; - upPath: string; - downPath: string | null; - upSql: string; - downSql: string | null; - checksum: string; -}; - -export type AppliedMigration = { - id: number; - name: string; - applied_at: Date; - checksum: string; -}; - -export function calculateChecksum(sql: string): string { - return createHash('sha256').update(sql.trim()).digest('hex'); -} - -export function parseMigrationFilename(filename: string) { - const match = filename.match(/^(\d+)_(.+)_(up|down)\.sql$/); - if (!match) return null; - - return { - id: match[1], - name: `${match[1]}_${match[2]}`, - direction: match[3] as 'up' | 'down', - }; -} - -export async function loadMigrationFiles(dir: string): Promise { - const files = await fs.readdir(dir); - const sqlFiles = files.filter(f => f.endsWith('.sql')); - - const migrationMap = new Map>(); - - for (const file of sqlFiles) { - const parsed = parseMigrationFilename(file); - if (!parsed) continue; - - const filePath = path.join(dir, file); - const sql = await fs.readFile(filePath, 'utf-8'); - - if (!migrationMap.has(parsed.id)) { - migrationMap.set(parsed.id, { id: parsed.id, name: parsed.name }); - } - - const migration = migrationMap.get(parsed.id)!; - - if (parsed.direction === 'up') { - migration.upPath = filePath; - migration.upSql = sql; - migration.checksum = calculateChecksum(sql); - } else { - migration.downPath = filePath; - migration.downSql = sql; - } - } - - const migrations: MigrationFile[] = []; - for (const [id, m] of migrationMap) { - if (!m.upPath || !m.upSql) { - throw new Error(`Migration ${id} missing up file`); - } - - migrations.push({ - id: m.id, - name: m.name!, - upPath: m.upPath, - downPath: m.downPath || null, - upSql: m.upSql, - downSql: m.downSql || null, - checksum: m.checksum!, - }); - } - - migrations.sort((a, b) => a.id.localeCompare(b.id)); - return migrations; -} - -export async function getAppliedMigrations(db: any): Promise { - // Create tracking table if doesn't exist - await db.execute(` - CREATE TABLE IF NOT EXISTS _betterbase_migrations ( - id SERIAL PRIMARY KEY, - name TEXT NOT NULL UNIQUE, - applied_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - checksum TEXT NOT NULL - ); - `); - - const result = await db.execute( - 'SELECT * FROM _betterbase_migrations ORDER BY id ASC' - ); - return result.rows as AppliedMigration[]; -} -``` - ---- - -### Step 3: Implement Rollback Command - -**File**: `packages/cli/src/commands/migrate.ts` - -**ADD**: - -```typescript -export async function runMigrateRollbackCommand( - projectRoot: string, - options: { steps?: number } = {} -): Promise { - const { steps = 1 } = options; - - logger.info(`Rolling back last ${steps} migration(s)...`); - - const db = await loadDatabaseConnection(projectRoot); - const migrationsDir = path.join(projectRoot, 'migrations'); - const allMigrations = await loadMigrationFiles(migrationsDir); - const applied = await getAppliedMigrations(db); - - if (applied.length === 0) { - logger.warn('No migrations to rollback'); - return; - } - - let rolledBack = 0; - for (let i = 0; i < steps; i++) { - const lastMigration = applied[applied.length - 1]; - if (!lastMigration) break; - - const migration = allMigrations.find(m => m.name === lastMigration.name); - - if (!migration?.downSql) { - logger.error(`Migration ${lastMigration.name} has no down file`); - logger.info(`Create ${lastMigration.name}_down.sql to enable rollback`); - process.exit(1); - } - - logger.info(`Rolling back: ${migration.name}`); - - try { - await db.execute(migration.downSql); - await db.execute({ - sql: 'DELETE FROM _betterbase_migrations WHERE name = ?', - args: [migration.name], - }); - - logger.success(`✅ Rolled back: ${migration.name}`); - rolledBack++; - applied.pop(); - } catch (error) { - logger.error(`Failed to rollback: ${error}`); - process.exit(1); - } - } - - logger.success(`✅ Rolled back ${rolledBack} migration(s)`); -} - -export async function runMigrateHistoryCommand(projectRoot: string) { - const db = await loadDatabaseConnection(projectRoot); - const applied = await getAppliedMigrations(db); - - if (applied.length === 0) { - logger.info('No migrations applied'); - return; - } - - console.log('\nMigration History:\n'); - console.log('ID | Name | Applied At'); - console.log('---|-------------------------|-------------------'); - - for (const m of applied) { - console.log(`${m.id.toString().padEnd(2)} | ${m.name.padEnd(23)} | ${m.applied_at}`); - } -} -``` - ---- - -### Step 4: Register Commands - -**File**: `packages/cli/src/index.ts` - -```typescript -program - .command('migrate:rollback') - .description('Rollback last migration') - .option('-s, --steps ', 'Number of migrations', '1') - .action(async (options) => { - await runMigrateRollbackCommand(process.cwd(), { - steps: parseInt(options.steps), - }); - }); - -program - .command('migrate:history') - .description('Show migration history') - .action(async () => { - await runMigrateHistoryCommand(process.cwd()); - }); -``` - ---- - -## Acceptance Criteria - -- [ ] Migrations tracking table created -- [ ] `bb migrate` records migrations in tracking table -- [ ] `bb migrate:rollback` reverts last migration -- [ ] `bb migrate:rollback --steps=3` reverts last 3 -- [ ] `bb migrate:history` shows applied migrations -- [ ] Migration files: `0001_name_up.sql` + `0001_name_down.sql` -- [ ] Error if down file missing -- [ ] Test: Apply → rollback → verify DB state restored diff --git a/new-features-docs/FEATURE_04_Functions_Local_Dev.md b/new-features-docs/FEATURE_04_Functions_Local_Dev.md deleted file mode 100644 index 7e28cf5..0000000 --- a/new-features-docs/FEATURE_04_Functions_Local_Dev.md +++ /dev/null @@ -1,211 +0,0 @@ -# Feature 4: Edge Functions Local Dev Server - -**Priority**: Medium (Week 10) -**Complexity**: Medium -**Dependencies**: Structured Logging -**Estimated Effort**: 1 week - ---- - -## Problem Statement - -Developers must deploy functions to test them (`bb function deploy`). This is: -- **Slow**: Deploy takes 30-60 seconds -- **Expensive**: Burns cloud credits during development -- **Frustrating**: Breaks fast feedback loop - ---- - -## Solution - -Run functions locally with hot reload: -- Functions accessible at `http://localhost:3000/functions/:name` -- File changes trigger automatic reload -- Environment variables injected from `.env` -- Same port as main app (no CORS issues) - ---- - -## Implementation - -### Step 1: Create Local Runtime - -**File**: `packages/core/src/functions/local-runtime.ts` (NEW FILE) - -```typescript -import type { Context } from 'hono'; -import { watch } from 'fs'; -import path from 'path'; - -export type FunctionContext = { - request: Request; - env: Record; -}; - -export type FunctionHandler = (ctx: FunctionContext) => Promise | Response; - -type LoadedFunction = { - name: string; - handler: FunctionHandler; - lastModified: number; -}; - -export class LocalFunctionsRuntime { - private functions = new Map(); - private functionsDir: string; - private envVars: Record; - - constructor(functionsDir: string, envVars: Record = {}) { - this.functionsDir = functionsDir; - this.envVars = envVars; - } - - async loadFunction(name: string): Promise { - const functionPath = path.join(this.functionsDir, name, 'index.ts'); - const stat = await Bun.file(functionPath).stat(); - - if (!stat) { - throw new Error(`Function not found: ${name}`); - } - - // Clear cache for hot reload - delete require.cache[functionPath]; - - const module = await import(functionPath); - - if (!module.default || typeof module.default !== 'function') { - throw new Error(`Function ${name} must export default function`); - } - - const loaded: LoadedFunction = { - name, - handler: module.default, - lastModified: stat.mtime.getTime(), - }; - - this.functions.set(name, loaded); - return loaded; - } - - async executeFunction(name: string, request: Request): Promise { - let func = this.functions.get(name); - - if (!func) { - func = await this.loadFunction(name); - } else { - // Check if modified (hot reload) - const functionPath = path.join(this.functionsDir, name, 'index.ts'); - const stat = await Bun.file(functionPath).stat(); - - if (stat && stat.mtime.getTime() > func.lastModified) { - console.log(`[Functions] Hot reloading: ${name}`); - func = await this.loadFunction(name); - } - } - - const ctx: FunctionContext = { - request, - env: this.envVars, - }; - - try { - return await func.handler(ctx); - } catch (error) { - console.error(`[Functions] Error: ${name}`, error); - return new Response( - JSON.stringify({ error: 'Internal Server Error' }), - { status: 500, headers: { 'Content-Type': 'application/json' } } - ); - } - } - - startWatcher(): void { - watch(this.functionsDir, { recursive: true }, (event, filename) => { - if (filename && filename.endsWith('.ts')) { - const functionName = filename.split('/')[0]; - console.log(`[Functions] File changed: ${filename}`); - this.functions.delete(functionName); - } - }); - - console.log(`[Functions] Watching ${this.functionsDir}`); - } -} - -export function createFunctionsMiddleware(runtime: LocalFunctionsRuntime) { - return async (c: Context) => { - const functionName = c.req.param('name'); - - if (!functionName) { - return c.json({ error: 'Function name required' }, 400); - } - - try { - const response = await runtime.executeFunction(functionName, c.req.raw); - return response; - } catch (error) { - if (error instanceof Error && error.message.includes('not found')) { - return c.json({ error: `Function not found: ${functionName}` }, 404); - } - throw error; - } - }; -} -``` - ---- - -### Step 2: Integrate with Dev Command - -**File**: `packages/cli/src/commands/dev.ts` - -**MODIFY**: - -```typescript -import { LocalFunctionsRuntime, createFunctionsMiddleware } from '@betterbase/core/functions/local-runtime'; - -export async function runDevCommand( - projectRoot: string, - options: { port?: number; functions?: boolean } = {} -): Promise<() => void> { - const { port = 3000, functions = true } = options; - - logger.info('Starting development server...'); - - // Load env vars - const envVars = loadEnvVars(projectRoot); - - // Start functions runtime - let functionsRuntime: LocalFunctionsRuntime | null = null; - if (functions) { - const functionsDir = path.join(projectRoot, 'src', 'functions'); - try { - await fs.access(functionsDir); - functionsRuntime = new LocalFunctionsRuntime(functionsDir, envVars); - functionsRuntime.startWatcher(); - logger.success('✅ Functions runtime started'); - } catch { - logger.warn('No src/functions directory'); - } - } - - // Add functions routes - if (functionsRuntime) { - app.all('/functions/:name', createFunctionsMiddleware(functionsRuntime)); - } - - // ... rest of dev server setup -} -``` - ---- - -## Acceptance Criteria - -- [ ] Local functions runtime created -- [ ] `bb dev` starts functions runtime -- [ ] Functions at `http://localhost:3000/functions/:name` -- [ ] Hot reload on file save -- [ ] Env vars from `.env` injected -- [ ] Errors return 500, don't crash server -- [ ] Test: Create function, call locally, modify, call again diff --git a/new-features-docs/FEATURE_05_Realtime_Presence.md b/new-features-docs/FEATURE_05_Realtime_Presence.md deleted file mode 100644 index 3aa1235..0000000 --- a/new-features-docs/FEATURE_05_Realtime_Presence.md +++ /dev/null @@ -1,253 +0,0 @@ -# Feature 5: Realtime Presence & Broadcast - -**Priority**: High (Week 11-12) -**Complexity**: Medium -**Dependencies**: Structured Logging -**Estimated Effort**: 2-3 weeks - ---- - -## Problem Statement - -Current realtime only has database subscriptions. Apps need: -- **Presence**: Who's online (chat, collaborative editors) -- **Broadcast**: Send messages between clients (cursor positions) - ---- - -## Solution - -Channel-based presence tracking and message broadcasting: -- Join channel: `channel.subscribe({ user_id: "123" })` -- Track presence: `channel.track({ status: "online" })` -- Broadcast: `channel.broadcast("cursor_move", { x: 100, y: 200 })` -- 30-second heartbeat cleans stale connections - ---- - -## Implementation - -### Step 1: Create Channel Manager - -**File**: `packages/core/src/realtime/channel-manager.ts` (NEW FILE) - -```typescript -export type PresenceState = { - user_id: string; - online_at: string; - [key: string]: any; -}; - -type Connection = { - id: string; - ws: WebSocket; - user_id?: string; - channels: Set; - presence: Map; -}; - -type Channel = { - name: string; - connections: Set; - presence: Map; -}; - -export class ChannelManager { - private channels = new Map(); - private connections = new Map(); - - registerConnection(id: string, ws: WebSocket): Connection { - const conn: Connection = { - id, - ws, - channels: new Set(), - presence: new Map(), - }; - this.connections.set(id, conn); - return conn; - } - - unregisterConnection(id: string): void { - const conn = this.connections.get(id); - if (!conn) return; - - for (const channelName of conn.channels) { - this.leaveChannel(id, channelName); - } - - this.connections.delete(id); - } - - joinChannel( - connId: string, - channelName: string, - options: { user_id?: string; presence?: Record } = {} - ): void { - const conn = this.connections.get(connId); - if (!conn) throw new Error('Connection not found'); - - let channel = this.channels.get(channelName); - if (!channel) { - channel = { - name: channelName, - connections: new Set(), - presence: new Map(), - }; - this.channels.set(channelName, channel); - } - - channel.connections.add(conn); - conn.channels.add(channelName); - - if (options.user_id) { - conn.user_id = options.user_id; - - const state: PresenceState = { - user_id: options.user_id, - online_at: new Date().toISOString(), - ...options.presence, - }; - - channel.presence.set(options.user_id, state); - conn.presence.set(channelName, state); - - this.broadcastToChannel(channelName, { - type: 'presence', - event: 'join', - payload: state, - }, connId); - } - - // Send initial presence sync - const presenceList = Array.from(channel.presence.values()); - this.sendToConnection(connId, { - type: 'presence', - event: 'sync', - payload: presenceList, - }); - } - - leaveChannel(connId: string, channelName: string): void { - const conn = this.connections.get(connId); - const channel = this.channels.get(channelName); - - if (!conn || !channel) return; - - channel.connections.delete(conn); - conn.channels.delete(channelName); - - if (conn.user_id && channel.presence.has(conn.user_id)) { - const state = channel.presence.get(conn.user_id)!; - channel.presence.delete(conn.user_id); - conn.presence.delete(channelName); - - this.broadcastToChannel(channelName, { - type: 'presence', - event: 'leave', - payload: state, - }, connId); - } - - if (channel.connections.size === 0) { - this.channels.delete(channelName); - } - } - - broadcastToChannel( - channelName: string, - message: any, - excludeConnId?: string - ): void { - const channel = this.channels.get(channelName); - if (!channel) return; - - const msgStr = JSON.stringify(message); - - for (const conn of channel.connections) { - if (excludeConnId && conn.id === excludeConnId) continue; - - if (conn.ws.readyState === WebSocket.OPEN) { - conn.ws.send(msgStr); - } - } - } - - startHeartbeat(interval = 30000): NodeJS.Timeout { - return setInterval(() => { - for (const [id, conn] of this.connections) { - if (conn.ws.readyState !== WebSocket.OPEN) { - this.unregisterConnection(id); - } - } - }, interval); - } -} -``` - ---- - -### Step 2: Update Client SDK - -**File**: `packages/client/src/realtime.ts` - -**ADD**: - -```typescript -channel(channelName: string) { - return { - subscribe: (options?: { user_id?: string; presence?: Record }) => { - this.send({ - type: 'subscribe', - channel: channelName, - payload: options, - }); - - return { - unsubscribe: () => { - this.send({ type: 'unsubscribe', channel: channelName }); - }, - - broadcast: (event: string, data: any) => { - this.send({ - type: 'broadcast', - channel: channelName, - payload: { event, data }, - }); - }, - - track: (state: Record) => { - this.send({ - type: 'presence', - channel: channelName, - payload: { action: 'update', state }, - }); - }, - - onPresence: (callback: (event: any) => void) => { - this.on('presence', (data) => { - if (data.channel === channelName) callback(data); - }); - }, - - onBroadcast: (callback: (event: string, data: any) => void) => { - this.on('broadcast', (data) => { - if (data.channel === channelName) callback(data.event, data.payload); - }); - }, - }; - }, - }; -} -``` - ---- - -## Acceptance Criteria - -- [ ] Channel manager with presence tracking -- [ ] WebSocket server integration -- [ ] Client SDK: subscribe, track, broadcast, onPresence, onBroadcast -- [ ] Heartbeat cleanup (30s) -- [ ] Test: Two clients join, both receive presence sync -- [ ] Test: Client broadcasts, other receives -- [ ] Test: Client disconnects, others notified diff --git a/new-features-docs/FEATURE_06_AutoREST_Filtering.md b/new-features-docs/FEATURE_06_AutoREST_Filtering.md deleted file mode 100644 index e6480cc..0000000 --- a/new-features-docs/FEATURE_06_AutoREST_Filtering.md +++ /dev/null @@ -1,214 +0,0 @@ -# Feature 6: Auto-REST Advanced Filtering - -**Priority**: Medium (Week 13) -**Complexity**: Medium -**Dependencies**: Structured Logging -**Estimated Effort**: 2-3 weeks - ---- - -## Problem Statement - -Current Auto-REST only supports basic queries: -- `GET /api/users?id=123` (equality only) - -Developers need: -- Range: `?age_gte=18&age_lte=65` -- Pattern: `?name_like=john` -- IN: `?status_in=active,pending` -- Null checks: `?deleted_at_is_null=true` - ---- - -## Solution - -Parse advanced operators from query params and map to Drizzle filters. - -**Format**: `column_operator=value` - -**Examples**: -- `?age_gte=18` → `age >= 18` -- `?name_like=john` → `name LIKE '%john%'` -- `?status_in=active,pending` → `status IN ('active', 'pending')` - ---- - -## Implementation - -### Step 1: Define Operators - -**File**: `packages/core/src/auto-rest.ts` - -**ADD** at top: - -```typescript -import { eq, ne, gt, gte, lt, lte, like, ilike, inArray, isNull, isNotNull, and } from 'drizzle-orm'; - -export const QUERY_OPERATORS = { - eq: (col: any, val: any) => eq(col, val), - neq: (col: any, val: any) => ne(col, val), - gt: (col: any, val: any) => gt(col, val), - gte: (col: any, val: any) => gte(col, val), - lt: (col: any, val: any) => lt(col, val), - lte: (col: any, val: any) => lte(col, val), - like: (col: any, val: any) => like(col, `%${val}%`), - ilike: (col: any, val: any) => ilike(col, `%${val}%`), - in: (col: any, val: any) => { - const values = typeof val === 'string' ? val.split(',') : val; - return inArray(col, values); - }, - is_null: (col: any, val: any) => { - const check = val === 'true' || val === true; - return check ? isNull(col) : isNotNull(col); - }, -} as const; - -function parseFilter(key: string, value: string, schema: any): any | null { - const parts = key.split('_'); - - let operator: string | null = null; - let columnName: string | null = null; - - // Try two-word operators (is_null) - if (parts.length >= 3) { - const twoWord = `${parts[parts.length - 2]}_${parts[parts.length - 1]}`; - if (twoWord in QUERY_OPERATORS) { - operator = twoWord; - columnName = parts.slice(0, -2).join('_'); - } - } - - // Try one-word operators - if (!operator && parts.length >= 2) { - const oneWord = parts[parts.length - 1]; - if (oneWord in QUERY_OPERATORS) { - operator = oneWord; - columnName = parts.slice(0, -1).join('_'); - } - } - - // No operator = equality - if (!operator) { - operator = 'eq'; - columnName = key; - } - - const column = schema[columnName]; - if (!column) return null; - - const opFn = QUERY_OPERATORS[operator as keyof typeof QUERY_OPERATORS]; - if (!opFn) return null; - - return opFn(column, value); -} -``` - ---- - -### Step 2: Update GET Handler - -**FIND** the existing GET route: - -```typescript -app.get('/api/:table', async (c) => { - // ... existing code -}); -``` - -**REPLACE** with: - -```typescript -app.get('/api/:table', async (c) => { - const tableName = c.req.param('table'); - const queryParams = c.req.query(); - - const table = schema[tableName]; - if (!table) { - return c.json({ error: 'Table not found' }, 404); - } - - let query = db.select().from(table); - - // Apply filters - const filters: any[] = []; - const specialParams = ['limit', 'offset', 'order_by', 'order']; - - for (const [key, value] of Object.entries(queryParams)) { - if (specialParams.includes(key)) continue; - - const filter = parseFilter(key, value as string, table); - if (filter) filters.push(filter); - } - - if (filters.length > 0) { - query = query.where(and(...filters)); - } - - // Ordering - if (queryParams.order_by) { - const column = table[queryParams.order_by]; - if (column) { - const direction = queryParams.order === 'desc' ? desc : asc; - query = query.orderBy(direction(column)); - } - } - - // Pagination - const limit = parseInt(queryParams.limit || '100', 10); - const offset = parseInt(queryParams.offset || '0', 10); - - query = query.limit(Math.min(limit, 1000)).offset(offset); - - const results = await query; - - return c.json({ - data: results, - count: results.length, - limit, - offset, - }); -}); -``` - ---- - -### Step 3: Add Security Config - -**File**: `packages/core/src/config/schema.ts` - -**ADD**: - -```typescript -autoRest: z.object({ - enabled: z.boolean().default(true), - basePath: z.string().default('/api'), - tables: z.record(z.object({ - advancedFilters: z.boolean().default(false), - maxLimit: z.number().default(1000), - })).optional(), -}).optional(), -``` - -**Then check config in route handler**: - -```typescript -const tableConfig = config.autoRest?.tables?.[tableName]; -if (!tableConfig?.advancedFilters) { - // Only allow eq operator - // Skip advanced operators -} -``` - ---- - -## Acceptance Criteria - -- [ ] Operators: eq, neq, gt, gte, lt, lte, like, ilike, in, is_null -- [ ] Parse format: `column_operator=value` -- [ ] Multiple filters: `?age_gte=18&status=active` -- [ ] IN splits commas: `?status_in=active,pending` -- [ ] LIKE adds wildcards: `?name_like=john` → `%john%` -- [ ] Ordering: `?order_by=created_at&order=desc` -- [ ] Pagination: `?limit=50&offset=100` -- [ ] Config controls advanced filters per table -- [ ] Test: `?age_gte=18&age_lte=65` returns users 18-65 diff --git a/new-features-docs/FEATURE_07_GraphQL_Subscriptions.md b/new-features-docs/FEATURE_07_GraphQL_Subscriptions.md deleted file mode 100644 index 714d81a..0000000 --- a/new-features-docs/FEATURE_07_GraphQL_Subscriptions.md +++ /dev/null @@ -1,178 +0,0 @@ -# Feature 7: GraphQL Subscriptions - -**Priority**: Medium (Week 14) -**Complexity**: Low -**Dependencies**: Realtime Presence (uses same events) -**Estimated Effort**: 1-2 weeks - ---- - -## Problem Statement - -GraphQL server has queries and mutations but no subscriptions. Realtime apps need live data updates. - ---- - -## Solution - -Enable graphql-yoga subscriptions and wire to realtime event emitter: -- Subscribe: `subscription { postsInserted { id title } }` -- Fires when: Database insert occurs -- Uses: Existing realtime event system - ---- - -## Implementation - -### Step 1: Add PubSub - -**File**: `packages/core/src/graphql/server.ts` - -**MODIFY**: - -```typescript -import { createYoga, createPubSub } from 'graphql-yoga'; - -const pubsub = createPubSub(); - -export function createGraphQLServer(config: GraphQLConfig) { - const yoga = createYoga({ - schema: config.schema, - context: config.context, - graphqlEndpoint: '/graphql', - }); - - return yoga; -} - -export function publishGraphQLEvent(topic: string, payload: any): void { - pubsub.publish(topic, payload); -} - -export { pubsub }; -``` - ---- - -### Step 2: Generate Subscription Resolvers - -**File**: `packages/core/src/graphql/resolvers.ts` - -**ADD**: - -```typescript -import { pubsub } from './server'; - -export function generateSubscriptionResolvers( - schema: Record -): Record { - const subscriptions: Record = {}; - - for (const [tableName] of Object.entries(schema)) { - subscriptions[`${tableName}Changes`] = { - subscribe: () => pubsub.subscribe(`${tableName}:change`), - resolve: (payload: any) => payload, - }; - - subscriptions[`${tableName}Inserted`] = { - subscribe: () => pubsub.subscribe(`${tableName}:insert`), - resolve: (payload: any) => payload, - }; - - subscriptions[`${tableName}Updated`] = { - subscribe: () => pubsub.subscribe(`${tableName}:update`), - resolve: (payload: any) => payload, - }; - - subscriptions[`${tableName}Deleted`] = { - subscribe: () => pubsub.subscribe(`${tableName}:delete`), - resolve: (payload: any) => payload, - }; - } - - return subscriptions; -} -``` - ---- - -### Step 3: Bridge Realtime to GraphQL - -**File**: `packages/core/src/graphql/realtime-bridge.ts` (NEW) - -```typescript -import { pubsub } from './server'; -import type { EventEmitter } from 'events'; - -export function bridgeRealtimeToGraphQL(eventEmitter: EventEmitter): void { - eventEmitter.on('db:insert', (event: { table: string; record: any }) => { - pubsub.publish(`${event.table}:insert`, event.record); - pubsub.publish(`${event.table}:change`, { - type: 'INSERT', - record: event.record - }); - }); - - eventEmitter.on('db:update', (event: { table: string; record: any }) => { - pubsub.publish(`${event.table}:update`, event.record); - pubsub.publish(`${event.table}:change`, { - type: 'UPDATE', - record: event.record - }); - }); - - eventEmitter.on('db:delete', (event: { table: string; record: any }) => { - pubsub.publish(`${event.table}:delete`, event.record); - pubsub.publish(`${event.table}:change`, { - type: 'DELETE', - record: event.record - }); - }); - - console.log('[GraphQL] Subscriptions wired to realtime'); -} -``` - ---- - -### Step 4: Update Schema - -**File**: `packages/core/src/graphql/schema-generator.ts` - -**ADD** subscription types: - -```typescript -export function generateGraphQLSchema(schema: Record): string { - let sdl = ''; - - // ... existing type generation ... - - // Add Subscription type - sdl += '\ntype Subscription {\n'; - - for (const tableName of Object.keys(schema)) { - const typeName = capitalize(tableName); - - sdl += ` ${tableName}Changes: ${typeName}Change!\n`; - sdl += ` ${tableName}Inserted: ${typeName}!\n`; - sdl += ` ${tableName}Updated: ${typeName}!\n`; - sdl += ` ${tableName}Deleted: ${typeName}!\n`; - } - - sdl += '}\n'; - - return sdl; -} -``` - ---- - -## Acceptance Criteria - -- [ ] PubSub instance created -- [ ] Subscription resolvers generated -- [ ] Realtime bridge connects events -- [ ] Schema includes Subscription type -- [ ] Test: Subscribe to `postsInserted`, insert post, fires -- [ ] Test: GraphQL Playground shows subscriptions -- [ ] Test: Multiple clients can subscribe diff --git a/new-features-docs/FEATURE_08_Webhook_Logs.md b/new-features-docs/FEATURE_08_Webhook_Logs.md deleted file mode 100644 index 1605c55..0000000 --- a/new-features-docs/FEATURE_08_Webhook_Logs.md +++ /dev/null @@ -1,270 +0,0 @@ -# Feature 8: Webhooks Delivery Logs - -**Priority**: Medium (Week 15) -**Complexity**: Low -**Dependencies**: Structured Logging, Migrations -**Estimated Effort**: 1-2 weeks - ---- - -## Problem Statement - -Webhooks fire-and-forget with no visibility: -- Can't see if webhook succeeded/failed -- No history of deliveries -- Can't retry failed deliveries -- Debugging is impossible - ---- - -## Solution - -Store delivery attempts in database table: -- Log every delivery (success/fail) -- Dashboard route to view logs -- CLI command to retry failed deliveries -- 30-day retention (configurable) - ---- - -## Implementation - -### Step 1: Create Delivery Logs Table - -**File**: `packages/core/src/webhooks/schema.sql` (NEW) - -```sql -CREATE TABLE IF NOT EXISTS _betterbase_webhook_deliveries ( - id TEXT PRIMARY KEY, - webhook_id TEXT NOT NULL, - status TEXT NOT NULL CHECK (status IN ('success', 'failed', 'pending')), - request_url TEXT NOT NULL, - request_body TEXT, - response_code INTEGER, - response_body TEXT, - error TEXT, - attempt_count INTEGER NOT NULL DEFAULT 1, - created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP -); - -CREATE INDEX idx_webhook_deliveries_webhook_id - ON _betterbase_webhook_deliveries(webhook_id); -CREATE INDEX idx_webhook_deliveries_created_at - ON _betterbase_webhook_deliveries(created_at DESC); -``` - ---- - -### Step 2: Update Webhook Dispatcher - -**File**: `packages/core/src/webhooks/dispatcher.ts` - -**MODIFY**: - -```typescript -import { nanoid } from 'nanoid'; - -export class WebhookDispatcher { - private db: any; - - constructor(db: any) { - this.db = db; - } - - async dispatch(config: WebhookConfig, payload: WebhookPayload): Promise { - const deliveryId = nanoid(); - - // Create delivery log - await this.db.execute({ - sql: ` - INSERT INTO _betterbase_webhook_deliveries - (id, webhook_id, status, request_url, request_body, created_at) - VALUES (?, ?, ?, ?, ?, ?) - `, - args: [ - deliveryId, - config.id, - 'pending', - config.url, - JSON.stringify(payload), - new Date().toISOString(), - ], - }); - - try { - const signature = signPayload(payload, config.secret); - - const response = await fetch(config.url, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'X-BetterBase-Signature': signature, - }, - body: JSON.stringify(payload), - }); - - const responseBody = await response.text(); - - // Update log - success/fail - await this.db.execute({ - sql: ` - UPDATE _betterbase_webhook_deliveries - SET status = ?, response_code = ?, response_body = ?, updated_at = ? - WHERE id = ? - `, - args: [ - response.ok ? 'success' : 'failed', - response.status, - responseBody, - new Date().toISOString(), - deliveryId, - ], - }); - } catch (error) { - // Update log - error - await this.db.execute({ - sql: ` - UPDATE _betterbase_webhook_deliveries - SET status = ?, error = ?, updated_at = ? - WHERE id = ? - `, - args: [ - 'failed', - error instanceof Error ? error.message : 'Unknown error', - new Date().toISOString(), - deliveryId, - ], - }); - } - } - - async getDeliveryLogs(webhookId: string, limit = 50): Promise { - const result = await this.db.execute({ - sql: ` - SELECT * FROM _betterbase_webhook_deliveries - WHERE webhook_id = ? - ORDER BY created_at DESC - LIMIT ? - `, - args: [webhookId, limit], - }); - - return result.rows; - } -} -``` - ---- - -### Step 3: Create Dashboard Route - -**File**: `apps/test-project/src/routes/webhooks.ts` (NEW) - -```typescript -import { Hono } from 'hono'; -import { db } from '../db'; - -const app = new Hono(); - -app.get('/:webhookId/deliveries', async (c) => { - const webhookId = c.req.param('webhookId'); - const limit = parseInt(c.req.query('limit') || '50', 10); - - const result = await db.execute({ - sql: ` - SELECT * FROM _betterbase_webhook_deliveries - WHERE webhook_id = ? - ORDER BY created_at DESC - LIMIT ? - `, - args: [webhookId, limit], - }); - - return c.json({ - data: result.rows, - count: result.rows.length, - }); -}); - -export default app; -``` - -**Mount in routes**: - -```typescript -// File: apps/test-project/src/routes/index.ts -import webhooksRoutes from './webhooks'; -app.route('/api/webhooks', webhooksRoutes); -``` - ---- - -### Step 4: Add CLI Commands - -**File**: `packages/cli/src/commands/webhook.ts` - -**ADD**: - -```typescript -export async function runWebhookLogsCommand( - projectRoot: string, - webhookId: string, - options: { limit?: number } = {} -): Promise { - const { limit = 20 } = options; - const db = await loadDatabaseConnection(projectRoot); - - const result = await db.execute({ - sql: ` - SELECT * FROM _betterbase_webhook_deliveries - WHERE webhook_id = ? - ORDER BY created_at DESC - LIMIT ? - `, - args: [webhookId, limit], - }); - - if (result.rows.length === 0) { - logger.info('No delivery logs found'); - return; - } - - console.log('\nWebhook Delivery Logs:\n'); - console.log('Status | Code | Created At | Error'); - console.log('---------|------|---------------------|-------'); - - for (const log of result.rows) { - const status = log.status.padEnd(8); - const code = (log.response_code || 'N/A').toString().padEnd(4); - const time = new Date(log.created_at).toISOString(); - const error = log.error ? log.error.substring(0, 20) : ''; - - console.log(`${status} | ${code} | ${time} | ${error}`); - } -} -``` - -**Register**: - -```typescript -// File: packages/cli/src/index.ts -program - .command('webhook:logs ') - .option('-l, --limit ', 'Limit', '20') - .action(async (id, opts) => { - await runWebhookLogsCommand(process.cwd(), id, opts); - }); -``` - ---- - -## Acceptance Criteria - -- [ ] Delivery logs table created -- [ ] Dispatcher logs every attempt -- [ ] Dashboard route returns logs as JSON -- [ ] CLI `bb webhook:logs ` works -- [ ] Logs include: status, request/response, error, timestamps -- [ ] Test: Trigger webhook, verify log entry -- [ ] Test: Failed webhook shows status='failed' diff --git a/new-features-docs/FEATURE_09_RLS_Testing.md b/new-features-docs/FEATURE_09_RLS_Testing.md deleted file mode 100644 index ef03dce..0000000 --- a/new-features-docs/FEATURE_09_RLS_Testing.md +++ /dev/null @@ -1,204 +0,0 @@ -# Feature 9: RLS Policy Testing Tool - -**Priority**: Medium (Week 16) -**Complexity**: Medium -**Dependencies**: Migrations, Structured Logging -**Estimated Effort**: 1 week - ---- - -## Problem Statement - -RLS policies are critical for security but hard to test: -- No visibility if policies work correctly -- Manual testing is error-prone -- Production bugs are catastrophic (data leaks) - ---- - -## Solution - -CLI tool that: -- Creates temporary test schema (isolated) -- Generates test data -- Simulates queries as different users -- Outputs pass/fail results (JSON) -- Cleans up after test - ---- - -## Implementation - -### Step 1: Create Test Runner - -**File**: `packages/cli/src/commands/rls-test.ts` (NEW) - -```typescript -import { nanoid } from 'nanoid'; - -type RLSTestCase = { - name: string; - user_id: string; - query: string; - expected: 'allowed' | 'blocked'; - expectedRowCount?: number; -}; - -type RLSTestResult = { - test: string; - passed: boolean; - actual: 'allowed' | 'blocked'; - expected: 'allowed' | 'blocked'; - rowCount?: number; - error?: string; -}; - -export async function runRLSTestCommand( - projectRoot: string, - tableName: string -): Promise { - logger.info(`Testing RLS policies for: ${tableName}`); - - const db = await loadDatabaseConnection(projectRoot); - - // Create test schema - const testSchema = `test_${nanoid(8)}`; - await db.execute(`CREATE SCHEMA ${testSchema}`); - - try { - // Copy table structure - await db.execute(` - CREATE TABLE ${testSchema}.${tableName} - (LIKE public.${tableName} INCLUDING ALL) - `); - - // Enable RLS - await db.execute(` - ALTER TABLE ${testSchema}.${tableName} - ENABLE ROW LEVEL SECURITY - `); - - // Apply policies (load from files) - const policies = await loadTablePolicies(projectRoot, tableName); - for (const policy of policies) { - const sql = generatePolicySQL(testSchema, tableName, policy); - await db.execute(sql); - } - - // Create test data - const user1 = 'test_user_1'; - const user2 = 'test_user_2'; - - await db.execute({ - sql: `INSERT INTO ${testSchema}.${tableName} (id, user_id, title) VALUES (?, ?, ?)`, - args: [nanoid(), user1, 'Post by user 1'], - }); - - await db.execute({ - sql: `INSERT INTO ${testSchema}.${tableName} (id, user_id, title) VALUES (?, ?, ?)`, - args: [nanoid(), user2, 'Post by user 2'], - }); - - // Test cases - const tests: RLSTestCase[] = [ - { - name: 'User can read own records', - user_id: user1, - query: `SELECT * FROM ${testSchema}.${tableName} WHERE user_id = '${user1}'`, - expected: 'allowed', - expectedRowCount: 1, - }, - { - name: 'User cannot read others records', - user_id: user1, - query: `SELECT * FROM ${testSchema}.${tableName} WHERE user_id = '${user2}'`, - expected: 'blocked', - expectedRowCount: 0, - }, - ]; - - // Run tests - const results: RLSTestResult[] = []; - - for (const test of tests) { - // Set current user - await db.execute(`SELECT set_config('request.jwt.claims.sub', '${test.user_id}', true)`); - - let actual: 'allowed' | 'blocked' = 'blocked'; - let rowCount: number | undefined; - let error: string | undefined; - - try { - const result = await db.execute(test.query); - actual = 'allowed'; - rowCount = result.rows?.length; - } catch (err) { - actual = 'blocked'; - error = err instanceof Error ? err.message : 'Unknown'; - } - - const passed = actual === test.expected && - (test.expectedRowCount === undefined || rowCount === test.expectedRowCount); - - results.push({ - test: test.name, - passed, - actual, - expected: test.expected, - rowCount, - error, - }); - - if (passed) { - logger.success(`✅ ${test.name}`); - } else { - logger.error(`❌ ${test.name}`); - } - } - - // Output JSON - console.log('\nResults:'); - console.log(JSON.stringify({ - table: tableName, - total: results.length, - passed: results.filter(r => r.passed).length, - failed: results.filter(r => !r.passed).length, - results, - }, null, 2)); - - } finally { - // Cleanup - await db.execute(`DROP SCHEMA ${testSchema} CASCADE`); - logger.info('Test schema cleaned up'); - } -} -``` - ---- - -### Step 2: Register CLI Command - -**File**: `packages/cli/src/index.ts` - -```typescript -import { runRLSTestCommand } from './commands/rls-test'; - -program - .command('rls:test ') - .description('Test RLS policies') - .action(async (table: string) => { - await runRLSTestCommand(process.cwd(), table); - }); -``` - ---- - -## Acceptance Criteria - -- [ ] `bb rls:test
` command works -- [ ] Creates temporary test schema -- [ ] Generates test data (multiple users) -- [ ] Tests SELECT, INSERT, UPDATE, DELETE -- [ ] Outputs JSON with pass/fail -- [ ] Cleans up test schema after -- [ ] Test: Run on table with policies, verify results diff --git a/new-features-docs/FEATURE_10_Structured_Logging.md b/new-features-docs/FEATURE_10_Structured_Logging.md deleted file mode 100644 index 665f9f2..0000000 --- a/new-features-docs/FEATURE_10_Structured_Logging.md +++ /dev/null @@ -1,624 +0,0 @@ -# Feature 10: Structured Logging - -**Priority**: CRITICAL (Week 1-2) - **IMPLEMENT THIS FIRST** -**Complexity**: Low -**Dependencies**: None -**Estimated Effort**: 1-2 weeks - ---- - -## Why This Feature First? - -Structured logging is the FOUNDATION for all other features. Every feature will use logging for: -- **Debugging**: Track what's happening in production -- **Performance**: Log slow queries, long requests -- **Security**: Audit trail for sensitive operations -- **Monitoring**: Track errors and warnings - -**Without logging in place first, debugging the other 9 features will be painful.** - ---- - -## Problem Statement - -Current codebase uses scattered `console.log` statements: -- **No structure**: `console.log("User logged in")` - what user? when? -- **No levels**: Can't filter debug vs error messages -- **No persistence**: Logs disappear when process restarts -- **No request tracking**: Can't trace a request across multiple log entries - -**Production Impact**: When something breaks in production, you have no way to diagnose it. - ---- - -## Solution Overview - -Implement **Pino** (fastest Node.js logger) with: -- **Log levels**: debug, info, warn, error -- **Structured data**: JSON logs with metadata -- **Pretty dev mode**: Colored, human-readable -- **File persistence**: Rotating daily log files in production -- **Request IDs**: Track requests across the system - ---- - -## Architecture - -``` -┌────────────────────────────────────────────────────────────┐ -│ Application Code │ -│ ┌──────────────────────────────────────────────────┐ │ -│ │ logger.info({ msg: "User logged in", │ │ -│ │ userId: "123", │ │ -│ │ duration: 45 }) │ │ -│ └────────────────────┬─────────────────────────────┘ │ -└───────────────────────┼────────────────────────────────────┘ - │ - ▼ -┌────────────────────────────────────────────────────────────┐ -│ Pino Logger │ -│ ┌──────────────────────────────────────────────────┐ │ -│ │ NODE_ENV === 'development'? │ │ -│ │ ├─ YES → pino-pretty (colored console) │ │ -│ │ └─ NO → JSON (structured logs) │ │ -│ └────────────────────┬─────────────────────────────┘ │ -└───────────────────────┼────────────────────────────────────┘ - │ - ┌───────┴────────┐ - │ │ - DEVELOPMENT PRODUCTION - │ │ - ▼ ▼ - ┌─────────────┐ ┌──────────────────┐ - │ Terminal │ │ Console + Files │ - │ (pretty) │ │ (JSON) │ - └─────────────┘ │ logs/ │ - │ betterbase- │ - │ 2026-03-20.log │ - └──────────────────┘ -``` - ---- - -## Implementation Steps - -### Step 1: Install Pino - -**Action**: Install Pino and pino-pretty - -```bash -cd packages/core -bun add pino -bun add -D pino-pretty # Dev dependency for pretty printing -``` - -**Verification**: -```bash -cat package.json | grep pino -# Should show: -# "pino": "^8.x.x" -# "pino-pretty": "^10.x.x" (in devDependencies) -``` - ---- - -### Step 2: Create Logger Module - -**File**: `packages/core/src/logger/index.ts` (NEW FILE - create `logger/` directory) - -```bash -mkdir -p packages/core/src/logger -``` - -```typescript -/** - * Structured Logging Module - * - * Provides application-wide logging with: - * - Structured JSON logs - * - Log levels (debug, info, warn, error) - * - Request ID tracking - * - Pretty dev mode, JSON production mode - * - File rotation (production only) - * - * Usage: - * import { logger } from './logger'; - * logger.info({ msg: "User action", userId: "123" }); - */ - -import pino from 'pino'; -import { nanoid } from 'nanoid'; - -/** - * Determine environment - */ -const isDev = process.env.NODE_ENV !== 'production'; -const logLevel = process.env.LOG_LEVEL || (isDev ? 'debug' : 'info'); - -/** - * Main application logger - * - * Development mode: - * - Uses pino-pretty for colored, readable output - * - Shows timestamp, level, message - * - Hides pid and hostname (noise reduction) - * - * Production mode: - * - Outputs structured JSON - * - Includes all metadata - * - Can be parsed by log aggregators (Datadog, CloudWatch, etc.) - * - * @example - * logger.info("User logged in"); - * logger.info({ userId: "123", action: "login" }, "User logged in"); - * logger.error({ err: error }, "Failed to process payment"); - */ -export const logger = pino({ - level: logLevel, - - // Pretty print in development - transport: isDev ? { - target: 'pino-pretty', - options: { - colorize: true, - translateTime: 'HH:MM:ss.l', // e.g., 14:30:22.123 - ignore: 'pid,hostname', // Hide noise - singleLine: false, - }, - } : undefined, - - // JSON formatting in production - formatters: isDev ? undefined : { - level: (label) => { - return { level: label }; - }, - }, -}); - -/** - * Create a child logger with a unique request ID - * - * Use this for HTTP request handling to track all logs - * related to a single request - * - * @returns Child logger with reqId field - * - * @example - * const reqLogger = createRequestLogger(); - * reqLogger.info("Processing request"); - * reqLogger.info("Query executed"); - * // Both logs will have the same reqId - */ -export function createRequestLogger(): pino.Logger { - const requestId = nanoid(10); // e.g., "a1B2c3D4e5" - return logger.child({ reqId: requestId }); -} - -/** - * Log slow database queries - * - * Automatically warns when a query exceeds threshold - * - * @param query - SQL query (will be truncated to 200 chars) - * @param duration - Query duration in milliseconds - * @param threshold - Threshold in ms (default: 100ms) - * - * @example - * const start = Date.now(); - * await db.execute(query); - * logSlowQuery(query, Date.now() - start); - */ -export function logSlowQuery( - query: string, - duration: number, - threshold = 100 -): void { - if (duration > threshold) { - logger.warn({ - msg: 'Slow query detected', - query: query.substring(0, 200), // Truncate long queries - duration_ms: duration, - threshold_ms: threshold, - }); - } -} - -/** - * Log errors with full stack trace - * - * Ensures errors are logged consistently with context - * - * @param error - Error object - * @param context - Additional context (userId, requestId, etc.) - * - * @example - * try { - * await riskyOperation(); - * } catch (error) { - * logError(error, { userId: "123", operation: "payment" }); - * } - */ -export function logError( - error: Error, - context?: Record -): void { - logger.error({ - msg: error.message, - stack: error.stack, - error_name: error.name, - ...context, - }); -} - -/** - * Log successful operations with timing - * - * @param operation - Operation name - * @param duration - Duration in ms - * @param metadata - Additional metadata - * - * @example - * const start = Date.now(); - * await processData(); - * logSuccess("process_data", Date.now() - start, { records: 100 }); - */ -export function logSuccess( - operation: string, - duration: number, - metadata?: Record -): void { - logger.info({ - msg: `Operation completed: ${operation}`, - operation, - duration_ms: duration, - ...metadata, - }); -} -``` - -**Verification**: -```bash -cd packages/core -bun run build -# Should compile without errors -``` - ---- - -### Step 3: Create Request Logger Middleware (Hono) - -**File**: `packages/core/src/middleware/request-logger.ts` (NEW FILE) - -```typescript -import type { Context, Next } from 'hono'; -import { createRequestLogger } from '../logger'; - -/** - * Request logging middleware for Hono - * - * Logs all incoming requests and responses with: - * - Request ID (unique per request) - * - HTTP method and path - * - Response status code - * - Request duration - * - * Usage: - * app.use('*', requestLogger()); - * - * The logger is attached to context and can be accessed: - * const logger = c.get('logger'); - * logger.info("Processing payment"); - */ -export function requestLogger() { - return async (c: Context, next: Next) => { - const logger = createRequestLogger(); - const start = Date.now(); - - // Attach logger to context for use in route handlers - c.set('logger', logger); - - // Log incoming request - logger.info({ - msg: 'Incoming request', - method: c.req.method, - path: c.req.path, - user_agent: c.req.header('user-agent'), - }); - - // Execute route handler - await next(); - - // Log response - const duration = Date.now() - start; - const level = c.res.status >= 500 ? 'error' : - c.res.status >= 400 ? 'warn' : 'info'; - - logger[level]({ - msg: 'Request completed', - method: c.req.method, - path: c.req.path, - status: c.res.status, - duration_ms: duration, - }); - - // Warn on slow requests (>1s) - if (duration > 1000) { - logger.warn({ - msg: 'Slow request detected', - duration_ms: duration, - path: c.req.path, - }); - } - }; -} -``` - ---- - -### Step 4: Add File Logging (Production Only) - -**File**: `packages/core/src/logger/file-transport.ts` (NEW FILE) - -```typescript -import path from 'path'; -import { mkdir } from 'fs/promises'; -import pino from 'pino'; - -/** - * Setup file logging for production - * - * Creates daily rotating log files in logs/ directory - * - * @returns Pino destination stream - */ -export async function setupFileLogging(): Promise { - const logsDir = path.join(process.cwd(), 'logs'); - - // Create logs directory if it doesn't exist - await mkdir(logsDir, { recursive: true }); - - // Create log file with today's date - const date = new Date().toISOString().split('T')[0]; // YYYY-MM-DD - const logFile = path.join(logsDir, `betterbase-${date}.log`); - - return pino.destination({ - dest: logFile, - sync: false, // Async for better performance - mkdir: true, - }); -} -``` - -**Update**: `packages/core/src/logger/index.ts` - -**REPLACE** the logger initialization with: - -```typescript -import { setupFileLogging } from './file-transport'; - -// Initialize logger -let loggerInstance: pino.Logger; - -if (isDev) { - // Development: Pretty console output - loggerInstance = pino({ - level: logLevel, - transport: { - target: 'pino-pretty', - options: { - colorize: true, - translateTime: 'HH:MM:ss.l', - ignore: 'pid,hostname', - }, - }, - }); -} else { - // Production: JSON to console + file - const fileStream = await setupFileLogging(); - - // Multi-stream: both console and file - const streams = [ - { stream: process.stdout }, - { stream: fileStream }, - ]; - - loggerInstance = pino( - { level: logLevel }, - pino.multistream(streams) - ); -} - -export const logger = loggerInstance; -``` - ---- - -### Step 5: Replace console.log Throughout Codebase - -**Action**: Search and replace console.log with logger - -**Strategy**: -1. Search for all `console.log` -2. Replace with `logger.info` -3. Search for all `console.error` -4. Replace with `logger.error` -5. Search for all `console.warn` -6. Replace with `logger.warn` - -**Example Replacements**: - -**Before**: -```typescript -console.log('User logged in:', userId); -console.error('Failed to save:', error); -``` - -**After**: -```typescript -import { logger } from './logger'; - -logger.info({ userId }, 'User logged in'); -logger.error({ error }, 'Failed to save'); -``` - -**Files to Update** (search in these directories): -- `packages/core/src/` -- `packages/cli/src/` -- `apps/test-project/src/` - -**Bash Command to Find All console.log**: -```bash -grep -r "console\.log" packages/core/src -grep -r "console\.error" packages/core/src -grep -r "console\.warn" packages/core/src -``` - ---- - -### Step 6: Add Logging to Main App - -**File**: `apps/test-project/src/index.ts` - -**Action**: Add request logger middleware - -```typescript -import { Hono } from 'hono'; -import { requestLogger } from '@betterbase/core/middleware/request-logger'; - -const app = new Hono(); - -// Add request logger (must be first middleware) -app.use('*', requestLogger()); - -// ... rest of your app -``` - ---- - -## Testing - -### Manual Testing - -**1. Start dev server**: -```bash -cd apps/test-project -bun run dev -``` - -**Expected output** (pretty logs): -``` -14:30:22.123 INFO Server starting on port 3000 -``` - -**2. Make a request**: -```bash -curl http://localhost:3000/api/users -``` - -**Expected logs**: -``` -14:30:25.456 INFO (a1B2c3D4e5) Incoming request - method: "GET" - path: "/api/users" -14:30:25.498 INFO (a1B2c3D4e5) Request completed - method: "GET" - path: "/api/users" - status: 200 - duration_ms: 42 -``` - -**3. Test production mode**: -```bash -NODE_ENV=production bun run dev -``` - -**Expected output** (JSON logs): -```json -{"level":"info","time":1709827935234,"msg":"Server starting","port":3000} -{"level":"info","time":1709827936123,"reqId":"a1B2c3D4e5","msg":"Incoming request","method":"GET","path":"/api/users"} -``` - -**4. Check log file created**: -```bash -ls -la logs/ -# Should show: betterbase-2026-03-20.log -``` - ---- - -## Acceptance Criteria - -- [ ] Pino and pino-pretty installed -- [ ] Logger module created in `packages/core/src/logger/` -- [ ] Request ID middleware created -- [ ] File logging works in production (logs/ directory) -- [ ] Dev mode uses pretty colored output -- [ ] Production mode uses JSON output -- [ ] All console.log replaced with logger.info -- [ ] All console.error replaced with logger.error -- [ ] Request duration logged for every HTTP request -- [ ] Slow requests (>1s) generate warning logs -- [ ] Slow queries (>100ms) generate warning logs -- [ ] Test: Start server, make request, verify logs with request ID -- [ ] Test: Production mode writes to file -- [ ] Test: Log rotation creates new file daily - ---- - -## Log Levels Guide - -**debug**: Verbose information for debugging -```typescript -logger.debug({ query, params }, 'Executing database query'); -``` - -**info**: Normal application flow -```typescript -logger.info({ userId }, 'User logged in'); -``` - -**warn**: Something unusual but not an error -```typescript -logger.warn({ duration: 1500 }, 'Slow request detected'); -``` - -**error**: Error occurred -```typescript -logger.error({ error: err }, 'Failed to process payment'); -``` - ---- - -## Environment Variables - -Add to `.env`: -```bash -# Logging configuration -LOG_LEVEL=debug # debug | info | warn | error -NODE_ENV=development # development | production -``` - ---- - -## Performance Notes - -- Pino is the **fastest** Node.js logger (benchmarked) -- Async file writes don't block requests -- Pretty printing adds ~5-10ms overhead (dev only) -- Production JSON logs add <1ms overhead - ---- - -## Next Steps After Implementation - -1. **Integrate with other features**: All features will use this logger -2. **Add log aggregation** (optional): Send logs to Datadog, CloudWatch, Loki -3. **Add sampling** (optional): Sample high-volume logs in production -4. **Add correlation IDs**: Track requests across microservices - ---- - -**Feature Status**: Ready for implementation -**Estimated Time**: 1-2 weeks -**Start Date**: Week 1 (IMPLEMENT THIS FIRST) - diff --git a/new-features-docs/README_START_HERE.md b/new-features-docs/README_START_HERE.md deleted file mode 100644 index 46e6454..0000000 --- a/new-features-docs/README_START_HERE.md +++ /dev/null @@ -1,83 +0,0 @@ -# BetterBase FOSS Features - Getting Started - -## 📦 What You Have - -**10 detailed feature specification files** ready for Kilo Code implementation: - -### ⭐ PHASE 1: Start Here (Weeks 1-4) -1. **FEATURE_10_Structured_Logging.md** - IMPLEMENT THIS FIRST ✅ -2. **FEATURE_03_Migration_Rollback.md** - Coming next ⏳ - -### 📊 PHASE 2-4: Core Features (Weeks 5-16) -3-10. Remaining features (being created now) - ---- - -## 🚀 Quick Start Instructions - -### Step 1: Review Files -```bash -# You should have these files: -ls -1 FEATURE_*.md - -# Expected output: -# FEATURE_01_Storage_Image_Transformations.md ✅ READY (3,000 words) -# FEATURE_10_Structured_Logging.md ✅ READY (1,900 words) -# FEATURE_02 through FEATURE_09.md ⏳ CREATING NOW -``` - -### Step 2: Start with Logging -1. Open `FEATURE_10_Structured_Logging.md` -2. Follow step-by-step instructions -3. Implement in Kilo Code -4. Test acceptance criteria - -### Step 3: Continue in Order -Follow the implementation order in `_INDEX_ALL_FEATURES.md` - ---- - -## 📋 Implementation Checklist - -- [ ] Week 1-2: Structured Logging (Feature 10) -- [ ] Week 3-4: Migration Rollback (Feature 3) -- [ ] Week 5-7: Storage Transforms (Feature 1) -- [ ] Week 8-9: Auth Providers (Feature 2) -- [ ] Week 10: Functions Local Dev (Feature 4) -- [ ] Week 11-12: Realtime Presence (Feature 5) -- [ ] Week 13: Auto-REST Filtering (Feature 6) -- [ ] Week 14: GraphQL Subscriptions (Feature 7) -- [ ] Week 15: Webhook Logs (Feature 8) -- [ ] Week 16: RLS Testing (Feature 9) - ---- - -## 💡 Tips for Success - -1. **One feature at a time** - Don't skip ahead -2. **Test thoroughly** - Check acceptance criteria -3. **Commit often** - After each feature completes -4. **Ask questions** - Reference back to original conversation if needed - ---- - -## ⚠️ Important Notes - -- **Feature 10 (Logging) MUST be first** - All other features depend on it -- **Each file is self-contained** - Has everything needed to implement -- **Code examples included** - For complex parts -- **Total timeline: 16 weeks** - Can be done faster if needed - ---- - -## 📝 File Status - -Current progress: -- ✅ **2 detailed files created** (Features 1, 10) -- ✅ **Index file created** -- ⏳ **8 remaining files** (creating now - check back in outputs folder) - ---- - -**Ready to start?** Open `FEATURE_10_Structured_Logging.md` and begin! - diff --git a/new-features-docs/_INDEX_ALL_FEATURES.md b/new-features-docs/_INDEX_ALL_FEATURES.md deleted file mode 100644 index af7d22d..0000000 --- a/new-features-docs/_INDEX_ALL_FEATURES.md +++ /dev/null @@ -1,71 +0,0 @@ -# BetterBase FOSS Features - Implementation Index - -**Total Features**: 10 -**Timeline**: 16 weeks -**Status**: Ready for implementation - ---- - -## Implementation Order - -### Phase 1: Foundation (Weeks 1-4) -1. **[FEATURE_10_Structured_Logging.md](./FEATURE_10_Structured_Logging.md)** ⭐ **START HERE** - - Pino logger with request IDs - - Week 1-2, Critical priority - -2. **[FEATURE_03_Migration_Rollback.md](./FEATURE_03_Migration_Rollback.md)** - - Up/down SQL migrations - - Week 3-4, High priority - -### Phase 2: Core Features (Weeks 5-10) -3. **[FEATURE_01_Storage_Image_Transformations.md](./FEATURE_01_Storage_Image_Transformations.md)** - - Sharp library integration - - Week 5-7, High priority - -4. **[FEATURE_02_Auth_Social_Providers.md](./FEATURE_02_Auth_Social_Providers.md)** - - OAuth scaffolding CLI - - Week 8-9, Medium priority - -5. **[FEATURE_04_Functions_Local_Dev.md](./FEATURE_04_Functions_Local_Dev.md)** - - Hot reload for serverless functions - - Week 10, Medium priority - -### Phase 3: Realtime & Querying (Weeks 11-14) -6. **[FEATURE_05_Realtime_Presence.md](./FEATURE_05_Realtime_Presence.md)** - - Channels, presence, broadcast - - Week 11-12, High priority - -7. **[FEATURE_06_AutoREST_Filtering.md](./FEATURE_06_AutoREST_Filtering.md)** - - Advanced query operators - - Week 13, Medium priority - -8. **[FEATURE_07_GraphQL_Subscriptions.md](./FEATURE_07_GraphQL_Subscriptions.md)** - - Realtime GraphQL - - Week 14, Medium priority - -### Phase 4: Operations (Weeks 15-16) -9. **[FEATURE_08_Webhook_Logs.md](./FEATURE_08_Webhook_Logs.md)** - - Delivery tracking - - Week 15, Medium priority - -10. **[FEATURE_09_RLS_Testing.md](./FEATURE_09_RLS_Testing.md)** - - Policy validation tool - - Week 16, Medium priority - ---- - -## Files Created - -✅ FEATURE_01_Storage_Image_Transformations.md (~3,000 words) -✅ FEATURE_10_Structured_Logging.md (~1,900 words) -⏳ FEATURE_02 through FEATURE_09 (creating now...) - ---- - -## Quick Start - -1. Read [FEATURE_10_Structured_Logging.md](./FEATURE_10_Structured_Logging.md) first -2. Implement logging (1-2 weeks) -3. Move to Feature 3 (Migration Rollback) -4. Follow implementation order above - From a6f17b103682772af865b0193419995e538f2033 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Tue, 24 Mar 2026 16:56:04 +0000 Subject: [PATCH 3/5] Fix lint script command in package.json to use bunx for consistency --- packages/client/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/client/package.json b/packages/client/package.json index 8f23e63..62de6cc 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -28,7 +28,7 @@ "dev": "bun --watch run src/build.ts", "test": "bun test", "typecheck": "tsc --noEmit --project tsconfig.json", - "lint": "biome check src test", + "lint": "bunx biome check src test", "typecheck:test": "tsc --noEmit --project tsconfig.test.json" }, "keywords": ["betterbase", "baas", "backend", "database", "realtime", "auth", "better-auth"], From b381dd8404e961945f52ae325c35ea2d01b3c161 Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Tue, 24 Mar 2026 17:19:54 +0000 Subject: [PATCH 4/5] Add Docker deployment configuration and documentation --- .dockerignore | 103 +++++++++++++++++++++ .env.example | 92 +++++++++++++++++++ CODEBASE_MAP.md | 37 ++++++++ Dockerfile | 113 +++++++++++++++++++++++ Dockerfile.project | 120 +++++++++++++++++++++++++ README.md | 72 ++++++++++----- docker-compose.production.yml | 163 ++++++++++++++++++++++++++++++++++ docker-compose.yml | 125 ++++++++++++++++++++++++++ 8 files changed, 805 insertions(+), 20 deletions(-) create mode 100644 .dockerignore create mode 100644 .env.example create mode 100644 Dockerfile create mode 100644 Dockerfile.project create mode 100644 docker-compose.production.yml create mode 100644 docker-compose.yml diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..be67c53 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,103 @@ +# ============================================================================ +# Docker Ignore +# +# Excludes files not needed in Docker builds to reduce image size +# and improve build performance. +# ============================================================================ + +# Dependencies +node_modules +bower_components + +# Build outputs +dist +build +.next +.nuxt +output + +# Logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +# IDE +.idea +.vscode +*.swp +*.swo +*~ + +# OS +.DS_Store +Thumbs.db + +# Git +.git +.gitignore + +# Documentation +*.md +!README.md +docs +LICENSE + +# Test files +coverage +.nyc_output +*.test.ts +*.spec.ts +__tests__ +test +tests +*.test.js +*.spec.js + +# Turborepo +.turbo +node_modules/.cache + +# Database +*.db +*.sqlite +*.sqlite3 +migrations + +# Environment files (keep .env.example as reference) +.env +.env.* +!.env.example + +# Development specific +.vscode +.idea +*.local + +# Build config (not needed in final image) +tsconfig.json +tsconfig.*.json +drizzle.config.ts +betterbase.config.ts +turbo.json + +# Package artifacts +*.tgz +*.tar.gz + +# CI/CD +.github +.gitlab-ci.yml +.travis.yml +Jenkinsfile +azure-pipelines.yml + +# Misc +.cache +tmp +temp +*.pid +*.seed +*.pid.lock diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..b7be60a --- /dev/null +++ b/.env.example @@ -0,0 +1,92 @@ +# ============================================================================ +# Betterbase Environment Variables +# +# Copy this file to .env and fill in your values. +# NEVER commit .env to version control! +# ============================================================================ + +# ---------------------------------------------------------------------------- +# Required: Database +# ---------------------------------------------------------------------------- +# PostgreSQL connection string (for postgres, neon, supabase) +# Format: postgresql://user:password@host:port/database +DATABASE_URL=postgres://user:password@localhost:5432/betterbase + +# Or for Neon (serverless PostgreSQL) +# DATABASE_URL=postgres://user:password@ep-xxx.us-east-1.aws.neon.tech/neondb?sslmode=require + +# Or for Turso (libSQL) +# TURSO_URL=libsql://your-database.turso.io +# TURSO_AUTH_TOKEN=your-auth-token + +# ---------------------------------------------------------------------------- +# Required: Authentication +# ---------------------------------------------------------------------------- +# Generate a secure secret: openssl rand -base64 32 +AUTH_SECRET=your-super-secret-key-min-32-chars-long-change-in-production +AUTH_URL=http://localhost:3000 + +# ---------------------------------------------------------------------------- +# Optional: Storage (S3-compatible) +# ---------------------------------------------------------------------------- +# Provider: s3, r2, backblaze, minio +STORAGE_PROVIDER=r2 +STORAGE_BUCKET=betterbase-storage +STORAGE_REGION=auto +STORAGE_ENDPOINT=https://your-r2-endpoint.r2.cloudflarestorage.com +STORAGE_ACCESS_KEY_ID=your-access-key +STORAGE_SECRET_ACCESS_KEY=your-secret-key + +# For local storage (development only) +# STORAGE_PROVIDER=local +# STORAGE_PATH=./storage + +# ---------------------------------------------------------------------------- +# Optional: Email (SMTP) +# ---------------------------------------------------------------------------- +SMTP_HOST=smtp.example.com +SMTP_PORT=587 +SMTP_USER=your-smtp-user +SMTP_PASS=your-smtp-password +SMTP_FROM=noreply@your-domain.com + +# ---------------------------------------------------------------------------- +# Optional: OAuth Providers +# ---------------------------------------------------------------------------- +# GitHub +# GITHUB_CLIENT_ID=your-github-client-id +# GITHUB_CLIENT_SECRET=your-github-client-secret + +# Google +# GOOGLE_CLIENT_ID=your-google-client-id +# GOOGLE_CLIENT_SECRET=your-google-client-secret + +# Discord +# DISCORD_CLIENT_ID=your-discord-client-id +# DISCORD_CLIENT_SECRET=your-discord-client-secret + +# ---------------------------------------------------------------------------- +# Optional: Phone Auth (Twilio) +# ---------------------------------------------------------------------------- +# TWILIO_ACCOUNT_SID=your-twilio-sid +# TWILIO_AUTH_TOKEN=your-twilio-token +# TWILIO_PHONE_NUMBER=+1234567890 + +# ---------------------------------------------------------------------------- +# Application Settings +# ---------------------------------------------------------------------------- +NODE_ENV=development +PORT=3000 +HOST=0.0.0.0 + +# Comma-separated list of allowed CORS origins +CORS_ORIGIN=http://localhost:3000,http://localhost:5173 + +# Logging +LOG_LEVEL=debug + +# ---------------------------------------------------------------------------- +# Vector Search (optional) +# ---------------------------------------------------------------------------- +# VECTOR_PROVIDER=openai +# OPENAI_API_KEY=your-openai-api-key diff --git a/CODEBASE_MAP.md b/CODEBASE_MAP.md index bd22432..9a457c3 100644 --- a/CODEBASE_MAP.md +++ b/CODEBASE_MAP.md @@ -309,6 +309,43 @@ betterbase/ --- +## Docker Deployment + +Betterbase includes production-ready Docker configuration for self-hosted deployment. + +### Docker Files + +| File | Purpose | +|------|---------| +| `Dockerfile` | Monorepo build (for developing Betterbase itself) | +| `Dockerfile.project` | Project template for deploying user projects | +| `docker-compose.yml` | Development environment with PostgreSQL | +| `docker-compose.production.yml` | Production-ready configuration | +| `.dockerignore` | Optimizes Docker builds | +| `.env.example` | Environment variable template | + +### Quick Start + +```bash +# Development with Docker Compose +docker-compose up -d + +# Production deployment +docker-compose -f docker-compose.production.yml up -d +``` + +### Docker Features + +- **Multi-stage builds** for minimal image size +- **PostgreSQL** included in dev environment +- **Health checks** for reliability +- **Non-root user** for security +- **Volume mounts** for hot-reload in development +- **External database support** - Neon, Supabase, RDS, etc. +- **S3-compatible storage** - R2, S3, B2, MinIO + +--- + ## Root-Level Files ### [`package.json`](package.json) diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..74cacb0 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,113 @@ +# ============================================================================ +# Betterbase Monorepo Dockerfile +# +# This Dockerfile builds the entire Betterbase monorepo including: +# - @betterbase/cli +# - @betterbase/core +# - @betterbase/client +# - @betterbase/shared +# +# Usage: +# docker build -t betterbase:local . +# docker run -p 3000:3000 betterbase:local +# ============================================================================ + +# ---------------------------------------------------------------------------- +# Stage 1: Base +# ---------------------------------------------------------------------------- +FROM oven/bun:1.3.9-debian AS base + +LABEL maintainer="Betterbase Team" +LABEL description="AI-Native Backend-as-a-Service Platform" + +WORKDIR /app + +# Install system dependencies +RUN apt-get update && apt-get install -y \ + # For sharp image processing + vips-tools \ + fftw3 \ + libvips \ + # For PostgreSQL client + libpq-dev \ + # For build tools + make \ + gcc \ + g++ \ + git \ + && rm -rf /var/lib/apt/lists/* + +# ---------------------------------------------------------------------------- +# Stage 2: Dependencies +# ---------------------------------------------------------------------------- +FROM base AS deps + +WORKDIR /app + +# Copy package files +COPY package.json bun.lock ./ +COPY turbo.json ./ + +# Copy workspace package.json files +COPY packages/cli/package.json packages/cli/ +COPY packages/core/package.json packages/core/ +COPY packages/client/package.json packages/client/ +COPY apps/test-project/package.json apps/test-project/ + +# Install dependencies +RUN bun install --frozen-lockfile + +# ---------------------------------------------------------------------------- +# Stage 3: Builder +# ---------------------------------------------------------------------------- +FROM base AS builder + +WORKDIR /app + +# Copy lockfile and install dependencies +COPY package.json bun.lock ./ +COPY turbo.json ./ +RUN bun install --frozen-lockfile + +# Copy all source code +COPY packages/ packages/ +COPY apps/ apps/ + +# Build all packages using turbo +RUN bun run build + +# ---------------------------------------------------------------------------- +# Stage 4: Production Runner +# ---------------------------------------------------------------------------- +FROM base AS runner + +WORKDIR /app + +# Copy package files for production +COPY package.json bun.lock ./ +COPY turbo.json ./ + +# Install only production dependencies +RUN bun install --frozen-lockfile --production + +# Copy built packages from builder +COPY --from=builder /app/packages/core/dist ./node_modules/@betterbase/core/dist +COPY --from=builder /app/packages/cli/dist ./node_modules/@betterbase/cli/dist +COPY --from=builder /app/packages/client/dist ./node_modules/@betterbase/client/dist +COPY --from=builder /app/packages/shared/dist ./node_modules/@betterbase/shared/dist + +# Copy package.json files to access exports +COPY packages/core/package.json ./node_modules/@betterbase/core/ +COPY packages/cli/package.json ./node_modules/@betterbase/cli/ +COPY packages/client/package.json ./node_modules/@betterbase/client/ +COPY packages/shared/package.json ./node_modules/@betterbase/shared/ + +# Set environment +ENV NODE_ENV=production +ENV PORT=3000 + +# Expose port +EXPOSE 3000 + +# Default command (should be overridden by project-specific Dockerfiles) +CMD ["bun", "run", "src/index.ts"] diff --git a/Dockerfile.project b/Dockerfile.project new file mode 100644 index 0000000..b2d24b9 --- /dev/null +++ b/Dockerfile.project @@ -0,0 +1,120 @@ +# ============================================================================ +# Betterbase Project Dockerfile +# +# This is the recommended Dockerfile for deploying a Betterbase project +# created with `bb init my-project` +# +# Usage: +# 1. Copy this to your project root +# 2. Customize environment variables +# 3. Build and run: +# docker build -t my-betterbase-app . +# docker run -p 3000:3000 my-betterbase-app +# ============================================================================ + +# ---------------------------------------------------------------------------- +# Stage 1: Base +# ---------------------------------------------------------------------------- +FROM oven/bun:1.3.9-debian AS base + +LABEL maintainer="Betterbase Team" +LABEL description="Betterbase Project - AI-Native Backend Platform" + +WORKDIR /app + +# Install system dependencies for image processing and database +RUN apt-get update && apt-get install -y \ + # For sharp image processing + vips-tools \ + fftw3 \ + libvips \ + # For PostgreSQL client + libpq-dev \ + # For build tools + make \ + gcc \ + g++ \ + git \ + curl \ + && rm -rf /var/lib/apt/lists/* + +# ---------------------------------------------------------------------------- +# Stage 2: Dependencies +# ---------------------------------------------------------------------------- +FROM base AS deps + +WORKDIR /app + +# Copy package files +COPY package.json bun.lock ./ + +# Install dependencies +RUN bun install --frozen-lockfile + +# ---------------------------------------------------------------------------- +# Stage 3: Builder +# ---------------------------------------------------------------------------- +FROM base AS builder + +WORKDIR /app + +# Copy lockfile and install all dependencies +COPY package.json bun.lock ./ +RUN bun install --frozen-lockfile + +# Copy source code +COPY . . + +# Generate database migrations +# NOTE: Ensure DATABASE_URL is set in your build args or environment +# RUN bunx drizzle-kit generate + +# Build the application +RUN bun run build + +# ---------------------------------------------------------------------------- +# Stage 4: Production Runner +# ---------------------------------------------------------------------------- +FROM base AS runner + +WORKDIR /app + +# Create non-root user for security +RUN addgroup --system --gid 1001 appgroup && \ + adduser --system --uid 1001 appuser + +# Copy package files +COPY package.json bun.lock ./ + +# Install only production dependencies +RUN bun install --frozen-lockfile --production + +# Copy built artifacts +COPY --from=builder /app/dist ./dist +COPY --from=builder /app/node_modules/.prisma ./node_modules/.prisma + +# Copy necessary source files for runtime +COPY --from=builder /app/src ./src +COPY --from=builder /app/betterbase.config.* ./ +COPY --from=builder /app/drizzle.config.* ./ + +# Create storage directory +RUN mkdir -p ./storage && chown -R appuser:appgroup ./storage + +# Switch to non-root user +USER appuser + +# Set environment variables +ENV NODE_ENV=production \ + PORT=3000 \ + HOST=0.0.0.0 + +# Expose port +EXPOSE 3000 + +# Health check +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD curl -f http://localhost:3000/api/health || exit 1 + +# Start the application +CMD ["bun", "run", "start"] diff --git a/README.md b/README.md index cdfdd9d..23c933d 100644 --- a/README.md +++ b/README.md @@ -952,37 +952,69 @@ bun run start ### Docker -Create a `Dockerfile`: +Betterbase includes production-ready Docker configuration for self-hosted deployment. -```dockerfile -FROM oven/bun:1 AS base -WORKDIR /app +#### Quick Start with Docker Compose -FROM base AS deps -COPY package.json bun.lock ./ -RUN bun install --frozen-lockfile +```bash +# Start development environment with PostgreSQL +docker-compose up -d + +# View logs +docker-compose logs -f app + +# Stop services +docker-compose down +``` + +#### Docker Files Included + +| File | Purpose | +|------|---------| +| `Dockerfile` | Monorepo build (for developing Betterbase itself) | +| `Dockerfile.project` | Project template for deploying user projects | +| `docker-compose.yml` | Development environment with PostgreSQL | +| `docker-compose.production.yml` | Production-ready configuration | +| `.env.example` | Environment variable template | -FROM base AS builder -COPY --from=deps /app/node_modules ./node_modules -COPY . . -RUN bun run build +#### Building a Project -FROM base -COPY --from=builder /app/dist ./dist -COPY --from=builder /app/node_modules ./node_modules -COPY package.json ./ +```bash +# Copy the project Dockerfile to your project root +cp Dockerfile.project ./Dockerfile + +# Configure environment variables +cp .env.example .env +# Edit .env with your database and storage settings -EXPOSE 3000 -CMD ["bun", "run", "start"] +# Build and run +docker build -t my-betterbase-app . +docker run -p 3000:3000 my-betterbase-app ``` -Build and run: +#### Production Deployment ```bash -docker build -t betterbase-app . -docker run -p 3000:3000 betterbase-app +# Use production compose file +docker-compose -f docker-compose.production.yml up -d + +# With external database (Neon, Supabase, RDS) +DATABASE_URL=postgres://... docker-compose -f docker-compose.production.yml up -d + +# With Cloudflare R2 storage +STORAGE_PROVIDER=r2 STORAGE_BUCKET=my-bucket docker-compose -f docker-compose.production.yml up -d ``` +#### Docker Features + +- **Multi-stage builds** for minimal image size +- **PostgreSQL** included in dev environment +- **Health checks** for reliability +- **Non-root user** for security +- **Volume mounts** for hot-reload in development +- **External database support** - Neon, Supabase, RDS, etc. +- **S3-compatible storage** - R2, S3, B2, MinIO + ### Cloud Providers | Provider | Deployment Method | diff --git a/docker-compose.production.yml b/docker-compose.production.yml new file mode 100644 index 0000000..7c1fdf6 --- /dev/null +++ b/docker-compose.production.yml @@ -0,0 +1,163 @@ +# ============================================================================ +# Betterbase Production Docker Compose +# +# Production-ready configuration with: +# - PostgreSQL (managed externally or via this compose) +# - S3-compatible storage (Cloudflare R2, MinIO, etc.) +# - Health checks +# - Proper security settings +# +# Usage: +# docker-compose -f docker-compose.production.yml up -d +# +# For production, it's recommended to: +# 1. Use external managed database (Neon, Supabase, RDS, etc.) +# 2. Use external S3 storage (R2, S3, B2) +# 3. Use a reverse proxy (Caddy, Nginx, Traefik) +# 4. Enable SSL/TLS +# ============================================================================ + +services: + # -------------------------------------------------------------------------- + # Betterbase Application + # -------------------------------------------------------------------------- + app: + build: + context: . + dockerfile: Dockerfile.project + args: + # Build arguments + NODE_ENV: production + expose: + - "3000" + environment: + # Database - UPDATE THIS for your managed PostgreSQL + DATABASE_URL: ${DATABASE_URL} + + # Auth - IMPORTANT: Generate a secure secret in production + AUTH_SECRET: ${AUTH_SECRET} + AUTH_URL: ${AUTH_URL:-https://your-domain.com} + + # Storage - S3-compatible + STORAGE_PROVIDER: ${STORAGE_PROVIDER:-r2} + STORAGE_BUCKET: ${STORAGE_BUCKET:-betterbase-storage} + STORAGE_REGION: ${STORAGE_REGION:-auto} + # For R2/S3-compatible + STORAGE_ENDPOINT: ${STORAGE_ENDPOINT:-https://your-r2-endpoint.r2.cloudflarestorage.com} + STORAGE_ACCESS_KEY_ID: ${STORAGE_ACCESS_KEY_ID} + STORAGE_SECRET_ACCESS_KEY: ${STORAGE_SECRET_ACCESS_KEY} + + # Node environment + NODE_ENV: production + PORT: "3000" + HOST: "0.0.0.0" + + # Security + CORS_ORIGIN: ${CORS_ORIGIN:-https://your-domain.com} + + # Optional: Logging + LOG_LEVEL: ${LOG_LEVEL:-info} + + restart: always + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:3000/api/health"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + networks: + - betterbase-prod + + # -------------------------------------------------------------------------- + # Optional: Self-hosted PostgreSQL (if not using managed) + # Remove this service if using Neon, Supabase, RDS, etc. + # -------------------------------------------------------------------------- + # postgres: + # image: postgres:16-alpine + # environment: + # POSTGRES_USER: ${DB_USER:-betterbase} + # POSTGRES_PASSWORD: ${DB_PASSWORD} + # POSTGRES_DB: ${DB_NAME:-betterbase} + # volumes: + # - postgres_data:/var/lib/postgresql/data + # restart: always + # healthcheck: + # test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-betterbase} -d ${DB_NAME:-betterbase}"] + # interval: 10s + # timeout: 5s + # retries: 5 + # networks: + # - betterbase-prod + # # Only expose ports if needed for debugging + # # ports: + # # - "5432:5432" + + # -------------------------------------------------------------------------- + # Optional: MinIO for self-hosted S3 storage + # Remove this service if using R2, S3, B2 + # -------------------------------------------------------------------------- + # minio: + # image: minio/minio:latest + # command: server /data --console-address ":9001" + # environment: + # MINIO_ROOT_USER: ${MINIO_ROOT_USER} + # MINIO_ROOT_PASSWORD: ${MINIO_ROOT_PASSWORD} + # volumes: + # - minio_data:/data + # restart: always + # networks: + # - betterbase-prod + + # -------------------------------------------------------------------------- + # Optional: Traefik reverse proxy (uncomment for automatic HTTPS) + # -------------------------------------------------------------------------- + # traefik: + # image: traefik:v3.0 + # command: + # - "--api.insecure=true" + # - "--providers.docker=true" + # - "--providers.docker.exposedbydefault=false" + # - "--entrypoints.web.address=:80" + # - "--entrypoints.websecure.address=:443" + # - "--certificatesresolvers.letsencrypt.acme.email=your@email.com" + # - "--certificatesresolvers.letsencrypt.acme.storage=/letsencrypt/acme.json" + # - "--certificatesresolvers.letsencrypt.acme.tlschallenge=true" + # ports: + # - "80:80" + # - "443:443" + # - "8080:8080" + # volumes: + # - /var/run/docker.sock:/var/run/docker.sock:ro + # - letsencrypt_data:/letsencrypt + # networks: + # - betterbase-prod + + # -------------------------------------------------------------------------- + # Optional: Caddy reverse proxy (simpler than Traefik) + # -------------------------------------------------------------------------- + # caddy: + # image: caddy:2-alpine + # volumes: + # - ./Caddyfile:/etc/caddy/Caddyfile + # - caddy_data:/data + # ports: + # - "80:80" + # - "443:443" + # networks: + # - betterbase-prod + +# ---------------------------------------------------------------------------- +# Networks +# ---------------------------------------------------------------------------- +networks: + betterbase-prod: + driver: bridge + +# ---------------------------------------------------------------------------- +# Volumes +# ---------------------------------------------------------------------------- +volumes: + # postgres_data: + # minio_data: + # letsencrypt_data: + # caddy_data: diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..564bab2 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,125 @@ +# ============================================================================ +# Betterbase Docker Compose +# +# Development environment with PostgreSQL, Redis (for sessions), +# and the Betterbase application. +# +# Usage: +# docker-compose up -d # Start all services +# docker-compose logs -f app # View app logs +# docker-compose down # Stop all services +# docker-compose down -v # Stop and remove volumes +# ============================================================================ + +services: + # -------------------------------------------------------------------------- + # Betterbase Application + # -------------------------------------------------------------------------- + app: + build: + context: . + dockerfile: Dockerfile.project + ports: + - "3000:3000" + environment: + # Database - connect to postgres service + DATABASE_URL: postgres://betterbase:betterbase_secret@postgres:5432/betterbase + + # Auth + AUTH_SECRET: ${AUTH_SECRET:-your-super-secret-key-min-32-chars-long-change-in-production} + AUTH_URL: http://localhost:3000 + + # Storage (local for development) + STORAGE_PROVIDER: local + STORAGE_PATH: /app/storage + + # Node environment + NODE_ENV: ${NODE_ENV:-development} + PORT: "3000" + + # CORS + CORS_ORIGIN: http://localhost:3000,http://localhost:5173 + + volumes: + # Mount source for hot reload in development + - ./src:/app/src + # Storage volume for uploaded files + - betterbase_storage:/app/storage + depends_on: + postgres: + condition: service_healthy + # redis: + # condition: service_started + restart: unless-stopped + networks: + - betterbase-network + + # -------------------------------------------------------------------------- + # PostgreSQL Database + # -------------------------------------------------------------------------- + postgres: + image: postgres:16-alpine + environment: + POSTGRES_USER: betterbase + POSTGRES_PASSWORD: betterbase_secret + POSTGRES_DB: betterbase + volumes: + - postgres_data:/var/lib/postgresql/data + # Initialize with schema on first run (optional) + # - ./init-schema.sql:/docker-entrypoint-initdb.d/schema.sql:ro + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U betterbase -d betterbase"] + interval: 5s + timeout: 5s + retries: 5 + restart: unless-stopped + networks: + - betterbase-network + + # -------------------------------------------------------------------------- + # Optional: MinIO for S3-compatible storage (development) + # -------------------------------------------------------------------------- + # minio: + # image: minio/minio:latest + # command: server /data --console-address ":9001" + # environment: + # MINIO_ROOT_USER: minioadmin + # MINIO_ROOT_PASSWORD: minioadmin + # volumes: + # - minio_data:/data + # ports: + # - "9000:9000" + # - "9001:9001" + # networks: + # - betterbase-network + + # -------------------------------------------------------------------------- + # Optional: Mailhog for email testing (development) + # -------------------------------------------------------------------------- + # mailhog: + # image: mailhog/mailhog:latest + # ports: + # - "1025:1025" # SMTP + # - "8025:8025" # Web UI + # networks: + # - betterbase-network + +# ---------------------------------------------------------------------------- +# Networks +# ---------------------------------------------------------------------------- +networks: + betterbase-network: + driver: bridge + +# ---------------------------------------------------------------------------- +# Volumes +# ---------------------------------------------------------------------------- +volumes: + postgres_data: + driver: local + betterbase_storage: + driver: local + # minio_data: + # driver: local From 4dd593430469d807bafb3c82fe05da5daf86d48f Mon Sep 17 00:00:00 2001 From: Ziad Khaled Date: Tue, 24 Mar 2026 20:43:47 +0000 Subject: [PATCH 5/5] feat: Add self-hosted Betterbase deployment support --- .env.example | 4 - BetterBase_SelfHosted_Spec.md | 2316 +++++++++++++++++ CODEBASE_MAP.md | 33 + Dockerfile | 22 +- Dockerfile.project | 5 +- README.md | 77 + SELF_HOSTED.md | 88 + apps/dashboard/Dockerfile | 45 + docker-compose.self-hosted.yml | 133 + docker-compose.yml | 53 +- docker/nginx/nginx.conf | 89 + docs/README.md | 21 +- docs/guides/deployment.md | 43 + packages/cli/src/commands/login.ts | 219 +- packages/cli/src/index.ts | 29 +- packages/cli/src/utils/api-client.ts | 34 + packages/cli/src/utils/credentials.ts | 44 + packages/server/Dockerfile | 34 + .../server/migrations/001_initial_schema.sql | 51 + packages/server/migrations/002_webhooks.sql | 10 + packages/server/migrations/003_functions.sql | 9 + packages/server/migrations/004_logs.sql | 12 + packages/server/package.json | 28 + packages/server/src/index.ts | 74 + packages/server/src/lib/admin-middleware.ts | 28 + packages/server/src/lib/auth.ts | 61 + packages/server/src/lib/db.ts | 18 + packages/server/src/lib/env.ts | 28 + packages/server/src/lib/migrate.ts | 36 + packages/server/src/routes/admin/auth.ts | 109 + packages/server/src/routes/admin/functions.ts | 51 + packages/server/src/routes/admin/index.ts | 25 + packages/server/src/routes/admin/logs.ts | 20 + packages/server/src/routes/admin/metrics.ts | 23 + packages/server/src/routes/admin/projects.ts | 106 + packages/server/src/routes/admin/storage.ts | 57 + packages/server/src/routes/admin/users.ts | 67 + packages/server/src/routes/admin/webhooks.ts | 86 + packages/server/src/routes/device/index.ts | 152 ++ packages/server/tsconfig.json | 8 + 40 files changed, 4175 insertions(+), 173 deletions(-) create mode 100644 BetterBase_SelfHosted_Spec.md create mode 100644 SELF_HOSTED.md create mode 100644 apps/dashboard/Dockerfile create mode 100644 docker-compose.self-hosted.yml create mode 100644 docker/nginx/nginx.conf create mode 100644 packages/cli/src/utils/api-client.ts create mode 100644 packages/cli/src/utils/credentials.ts create mode 100644 packages/server/Dockerfile create mode 100644 packages/server/migrations/001_initial_schema.sql create mode 100644 packages/server/migrations/002_webhooks.sql create mode 100644 packages/server/migrations/003_functions.sql create mode 100644 packages/server/migrations/004_logs.sql create mode 100644 packages/server/package.json create mode 100644 packages/server/src/index.ts create mode 100644 packages/server/src/lib/admin-middleware.ts create mode 100644 packages/server/src/lib/auth.ts create mode 100644 packages/server/src/lib/db.ts create mode 100644 packages/server/src/lib/env.ts create mode 100644 packages/server/src/lib/migrate.ts create mode 100644 packages/server/src/routes/admin/auth.ts create mode 100644 packages/server/src/routes/admin/functions.ts create mode 100644 packages/server/src/routes/admin/index.ts create mode 100644 packages/server/src/routes/admin/logs.ts create mode 100644 packages/server/src/routes/admin/metrics.ts create mode 100644 packages/server/src/routes/admin/projects.ts create mode 100644 packages/server/src/routes/admin/storage.ts create mode 100644 packages/server/src/routes/admin/users.ts create mode 100644 packages/server/src/routes/admin/webhooks.ts create mode 100644 packages/server/src/routes/device/index.ts create mode 100644 packages/server/tsconfig.json diff --git a/.env.example b/.env.example index b7be60a..80ff1da 100644 --- a/.env.example +++ b/.env.example @@ -37,10 +37,6 @@ STORAGE_ENDPOINT=https://your-r2-endpoint.r2.cloudflarestorage.com STORAGE_ACCESS_KEY_ID=your-access-key STORAGE_SECRET_ACCESS_KEY=your-secret-key -# For local storage (development only) -# STORAGE_PROVIDER=local -# STORAGE_PATH=./storage - # ---------------------------------------------------------------------------- # Optional: Email (SMTP) # ---------------------------------------------------------------------------- diff --git a/BetterBase_SelfHosted_Spec.md b/BetterBase_SelfHosted_Spec.md new file mode 100644 index 0000000..6d993fc --- /dev/null +++ b/BetterBase_SelfHosted_Spec.md @@ -0,0 +1,2316 @@ +# BetterBase Self-Hosted — Orchestrator Specification + +> **For Kilo Code Orchestrator** +> Execute tasks in strict order. Each task lists its dependencies — do not begin a task until all listed dependencies are marked complete. All file paths are relative to the monorepo root unless otherwise noted. + +--- + +## Overview + +This document specifies everything needed to make Betterbase fully self-hostable. The output is: a user runs `docker compose up` and gets a complete BaaS platform — server, dashboard, database, storage, proxy — with the CLI working against their local instance. + +**7 implementation phases, 28 tasks total.** + +--- + +## Phase 1 — Metadata Database Schema + +> Foundation. Every other phase depends on this. No code is written elsewhere until this phase is complete. + +### Task SH-01 — Create Betterbase Internal Schema Migration Files + +**Depends on:** nothing + +**What it is:** Betterbase needs its own internal tables to track admin accounts, projects, device auth sessions, and CLI credentials. These tables live in the same Postgres instance the user spins up, in a schema called `betterbase_meta`. + +**Create file:** `packages/server/migrations/001_initial_schema.sql` + +```sql +-- Betterbase internal metadata schema +-- Runs once on first container start via the bootstrap process + +-- Enable UUID generation +CREATE EXTENSION IF NOT EXISTS pgcrypto; + +CREATE SCHEMA IF NOT EXISTS betterbase_meta; + +-- Admin accounts (these are Betterbase operators, not end-users of projects) +CREATE TABLE IF NOT EXISTS betterbase_meta.admin_users ( + id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text, + email TEXT NOT NULL UNIQUE, + password_hash TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Projects registered in this Betterbase instance +CREATE TABLE IF NOT EXISTS betterbase_meta.projects ( + id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text, + name TEXT NOT NULL, + slug TEXT NOT NULL UNIQUE, + admin_key_hash TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Device auth codes for CLI `bb login` flow +CREATE TABLE IF NOT EXISTS betterbase_meta.device_codes ( + user_code TEXT PRIMARY KEY, + device_code TEXT NOT NULL UNIQUE, + admin_user_id TEXT REFERENCES betterbase_meta.admin_users(id) ON DELETE CASCADE, + expires_at TIMESTAMPTZ NOT NULL, + verified BOOLEAN NOT NULL DEFAULT FALSE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- CLI sessions — issued after device code verified +CREATE TABLE IF NOT EXISTS betterbase_meta.cli_sessions ( + id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text, + admin_user_id TEXT NOT NULL REFERENCES betterbase_meta.admin_users(id) ON DELETE CASCADE, + token_hash TEXT NOT NULL UNIQUE, + expires_at TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Migration tracking +CREATE TABLE IF NOT EXISTS betterbase_meta.migrations ( + id SERIAL PRIMARY KEY, + filename TEXT NOT NULL UNIQUE, + applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); +``` + +**Acceptance criteria:** +- File exists at the specified path +- All five tables defined with correct columns, types, constraints +- Schema prefix `betterbase_meta.` on every table +- `gen_random_uuid()` used for IDs (requires `pgcrypto` extension — add `CREATE EXTENSION IF NOT EXISTS pgcrypto;` at top of file before the schema) + +--- + +### Task SH-02 — Create Migration Runner + +**Depends on:** SH-01 + +**What it is:** A TypeScript module that reads SQL files from the migrations directory and applies any that haven't been applied yet. Runs on server startup before anything else. + +**Create file:** `packages/server/src/lib/migrate.ts` + +```typescript +import { readdir, readFile } from "fs/promises"; +import { join } from "path"; +import type { Pool } from "pg"; + +const MIGRATIONS_DIR = join(__dirname, "../../migrations"); + +export async function runMigrations(pool: Pool): Promise { + // Ensure tracking table exists before we query it + await pool.query(` + CREATE SCHEMA IF NOT EXISTS betterbase_meta; + CREATE EXTENSION IF NOT EXISTS pgcrypto; + CREATE TABLE IF NOT EXISTS betterbase_meta.migrations ( + id SERIAL PRIMARY KEY, + filename TEXT NOT NULL UNIQUE, + applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + `); + + const files = (await readdir(MIGRATIONS_DIR)) + .filter((f) => f.endsWith(".sql")) + .sort(); + + const { rows: applied } = await pool.query<{ filename: string }>( + "SELECT filename FROM betterbase_meta.migrations" + ); + const appliedSet = new Set(applied.map((r) => r.filename)); + + for (const file of files) { + if (appliedSet.has(file)) continue; + + const sql = await readFile(join(MIGRATIONS_DIR, file), "utf-8"); + await pool.query(sql); + await pool.query( + "INSERT INTO betterbase_meta.migrations (filename) VALUES ($1)", + [file] + ); + console.log(`[migrate] Applied: ${file}`); + } + + console.log("[migrate] All migrations up to date."); +} +``` + +**Acceptance criteria:** +- Idempotent — safe to call on every server start +- Applies only unapplied files, in alphabetical order +- Logs each applied migration +- Uses `pg` Pool, not Drizzle (keeps it dependency-light and safe to run before the app fully initialises) + +--- + +### Task SH-03 — Create Database Connection Pool Module + +**Depends on:** SH-02 + +**What it is:** A singleton Postgres pool for use by the server. Reads `DATABASE_URL` from env. + +**Create file:** `packages/server/src/lib/db.ts` + +```typescript +import { Pool } from "pg"; + +let _pool: Pool | null = null; + +export function getPool(): Pool { + if (!_pool) { + if (!process.env.DATABASE_URL) { + throw new Error("DATABASE_URL environment variable is required"); + } + _pool = new Pool({ + connectionString: process.env.DATABASE_URL, + max: 10, + idleTimeoutMillis: 30_000, + connectionTimeoutMillis: 5_000, + }); + } + return _pool; +} +``` + +**Acceptance criteria:** +- Singleton pattern — only one pool created per process +- Throws with clear message if `DATABASE_URL` missing +- Pool config has reasonable limits + +--- + +## Phase 2 — The Server Package + +> Creates the runnable Betterbase backend. This is what runs inside Docker. + +### Task SH-04 — Scaffold `packages/server` + +**Depends on:** SH-03 + +**What it is:** The server package doesn't exist yet. Create the scaffold. + +**Create file:** `packages/server/package.json` + +```json +{ + "name": "@betterbase/server", + "version": "0.1.0", + "private": true, + "main": "src/index.ts", + "scripts": { + "dev": "bun --watch src/index.ts", + "start": "bun src/index.ts", + "build": "bun build src/index.ts --outdir dist --target bun" + }, + "dependencies": { + "@betterbase/core": "workspace:*", + "@betterbase/shared": "workspace:*", + "hono": "^4.0.0", + "pg": "^8.11.0", + "bcryptjs": "^2.4.3", + "nanoid": "^5.0.0", + "jose": "^5.0.0" + }, + "devDependencies": { + "@types/pg": "^8.11.0", + "@types/bcryptjs": "^2.4.6", + "typescript": "^5.4.0" + } +} +``` + +**Create file:** `packages/server/tsconfig.json` + +```json +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src" + }, + "include": ["src/**/*", "migrations/**/*"] +} +``` + +**Create directory structure (empty files to establish layout):** + +```text +packages/server/ +├── package.json +├── tsconfig.json +├── migrations/ +│ └── 001_initial_schema.sql ← already created in SH-01 +└── src/ + ├── index.ts ← created in SH-05 + ├── lib/ + │ ├── db.ts ← created in SH-03 + │ ├── migrate.ts ← created in SH-02 + │ ├── auth.ts ← created in SH-06 + │ └── env.ts ← created in SH-05 + └── routes/ + ├── admin/ + │ ├── index.ts ← created in SH-08 + │ ├── auth.ts ← created in SH-09 + │ ├── projects.ts ← created in SH-10 + │ ├── users.ts ← created in SH-11 + │ ├── metrics.ts ← created in SH-12 + │ ├── storage.ts ← created in SH-13 + │ ├── webhooks.ts ← created in SH-14 + │ ├── functions.ts ← created in SH-15 + │ └── logs.ts ← created in SH-16 + └── device/ + ├── index.ts ← created in SH-17 +``` + +**Acceptance criteria:** +- Directory structure exists +- `package.json` added to Turborepo workspace (root `package.json` `workspaces` array already includes `packages/*` so this is automatic) + +--- + +### Task SH-05 — Create Server Entry Point and Env Validation + +**Depends on:** SH-04 + +**Create file:** `packages/server/src/lib/env.ts` + +```typescript +import { z } from "zod"; + +const EnvSchema = z.object({ + DATABASE_URL: z.string().min(1), + BETTERBASE_JWT_SECRET: z.string().min(32, "JWT secret must be at least 32 characters"), + BETTERBASE_ADMIN_EMAIL: z.string().email().optional(), + BETTERBASE_ADMIN_PASSWORD: z.string().min(8).optional(), + PORT: z.string().default("3001"), + NODE_ENV: z.enum(["development", "production", "test"]).default("development"), + STORAGE_ENDPOINT: z.string().optional(), // MinIO or S3 endpoint + STORAGE_ACCESS_KEY: z.string().optional(), + STORAGE_SECRET_KEY: z.string().optional(), + STORAGE_BUCKET: z.string().default("betterbase"), + CORS_ORIGINS: z.string().default("http://localhost:3000"), +}); + +export type Env = z.infer; + +export function validateEnv(): Env { + const result = EnvSchema.safeParse(process.env); + if (!result.success) { + console.error("[env] Invalid environment variables:"); + console.error(result.error.flatten().fieldErrors); + process.exit(1); + } + return result.data; +} +``` + +**Create file:** `packages/server/src/index.ts` + +```typescript +import { Hono } from "hono"; +import { cors } from "hono/cors"; +import { logger } from "hono/logger"; +import { validateEnv } from "./lib/env"; +import { getPool } from "./lib/db"; +import { runMigrations } from "./lib/migrate"; +import { adminRouter } from "./routes/admin/index"; +import { deviceRouter } from "./routes/device/index"; + +// Validate env first — exits if invalid +const env = validateEnv(); + +// Bootstrap +const pool = getPool(); +await runMigrations(pool); + +// Seed initial admin if env vars provided and no admin exists +if (env.BETTERBASE_ADMIN_EMAIL && env.BETTERBASE_ADMIN_PASSWORD) { + const { seedAdminUser } = await import("./lib/auth"); + await seedAdminUser(pool, env.BETTERBASE_ADMIN_EMAIL, env.BETTERBASE_ADMIN_PASSWORD); +} + +// App +const app = new Hono(); + +app.use("*", logger()); +app.use("*", cors({ + origin: env.CORS_ORIGINS.split(","), + credentials: true, + allowHeaders: ["Content-Type", "Authorization"], + allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"], +})); + +// Health check — used by Docker HEALTHCHECK +app.get("/health", (c) => c.json({ status: "ok", timestamp: new Date().toISOString() })); + +// Routers +app.route("/admin", adminRouter); +app.route("/device", deviceRouter); + +// 404 +app.notFound((c) => c.json({ error: "Not found" }, 404)); + +// Error handler +app.onError((err, c) => { + console.error("[error]", err); + return c.json({ error: "Internal server error" }, 500); +}); + +const port = parseInt(env.PORT); +console.log(`[server] Betterbase server running on port ${port}`); + +export default { + port, + fetch: app.fetch, +}; +``` + +**Acceptance criteria:** +- Server starts with `bun src/index.ts` +- Env validation runs before anything else — exits with clear error if invalid +- Migrations run on startup +- `/health` returns 200 JSON +- CORS configured from env + +--- + +### Task SH-06 — Create Auth Utilities (JWT + Password) + +**Depends on:** SH-05 + +**What it is:** JWT signing/verification for admin sessions, password hashing for admin accounts. + +**Create file:** `packages/server/src/lib/auth.ts` + +```typescript +import { SignJWT, jwtVerify } from "jose"; +import bcrypt from "bcryptjs"; +import type { Pool } from "pg"; + +const getSecret = () => + new TextEncoder().encode(process.env.BETTERBASE_JWT_SECRET!); + +const TOKEN_EXPIRY = "30d"; +const BCRYPT_ROUNDS = 12; + +// --- Password --- + +export async function hashPassword(password: string): Promise { + return bcrypt.hash(password, BCRYPT_ROUNDS); +} + +export async function verifyPassword( + password: string, + hash: string +): Promise { + return bcrypt.compare(password, hash); +} + +// --- JWT for admin sessions --- + +export async function signAdminToken(adminUserId: string): Promise { + return new SignJWT({ sub: adminUserId, type: "admin" }) + .setProtectedHeader({ alg: "HS256" }) + .setIssuedAt() + .setExpirationTime(TOKEN_EXPIRY) + .sign(getSecret()); +} + +export async function verifyAdminToken( + token: string +): Promise<{ sub: string } | null> { + try { + const { payload } = await jwtVerify(token, getSecret()); + if (payload.type !== "admin") return null; + return { sub: payload.sub as string }; + } catch { + return null; + } +} + +// --- Middleware helper: extract + verify token from Authorization header --- + +export function extractBearerToken(authHeader: string | undefined): string | null { + if (!authHeader?.startsWith("Bearer ")) return null; + return authHeader.slice(7); +} + +// --- Seed initial admin on first start --- + +export async function seedAdminUser( + pool: Pool, + email: string, + password: string +): Promise { + const { rows } = await pool.query( + "SELECT id FROM betterbase_meta.admin_users WHERE email = $1", + [email] + ); + if (rows.length > 0) return; // Already exists + + const hash = await hashPassword(password); + await pool.query( + "INSERT INTO betterbase_meta.admin_users (email, password_hash) VALUES ($1, $2)", + [email, hash] + ); + console.log(`[auth] Seeded admin user: ${email}`); +} +``` + +**Acceptance criteria:** +- JWT uses HS256, 30-day expiry +- Password hashing uses bcrypt with 12 rounds +- `seedAdminUser` is idempotent +- `verifyAdminToken` returns null on any failure (never throws) + +--- + +### Task SH-07 — Create Admin Auth Middleware + +**Depends on:** SH-06 + +**What it is:** Hono middleware that validates the admin JWT on every protected route. + +**Create file:** `packages/server/src/lib/admin-middleware.ts` + +```typescript +import type { Context, Next } from "hono"; +import { extractBearerToken, verifyAdminToken } from "./auth"; +import { getPool } from "./db"; + +export async function requireAdmin(c: Context, next: Next) { + const token = extractBearerToken(c.req.header("Authorization")); + if (!token) { + return c.json({ error: "Unauthorized" }, 401); + } + + const payload = await verifyAdminToken(token); + if (!payload) { + return c.json({ error: "Invalid or expired token" }, 401); + } + + // Verify admin still exists in DB + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, email FROM betterbase_meta.admin_users WHERE id = $1", + [payload.sub] + ); + if (rows.length === 0) { + return c.json({ error: "Unauthorized" }, 401); + } + + c.set("adminUser", rows[0]); + await next(); +} +``` + +**Acceptance criteria:** +- Returns 401 with `{ error: "Unauthorized" }` for missing/invalid token +- Verifies admin still exists in DB (handles deleted accounts) +- Sets `adminUser` on context for downstream handlers + +--- + +## Phase 3 — Admin API Routes + +> These are the routes the dashboard and CLI call. Implement them in the order listed. + +### Task SH-08 — Admin Router Index + +**Depends on:** SH-07 + +**Create file:** `packages/server/src/routes/admin/index.ts` + +```typescript +import { Hono } from "hono"; +import { requireAdmin } from "../../lib/admin-middleware"; +import { authRoutes } from "./auth"; +import { projectRoutes } from "./projects"; +import { userRoutes } from "./users"; +import { metricsRoutes } from "./metrics"; +import { storageRoutes } from "./storage"; +import { webhookRoutes } from "./webhooks"; +import { functionRoutes } from "./functions"; +import { logRoutes } from "./logs"; + +export const adminRouter = new Hono(); + +// Auth routes are public (login doesn't require a token) +adminRouter.route("/auth", authRoutes); + +// All other admin routes require a valid admin token +adminRouter.use("/*", requireAdmin); +adminRouter.route("/projects", projectRoutes); +adminRouter.route("/users", userRoutes); +adminRouter.route("/metrics", metricsRoutes); +adminRouter.route("/storage", storageRoutes); +adminRouter.route("/webhooks", webhookRoutes); +adminRouter.route("/functions", functionRoutes); +adminRouter.route("/logs", logRoutes); +``` + +**Acceptance criteria:** +- `/admin/auth/*` is unprotected +- All other `/admin/*` routes go through `requireAdmin` + +--- + +### Task SH-09 — Admin Auth Routes (Login / Logout / Me) + +**Depends on:** SH-08 + +**Create file:** `packages/server/src/routes/admin/auth.ts` + +```typescript +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { getPool } from "../../lib/db"; +import { verifyPassword, signAdminToken, extractBearerToken, verifyAdminToken } from "../../lib/auth"; + +export const authRoutes = new Hono(); + +// POST /admin/auth/login +authRoutes.post( + "/login", + zValidator("json", z.object({ + email: z.string().email(), + password: z.string().min(1), + })), + async (c) => { + const { email, password } = c.req.valid("json"); + const pool = getPool(); + + const { rows } = await pool.query( + "SELECT id, email, password_hash FROM betterbase_meta.admin_users WHERE email = $1", + [email] + ); + if (rows.length === 0) { + return c.json({ error: "Invalid credentials" }, 401); + } + + const admin = rows[0]; + const valid = await verifyPassword(password, admin.password_hash); + if (!valid) { + return c.json({ error: "Invalid credentials" }, 401); + } + + const token = await signAdminToken(admin.id); + return c.json({ token, admin: { id: admin.id, email: admin.email } }); + } +); + +// GET /admin/auth/me (requires token) +authRoutes.get("/me", async (c) => { + const token = extractBearerToken(c.req.header("Authorization")); + if (!token) return c.json({ error: "Unauthorized" }, 401); + + const payload = await verifyAdminToken(token); + if (!payload) return c.json({ error: "Unauthorized" }, 401); + + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, email, created_at FROM betterbase_meta.admin_users WHERE id = $1", + [payload.sub] + ); + if (rows.length === 0) return c.json({ error: "Unauthorized" }, 401); + + return c.json({ admin: rows[0] }); +}); + +// POST /admin/auth/logout (client-side token discard — stateless) +authRoutes.post("/logout", (c) => c.json({ success: true })); +``` + +**Acceptance criteria:** +- `POST /admin/auth/login` returns token + admin object on success, 401 on bad credentials +- `GET /admin/auth/me` validates token and returns admin data +- Timing-safe: both "user not found" and "wrong password" return identical 401 + +--- + +### Task SH-10 — Projects Routes + +**Depends on:** SH-09 + +**Create file:** `packages/server/src/routes/admin/projects.ts` + +```typescript +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { nanoid } from "nanoid"; +import { createHash, randomBytes } from "crypto"; +import { getPool } from "../../lib/db"; + +export const projectRoutes = new Hono(); + +// GET /admin/projects +projectRoutes.get("/", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, name, slug, created_at, updated_at FROM betterbase_meta.projects ORDER BY created_at DESC" + ); + return c.json({ projects: rows }); +}); + +// GET /admin/projects/:id +projectRoutes.get("/:id", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, name, slug, created_at, updated_at FROM betterbase_meta.projects WHERE id = $1", + [c.req.param("id")] + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ project: rows[0] }); +}); + +// POST /admin/projects +projectRoutes.post( + "/", + zValidator("json", z.object({ + name: z.string().min(1).max(100), + slug: z.string().min(1).max(63).regex(/^[a-z0-9-]+$/, "Slug must be lowercase alphanumeric with hyphens"), + })), + async (c) => { + const { name, slug } = c.req.valid("json"); + const pool = getPool(); + + // Check slug uniqueness + const { rows: existing } = await pool.query( + "SELECT id FROM betterbase_meta.projects WHERE slug = $1", + [slug] + ); + if (existing.length > 0) { + return c.json({ error: "Slug already taken" }, 409); + } + + // Generate admin key — returned once, never again + const adminKeyPlaintext = `bb_admin_${randomBytes(24).toString("hex")}`; + const adminKeyHash = createHash("sha256").update(adminKeyPlaintext).digest("hex"); + + const { rows } = await pool.query( + `INSERT INTO betterbase_meta.projects (id, name, slug, admin_key_hash) + VALUES ($1, $2, $3, $4) + RETURNING id, name, slug, created_at`, + [nanoid(), name, slug, adminKeyHash] + ); + + // Return admin key plaintext ONCE — not stored, cannot be recovered + return c.json({ project: rows[0], admin_key: adminKeyPlaintext }, 201); + } +); + +// PATCH /admin/projects/:id +projectRoutes.patch( + "/:id", + zValidator("json", z.object({ + name: z.string().min(1).max(100).optional(), + })), + async (c) => { + const { name } = c.req.valid("json"); + const pool = getPool(); + const { rows } = await pool.query( + `UPDATE betterbase_meta.projects + SET name = COALESCE($1, name), updated_at = NOW() + WHERE id = $2 + RETURNING id, name, slug, updated_at`, + [name, c.req.param("id")] + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ project: rows[0] }); + } +); + +// DELETE /admin/projects/:id +projectRoutes.delete("/:id", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "DELETE FROM betterbase_meta.projects WHERE id = $1 RETURNING id", + [c.req.param("id")] + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ success: true }); +}); +``` + +**Acceptance criteria:** +- Admin key is SHA-256 hashed before storage, plaintext returned only on creation +- Slug uniqueness enforced at DB + API level +- `name` is the only patchable field (slug changes are destructive, not allowed) +- All routes return consistent `{ project }` or `{ projects }` shape + +--- + +### Task SH-11 — Users Routes + +**Depends on:** SH-09 + +**Create file:** `packages/server/src/routes/admin/users.ts` + +```typescript +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { getPool } from "../../lib/db"; +import { hashPassword } from "../../lib/auth"; +import { nanoid } from "nanoid"; + +export const userRoutes = new Hono(); + +// GET /admin/users — list all admin users +userRoutes.get("/", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, email, created_at FROM betterbase_meta.admin_users ORDER BY created_at DESC" + ); + return c.json({ users: rows }); +}); + +// POST /admin/users — create new admin user +userRoutes.post( + "/", + zValidator("json", z.object({ + email: z.string().email(), + password: z.string().min(8), + })), + async (c) => { + const { email, password } = c.req.valid("json"); + const pool = getPool(); + + const { rows: existing } = await pool.query( + "SELECT id FROM betterbase_meta.admin_users WHERE email = $1", + [email] + ); + if (existing.length > 0) { + return c.json({ error: "Email already registered" }, 409); + } + + const passwordHash = await hashPassword(password); + const { rows } = await pool.query( + "INSERT INTO betterbase_meta.admin_users (id, email, password_hash) VALUES ($1, $2, $3) RETURNING id, email, created_at", + [nanoid(), email, passwordHash] + ); + return c.json({ user: rows[0] }, 201); + } +); + +// DELETE /admin/users/:id +userRoutes.delete("/:id", async (c) => { + const pool = getPool(); + // Prevent deleting last admin + const { rows: count } = await pool.query( + "SELECT COUNT(*)::int as count FROM betterbase_meta.admin_users" + ); + if (count[0].count <= 1) { + return c.json({ error: "Cannot delete last admin user" }, 400); + } + + const { rows } = await pool.query( + "DELETE FROM betterbase_meta.admin_users WHERE id = $1 RETURNING id", + [c.req.param("id")] + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ success: true }); +}); +``` + +**Acceptance criteria:** +- Password never returned in any response +- Cannot delete the last admin user +- Email uniqueness enforced + +--- + +### Task SH-12 — Metrics Route + +**Depends on:** SH-09 + +**Create file:** `packages/server/src/routes/admin/metrics.ts` + +```typescript +import { Hono } from "hono"; +import { getPool } from "../../lib/db"; + +export const metricsRoutes = new Hono(); + +// GET /admin/metrics — overview stats for dashboard home +metricsRoutes.get("/", async (c) => { + const pool = getPool(); + + const [projects, admins] = await Promise.all([ + pool.query("SELECT COUNT(*)::int as count FROM betterbase_meta.projects"), + pool.query("SELECT COUNT(*)::int as count FROM betterbase_meta.admin_users"), + ]); + + return c.json({ + metrics: { + projects: projects.rows[0].count, + admin_users: admins.rows[0].count, + server_uptime_seconds: Math.floor(process.uptime()), + timestamp: new Date().toISOString(), + }, + }); +}); +``` + +**Acceptance criteria:** +- Returns counts for projects and admin users +- Returns server uptime +- Single query-grouped for performance + +--- + +### Task SH-13 — Storage Admin Routes + +**Depends on:** SH-09 + +**What it is:** Routes for the dashboard to list and manage MinIO/S3 buckets. Uses the `@aws-sdk/client-s3` that already exists in `@betterbase/core`. + +**Create file:** `packages/server/src/routes/admin/storage.ts` + +```typescript +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { + S3Client, + ListBucketsCommand, + CreateBucketCommand, + DeleteBucketCommand, + ListObjectsV2Command, +} from "@aws-sdk/client-s3"; + +function getS3Client(): S3Client { + return new S3Client({ + endpoint: process.env.STORAGE_ENDPOINT, + region: "us-east-1", + credentials: { + accessKeyId: process.env.STORAGE_ACCESS_KEY ?? "minioadmin", + secretAccessKey: process.env.STORAGE_SECRET_KEY ?? "minioadmin", + }, + forcePathStyle: true, // Required for MinIO + }); +} + +export const storageRoutes = new Hono(); + +// GET /admin/storage/buckets +storageRoutes.get("/buckets", async (c) => { + const client = getS3Client(); + const { Buckets } = await client.send(new ListBucketsCommand({})); + return c.json({ buckets: Buckets ?? [] }); +}); + +// POST /admin/storage/buckets +storageRoutes.post( + "/buckets", + zValidator("json", z.object({ name: z.string().min(1) })), + async (c) => { + const { name } = c.req.valid("json"); + const client = getS3Client(); + await client.send(new CreateBucketCommand({ Bucket: name })); + return c.json({ bucket: { name } }, 201); + } +); + +// DELETE /admin/storage/buckets/:name +storageRoutes.delete("/buckets/:name", async (c) => { + const client = getS3Client(); + await client.send(new DeleteBucketCommand({ Bucket: c.req.param("name") })); + return c.json({ success: true }); +}); + +// GET /admin/storage/buckets/:name/objects +storageRoutes.get("/buckets/:name/objects", async (c) => { + const client = getS3Client(); + const { Contents } = await client.send( + new ListObjectsV2Command({ Bucket: c.req.param("name") }) + ); + return c.json({ objects: Contents ?? [] }); +}); +``` + +**Acceptance criteria:** +- All 4 endpoints work against MinIO (forcePathStyle=true) +- Falls back to `minioadmin` defaults if env vars not set (dev convenience only) +- Errors from S3Client propagate to the global error handler + +--- + +### Task SH-14 — Webhooks Admin Routes + +**Depends on:** SH-09 + +**Create file:** `packages/server/src/routes/admin/webhooks.ts` + +```typescript +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { getPool } from "../../lib/db"; +import { nanoid } from "nanoid"; + +// Note: webhook configs are stored in betterbase_meta. +// Add this table to a new migration file: 002_webhooks.sql +// CREATE TABLE IF NOT EXISTS betterbase_meta.webhooks ( +// id TEXT PRIMARY KEY, +// name TEXT NOT NULL, +// table_name TEXT NOT NULL, +// events TEXT[] NOT NULL, +// url TEXT NOT NULL, +// secret TEXT, +// enabled BOOLEAN NOT NULL DEFAULT TRUE, +// created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +// ); + +export const webhookRoutes = new Hono(); + +webhookRoutes.get("/", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, name, table_name, events, url, enabled, created_at FROM betterbase_meta.webhooks ORDER BY created_at DESC" + ); + return c.json({ webhooks: rows }); +}); + +webhookRoutes.post( + "/", + zValidator("json", z.object({ + name: z.string().min(1), + table_name: z.string().min(1), + events: z.array(z.enum(["INSERT", "UPDATE", "DELETE"])).min(1), + url: z.string().url(), + secret: z.string().optional(), + enabled: z.boolean().default(true), + })), + async (c) => { + const data = c.req.valid("json"); + const pool = getPool(); + const { rows } = await pool.query( + `INSERT INTO betterbase_meta.webhooks (id, name, table_name, events, url, secret, enabled) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id, name, table_name, events, url, enabled, created_at`, + [nanoid(), data.name, data.table_name, data.events, data.url, data.secret ?? null, data.enabled] + ); + return c.json({ webhook: rows[0] }, 201); + } +); + +webhookRoutes.patch( + "/:id", + zValidator("json", z.object({ + enabled: z.boolean().optional(), + url: z.string().url().optional(), + secret: z.string().optional(), + })), + async (c) => { + const data = c.req.valid("json"); + const pool = getPool(); + const { rows } = await pool.query( + `UPDATE betterbase_meta.webhooks + SET enabled = COALESCE($1, enabled), + url = COALESCE($2, url), + secret = COALESCE($3, secret) + WHERE id = $4 + RETURNING id, name, table_name, events, url, enabled`, + [data.enabled ?? null, data.url ?? null, data.secret ?? null, c.req.param("id")] + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ webhook: rows[0] }); + } +); + +webhookRoutes.delete("/:id", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "DELETE FROM betterbase_meta.webhooks WHERE id = $1 RETURNING id", + [c.req.param("id")] + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ success: true }); +}); +``` + +**Also create file:** `packages/server/migrations/002_webhooks.sql` + +```sql +CREATE TABLE IF NOT EXISTS betterbase_meta.webhooks ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + table_name TEXT NOT NULL, + events TEXT[] NOT NULL, + url TEXT NOT NULL, + secret TEXT, + enabled BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); +``` + +**Acceptance criteria:** +- Migration file follows the naming convention so it runs after `001_initial_schema.sql` +- Webhook secret stored in plaintext (it's user-provided, used for HMAC signing, not an auth credential) +- PATCH allows toggling `enabled` without full replacement + +--- + +### Task SH-15 — Functions Admin Routes + +**Depends on:** SH-09 + +**Create file:** `packages/server/src/routes/admin/functions.ts` + +```typescript +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { getPool } from "../../lib/db"; +import { nanoid } from "nanoid"; + +// Migration: 003_functions.sql +// CREATE TABLE IF NOT EXISTS betterbase_meta.functions ( +// id TEXT PRIMARY KEY, +// name TEXT NOT NULL UNIQUE, +// runtime TEXT NOT NULL DEFAULT 'bun', +// status TEXT NOT NULL DEFAULT 'inactive', +// deploy_target TEXT, +// created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), +// updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +// ); + +export const functionRoutes = new Hono(); + +functionRoutes.get("/", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, name, runtime, status, deploy_target, created_at FROM betterbase_meta.functions ORDER BY created_at DESC" + ); + return c.json({ functions: rows }); +}); + +functionRoutes.post( + "/", + zValidator("json", z.object({ + name: z.string().min(1).regex(/^[a-z0-9-]+$/), + runtime: z.string().default("bun"), + deploy_target: z.enum(["cloudflare", "vercel"]).optional(), + })), + async (c) => { + const data = c.req.valid("json"); + const pool = getPool(); + const { rows } = await pool.query( + `INSERT INTO betterbase_meta.functions (id, name, runtime, deploy_target) + VALUES ($1, $2, $3, $4) + RETURNING id, name, runtime, status, deploy_target, created_at`, + [nanoid(), data.name, data.runtime, data.deploy_target ?? null] + ); + return c.json({ function: rows[0] }, 201); + } +); + +functionRoutes.delete("/:id", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "DELETE FROM betterbase_meta.functions WHERE id = $1 RETURNING id", + [c.req.param("id")] + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ success: true }); +}); +``` + +**Also create file:** `packages/server/migrations/003_functions.sql` + +```sql +CREATE TABLE IF NOT EXISTS betterbase_meta.functions ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL UNIQUE, + runtime TEXT NOT NULL DEFAULT 'bun', + status TEXT NOT NULL DEFAULT 'inactive', + deploy_target TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); +``` + +**Acceptance criteria:** +- Function names constrained to lowercase alphanumeric + hyphens +- Status defaults to `inactive` + +--- + +### Task SH-16 — Logs Route + +**Depends on:** SH-09 + +**Create file:** `packages/server/src/routes/admin/logs.ts` + +```typescript +import { Hono } from "hono"; +import { getPool } from "../../lib/db"; + +// Migration: 004_logs.sql +// CREATE TABLE IF NOT EXISTS betterbase_meta.request_logs ( +// id BIGSERIAL PRIMARY KEY, +// method TEXT NOT NULL, +// path TEXT NOT NULL, +// status INT NOT NULL, +// duration_ms INT, +// created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +// ); + +export const logRoutes = new Hono(); + +// GET /admin/logs?limit=50&offset=0 +logRoutes.get("/", async (c) => { + const limit = Math.min(parseInt(c.req.query("limit") ?? "50"), 200); + const offset = parseInt(c.req.query("offset") ?? "0"); + const pool = getPool(); + + const { rows } = await pool.query( + `SELECT id, method, path, status, duration_ms, created_at + FROM betterbase_meta.request_logs + ORDER BY created_at DESC + LIMIT $1 OFFSET $2`, + [limit, offset] + ); + return c.json({ logs: rows, limit, offset }); +}); +``` + +**Also create file:** `packages/server/migrations/004_logs.sql` + +```sql +CREATE TABLE IF NOT EXISTS betterbase_meta.request_logs ( + id BIGSERIAL PRIMARY KEY, + method TEXT NOT NULL, + path TEXT NOT NULL, + status INT NOT NULL, + duration_ms INT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Index for dashboard queries +CREATE INDEX IF NOT EXISTS idx_request_logs_created_at + ON betterbase_meta.request_logs (created_at DESC); +``` + +**Add request logging middleware to `packages/server/src/index.ts`** after the existing `logger()` middleware line: + +```typescript +// Add this import at the top: +import { getPool } from "./lib/db"; + +// Add this middleware after app.use("*", logger()): +app.use("*", async (c, next) => { + const start = Date.now(); + await next(); + const duration = Date.now() - start; + // Fire-and-forget log insert (don't await, don't fail requests on log error) + getPool() + .query( + "INSERT INTO betterbase_meta.request_logs (method, path, status, duration_ms) VALUES ($1, $2, $3, $4)", + [c.req.method, new URL(c.req.url).pathname, c.res.status, duration] + ) + .catch(() => {}); // Silently ignore log failures +}); +``` + +**Acceptance criteria:** +- Logs are written fire-and-forget (never delays responses) +- `limit` capped at 200 to prevent large queries +- Index on `created_at` for fast dashboard queries + +--- + +## Phase 4 — Device Auth for CLI + +> Powers `bb login` against a self-hosted instance. + +### Task SH-17 — Device Auth Routes + +**Depends on:** SH-08 + +**What it is:** The OAuth 2.0 device flow that `bb login` uses. Three endpoints: initiate, verify (user approves in browser), poll (CLI waits for approval). + +**Create file:** `packages/server/src/routes/device/index.ts` + +```typescript +import { Hono } from "hono"; +import { zValidator } from "@hono/zod-validator"; +import { z } from "zod"; +import { nanoid } from "nanoid"; +import { getPool } from "../../lib/db"; +import { signAdminToken } from "../../lib/auth"; + +export const deviceRouter = new Hono(); + +const CODE_EXPIRY_MINUTES = 10; + +// POST /device/code — CLI calls this to initiate login +deviceRouter.post("/code", async (c) => { + const pool = getPool(); + + const deviceCode = nanoid(32); + const userCode = nanoid(8).toUpperCase(); // Human-readable: shown in CLI + const expiresAt = new Date(Date.now() + CODE_EXPIRY_MINUTES * 60 * 1000); + + await pool.query( + `INSERT INTO betterbase_meta.device_codes (user_code, device_code, expires_at) + VALUES ($1, $2, $3)`, + [userCode, deviceCode, expiresAt] + ); + + const baseUrl = process.env.BETTERBASE_PUBLIC_URL ?? `http://localhost:${process.env.PORT ?? 3001}`; + + return c.json({ + device_code: deviceCode, + user_code: userCode, + verification_uri: `${baseUrl}/device/verify`, + expires_in: CODE_EXPIRY_MINUTES * 60, + interval: 5, // CLI polls every 5 seconds + }); +}); + +// GET /device/verify — Browser opens this page to approve +deviceRouter.get("/verify", async (c) => { + const userCode = c.req.query("code"); + // Return minimal HTML form for verification + const html = ` + +Betterbase CLI Login + + + +

Betterbase CLI Login

+

Enter your admin credentials to authorize the CLI.

+
+ + + + + + +`; + return c.html(html); +}); + +// POST /device/verify — Form submission +deviceRouter.post("/verify", async (c) => { + const body = await c.req.parseBody(); + const userCode = String(body.user_code ?? "").toUpperCase().trim(); + const email = String(body.email ?? "").trim(); + const password = String(body.password ?? ""); + + const pool = getPool(); + + // Verify admin credentials + const { rows: admins } = await pool.query( + "SELECT id, password_hash FROM betterbase_meta.admin_users WHERE email = $1", + [email] + ); + if (admins.length === 0) { + return c.html(`

Invalid credentials.

`); + } + + const { verifyPassword } = await import("../../lib/auth"); + const valid = await verifyPassword(password, admins[0].password_hash); + if (!valid) { + return c.html(`

Invalid credentials.

`); + } + + // Find and verify the device code + const { rows: codes } = await pool.query( + `SELECT user_code FROM betterbase_meta.device_codes + WHERE user_code = $1 AND verified = FALSE AND expires_at > NOW()`, + [userCode] + ); + if (codes.length === 0) { + return c.html(`

Code not found or expired.

`); + } + + // Mark verified, associate admin user + await pool.query( + `UPDATE betterbase_meta.device_codes + SET verified = TRUE, admin_user_id = $1 + WHERE user_code = $2`, + [admins[0].id, userCode] + ); + + return c.html(`

✓ CLI authorized. You can close this tab.

`); +}); + +// POST /device/token — CLI polls this to get the token once verified +deviceRouter.post("/token", zValidator("json", z.object({ device_code: z.string() })), async (c) => { + const { device_code } = c.req.valid("json"); + const pool = getPool(); + + const { rows } = await pool.query( + `SELECT verified, admin_user_id, expires_at + FROM betterbase_meta.device_codes + WHERE device_code = $1`, + [device_code] + ); + + if (rows.length === 0) { + return c.json({ error: "invalid_device_code" }, 400); + } + + const code = rows[0]; + + if (new Date(code.expires_at) < new Date()) { + return c.json({ error: "expired_token" }, 400); + } + + if (!code.verified) { + return c.json({ error: "authorization_pending" }, 202); + } + + // Issue token, clean up device code + const token = await signAdminToken(code.admin_user_id); + await pool.query( + "DELETE FROM betterbase_meta.device_codes WHERE device_code = $1", + [device_code] + ); + + return c.json({ access_token: token, token_type: "Bearer" }); +}); +``` + +**Acceptance criteria:** +- Full device flow: POST /device/code → browser /device/verify → CLI polls /device/token +- Device codes expire after 10 minutes +- Verified codes are deleted after token issued (one-time use) +- `/device/verify` is a self-contained HTML page (no external deps, works without dashboard) +- `authorization_pending` returns 202 (CLI uses this to keep polling) + +--- + +## Phase 5 — CLI Self-Hosted Mode + +> Allows the CLI to target a self-hosted Betterbase instance instead of cloud. + +### Task SH-18 — Update Credentials Schema + +**Depends on:** SH-17 + +**File to modify:** `packages/cli/src/utils/credentials.ts` + +If this file doesn't exist, create it. If it does, update it to the following shape: + +**Create/replace file:** `packages/cli/src/utils/credentials.ts` + +```typescript +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs"; +import { homedir } from "os"; +import { join } from "path"; +import { z } from "zod"; + +const CREDENTIALS_DIR = join(homedir(), ".betterbase"); +const CREDENTIALS_FILE = join(CREDENTIALS_DIR, "credentials.json"); + +const CredentialsSchema = z.object({ + token: z.string(), + admin_email: z.string().email(), + server_url: z.string().url(), // ← NEW: base URL of the Betterbase server + created_at: z.string(), +}); + +export type Credentials = z.infer; + +export function saveCredentials(creds: Credentials): void { + if (!existsSync(CREDENTIALS_DIR)) { + mkdirSync(CREDENTIALS_DIR, { recursive: true, mode: 0o700 }); + } + writeFileSync(CREDENTIALS_FILE, JSON.stringify(creds, null, 2), { mode: 0o600 }); +} + +export function loadCredentials(): Credentials | null { + if (!existsSync(CREDENTIALS_FILE)) return null; + try { + const raw = JSON.parse(readFileSync(CREDENTIALS_FILE, "utf-8")); + return CredentialsSchema.parse(raw); + } catch { + return null; + } +} + +export function clearCredentials(): void { + if (existsSync(CREDENTIALS_FILE)) { + writeFileSync(CREDENTIALS_FILE, JSON.stringify({})); + } +} + +export function getServerUrl(): string { + const creds = loadCredentials(); + return creds?.server_url ?? "https://api.betterbase.io"; // Falls back to cloud +} +``` + +**Acceptance criteria:** +- `server_url` field added to credentials schema +- `getServerUrl()` helper returns local URL when credentials contain it +- Falls back to cloud URL when no credentials exist + +--- + +### Task SH-19 — Update `bb login` Command for Self-Hosted + +**Depends on:** SH-18 + +**File to modify:** `packages/cli/src/commands/login.ts` + +**Add `--url` option to the login command.** The full updated implementation: + +```typescript +import { Command } from "commander"; +import chalk from "chalk"; +import { saveCredentials, clearCredentials, loadCredentials } from "../utils/credentials"; +import { info, success, error, warn } from "../utils/logger"; +import prompts from "../utils/prompts"; + +const DEFAULT_SERVER_URL = "https://api.betterbase.io"; +const POLL_INTERVAL_MS = 5000; +const POLL_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes + +export function registerLoginCommand(program: Command) { + program + .command("login") + .description("Authenticate with a Betterbase instance") + .option("--url ", "Self-hosted Betterbase server URL", DEFAULT_SERVER_URL) + .action(async (opts) => { + await runLoginCommand({ serverUrl: opts.url }); + }); + + program + .command("logout") + .description("Clear stored credentials") + .action(() => { + clearCredentials(); + success("Logged out."); + }); +} + +export async function runLoginCommand(opts: { serverUrl?: string } = {}) { + const serverUrl = (opts.serverUrl ?? DEFAULT_SERVER_URL).replace(/\/$/, ""); + + // Validate URL for security + try { + const url = new URL(serverUrl); + const isLocalhost = url.hostname === "localhost" || url.hostname === "127.0.0.1"; + + // Warn if using non-https for non-localhost + if (url.protocol === "http:" && !isLocalhost) { + warn("Using HTTP on a public host is insecure. Consider using HTTPS."); + const confirmed = await prompts.confirm({ + message: "Continue anyway?", + default: false, + }); + if (!confirmed) { + info("Login cancelled."); + return; + } + } + + info(`Logging in to ${chalk.cyan(serverUrl)} ...`); + info(`Verification URL will be displayed after requesting code.`); + } catch (err) { + error(`Invalid server URL: ${opts.serverUrl}`); + error("URL must include protocol (http/https) and host"); + return; + } + + // Step 1: Request device code + let deviceCode: string; + let userCode: string; + let verificationUri: string; + + try { + const res = await fetch(`${serverUrl}/device/code`, { method: "POST" }); + if (!res.ok) throw new Error(`Server returned ${res.status}`); + const data = await res.json() as { + device_code: string; + user_code: string; + verification_uri: string; + }; + deviceCode = data.device_code; + userCode = data.user_code; + verificationUri = data.verification_uri; + } catch (err: any) { + error(`Could not reach server: ${err.message}`); + process.exit(1); + } + + console.log(""); + console.log(chalk.bold("Open this URL in your browser to authorize:")); + console.log(chalk.cyan(`${verificationUri}?code=${userCode}`)); + console.log(""); + console.log(`Your code: ${chalk.yellow.bold(userCode)}`); + console.log("Waiting for authorization..."); + + // Step 2: Poll for token + const deadline = Date.now() + POLL_TIMEOUT_MS; + + while (Date.now() < deadline) { + await new Promise((r) => setTimeout(r, POLL_INTERVAL_MS)); + + const res = await fetch(`${serverUrl}/device/token`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ device_code: deviceCode }), + }); + + if (res.status === 202) continue; // authorization_pending + + if (!res.ok) { + const body = await res.json() as { error?: string }; + if (body.error === "authorization_pending") continue; + error(`Login failed: ${body.error ?? "unknown error"}`); + process.exit(1); + } + + const { access_token } = await res.json() as { access_token: string }; + + // Get admin info + const meRes = await fetch(`${serverUrl}/admin/auth/me`, { + headers: { Authorization: `Bearer ${access_token}` }, + }); + const { admin } = await meRes.json() as { admin: { email: string } }; + + saveCredentials({ + token: access_token, + admin_email: admin.email, + server_url: serverUrl, + created_at: new Date().toISOString(), + }); + + success(`Logged in as ${chalk.cyan(admin.email)}`); + return; + } + + error("Login timed out. Please try again."); + process.exit(1); +} +``` + +**Acceptance criteria:** +- `bb login` with no flags works against default cloud URL +- `bb login --url http://localhost:3001` works against local instance +- `server_url` saved to credentials on success +- `bb logout` clears credentials + +--- + +### Task SH-20 — Add Self-Hosted URL to All CLI API Calls + +**Depends on:** SH-19 + +**What it is:** Every CLI command that calls an API must read `server_url` from credentials instead of having hardcoded URLs. This is a cross-cutting change. + +**Create file:** `packages/cli/src/utils/api-client.ts` + +```typescript +import { loadCredentials } from "./credentials"; +import { error } from "./logger"; + +export function requireAuth(): { token: string; serverUrl: string } { + const creds = loadCredentials(); + if (!creds?.token) { + error("Not logged in. Run `bb login` first."); + process.exit(1); + } + return { token: creds.token, serverUrl: creds.server_url }; +} + +export async function apiRequest( + path: string, + options: RequestInit = {} +): Promise { + const { token, serverUrl } = requireAuth(); + + const url = `${serverUrl}${path}`; + const res = await fetch(url, { + ...options, + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}`, + ...(options.headers ?? {}), + }, + }); + + if (!res.ok) { + const body = await res.json().catch(() => ({ error: "Request failed" })) as { error?: string }; + throw new Error(body.error ?? `HTTP ${res.status}`); + } + + return res.json() as Promise; +} +``` + +**Update all CLI commands that make API calls** to use `apiRequest()` instead of raw `fetch()` with hardcoded URLs. Specifically audit and update: + +- `packages/cli/src/commands/login.ts` — already done in SH-19 +- Any command that calls `process.env.BETTERBASE_API_URL` or hardcoded URLs — replace with `apiRequest()` + +**Acceptance criteria:** +- `apiRequest()` reads `server_url` from credentials +- Exits with clear message if not logged in +- All CLI commands that talk to the API use this utility + +--- + +### Task SH-21 — Add `init` to `PUBLIC_COMMANDS` + +**Depends on:** SH-20 + +**File to modify:** `packages/cli/src/index.ts` line 17 + +**This is the previously flagged critical fix.** Add `"init"` to the `PUBLIC_COMMANDS` array so `bb init` works without being logged in. + +```typescript +// Find the PUBLIC_COMMANDS array and ensure it includes: +const PUBLIC_COMMANDS = ["login", "init", "--version", "--help", "-V", "-h"]; +``` + +**Acceptance criteria:** +- `bb init` runs without requiring credentials +- `bb login` still in PUBLIC_COMMANDS +- `bb --version` still in PUBLIC_COMMANDS + +--- + +## Phase 6 — Docker Compose Self-Hosted + +> Packages everything into a single runnable deployment. + +### Task SH-22 — Create Server Dockerfile + +**Depends on:** SH-05 + +**Create file:** `packages/server/Dockerfile` + +```dockerfile +FROM oven/bun:1.2-alpine AS builder + +WORKDIR /app + +# Copy monorepo structure (only what server needs) +COPY package.json turbo.json bun.lock ./ +COPY packages/server/package.json ./packages/server/ +COPY packages/core/package.json ./packages/core/ +COPY packages/shared/package.json ./packages/shared/ + +RUN bun install --frozen-lockfile + +COPY packages/server ./packages/server +COPY packages/core ./packages/core +COPY packages/shared ./packages/shared +COPY tsconfig.base.json ./ + +RUN cd packages/server && bun build src/index.ts --outdir dist --target bun + +# --- Runtime stage --- +FROM oven/bun:1.2-alpine + +WORKDIR /app + +COPY --from=builder /app/packages/server/dist ./dist +COPY --from=builder /app/packages/server/migrations ./migrations + +# Health check +HEALTHCHECK --interval=10s --timeout=5s --start-period=30s --retries=3 \ + CMD wget -qO- http://localhost:3001/health || exit 1 + +EXPOSE 3001 + +CMD ["bun", "dist/index.js"] +``` + +**Acceptance criteria:** +- Multi-stage build — runtime image contains only compiled output + migrations +- `bun:1.2-alpine` base for minimal image size +- Healthcheck targets `/health` on port 3001 +- Migrations directory copied to runtime stage + +--- + +### Task SH-23 — Create Nginx Configuration + +**Depends on:** SH-22 + +**What it is:** Nginx sits in front of all services and routes traffic by path prefix. + +**Create file:** `docker/nginx/nginx.conf` + +```nginx +events { + worker_connections 1024; +} + +http { + upstream betterbase_server { + server betterbase-server:3001; + } + + upstream betterbase_dashboard { + server betterbase-dashboard:80; + } + + upstream minio { + server minio:9000; + } + + server { + listen 80; + server_name _; + + # API + admin + device auth + location /admin/ { + proxy_pass http://betterbase_server; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_read_timeout 60s; + } + + location /device/ { + proxy_pass http://betterbase_server; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + } + + location /health { + proxy_pass http://betterbase_server; + } + + # Storage (MinIO) + location /storage/ { + rewrite ^/storage/(.*) /$1 break; + proxy_pass http://minio; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + client_max_body_size 100m; + } + + # Dashboard (catch-all) + location / { + proxy_pass http://betterbase_dashboard; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + # SPA fallback + proxy_intercept_errors on; + error_page 404 = @dashboard_fallback; + } + + location @dashboard_fallback { + proxy_pass http://betterbase_dashboard; + proxy_set_header Host $host; + } + + # WebSocket support for realtime + location /realtime/ { + proxy_pass http://betterbase_server; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_read_timeout 3600s; + } + } +} +``` + +**Acceptance criteria:** +- API traffic (`/admin/`, `/device/`, `/health`) → server +- Storage traffic (`/storage/`) → MinIO +- Dashboard (everything else) → dashboard container +- WebSocket upgrade headers set for `/realtime/` +- `client_max_body_size 100m` for file uploads + +--- + +### Task SH-24 — Create Self-Hosted Docker Compose + +**Depends on:** SH-22, SH-23 + +**Create file:** `docker-compose.self-hosted.yml` + +```yaml +version: "3.9" + +services: + # ─── Postgres ────────────────────────────────────────────────────────────── + postgres: + image: postgres:16-alpine + container_name: betterbase-postgres + restart: unless-stopped + environment: + POSTGRES_USER: betterbase + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-betterbase} + POSTGRES_DB: betterbase + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U betterbase"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - betterbase-internal + + # ─── MinIO (S3-compatible storage) ───────────────────────────────────────── + minio: + image: minio/minio:latest + container_name: betterbase-minio + restart: unless-stopped + command: server /data --console-address ":9001" + environment: + MINIO_ROOT_USER: ${STORAGE_ACCESS_KEY:-minioadmin} + MINIO_ROOT_PASSWORD: ${STORAGE_SECRET_KEY:-minioadmin} + volumes: + - minio_data:/data + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - betterbase-internal + + # ─── MinIO bucket init (runs once, exits) ────────────────────────────────── + minio-init: + image: minio/mc:latest + container_name: betterbase-minio-init + depends_on: + minio: + condition: service_healthy + entrypoint: > + /bin/sh -c " + mc alias set local http://minio:9000 ${STORAGE_ACCESS_KEY:-minioadmin} ${STORAGE_SECRET_KEY:-minioadmin}; + mc mb --ignore-existing local/betterbase; + mc anonymous set public local/betterbase; + echo 'MinIO bucket initialized.'; + " + networks: + - betterbase-internal + + # ─── Betterbase Server ───────────────────────────────────────────────────── + betterbase-server: + build: + context: . + dockerfile: packages/server/Dockerfile + container_name: betterbase-server + restart: unless-stopped + depends_on: + postgres: + condition: service_healthy + minio: + condition: service_healthy + environment: + DATABASE_URL: postgresql://betterbase:${POSTGRES_PASSWORD:-betterbase}@postgres:5432/betterbase + BETTERBASE_JWT_SECRET: ${BETTERBASE_JWT_SECRET:?JWT secret required - set BETTERBASE_JWT_SECRET in .env} + BETTERBASE_ADMIN_EMAIL: ${BETTERBASE_ADMIN_EMAIL:-} + BETTERBASE_ADMIN_PASSWORD: ${BETTERBASE_ADMIN_PASSWORD:-} + BETTERBASE_PUBLIC_URL: ${BETTERBASE_PUBLIC_URL:-http://localhost} + STORAGE_ENDPOINT: http://minio:9000 + STORAGE_ACCESS_KEY: ${STORAGE_ACCESS_KEY:-minioadmin} + STORAGE_SECRET_KEY: ${STORAGE_SECRET_KEY:-minioadmin} + PORT: "3001" + NODE_ENV: production + CORS_ORIGINS: ${CORS_ORIGINS:-http://localhost} + networks: + - betterbase-internal + healthcheck: + test: ["CMD-SHELL", "wget -qO- http://localhost:3001/health || exit 1"] + interval: 10s + timeout: 5s + retries: 5 + + # ─── Dashboard ───────────────────────────────────────────────────────────── + betterbase-dashboard: + build: + context: . + dockerfile: apps/dashboard/Dockerfile # Dashboard Dockerfile — see SH-25 + container_name: betterbase-dashboard + restart: unless-stopped + depends_on: + betterbase-server: + condition: service_healthy + environment: + VITE_API_URL: ${BETTERBASE_PUBLIC_URL:-http://localhost} + networks: + - betterbase-internal + + # ─── Nginx Reverse Proxy ─────────────────────────────────────────────────── + nginx: + image: nginx:alpine + container_name: betterbase-nginx + restart: unless-stopped + depends_on: + - betterbase-server + - betterbase-dashboard + ports: + - "${HTTP_PORT:-80}:80" + volumes: + - ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro + networks: + - betterbase-internal + healthcheck: + test: ["CMD", "nginx", "-t"] + interval: 30s + timeout: 10s + retries: 3 + +volumes: + postgres_data: + minio_data: + +networks: + betterbase-internal: + driver: bridge +``` + +**Acceptance criteria:** +- All 5 services defined with healthchecks +- Dependency ordering: postgres + minio → server → dashboard → nginx +- `BETTERBASE_JWT_SECRET` is required (`:?` syntax causes compose to fail with clear error if missing) +- `minio-init` creates default bucket and exits — does not stay running +- All services on internal network, only nginx exposes a port + +--- + +### Task SH-25 — Create Dashboard Dockerfile + +**Depends on:** SH-24 + +**What it is:** The dashboard is a static build served by nginx. This assumes the dashboard is a Vite/React app. + +**Create file:** `apps/dashboard/Dockerfile` + +```dockerfile +FROM node:20-alpine AS builder + +WORKDIR /app + +COPY apps/dashboard/package.json apps/dashboard/package-lock.json* ./ +RUN npm install --frozen-lockfile + +COPY apps/dashboard ./ + +# Inject API URL at build time +ARG VITE_API_URL=http://localhost +ENV VITE_API_URL=$VITE_API_URL + +RUN npm run build + +# --- Runtime: serve static files with nginx --- +FROM nginx:alpine + +COPY --from=builder /app/dist /usr/share/nginx/html + +# SPA routing: serve index.html for all unknown paths +RUN echo 'server { \ + listen 80; \ + root /usr/share/nginx/html; \ + index index.html; \ + location / { try_files $uri $uri/ /index.html; } \ +}' > /etc/nginx/conf.d/default.conf + +EXPOSE 80 +``` + +**Note for orchestrator:** If the dashboard uses Bun instead of npm, replace the `node:20-alpine` base with `oven/bun:1.2-alpine` and replace `npm install` with `bun install` and `npm run build` with `bun run build`. + +**Acceptance criteria:** +- Multi-stage build — final image is nginx + static files only +- SPA fallback (all routes serve `index.html`) configured +- `VITE_API_URL` injectable at build time via build arg + +--- + +### Task SH-26 — Create `.env.example` for Self-Hosted + +**Depends on:** SH-24 + +**Create file:** `.env.self-hosted.example` + +```bash +# ─── REQUIRED ──────────────────────────────────────────────────────────────── + +# Minimum 32 characters. Generate with: openssl rand -base64 32 +BETTERBASE_JWT_SECRET=change-me-to-a-random-string-at-least-32-chars + +# ─── FIRST-RUN ADMIN SETUP ────────────────────────────────────────────────── +# Set these to auto-create an admin account on first start. +# Remove from .env after first start (or leave — it's idempotent). +BETTERBASE_ADMIN_EMAIL=admin@example.com +BETTERBASE_ADMIN_PASSWORD=changeme123 + +# ─── OPTIONAL: CUSTOMISE PORTS / URLS ──────────────────────────────────────── + +# Public URL of your Betterbase instance (used in CLI device flow URLs) +BETTERBASE_PUBLIC_URL=http://localhost + +# Port nginx listens on +HTTP_PORT=80 + +# ─── OPTIONAL: POSTGRES ────────────────────────────────────────────────────── +POSTGRES_PASSWORD=betterbase + +# ─── OPTIONAL: STORAGE ─────────────────────────────────────────────────────── +# MinIO credentials (default: minioadmin/minioadmin — change for production) +STORAGE_ACCESS_KEY=minioadmin +STORAGE_SECRET_KEY=minioadmin + +# ─── OPTIONAL: CORS ───────────────────────────────────────────────────────── +# Comma-separated allowed origins for the API +CORS_ORIGINS=http://localhost +``` + +**Acceptance criteria:** +- Every variable referenced in `docker-compose.self-hosted.yml` is documented here +- Required variables clearly marked +- Sensible defaults for all optional variables + +--- + +## Phase 7 — First-Run Bootstrap + +> Makes the out-of-the-box experience self-explanatory. + +### Task SH-27 — Create Setup Endpoint + +**Depends on:** SH-09 + +**What it is:** A one-time-only endpoint that creates the first admin account via HTTP POST, available only before any admin exists. Once an admin exists, returns 410 Gone. This is an alternative to the env var seeding — useful for cloud deployments where env vars are awkward. + +**Add to:** `packages/server/src/routes/admin/auth.ts` + +```typescript +import { z } from "zod"; +import { zValidator } from "@hono/zod-validator"; + +// POST /admin/auth/setup — available only before first admin is created +authRoutes.post( + "/setup", + zValidator("json", z.object({ + email: z.string().email(), + password: z.string().min(8), + })), + async (c) => { + const pool = getPool(); + + // Check if any admin exists + const { rows } = await pool.query( + "SELECT COUNT(*)::int as count FROM betterbase_meta.admin_users" + ); + if (rows[0].count > 0) { + return c.json({ error: "Setup already complete" }, 410); + } + + const { email, password } = c.req.valid("json"); + const { hashPassword, signAdminToken } = await import("../../lib/auth"); + const { nanoid } = await import("nanoid"); + + const passwordHash = await hashPassword(password); + const { rows: newAdmin } = await pool.query( + "INSERT INTO betterbase_meta.admin_users (id, email, password_hash) VALUES ($1, $2, $3) RETURNING id, email", + [nanoid(), email, passwordHash] + ); + + const token = await signAdminToken(newAdmin[0].id); + return c.json({ + message: "Admin account created. Save your token — log in with `bb login`.", + admin: newAdmin[0], + token, + }, 201); + } +); +``` + +**Acceptance criteria:** +- Returns 410 if any admin already exists (idempotent safety) +- Returns token on success so the caller can immediately use it +- This endpoint is under `/admin/auth/setup` — it is NOT behind `requireAdmin` middleware (it can't be — there's no admin yet) + +--- + +### Task SH-28 — Create Self-Hosted README + +**Depends on:** SH-26, SH-27 + +**Create file:** `SELF_HOSTED.md` + +```markdown +# Self-Hosting Betterbase + +## Prerequisites + +- Docker and Docker Compose +- Ports 80 (or your chosen `HTTP_PORT`) available + +## Quick Start + +**1. Copy the example env file:** +\`\`\`bash +cp .env.self-hosted.example .env +\`\`\` + +**2. Edit `.env` — at minimum set these two values:** +\`\`\`bash +BETTERBASE_JWT_SECRET=your-random-string-here # min 32 chars +BETTERBASE_ADMIN_EMAIL=you@example.com +BETTERBASE_ADMIN_PASSWORD=yourpassword +\`\`\` +Generate a secret: `openssl rand -base64 32` + +**3. Start everything:** +\`\`\`bash +docker compose -f docker-compose.self-hosted.yml up -d +\`\`\` + +**4. Open the dashboard:** +Navigate to `http://localhost` (or your configured `BETTERBASE_PUBLIC_URL`). + +**5. Connect your CLI:** +\`\`\`bash +bb login --url http://localhost +\`\`\` + +--- + +## What Runs + +| Service | Internal Port | Description | +|---------|--------------|-------------| +| nginx | 80 (public) | Reverse proxy — only public-facing port | +| betterbase-server | 3001 (internal) | API server | +| betterbase-dashboard | 80 (internal) | Dashboard UI | +| postgres | 5432 (internal) | Betterbase metadata database | +| minio | 9000 (internal) | S3-compatible object storage | + +--- + +## CLI Usage Against Self-Hosted + +After `bb login --url http://your-server`, all CLI commands automatically target your server. + +\`\`\`bash +bb login --url http://localhost # authenticate +bb init my-project # create a project (registered to your local instance) +bb sync # sync local project to server +\`\`\` + +--- + +## Production Checklist + +- [ ] `BETTERBASE_JWT_SECRET` is a random 32+ character string +- [ ] `POSTGRES_PASSWORD` changed from default +- [ ] `STORAGE_ACCESS_KEY` and `STORAGE_SECRET_KEY` changed from defaults +- [ ] `BETTERBASE_PUBLIC_URL` set to your actual domain +- [ ] SSL/TLS termination configured (add HTTPS to the nginx config or use a load balancer) +- [ ] Remove `BETTERBASE_ADMIN_EMAIL` / `BETTERBASE_ADMIN_PASSWORD` from `.env` after first start (or keep — seeding is idempotent) + +--- + +## Troubleshooting + +**Server won't start:** +Check that `BETTERBASE_JWT_SECRET` is set (minimum 32 characters). Run: +\`\`\`bash +docker compose -f docker-compose.self-hosted.yml logs betterbase-server +\`\`\` + +**Can't log in with CLI:** +Ensure `BETTERBASE_PUBLIC_URL` in your `.env` matches the URL you pass to `bb login --url`. + +**Storage not working:** +The `minio-init` container initialises the default bucket on first start. Check its logs: +\`\`\`bash +docker compose -f docker-compose.self-hosted.yml logs minio-init +\`\`\` +``` + +**Acceptance criteria:** +- Covers the complete first-run flow in under 5 steps +- Production checklist is accurate and complete +- Troubleshooting covers the three most likely failure modes + +--- + +## Summary — Task Execution Order + +```text +Phase 1 — Metadata DB + SH-01 Create 001_initial_schema.sql + SH-02 Create migration runner + SH-03 Create DB pool module + +Phase 2 — Server Package + SH-04 Scaffold packages/server + SH-05 Entry point + env validation + SH-06 Auth utilities (JWT + bcrypt) + SH-07 Admin auth middleware + +Phase 3 — Admin API + SH-08 Admin router index + SH-09 Auth routes (login/logout/me) + SH-10 Projects routes + SH-11 Users routes + SH-12 Metrics route + SH-13 Storage admin routes + SH-14 Webhooks routes + 002 migration + SH-15 Functions routes + 003 migration + SH-16 Logs route + 004 migration + request logger + +Phase 4 — Device Auth + SH-17 Device auth routes (full flow) + +Phase 5 — CLI Self-Hosted Mode + SH-18 Update credentials schema + SH-19 Update bb login with --url flag + SH-20 api-client.ts utility + SH-21 Add init to PUBLIC_COMMANDS (critical fix) + +Phase 6 — Docker + SH-22 Server Dockerfile + SH-23 Nginx config + SH-24 docker-compose.self-hosted.yml + SH-25 Dashboard Dockerfile + SH-26 .env.self-hosted.example + +Phase 7 — Bootstrap + SH-27 /admin/auth/setup endpoint + SH-28 SELF_HOSTED.md +``` + +**Total: 28 tasks across 7 phases.** + +--- + +## Dependencies Not Yet in `packages/server/package.json` to Verify + +Before starting Phase 2, confirm these are available or add them: +- `pg` + `@types/pg` — Postgres client +- `bcryptjs` + `@types/bcryptjs` — password hashing +- `jose` — JWT +- `nanoid` — ID generation +- `@hono/zod-validator` — request validation +- `@aws-sdk/client-s3` — already in `@betterbase/core`, may need to be added directly to `packages/server` as well + +--- + +*End of specification. Execute tasks in the listed order. Do not skip phases.* +``` diff --git a/CODEBASE_MAP.md b/CODEBASE_MAP.md index 9a457c3..cd4834b 100644 --- a/CODEBASE_MAP.md +++ b/CODEBASE_MAP.md @@ -226,7 +226,40 @@ betterbase/ │ │ ├── constants.ts # Shared constants │ │ └── utils.ts # Utility functions │ │ +│ ├── server/ # @betterbase/server - Self-hosted server +│ │ ├── package.json +│ │ ├── tsconfig.json +│ │ ├── Dockerfile +│ │ ├── migrations/ # Database migrations +│ │ │ ├── 001_initial_schema.sql +│ │ │ ├── 002_admin_users.sql +│ │ │ ├── 003_projects.sql +│ │ │ └── 004_logs.sql +│ │ └── src/ +│ │ ├── index.ts # Server entry point +│ │ ├── lib/ +│ │ │ ├── db.ts # Database connection +│ │ │ ├── migrate.ts # Migration runner +│ │ │ ├── env.ts # Environment validation +│ │ │ ├── auth.ts # Auth utilities +│ │ │ └── admin-middleware.ts # Admin auth middleware +│ │ └── routes/ +│ │ ├── admin/ # Admin API routes +│ │ │ ├── index.ts +│ │ │ ├── auth.ts +│ │ │ ├── projects.ts +│ │ │ ├── users.ts +│ │ │ ├── metrics.ts +│ │ │ ├── storage.ts +│ │ │ ├── webhooks.ts +│ │ │ ├── functions.ts +│ │ │ └── logs.ts +│ │ └── device/ # Device auth routes +│ │ └── index.ts +│ │ ├── apps/ +│ ├── dashboard/ # Admin dashboard for self-hosted +│ │ ├── Dockerfile │ └── test-project/ # Example/test project │ ├── betterbase.config.ts # Project configuration │ ├── drizzle.config.ts # Drizzle configuration diff --git a/Dockerfile b/Dockerfile index 74cacb0..13627f0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,7 @@ LABEL description="AI-Native Backend-as-a-Service Platform" WORKDIR /app # Install system dependencies -RUN apt-get update && apt-get install -y \ +RUN apt-get update && apt-get install -y --no-install-recommends \ # For sharp image processing vips-tools \ fftw3 \ @@ -60,15 +60,10 @@ RUN bun install --frozen-lockfile # ---------------------------------------------------------------------------- # Stage 3: Builder # ---------------------------------------------------------------------------- -FROM base AS builder +FROM deps AS builder WORKDIR /app -# Copy lockfile and install dependencies -COPY package.json bun.lock ./ -COPY turbo.json ./ -RUN bun install --frozen-lockfile - # Copy all source code COPY packages/ packages/ COPY apps/ apps/ @@ -83,6 +78,10 @@ FROM base AS runner WORKDIR /app +# Create non-root user for security +RUN addgroup --system --gid 1001 appgroup && \ + adduser --system --uid 1001 appuser + # Copy package files for production COPY package.json bun.lock ./ COPY turbo.json ./ @@ -96,12 +95,21 @@ COPY --from=builder /app/packages/cli/dist ./node_modules/@betterbase/cli/dist COPY --from=builder /app/packages/client/dist ./node_modules/@betterbase/client/dist COPY --from=builder /app/packages/shared/dist ./node_modules/@betterbase/shared/dist +# Copy source files for runtime (needed for dynamic imports) +COPY --from=builder /app/packages/core/src ./node_modules/@betterbase/core/src +COPY --from=builder /app/packages/cli/src ./node_modules/@betterbase/cli/src +COPY --from=builder /app/packages/client/src ./node_modules/@betterbase/client/src +COPY --from=builder /app/packages/shared/src ./node_modules/@betterbase/shared/src + # Copy package.json files to access exports COPY packages/core/package.json ./node_modules/@betterbase/core/ COPY packages/cli/package.json ./node_modules/@betterbase/cli/ COPY packages/client/package.json ./node_modules/@betterbase/client/ COPY packages/shared/package.json ./node_modules/@betterbase/shared/ +# Switch to non-root user +USER appuser + # Set environment ENV NODE_ENV=production ENV PORT=3000 diff --git a/Dockerfile.project b/Dockerfile.project index b2d24b9..aa29501 100644 --- a/Dockerfile.project +++ b/Dockerfile.project @@ -23,7 +23,7 @@ LABEL description="Betterbase Project - AI-Native Backend Platform" WORKDIR /app # Install system dependencies for image processing and database -RUN apt-get update && apt-get install -y \ +RUN apt-get update && apt-get install -y --no-install-recommends \ # For sharp image processing vips-tools \ fftw3 \ @@ -91,7 +91,6 @@ RUN bun install --frozen-lockfile --production # Copy built artifacts COPY --from=builder /app/dist ./dist -COPY --from=builder /app/node_modules/.prisma ./node_modules/.prisma # Copy necessary source files for runtime COPY --from=builder /app/src ./src @@ -114,7 +113,7 @@ EXPOSE 3000 # Health check HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ - CMD curl -f http://localhost:3000/api/health || exit 1 + CMD curl -f http://localhost:3000/health || exit 1 # Start the application CMD ["bun", "run", "start"] diff --git a/README.md b/README.md index 23c933d..b2853ca 100644 --- a/README.md +++ b/README.md @@ -1015,6 +1015,83 @@ STORAGE_PROVIDER=r2 STORAGE_BUCKET=my-bucket docker-compose -f docker-compose.pr - **External database support** - Neon, Supabase, RDS, etc. - **S3-compatible storage** - R2, S3, B2, MinIO +### Self-Hosted Deployment + +Betterbase can be self-hosted on your own infrastructure using Docker. This is ideal for teams wanting full control over their data and infrastructure. + +#### Quick Start + +```bash +# Clone the repository +git clone https://github.com/betterbase/betterbase.git +cd betterbase + +# Start self-hosted deployment +docker-compose -f docker-compose.self-hosted.yml up -d +``` + +The self-hosted version includes: +- **Admin Dashboard** - Web UI for managing projects, users, and settings +- **Device Authentication** - CLI login flow for self-hosted instances +- **Admin API** - Full API for administrative tasks +- **Metrics** - Usage and performance tracking + +#### Configuration + +Copy the example environment file and configure: + +```bash +cp .env.self-hosted.example .env +``` + +Key environment variables: + +| Variable | Description | Required | +|----------|-------------|----------| +| `DATABASE_URL` | PostgreSQL connection string | Yes | +| `AUTH_SECRET` | Secret for auth tokens (min 32 chars) | Yes | +| `SERVER_URL` | Public URL of your instance | Yes | +| `ADMIN_EMAIL` | Initial admin email | Yes | +| `ADMIN_PASSWORD` | Initial admin password | Yes | +| `STORAGE_PROVIDER` | Storage provider (local, s3, r2, backblaze, minio) | No | +| `STORAGE_BUCKET` | Storage bucket name | No | + +#### CLI Login with Self-Hosted + +```bash +# Login to your self-hosted instance +bb login --url https://your-instance.com + +# This will initiate device authentication flow +# 1. You'll be given a device code +# 2. Open the admin dashboard +# 3. Approve the device +# 4. CLI will receive credentials automatically +``` + +#### Docker Compose Services + +| Service | Port | Description | +|---------|------|-------------| +| server | 3000 | Main API server | +| dashboard | 3001 | Admin dashboard | +| nginx | 80, 443 | Reverse proxy | + +#### For Development + +```bash +# Start all services +docker-compose -f docker-compose.self-hosted.yml up + +# View logs +docker-compose -f docker-compose.self-hosted.yml logs -f + +# Stop services +docker-compose -f docker-compose.self-hosted.yml down +``` + +See [SELF_HOSTED.md](SELF_HOSTED.md) for detailed documentation. + ### Cloud Providers | Provider | Deployment Method | diff --git a/SELF_HOSTED.md b/SELF_HOSTED.md new file mode 100644 index 0000000..9ae4863 --- /dev/null +++ b/SELF_HOSTED.md @@ -0,0 +1,88 @@ +# Self-Hosting Betterbase + +## Prerequisites + +- Docker and Docker Compose +- Ports 80 (or your chosen `HTTP_PORT`) available + +## Quick Start + +**1. Copy the example env file:** +```bash +cp .env.self-hosted.example .env +``` + +**2. Edit `.env` — at minimum set these two values:** +```bash +BETTERBASE_JWT_SECRET=your-random-string-here # min 32 chars +BETTERBASE_ADMIN_EMAIL=you@example.com +BETTERBASE_ADMIN_PASSWORD=yourpassword +``` +Generate a secret: `openssl rand -base64 32` + +**3. Start everything:** +```bash +docker compose -f docker-compose.self-hosted.yml up -d +``` + +**4. Open the dashboard:** +Navigate to `http://localhost` (or your configured `BETTERBASE_PUBLIC_URL`). + +**5. Connect your CLI:** +```bash +bb login --url http://localhost +``` + +--- + +## What Runs + +| Service | Internal Port | Description | +|---------|--------------|-------------| +| nginx | 80 (public) | Reverse proxy — only public-facing port | +| betterbase-server | 3001 (internal) | API server | +| betterbase-dashboard | 80 (internal) | Dashboard UI | +| postgres | 5432 (internal) | Betterbase metadata database | +| minio | 9000 (internal) | S3-compatible object storage | + +--- + +## CLI Usage Against Self-Hosted + +After `bb login --url http://your-server`, all CLI commands automatically target your server. + +```bash +bb login --url http://localhost # authenticate +bb init my-project # create a project (registered to your local instance) +bb sync # sync local project to server +``` + +--- + +## Production Checklist + +- [ ] `BETTERBASE_JWT_SECRET` is a random 32+ character string +- [ ] `POSTGRES_PASSWORD` changed from default +- [ ] `STORAGE_ACCESS_KEY` and `STORAGE_SECRET_KEY` changed from defaults +- [ ] `BETTERBASE_PUBLIC_URL` set to your actual domain +- [ ] SSL/TLS termination configured (add HTTPS to the nginx config or use a load balancer) +- [ ] Remove `BETTERBASE_ADMIN_EMAIL` / `BETTERBASE_ADMIN_PASSWORD` from `.env` after first start (or keep — seeding is idempotent) + +--- + +## Troubleshooting + +**Server won't start:** +Check that `BETTERBASE_JWT_SECRET` is set (minimum 32 characters). Run: +```bash +docker compose -f docker-compose.self-hosted.yml logs betterbase-server +``` + +**Can't log in with CLI:** +Ensure `BETTERBASE_PUBLIC_URL` in your `.env` matches the URL you pass to `bb login --url`. + +**Storage not working:** +The `minio-init` container initialises the default bucket on first start. Check its logs: +```bash +docker compose -f docker-compose.self-hosted.yml logs minio-init +``` \ No newline at end of file diff --git a/apps/dashboard/Dockerfile b/apps/dashboard/Dockerfile new file mode 100644 index 0000000..6f79f99 --- /dev/null +++ b/apps/dashboard/Dockerfile @@ -0,0 +1,45 @@ +# Dashboard Dockerfile placeholder +# This is a placeholder. The actual dashboard needs to be created first. +# If the dashboard uses Bun instead of npm, replace node:20-alpine with oven/bun:1.2-alpine +# and replace npm install with bun install and npm run build with bun run build + +FROM node:20-alpine AS builder + +WORKDIR /app + +COPY apps/dashboard/package.json apps/dashboard/package-lock.json* ./ +RUN npm ci + +COPY apps/dashboard ./ + +# Inject API URL at build time +ARG VITE_API_URL=http://localhost +ENV VITE_API_URL=$VITE_API_URL + +RUN npm run build + +# --- Runtime: serve static files with nginx --- +FROM nginx:alpine + +# Create non-root user for nginx +RUN addgroup -g 1000 -S appgroup && \ + adduser -u 1000 -S appuser -G appgroup + +COPY --from=builder /app/dist /usr/share/nginx/html + +# Ensure non-root user can read the html directory +RUN chown -R appuser:appgroup /usr/share/nginx/html && \ + chown -R appuser:appgroup /etc/nginx/conf.d + +# SPA routing: serve index.html for all unknown paths +RUN echo 'server { \ + listen 80; \ + root /usr/share/nginx/html; \ + index index.html; \ + location / { try_files $uri $uri/ /index.html; } \ +}' > /etc/nginx/conf.d/default.conf + +# Switch to non-root user +USER appuser + +EXPOSE 80 \ No newline at end of file diff --git a/docker-compose.self-hosted.yml b/docker-compose.self-hosted.yml new file mode 100644 index 0000000..b86037c --- /dev/null +++ b/docker-compose.self-hosted.yml @@ -0,0 +1,133 @@ +version: "3.9" + +services: + # ─── Postgres ────────────────────────────────────────────────────────────── + postgres: + image: postgres:16-alpine + container_name: betterbase-postgres + restart: unless-stopped + environment: + POSTGRES_USER: betterbase + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-betterbase} + POSTGRES_DB: betterbase + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U betterbase"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - betterbase-internal + + # ─── MinIO (S3-compatible storage) ───────────────────────────────────────── + minio: + image: minio/minio:RELEASE.2024-01-16T16-07-38Z + container_name: betterbase-minio + restart: unless-stopped + command: server /data --console-address ":9001" + environment: + MINIO_ROOT_USER: ${STORAGE_ACCESS_KEY:-minioadmin} + MINIO_ROOT_PASSWORD: ${STORAGE_SECRET_KEY:-minioadmin} + volumes: + - minio_data:/data + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - betterbase-internal + + # ─── MinIO bucket init (runs once, exits) ────────────────────────────────── + minio-init: + image: minio/mc:RELEASE.2024-01-06T18-51-57Z + container_name: betterbase-minio-init + depends_on: + minio: + condition: service_healthy + entrypoint: > + /bin/sh -c " + mc alias set local http://minio:9000 ${STORAGE_ACCESS_KEY:-minioadmin} ${STORAGE_SECRET_KEY:-minioadmin}; + mc mb --ignore-existing local/betterbase; + echo 'MinIO bucket initialized.'; + " + networks: + - betterbase-internal + + # ─── Betterbase Server ───────────────────────────────────────────────────── + betterbase-server: + build: + context: . + dockerfile: packages/server/Dockerfile + container_name: betterbase-server + restart: unless-stopped + depends_on: + postgres: + condition: service_healthy + minio: + condition: service_healthy + minio-init: + condition: service_completed_successfully + environment: + DATABASE_URL: postgresql://betterbase:${POSTGRES_PASSWORD:-betterbase}@postgres:5432/betterbase + BETTERBASE_JWT_SECRET: ${BETTERBASE_JWT_SECRET:?JWT secret required - set BETTERBASE_JWT_SECRET in .env} + BETTERBASE_ADMIN_EMAIL: ${BETTERBASE_ADMIN_EMAIL:-} + BETTERBASE_ADMIN_PASSWORD: ${BETTERBASE_ADMIN_PASSWORD:-} + BETTERBASE_PUBLIC_URL: ${BETTERBASE_PUBLIC_URL:-http://localhost} + STORAGE_ENDPOINT: http://minio:9000 + STORAGE_ACCESS_KEY: ${STORAGE_ACCESS_KEY:-minioadmin} + STORAGE_SECRET_KEY: ${STORAGE_SECRET_KEY:-minioadmin} + PORT: "3001" + NODE_ENV: production + CORS_ORIGINS: ${CORS_ORIGINS:-http://localhost} + networks: + - betterbase-internal + healthcheck: + test: ["CMD-SHELL", "wget -qO- http://localhost:3001/health || exit 1"] + interval: 10s + timeout: 5s + retries: 5 + + # ─── Dashboard ───────────────────────────────────────────────────────────── + betterbase-dashboard: + build: + context: . + dockerfile: apps/dashboard/Dockerfile # Dashboard Dockerfile — see SH-25 + args: + VITE_API_URL: ${BETTERBASE_PUBLIC_URL:-http://localhost} + container_name: betterbase-dashboard + restart: unless-stopped + depends_on: + betterbase-server: + condition: service_healthy + networks: + - betterbase-internal + + # ─── Nginx Reverse Proxy ─────────────────────────────────────────────────── + nginx: + image: nginx:alpine + container_name: betterbase-nginx + restart: unless-stopped + depends_on: + - betterbase-server + - betterbase-dashboard + ports: + - "${HTTP_PORT:-80}:80" + volumes: + - ./docker/nginx/nginx.conf:/etc/nginx/nginx.conf:ro + networks: + - betterbase-internal + healthcheck: + test: ["CMD", "nginx", "-t"] + interval: 30s + timeout: 10s + retries: 3 + +volumes: + postgres_data: + minio_data: + +networks: + betterbase-internal: + driver: bridge \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 564bab2..02dc018 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,8 +1,7 @@ # ============================================================================ # Betterbase Docker Compose # -# Development environment with PostgreSQL, Redis (for sessions), -# and the Betterbase application. +# Development environment with PostgreSQL and the Betterbase application. # # Usage: # docker-compose up -d # Start all services @@ -29,9 +28,12 @@ services: AUTH_SECRET: ${AUTH_SECRET:-your-super-secret-key-min-32-chars-long-change-in-production} AUTH_URL: http://localhost:3000 - # Storage (local for development) - STORAGE_PROVIDER: local - STORAGE_PATH: /app/storage + # Storage (MinIO for S3-compatible storage) + STORAGE_PROVIDER: minio + STORAGE_ENDPOINT: http://minio:9000 + STORAGE_BUCKET: betterbase + STORAGE_ACCESS_KEY: minioadmin + STORAGE_SECRET_KEY: minioadmin # Node environment NODE_ENV: ${NODE_ENV:-development} @@ -48,8 +50,8 @@ services: depends_on: postgres: condition: service_healthy - # redis: - # condition: service_started + minio: + condition: service_started restart: unless-stopped networks: - betterbase-network @@ -79,21 +81,26 @@ services: - betterbase-network # -------------------------------------------------------------------------- - # Optional: MinIO for S3-compatible storage (development) + # MinIO for S3-compatible storage (development) # -------------------------------------------------------------------------- - # minio: - # image: minio/minio:latest - # command: server /data --console-address ":9001" - # environment: - # MINIO_ROOT_USER: minioadmin - # MINIO_ROOT_PASSWORD: minioadmin - # volumes: - # - minio_data:/data - # ports: - # - "9000:9000" - # - "9001:9001" - # networks: - # - betterbase-network + minio: + image: minio/minio:latest + command: server /data --console-address ":9001" + environment: + MINIO_ROOT_USER: minioadmin + MINIO_ROOT_PASSWORD: minioadmin + volumes: + - minio_data:/data + ports: + - "9000:9000" + - "9001:9001" + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] + interval: 10s + timeout: 5s + retries: 5 + networks: + - betterbase-network # -------------------------------------------------------------------------- # Optional: Mailhog for email testing (development) @@ -121,5 +128,5 @@ volumes: driver: local betterbase_storage: driver: local - # minio_data: - # driver: local + minio_data: + driver: local diff --git a/docker/nginx/nginx.conf b/docker/nginx/nginx.conf new file mode 100644 index 0000000..4fda4cb --- /dev/null +++ b/docker/nginx/nginx.conf @@ -0,0 +1,89 @@ +events { + worker_connections 1024; +} + +http { + upstream betterbase_server { + server betterbase-server:3001; + } + + upstream betterbase_dashboard { + server betterbase-dashboard:80; + } + + upstream minio { + server minio:9000; + } + + server { + listen 80; + server_name _; + + # API + admin + device auth + location /admin/ { + proxy_pass http://betterbase_server; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_read_timeout 60s; + } + + location /device/ { + proxy_pass http://betterbase_server; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /health { + proxy_pass http://betterbase_server; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + # Storage (MinIO) + location /storage/ { + rewrite ^/storage/(.*) /$1 break; + proxy_pass http://minio; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + client_max_body_size 100m; + } + + # Dashboard (catch-all) + location / { + proxy_pass http://betterbase_dashboard; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + # SPA fallback + proxy_intercept_errors on; + error_page 404 = @dashboard_fallback; + } + + location @dashboard_fallback { + proxy_pass http://betterbase_dashboard; + proxy_set_header Host $host; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + # WebSocket support for realtime + location /realtime/ { + proxy_pass http://betterbase_server; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_read_timeout 3600s; + } + } +} \ No newline at end of file diff --git a/docs/README.md b/docs/README.md index 744444a..4e609c2 100644 --- a/docs/README.md +++ b/docs/README.md @@ -14,7 +14,7 @@ Comprehensive documentation for the BetterBase platform, covering all packages, The documentation is organized into logical sections for easy navigation: -``` +```text /docs ├── getting-started/ # Getting started guides │ ├── installation.md @@ -157,6 +157,25 @@ This documentation corresponds to the current version of the BetterBase platform - Refer to the CHANGELOG.md for version-specific changes - Documentation for older versions is available in the git history +## Self-Hosted Deployment + +BetterBase can be self-hosted on your own infrastructure. See [SELF_HOSTED.md](../SELF_HOSTED.md) for detailed documentation on: + +- Docker-based deployment +- Admin dashboard configuration +- Device authentication for CLI +- Admin API usage + +Quick start: + +```bash +# Clone and start +docker-compose -f docker-compose.self-hosted.yml up -d + +# Login to self-hosted instance +bb login --url https://your-instance.com +``` + ## License This documentation is part of the BetterBase platform and is licensed under the MIT License. diff --git a/docs/guides/deployment.md b/docs/guides/deployment.md index 4ebcf99..6bcc322 100644 --- a/docs/guides/deployment.md +++ b/docs/guides/deployment.md @@ -190,6 +190,49 @@ npm i -g vercel vercel --prod ``` +## Self-Hosted Deployment + +BetterBase can be self-hosted on your own infrastructure using Docker. + +### Quick Start + +```bash +# Clone the repository +git clone https://github.com/betterbase/betterbase.git +cd betterbase + +# Start self-hosted deployment +docker-compose -f docker-compose.self-hosted.yml up -d +``` + +### Configuration + +Copy and configure the environment file: + +```bash +cp .env.self-hosted.example .env +``` + +Key variables: + +| Variable | Description | +|----------|-------------| +| `DATABASE_URL` | PostgreSQL connection string | +| `AUTH_SECRET` | Auth secret (min 32 chars) | +| `SERVER_URL` | Public URL of your instance | +| `ADMIN_EMAIL` | Initial admin email | +| `ADMIN_PASSWORD` | Initial admin password | + +### CLI Login + +```bash +bb login --url https://your-instance.com +``` + +This uses device authentication flow - you'll approve the device in the admin dashboard. + +See [SELF_HOSTED.md](../../SELF_HOSTED.md) for complete documentation. + ## Environment Configuration ### Production Environment Variables diff --git a/packages/cli/src/commands/login.ts b/packages/cli/src/commands/login.ts index 4691f91..787dd6a 100644 --- a/packages/cli/src/commands/login.ts +++ b/packages/cli/src/commands/login.ts @@ -1,153 +1,122 @@ -import { randomBytes } from "node:crypto"; -import { existsSync } from "node:fs"; -import fs from "node:fs/promises"; -import os from "node:os"; -import path from "node:path"; -import { info, error as logError, success, warn } from "../utils/logger"; - -export interface Credentials { - token: string; - email: string; - userId: string; - expiresAt: string; +import chalk from "chalk"; +import type { Command } from "commander"; +import { clearCredentials, loadCredentials, saveCredentials } from "../utils/credentials"; +import { error, info, success } from "../utils/logger"; + +const DEFAULT_SERVER_URL = "https://api.betterbase.io"; +const POLL_INTERVAL_MS = 5000; +const POLL_TIMEOUT_MS = 5 * 60 * 1000; // 5 minutes + +export function registerLoginCommand(program: Command) { + program + .command("login") + .description("Authenticate with a Betterbase instance") + .option("--url ", "Self-hosted Betterbase server URL", DEFAULT_SERVER_URL) + .action(async (opts) => { + await runLoginCommand({ serverUrl: opts.url }); + }); + + program + .command("logout") + .description("Clear stored credentials") + .action(() => { + clearCredentials(); + success("Logged out."); + }); } -const BETTERBASE_API = - process.env.BETTERBASE_API_URL ?? "https://gzmqjmgomlkpwntbivox.supabase.co/functions/v1"; -const AUTH_PAGE_URL = - process.env.BETTERBASE_AUTH_PAGE_URL ?? "https://betterbaseauthpage.vercel.app"; -const CREDENTIALS_PATH = path.join(os.homedir(), ".betterbase", "credentials.json"); -const POLL_INTERVAL_MS = 2000; -const POLL_TIMEOUT_MS = 300000; - -export async function runLoginCommand(): Promise { - const existing = await getCredentials(); - if (existing) { - info(`Already logged in as ${existing.email}`); - info("Run bb logout to sign out."); - return; - } +export async function runLoginCommand(opts: { serverUrl?: string } = {}) { + const serverUrl = (opts.serverUrl ?? DEFAULT_SERVER_URL).replace(/\/$/, ""); + + info(`Logging in to ${chalk.cyan(serverUrl)} ...`); - const code = generateDeviceCode(); + // Step 1: Request device code + let deviceCode: string; + let userCode: string; + let verificationUri: string; - // Register device code in DB before opening browser try { - const res = await fetch(`${BETTERBASE_API}/cli-auth-device`, { + const res = await fetch(`${serverUrl}/device/code`, { method: "POST" }); + if (!res.ok) throw new Error(`Server returned ${res.status}`); + const data = (await res.json()) as { + device_code: string; + user_code: string; + verification_uri: string; + }; + deviceCode = data.device_code; + userCode = data.user_code; + verificationUri = data.verification_uri; + } catch (err: any) { + error(`Could not reach server: ${err.message}`); + process.exit(1); + } + + console.log(""); + console.log(chalk.bold("Open this URL in your browser to authorize:")); + console.log(chalk.cyan(`${verificationUri}?code=${userCode}`)); + console.log(""); + console.log(`Your code: ${chalk.yellow.bold(userCode)}`); + console.log("Waiting for authorization..."); + + // Step 2: Poll for token + const deadline = Date.now() + POLL_TIMEOUT_MS; + + while (Date.now() < deadline) { + await new Promise((r) => setTimeout(r, POLL_INTERVAL_MS)); + + const res = await fetch(`${serverUrl}/device/token`, { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ code }), + body: JSON.stringify({ device_code: deviceCode }), }); + + if (res.status === 202) continue; // authorization_pending + if (!res.ok) { - logError("Failed to register device code. Check your connection and try again."); + const body = (await res.json()) as { error?: string }; + if (body.error === "authorization_pending") continue; + error(`Login failed: ${body.error ?? "unknown error"}`); process.exit(1); } - } catch { - logError("Could not reach BetterBase API. Check your connection and try again."); - process.exit(1); - } - const authUrl = `${AUTH_PAGE_URL}?code=${code}`; - info("Opening browser for authentication..."); - info(`Auth URL: ${authUrl}`); - info("Waiting for authentication... (timeout: 5 minutes)"); + const { access_token } = (await res.json()) as { access_token: string }; - await openBrowser(authUrl); + // Get admin info + const meRes = await fetch(`${serverUrl}/admin/auth/me`, { + headers: { Authorization: `Bearer ${access_token}` }, + }); + const { admin } = (await meRes.json()) as { admin: { email: string } }; - const credentials = await pollForAuth(code); + saveCredentials({ + token: access_token, + admin_email: admin.email, + server_url: serverUrl, + created_at: new Date().toISOString(), + }); - if (!credentials) { - logError("Authentication timed out. Run bb login to try again."); - process.exit(1); + success(`Logged in as ${chalk.cyan(admin.email)}`); + return; } - await saveCredentials(credentials); - success(`Logged in as ${credentials.email}`); + error("Login timed out. Please try again."); + process.exit(1); +} + +// Legacy exports for compatibility +export async function runLoginCommandLegacy(): Promise { + await runLoginCommand({}); } export async function runLogoutCommand(): Promise { - if (existsSync(CREDENTIALS_PATH)) { - await fs.unlink(CREDENTIALS_PATH); - success("Logged out successfully."); - } else { - warn("Not currently logged in."); - } + clearCredentials(); + success("Logged out."); } -export async function getCredentials(): Promise { - if (!existsSync(CREDENTIALS_PATH)) return null; - try { - const raw = await fs.readFile(CREDENTIALS_PATH, "utf-8"); - const creds = JSON.parse(raw) as Credentials; - if (new Date(creds.expiresAt) < new Date()) return null; - return creds; - } catch { - return null; - } +export async function getCredentials() { + return loadCredentials(); } export async function isAuthenticated(): Promise { const creds = await getCredentials(); return creds !== null; } - -export async function requireCredentials(): Promise { - const creds = await getCredentials(); - if (!creds) { - logError( - "Not logged in. Run: bb login\n" + - "This connects your CLI with BetterBase so your project\n" + - "can be registered and managed from the dashboard.", - ); - process.exit(1); - } - return creds; -} - -function generateDeviceCode(): string { - const chars = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789"; - const part1 = Array.from({ length: 4 }, () => chars[randomBytes(1)[0] % chars.length]).join(""); - const part2 = Array.from({ length: 4 }, () => chars[randomBytes(1)[0] % chars.length]).join(""); - return `${part1}-${part2}`; -} - -async function openBrowser(url: string): Promise { - try { - if (process.platform === "darwin") { - await Bun.spawn(["open", url]); - } else if (process.platform === "win32") { - await Bun.spawn(["cmd", "/c", "start", "", url]); - } else { - await Bun.spawn(["xdg-open", url]); - } - } catch { - // Browser open failed — URL already printed, user can open manually - } -} - -async function pollForAuth(code: string): Promise { - const startTime = Date.now(); - - while (Date.now() - startTime < POLL_TIMEOUT_MS) { - await sleep(POLL_INTERVAL_MS); - try { - const response = await fetch(`${BETTERBASE_API}/cli-auth-poll?code=${code}`); - if (response.status === 200) { - return (await response.json()) as Credentials; - } - } catch { - // Network error — continue polling - } - } - - return null; -} - -async function saveCredentials(creds: Credentials): Promise { - const dir = path.dirname(CREDENTIALS_PATH); - await fs.mkdir(dir, { recursive: true }); - await fs.writeFile(CREDENTIALS_PATH, JSON.stringify(creds, null, 2), "utf-8"); -} - -function sleep(ms: number): Promise { - return new Promise((resolve) => setTimeout(resolve, ms)); -} diff --git a/packages/cli/src/index.ts b/packages/cli/src/index.ts index c8cdd91..852531b 100644 --- a/packages/cli/src/index.ts +++ b/packages/cli/src/index.ts @@ -1,14 +1,18 @@ import { Command, CommanderError } from "commander"; import packageJson from "../package.json"; -import { runAuthSetupCommand, runAuthAddProviderCommand } from "./commands/auth"; +import { runAuthAddProviderCommand, runAuthSetupCommand } from "./commands/auth"; import { runBranchCommand } from "./commands/branch"; import { runDevCommand } from "./commands/dev"; import { runFunctionCommand } from "./commands/function"; import { runGenerateCrudCommand } from "./commands/generate"; import { runGenerateGraphqlCommand, runGraphqlPlaygroundCommand } from "./commands/graphql"; import { runInitCommand } from "./commands/init"; -import { runLoginCommand, runLogoutCommand, isAuthenticated } from "./commands/login"; -import { runMigrateCommand, runMigrateRollbackCommand, runMigrateHistoryCommand } from "./commands/migrate"; +import { isAuthenticated, runLoginCommand, runLogoutCommand } from "./commands/login"; +import { + runMigrateCommand, + runMigrateHistoryCommand, + runMigrateRollbackCommand, +} from "./commands/migrate"; import { runRlsCommand } from "./commands/rls"; import { runRLSTestCommand } from "./commands/rls-test"; import { @@ -20,7 +24,7 @@ import { runWebhookCommand } from "./commands/webhook"; import * as logger from "./utils/logger"; // Commands that don't require authentication -const PUBLIC_COMMANDS = ["login", "logout", "version", "help"]; +const PUBLIC_COMMANDS = ["login", "logout", "version", "help", "init"]; /** * Check if the user is authenticated before running a command. @@ -118,7 +122,9 @@ export function createProgram(): Command { auth .command("add-provider") - .description("Add OAuth provider (google, github, discord, apple, microsoft, twitter, facebook)") + .description( + "Add OAuth provider (google, github, discord, apple, microsoft, twitter, facebook)", + ) .argument("", "OAuth provider name") .argument("[project-root]", "project root directory", process.cwd()) .action(async (provider: string, projectRoot: string) => { @@ -181,7 +187,7 @@ export function createProgram(): Command { .option("-s, --steps ", "Number of migrations to rollback", "1") .action(async (options: { steps?: string }) => { await runMigrateRollbackCommand(process.cwd(), { - steps: options.steps ? parseInt(options.steps, 10) : 1, + steps: options.steps ? Number.parseInt(options.steps, 10) : 1, }); }); @@ -304,7 +310,7 @@ export function createProgram(): Command { .option("-l, --limit ", "Limit number of logs to show", "50") .argument("[project-root]", "project root directory", process.cwd()) .action(async (webhookId: string, options: { limit?: string }, projectRoot: string) => { - const limit = options.limit ? parseInt(options.limit, 10) : 50; + const limit = options.limit ? Number.parseInt(options.limit, 10) : 50; await runWebhookCommand(["logs", webhookId, limit.toString()], projectRoot); }); @@ -432,10 +438,13 @@ export function createProgram(): Command { program .command("login") - .description("Authenticate the CLI with app.betterbase.com") - .action(runLoginCommand); + .description("Authenticate with a Betterbase instance") + .option("--url ", "Self-hosted Betterbase server URL", "https://api.betterbase.io") + .action(async (opts) => { + await runLoginCommand({ serverUrl: opts.url }); + }); - program.command("logout").description("Sign out of app.betterbase.com").action(runLogoutCommand); + program.command("logout").description("Sign out of Betterbase").action(runLogoutCommand); return program; } diff --git a/packages/cli/src/utils/api-client.ts b/packages/cli/src/utils/api-client.ts new file mode 100644 index 0000000..5a13e2c --- /dev/null +++ b/packages/cli/src/utils/api-client.ts @@ -0,0 +1,34 @@ +import { loadCredentials } from "./credentials"; +import { error } from "./logger"; + +export function requireAuth(): { token: string; serverUrl: string } { + const creds = loadCredentials(); + if (!creds?.token) { + error("Not logged in. Run `bb login` first."); + process.exit(1); + } + return { token: creds.token, serverUrl: creds.server_url }; +} + +export async function apiRequest(path: string, options: RequestInit = {}): Promise { + const { token, serverUrl } = requireAuth(); + + const url = `${serverUrl}${path}`; + const res = await fetch(url, { + ...options, + headers: { + "Content-Type": "application/json", + Authorization: `Bearer ${token}`, + ...(options.headers ?? {}), + }, + }); + + if (!res.ok) { + const body = (await res.json().catch(() => ({ error: "Request failed" }))) as { + error?: string; + }; + throw new Error(body.error ?? `HTTP ${res.status}`); + } + + return res.json() as Promise; +} diff --git a/packages/cli/src/utils/credentials.ts b/packages/cli/src/utils/credentials.ts new file mode 100644 index 0000000..2d33678 --- /dev/null +++ b/packages/cli/src/utils/credentials.ts @@ -0,0 +1,44 @@ +import { existsSync, mkdirSync, readFileSync, writeFileSync } from "fs"; +import { homedir } from "os"; +import { join } from "path"; +import { z } from "zod"; + +const CREDENTIALS_DIR = join(homedir(), ".betterbase"); +const CREDENTIALS_FILE = join(CREDENTIALS_DIR, "credentials.json"); + +const CredentialsSchema = z.object({ + token: z.string(), + admin_email: z.string().email(), + server_url: z.string().url(), // ← NEW: base URL of the Betterbase server + created_at: z.string(), +}); + +export type Credentials = z.infer; + +export function saveCredentials(creds: Credentials): void { + if (!existsSync(CREDENTIALS_DIR)) { + mkdirSync(CREDENTIALS_DIR, { recursive: true, mode: 0o700 }); + } + writeFileSync(CREDENTIALS_FILE, JSON.stringify(creds, null, 2), { mode: 0o600 }); +} + +export function loadCredentials(): Credentials | null { + if (!existsSync(CREDENTIALS_FILE)) return null; + try { + const raw = JSON.parse(readFileSync(CREDENTIALS_FILE, "utf-8")); + return CredentialsSchema.parse(raw); + } catch { + return null; + } +} + +export function clearCredentials(): void { + if (existsSync(CREDENTIALS_FILE)) { + writeFileSync(CREDENTIALS_FILE, JSON.stringify({})); + } +} + +export function getServerUrl(): string { + const creds = loadCredentials(); + return creds?.server_url ?? "https://api.betterbase.io"; // Falls back to cloud +} diff --git a/packages/server/Dockerfile b/packages/server/Dockerfile new file mode 100644 index 0000000..1a0cf9b --- /dev/null +++ b/packages/server/Dockerfile @@ -0,0 +1,34 @@ +FROM oven/bun:1.2-alpine AS builder + +WORKDIR /app + +# Copy monorepo structure (only what server needs) +COPY package.json turbo.json bun.lock ./ +COPY packages/server/package.json ./packages/server/ +COPY packages/core/package.json ./packages/core/ +COPY packages/shared/package.json ./packages/shared/ + +RUN bun install --frozen-lockfile + +COPY packages/server ./packages/server +COPY packages/core ./packages/core +COPY packages/shared ./packages/shared +COPY tsconfig.base.json ./ + +RUN cd packages/server && bun build src/index.ts --outdir dist --target bun + +# --- Runtime stage --- +FROM oven/bun:1.2-alpine + +WORKDIR /app + +COPY --from=builder /app/packages/server/dist ./dist +COPY --from=builder /app/packages/server/migrations ./migrations + +# Health check +HEALTHCHECK --interval=10s --timeout=5s --start-period=30s --retries=3 \ + CMD wget -qO- http://localhost:3001/health || exit 1 + +EXPOSE 3001 + +CMD ["bun", "dist/index.js"] \ No newline at end of file diff --git a/packages/server/migrations/001_initial_schema.sql b/packages/server/migrations/001_initial_schema.sql new file mode 100644 index 0000000..9f55da2 --- /dev/null +++ b/packages/server/migrations/001_initial_schema.sql @@ -0,0 +1,51 @@ +-- Betterbase internal metadata schema +-- Runs once on first container start via the bootstrap process + +CREATE EXTENSION IF NOT EXISTS pgcrypto; + +CREATE SCHEMA IF NOT EXISTS betterbase_meta; + +-- Admin accounts (these are Betterbase operators, not end-users of projects) +CREATE TABLE IF NOT EXISTS betterbase_meta.admin_users ( + id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text, + email TEXT NOT NULL UNIQUE, + password_hash TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Projects registered in this Betterbase instance +CREATE TABLE IF NOT EXISTS betterbase_meta.projects ( + id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text, + name TEXT NOT NULL, + slug TEXT NOT NULL UNIQUE, + admin_key_hash TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Device auth codes for CLI `bb login` flow +CREATE TABLE IF NOT EXISTS betterbase_meta.device_codes ( + user_code TEXT PRIMARY KEY, + device_code TEXT NOT NULL UNIQUE, + admin_user_id TEXT REFERENCES betterbase_meta.admin_users(id) ON DELETE CASCADE, + expires_at TIMESTAMPTZ NOT NULL, + verified BOOLEAN NOT NULL DEFAULT FALSE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- CLI sessions — issued after device code verified +CREATE TABLE IF NOT EXISTS betterbase_meta.cli_sessions ( + id TEXT PRIMARY KEY DEFAULT gen_random_uuid()::text, + admin_user_id TEXT NOT NULL REFERENCES betterbase_meta.admin_users(id) ON DELETE CASCADE, + token_hash TEXT NOT NULL UNIQUE, + expires_at TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Migration tracking +CREATE TABLE IF NOT EXISTS betterbase_meta.migrations ( + id SERIAL PRIMARY KEY, + filename TEXT NOT NULL UNIQUE, + applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); \ No newline at end of file diff --git a/packages/server/migrations/002_webhooks.sql b/packages/server/migrations/002_webhooks.sql new file mode 100644 index 0000000..f8e5e84 --- /dev/null +++ b/packages/server/migrations/002_webhooks.sql @@ -0,0 +1,10 @@ +CREATE TABLE IF NOT EXISTS betterbase_meta.webhooks ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL, + table_name TEXT NOT NULL, + events TEXT[] NOT NULL, + url TEXT NOT NULL, + secret TEXT, + enabled BOOLEAN NOT NULL DEFAULT TRUE, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); \ No newline at end of file diff --git a/packages/server/migrations/003_functions.sql b/packages/server/migrations/003_functions.sql new file mode 100644 index 0000000..79b6cc0 --- /dev/null +++ b/packages/server/migrations/003_functions.sql @@ -0,0 +1,9 @@ +CREATE TABLE IF NOT EXISTS betterbase_meta.functions ( + id TEXT PRIMARY KEY, + name TEXT NOT NULL UNIQUE, + runtime TEXT NOT NULL DEFAULT 'bun', + status TEXT NOT NULL DEFAULT 'inactive', + deploy_target TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); \ No newline at end of file diff --git a/packages/server/migrations/004_logs.sql b/packages/server/migrations/004_logs.sql new file mode 100644 index 0000000..169a0ba --- /dev/null +++ b/packages/server/migrations/004_logs.sql @@ -0,0 +1,12 @@ +CREATE TABLE IF NOT EXISTS betterbase_meta.request_logs ( + id BIGSERIAL PRIMARY KEY, + method TEXT NOT NULL, + path TEXT NOT NULL, + status INT NOT NULL, + duration_ms INT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); + +-- Index for dashboard queries +CREATE INDEX IF NOT EXISTS idx_request_logs_created_at + ON betterbase_meta.request_logs (created_at DESC); \ No newline at end of file diff --git a/packages/server/package.json b/packages/server/package.json new file mode 100644 index 0000000..bb616bf --- /dev/null +++ b/packages/server/package.json @@ -0,0 +1,28 @@ +{ + "name": "@betterbase/server", + "version": "0.1.0", + "private": true, + "main": "src/index.ts", + "scripts": { + "dev": "bun --watch src/index.ts", + "start": "bun src/index.ts", + "build": "bun build src/index.ts --outdir dist --target bun" + }, + "dependencies": { + "@betterbase/core": "workspace:*", + "@betterbase/shared": "workspace:*", + "hono": "^4.0.0", + "pg": "^8.11.0", + "bcryptjs": "^2.4.3", + "nanoid": "^5.0.0", + "jose": "^5.0.0", + "zod": "^3.23.8", + "@hono/zod-validator": "^0.4.0", + "@aws-sdk/client-s3": "^3.995.0" + }, + "devDependencies": { + "@types/pg": "^8.11.0", + "@types/bcryptjs": "^2.4.6", + "typescript": "^5.4.0" + } +} diff --git a/packages/server/src/index.ts b/packages/server/src/index.ts new file mode 100644 index 0000000..6758e1a --- /dev/null +++ b/packages/server/src/index.ts @@ -0,0 +1,74 @@ +import { Hono } from "hono"; +import { cors } from "hono/cors"; +import { logger } from "hono/logger"; +import { getPool } from "./lib/db"; +import { validateEnv } from "./lib/env"; +import { runMigrations } from "./lib/migrate"; +import { adminRouter } from "./routes/admin/index"; +import { deviceRouter } from "./routes/device/index"; + +// Validate env first — exits if invalid +const env = validateEnv(); + +// Bootstrap +const pool = getPool(); +await runMigrations(pool); + +// Seed initial admin if env vars provided and no admin exists +if (env.BETTERBASE_ADMIN_EMAIL && env.BETTERBASE_ADMIN_PASSWORD) { + const { seedAdminUser } = await import("./lib/auth"); + await seedAdminUser(pool, env.BETTERBASE_ADMIN_EMAIL, env.BETTERBASE_ADMIN_PASSWORD); +} + +// App +const app = new Hono(); + +app.use("*", logger()); + +// Request logging middleware - fire and forget +app.use("*", async (c, next) => { + const start = Date.now(); + await next(); + const duration = Date.now() - start; + // Fire-and-forget log insert (don't await, don't fail requests on log error) + getPool() + .query( + "INSERT INTO betterbase_meta.request_logs (method, path, status, duration_ms) VALUES ($1, $2, $3, $4)", + [c.req.method, new URL(c.req.url).pathname, c.res.status, duration], + ) + .catch(() => {}); // Silently ignore log failures +}); + +app.use( + "*", + cors({ + origin: env.CORS_ORIGINS.split(","), + credentials: true, + allowHeaders: ["Content-Type", "Authorization"], + allowMethods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"], + }), +); + +// Health check — used by Docker HEALTHCHECK +app.get("/health", (c) => c.json({ status: "ok", timestamp: new Date().toISOString() })); + +// Routers +app.route("/admin", adminRouter); +app.route("/device", deviceRouter); + +// 404 +app.notFound((c) => c.json({ error: "Not found" }, 404)); + +// Error handler +app.onError((err, c) => { + console.error("[error]", err); + return c.json({ error: "Internal server error" }, 500); +}); + +const port = Number.parseInt(env.PORT); +console.log(`[server] Betterbase server running on port ${port}`); + +export default { + port, + fetch: app.fetch, +}; diff --git a/packages/server/src/lib/admin-middleware.ts b/packages/server/src/lib/admin-middleware.ts new file mode 100644 index 0000000..f558c84 --- /dev/null +++ b/packages/server/src/lib/admin-middleware.ts @@ -0,0 +1,28 @@ +import type { Context, Next } from "hono"; +import { extractBearerToken, verifyAdminToken } from "./auth"; +import { getPool } from "./db"; + +export async function requireAdmin(c: Context, next: Next) { + const token = extractBearerToken(c.req.header("Authorization")); + if (!token) { + return c.json({ error: "Unauthorized" }, 401); + } + + const payload = await verifyAdminToken(token); + if (!payload) { + return c.json({ error: "Invalid or expired token" }, 401); + } + + // Verify admin still exists in DB + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, email FROM betterbase_meta.admin_users WHERE id = $1", + [payload.sub], + ); + if (rows.length === 0) { + return c.json({ error: "Unauthorized" }, 401); + } + + c.set("adminUser", rows[0]); + await next(); +} diff --git a/packages/server/src/lib/auth.ts b/packages/server/src/lib/auth.ts new file mode 100644 index 0000000..6467da9 --- /dev/null +++ b/packages/server/src/lib/auth.ts @@ -0,0 +1,61 @@ +import bcrypt from "bcryptjs"; +import { SignJWT, jwtVerify } from "jose"; +import type { Pool } from "pg"; + +const getSecret = () => new TextEncoder().encode(process.env.BETTERBASE_JWT_SECRET!); + +const TOKEN_EXPIRY = "30d"; +const BCRYPT_ROUNDS = 12; + +// --- Password --- + +export async function hashPassword(password: string): Promise { + return bcrypt.hash(password, BCRYPT_ROUNDS); +} + +export async function verifyPassword(password: string, hash: string): Promise { + return bcrypt.compare(password, hash); +} + +// --- JWT for admin sessions --- + +export async function signAdminToken(adminUserId: string): Promise { + return new SignJWT({ sub: adminUserId, type: "admin" }) + .setProtectedHeader({ alg: "HS256" }) + .setIssuedAt() + .setExpirationTime(TOKEN_EXPIRY) + .sign(getSecret()); +} + +export async function verifyAdminToken(token: string): Promise<{ sub: string } | null> { + try { + const { payload } = await jwtVerify(token, getSecret()); + if (payload.type !== "admin") return null; + return { sub: payload.sub as string }; + } catch { + return null; + } +} + +// --- Middleware helper: extract + verify token from Authorization header --- + +export function extractBearerToken(authHeader: string | undefined): string | null { + if (!authHeader?.startsWith("Bearer ")) return null; + return authHeader.slice(7); +} + +// --- Seed initial admin on first start --- + +export async function seedAdminUser(pool: Pool, email: string, password: string): Promise { + const { rows } = await pool.query("SELECT id FROM betterbase_meta.admin_users WHERE email = $1", [ + email, + ]); + if (rows.length > 0) return; // Already exists + + const hash = await hashPassword(password); + await pool.query( + "INSERT INTO betterbase_meta.admin_users (email, password_hash) VALUES ($1, $2)", + [email, hash], + ); + console.log(`[auth] Seeded admin user: ${email}`); +} diff --git a/packages/server/src/lib/db.ts b/packages/server/src/lib/db.ts new file mode 100644 index 0000000..0367729 --- /dev/null +++ b/packages/server/src/lib/db.ts @@ -0,0 +1,18 @@ +import { Pool } from "pg"; + +let _pool: Pool | null = null; + +export function getPool(): Pool { + if (!_pool) { + if (!process.env.DATABASE_URL) { + throw new Error("DATABASE_URL environment variable is required"); + } + _pool = new Pool({ + connectionString: process.env.DATABASE_URL, + max: 10, + idleTimeoutMillis: 30_000, + connectionTimeoutMillis: 5_000, + }); + } + return _pool; +} diff --git a/packages/server/src/lib/env.ts b/packages/server/src/lib/env.ts new file mode 100644 index 0000000..4c58578 --- /dev/null +++ b/packages/server/src/lib/env.ts @@ -0,0 +1,28 @@ +import { z } from "zod"; + +const EnvSchema = z.object({ + DATABASE_URL: z.string().min(1), + BETTERBASE_JWT_SECRET: z.string().min(32, "JWT secret must be at least 32 characters"), + BETTERBASE_ADMIN_EMAIL: z.string().email().optional(), + BETTERBASE_ADMIN_PASSWORD: z.string().min(8).optional(), + PORT: z.string().default("3001"), + NODE_ENV: z.enum(["development", "production", "test"]).default("development"), + STORAGE_ENDPOINT: z.string().optional(), + STORAGE_ACCESS_KEY: z.string().optional(), + STORAGE_SECRET_KEY: z.string().optional(), + STORAGE_BUCKET: z.string().default("betterbase"), + CORS_ORIGINS: z.string().default("http://localhost:3000"), + BETTERBASE_PUBLIC_URL: z.string().optional(), +}); + +export type Env = z.infer; + +export function validateEnv(): Env { + const result = EnvSchema.safeParse(process.env); + if (!result.success) { + console.error("[env] Invalid environment variables:"); + console.error(result.error.flatten().fieldErrors); + process.exit(1); + } + return result.data; +} diff --git a/packages/server/src/lib/migrate.ts b/packages/server/src/lib/migrate.ts new file mode 100644 index 0000000..30e8a86 --- /dev/null +++ b/packages/server/src/lib/migrate.ts @@ -0,0 +1,36 @@ +import { join } from "path"; +import { readFile, readdir } from "fs/promises"; +import type { Pool } from "pg"; + +const MIGRATIONS_DIR = join(__dirname, "../../migrations"); + +export async function runMigrations(pool: Pool): Promise { + // Ensure tracking table exists before we query it + await pool.query(` + CREATE SCHEMA IF NOT EXISTS betterbase_meta; + CREATE EXTENSION IF NOT EXISTS pgcrypto; + CREATE TABLE IF NOT EXISTS betterbase_meta.migrations ( + id SERIAL PRIMARY KEY, + filename TEXT NOT NULL UNIQUE, + applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ); + `); + + const files = (await readdir(MIGRATIONS_DIR)).filter((f) => f.endsWith(".sql")).sort(); + + const { rows: applied } = await pool.query<{ filename: string }>( + "SELECT filename FROM betterbase_meta.migrations", + ); + const appliedSet = new Set(applied.map((r) => r.filename)); + + for (const file of files) { + if (appliedSet.has(file)) continue; + + const sql = await readFile(join(MIGRATIONS_DIR, file), "utf-8"); + await pool.query(sql); + await pool.query("INSERT INTO betterbase_meta.migrations (filename) VALUES ($1)", [file]); + console.log(`[migrate] Applied: ${file}`); + } + + console.log("[migrate] All migrations up to date."); +} diff --git a/packages/server/src/routes/admin/auth.ts b/packages/server/src/routes/admin/auth.ts new file mode 100644 index 0000000..3144a27 --- /dev/null +++ b/packages/server/src/routes/admin/auth.ts @@ -0,0 +1,109 @@ +import { zValidator } from "@hono/zod-validator"; +import { Hono } from "hono"; +import { z } from "zod"; +import { + extractBearerToken, + signAdminToken, + verifyAdminToken, + verifyPassword, +} from "../../lib/auth"; +import { getPool } from "../../lib/db"; + +export const authRoutes = new Hono(); + +// POST /admin/auth/login +authRoutes.post( + "/login", + zValidator( + "json", + z.object({ + email: z.string().email(), + password: z.string().min(1), + }), + ), + async (c) => { + const { email, password } = c.req.valid("json"); + const pool = getPool(); + + const { rows } = await pool.query( + "SELECT id, email, password_hash FROM betterbase_meta.admin_users WHERE email = $1", + [email], + ); + if (rows.length === 0) { + return c.json({ error: "Invalid credentials" }, 401); + } + + const admin = rows[0]; + const valid = await verifyPassword(password, admin.password_hash); + if (!valid) { + return c.json({ error: "Invalid credentials" }, 401); + } + + const token = await signAdminToken(admin.id); + return c.json({ token, admin: { id: admin.id, email: admin.email } }); + }, +); + +// GET /admin/auth/me (requires token) +authRoutes.get("/me", async (c) => { + const token = extractBearerToken(c.req.header("Authorization")); + if (!token) return c.json({ error: "Unauthorized" }, 401); + + const payload = await verifyAdminToken(token); + if (!payload) return c.json({ error: "Unauthorized" }, 401); + + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, email, created_at FROM betterbase_meta.admin_users WHERE id = $1", + [payload.sub], + ); + if (rows.length === 0) return c.json({ error: "Unauthorized" }, 401); + + return c.json({ admin: rows[0] }); +}); + +// POST /admin/auth/logout (client-side token discard — stateless) +authRoutes.post("/logout", (c) => c.json({ success: true })); + +// POST /admin/auth/setup — available only before first admin is created +authRoutes.post( + "/setup", + zValidator( + "json", + z.object({ + email: z.string().email(), + password: z.string().min(8), + }), + ), + async (c) => { + const pool = getPool(); + + // Check if any admin exists + const { rows } = await pool.query( + "SELECT COUNT(*)::int as count FROM betterbase_meta.admin_users", + ); + if (rows[0].count > 0) { + return c.json({ error: "Setup already complete" }, 410); + } + + const { email, password } = c.req.valid("json"); + const { hashPassword, signAdminToken: signToken } = await import("../../lib/auth"); + const { nanoid } = await import("nanoid"); + + const passwordHash = await hashPassword(password); + const { rows: newAdmin } = await pool.query( + "INSERT INTO betterbase_meta.admin_users (id, email, password_hash) VALUES ($1, $2, $3) RETURNING id, email", + [nanoid(), email, passwordHash], + ); + + const token = await signToken(newAdmin[0].id); + return c.json( + { + message: "Admin account created. Save your token — log in with `bb login`.", + admin: newAdmin[0], + token, + }, + 201, + ); + }, +); diff --git a/packages/server/src/routes/admin/functions.ts b/packages/server/src/routes/admin/functions.ts new file mode 100644 index 0000000..420e76e --- /dev/null +++ b/packages/server/src/routes/admin/functions.ts @@ -0,0 +1,51 @@ +import { zValidator } from "@hono/zod-validator"; +import { Hono } from "hono"; +import { nanoid } from "nanoid"; +import { z } from "zod"; +import { getPool } from "../../lib/db"; + +export const functionRoutes = new Hono(); + +functionRoutes.get("/", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, name, runtime, status, deploy_target, created_at FROM betterbase_meta.functions ORDER BY created_at DESC", + ); + return c.json({ functions: rows }); +}); + +functionRoutes.post( + "/", + zValidator( + "json", + z.object({ + name: z + .string() + .min(1) + .regex(/^[a-z0-9-]+$/), + runtime: z.string().default("bun"), + deploy_target: z.enum(["cloudflare", "vercel"]).optional(), + }), + ), + async (c) => { + const data = c.req.valid("json"); + const pool = getPool(); + const { rows } = await pool.query( + `INSERT INTO betterbase_meta.functions (id, name, runtime, deploy_target) + VALUES ($1, $2, $3, $4) + RETURNING id, name, runtime, status, deploy_target, created_at`, + [nanoid(), data.name, data.runtime, data.deploy_target ?? null], + ); + return c.json({ function: rows[0] }, 201); + }, +); + +functionRoutes.delete("/:id", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "DELETE FROM betterbase_meta.functions WHERE id = $1 RETURNING id", + [c.req.param("id")], + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ success: true }); +}); diff --git a/packages/server/src/routes/admin/index.ts b/packages/server/src/routes/admin/index.ts new file mode 100644 index 0000000..2e329d3 --- /dev/null +++ b/packages/server/src/routes/admin/index.ts @@ -0,0 +1,25 @@ +import { Hono } from "hono"; +import { requireAdmin } from "../../lib/admin-middleware"; +import { authRoutes } from "./auth"; +import { functionRoutes } from "./functions"; +import { logRoutes } from "./logs"; +import { metricsRoutes } from "./metrics"; +import { projectRoutes } from "./projects"; +import { storageRoutes } from "./storage"; +import { userRoutes } from "./users"; +import { webhookRoutes } from "./webhooks"; + +export const adminRouter = new Hono(); + +// Auth routes are public (login doesn't require a token) +adminRouter.route("/auth", authRoutes); + +// All other admin routes require a valid admin token +adminRouter.use("/*", requireAdmin); +adminRouter.route("/projects", projectRoutes); +adminRouter.route("/users", userRoutes); +adminRouter.route("/metrics", metricsRoutes); +adminRouter.route("/storage", storageRoutes); +adminRouter.route("/webhooks", webhookRoutes); +adminRouter.route("/functions", functionRoutes); +adminRouter.route("/logs", logRoutes); diff --git a/packages/server/src/routes/admin/logs.ts b/packages/server/src/routes/admin/logs.ts new file mode 100644 index 0000000..abffedb --- /dev/null +++ b/packages/server/src/routes/admin/logs.ts @@ -0,0 +1,20 @@ +import { Hono } from "hono"; +import { getPool } from "../../lib/db"; + +export const logRoutes = new Hono(); + +// GET /admin/logs?limit=50&offset=0 +logRoutes.get("/", async (c) => { + const limit = Math.min(Number.parseInt(c.req.query("limit") ?? "50"), 200); + const offset = Number.parseInt(c.req.query("offset") ?? "0"); + const pool = getPool(); + + const { rows } = await pool.query( + `SELECT id, method, path, status, duration_ms, created_at + FROM betterbase_meta.request_logs + ORDER BY created_at DESC + LIMIT $1 OFFSET $2`, + [limit, offset], + ); + return c.json({ logs: rows, limit, offset }); +}); diff --git a/packages/server/src/routes/admin/metrics.ts b/packages/server/src/routes/admin/metrics.ts new file mode 100644 index 0000000..58652ef --- /dev/null +++ b/packages/server/src/routes/admin/metrics.ts @@ -0,0 +1,23 @@ +import { Hono } from "hono"; +import { getPool } from "../../lib/db"; + +export const metricsRoutes = new Hono(); + +// GET /admin/metrics — overview stats for dashboard home +metricsRoutes.get("/", async (c) => { + const pool = getPool(); + + const [projects, admins] = await Promise.all([ + pool.query("SELECT COUNT(*)::int as count FROM betterbase_meta.projects"), + pool.query("SELECT COUNT(*)::int as count FROM betterbase_meta.admin_users"), + ]); + + return c.json({ + metrics: { + projects: projects.rows[0].count, + admin_users: admins.rows[0].count, + server_uptime_seconds: Math.floor(process.uptime()), + timestamp: new Date().toISOString(), + }, + }); +}); diff --git a/packages/server/src/routes/admin/projects.ts b/packages/server/src/routes/admin/projects.ts new file mode 100644 index 0000000..42ec9cd --- /dev/null +++ b/packages/server/src/routes/admin/projects.ts @@ -0,0 +1,106 @@ +import { createHash, randomBytes } from "crypto"; +import { zValidator } from "@hono/zod-validator"; +import { Hono } from "hono"; +import { nanoid } from "nanoid"; +import { z } from "zod"; +import { getPool } from "../../lib/db"; + +export const projectRoutes = new Hono(); + +// GET /admin/projects +projectRoutes.get("/", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, name, slug, created_at, updated_at FROM betterbase_meta.projects ORDER BY created_at DESC", + ); + return c.json({ projects: rows }); +}); + +// GET /admin/projects/:id +projectRoutes.get("/:id", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, name, slug, created_at, updated_at FROM betterbase_meta.projects WHERE id = $1", + [c.req.param("id")], + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ project: rows[0] }); +}); + +// POST /admin/projects +projectRoutes.post( + "/", + zValidator( + "json", + z.object({ + name: z.string().min(1).max(100), + slug: z + .string() + .min(1) + .max(63) + .regex(/^[a-z0-9-]+$/, "Slug must be lowercase alphanumeric with hyphens"), + }), + ), + async (c) => { + const { name, slug } = c.req.valid("json"); + const pool = getPool(); + + // Check slug uniqueness + const { rows: existing } = await pool.query( + "SELECT id FROM betterbase_meta.projects WHERE slug = $1", + [slug], + ); + if (existing.length > 0) { + return c.json({ error: "Slug already taken" }, 409); + } + + // Generate admin key — returned once, never again + const adminKeyPlaintext = `bb_admin_${randomBytes(24).toString("hex")}`; + const adminKeyHash = createHash("sha256").update(adminKeyPlaintext).digest("hex"); + + const { rows } = await pool.query( + `INSERT INTO betterbase_meta.projects (id, name, slug, admin_key_hash) + VALUES ($1, $2, $3, $4) + RETURNING id, name, slug, created_at`, + [nanoid(), name, slug, adminKeyHash], + ); + + // Return admin key plaintext ONCE — not stored, cannot be recovered + return c.json({ project: rows[0], admin_key: adminKeyPlaintext }, 201); + }, +); + +// PATCH /admin/projects/:id +projectRoutes.patch( + "/:id", + zValidator( + "json", + z.object({ + name: z.string().min(1).max(100).optional(), + }), + ), + async (c) => { + const { name } = c.req.valid("json"); + const pool = getPool(); + const { rows } = await pool.query( + `UPDATE betterbase_meta.projects + SET name = COALESCE($1, name), updated_at = NOW() + WHERE id = $2 + RETURNING id, name, slug, updated_at`, + [name, c.req.param("id")], + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ project: rows[0] }); + }, +); + +// DELETE /admin/projects/:id +projectRoutes.delete("/:id", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "DELETE FROM betterbase_meta.projects WHERE id = $1 RETURNING id", + [c.req.param("id")], + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ success: true }); +}); diff --git a/packages/server/src/routes/admin/storage.ts b/packages/server/src/routes/admin/storage.ts new file mode 100644 index 0000000..d5a0e6f --- /dev/null +++ b/packages/server/src/routes/admin/storage.ts @@ -0,0 +1,57 @@ +import { + CreateBucketCommand, + DeleteBucketCommand, + ListBucketsCommand, + ListObjectsV2Command, + S3Client, +} from "@aws-sdk/client-s3"; +import { zValidator } from "@hono/zod-validator"; +import { Hono } from "hono"; +import { z } from "zod"; + +function getS3Client(): S3Client { + return new S3Client({ + endpoint: process.env.STORAGE_ENDPOINT, + region: "us-east-1", + credentials: { + accessKeyId: process.env.STORAGE_ACCESS_KEY ?? "minioadmin", + secretAccessKey: process.env.STORAGE_SECRET_KEY ?? "minioadmin", + }, + forcePathStyle: true, // Required for MinIO + }); +} + +export const storageRoutes = new Hono(); + +// GET /admin/storage/buckets +storageRoutes.get("/buckets", async (c) => { + const client = getS3Client(); + const { Buckets } = await client.send(new ListBucketsCommand({})); + return c.json({ buckets: Buckets ?? [] }); +}); + +// POST /admin/storage/buckets +storageRoutes.post( + "/buckets", + zValidator("json", z.object({ name: z.string().min(1) })), + async (c) => { + const { name } = c.req.valid("json"); + const client = getS3Client(); + await client.send(new CreateBucketCommand({ Bucket: name })); + return c.json({ bucket: { name } }, 201); + }, +); + +// DELETE /admin/storage/buckets/:name +storageRoutes.delete("/buckets/:name", async (c) => { + const client = getS3Client(); + await client.send(new DeleteBucketCommand({ Bucket: c.req.param("name") })); + return c.json({ success: true }); +}); + +// GET /admin/storage/buckets/:name/objects +storageRoutes.get("/buckets/:name/objects", async (c) => { + const client = getS3Client(); + const { Contents } = await client.send(new ListObjectsV2Command({ Bucket: c.req.param("name") })); + return c.json({ objects: Contents ?? [] }); +}); diff --git a/packages/server/src/routes/admin/users.ts b/packages/server/src/routes/admin/users.ts new file mode 100644 index 0000000..767269d --- /dev/null +++ b/packages/server/src/routes/admin/users.ts @@ -0,0 +1,67 @@ +import { zValidator } from "@hono/zod-validator"; +import { Hono } from "hono"; +import { nanoid } from "nanoid"; +import { z } from "zod"; +import { hashPassword } from "../../lib/auth"; +import { getPool } from "../../lib/db"; + +export const userRoutes = new Hono(); + +// GET /admin/users — list all admin users +userRoutes.get("/", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, email, created_at FROM betterbase_meta.admin_users ORDER BY created_at DESC", + ); + return c.json({ users: rows }); +}); + +// POST /admin/users — create new admin user +userRoutes.post( + "/", + zValidator( + "json", + z.object({ + email: z.string().email(), + password: z.string().min(8), + }), + ), + async (c) => { + const { email, password } = c.req.valid("json"); + const pool = getPool(); + + const { rows: existing } = await pool.query( + "SELECT id FROM betterbase_meta.admin_users WHERE email = $1", + [email], + ); + if (existing.length > 0) { + return c.json({ error: "Email already registered" }, 409); + } + + const passwordHash = await hashPassword(password); + const { rows } = await pool.query( + "INSERT INTO betterbase_meta.admin_users (id, email, password_hash) VALUES ($1, $2, $3) RETURNING id, email, created_at", + [nanoid(), email, passwordHash], + ); + return c.json({ user: rows[0] }, 201); + }, +); + +// DELETE /admin/users/:id +userRoutes.delete("/:id", async (c) => { + const pool = getPool(); + // Prevent deleting last admin + const { rows: count } = await pool.query( + "SELECT COUNT(*)::int as count FROM betterbase_meta.admin_users", + ); + if (count[0].count <= 1) { + return c.json({ error: "Cannot delete last admin user" }, 400); + } + + const { rows } = await pool.query( + "DELETE FROM betterbase_meta.admin_users WHERE id = $1 RETURNING id", + [c.req.param("id")], + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ success: true }); +}); diff --git a/packages/server/src/routes/admin/webhooks.ts b/packages/server/src/routes/admin/webhooks.ts new file mode 100644 index 0000000..3de7e16 --- /dev/null +++ b/packages/server/src/routes/admin/webhooks.ts @@ -0,0 +1,86 @@ +import { zValidator } from "@hono/zod-validator"; +import { Hono } from "hono"; +import { nanoid } from "nanoid"; +import { z } from "zod"; +import { getPool } from "../../lib/db"; + +export const webhookRoutes = new Hono(); + +webhookRoutes.get("/", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "SELECT id, name, table_name, events, url, enabled, created_at FROM betterbase_meta.webhooks ORDER BY created_at DESC", + ); + return c.json({ webhooks: rows }); +}); + +webhookRoutes.post( + "/", + zValidator( + "json", + z.object({ + name: z.string().min(1), + table_name: z.string().min(1), + events: z.array(z.enum(["INSERT", "UPDATE", "DELETE"])).min(1), + url: z.string().url(), + secret: z.string().optional(), + enabled: z.boolean().default(true), + }), + ), + async (c) => { + const data = c.req.valid("json"); + const pool = getPool(); + const { rows } = await pool.query( + `INSERT INTO betterbase_meta.webhooks (id, name, table_name, events, url, secret, enabled) + VALUES ($1, $2, $3, $4, $5, $6, $7) + RETURNING id, name, table_name, events, url, enabled, created_at`, + [ + nanoid(), + data.name, + data.table_name, + data.events, + data.url, + data.secret ?? null, + data.enabled, + ], + ); + return c.json({ webhook: rows[0] }, 201); + }, +); + +webhookRoutes.patch( + "/:id", + zValidator( + "json", + z.object({ + enabled: z.boolean().optional(), + url: z.string().url().optional(), + secret: z.string().optional(), + }), + ), + async (c) => { + const data = c.req.valid("json"); + const pool = getPool(); + const { rows } = await pool.query( + `UPDATE betterbase_meta.webhooks + SET enabled = COALESCE($1, enabled), + url = COALESCE($2, url), + secret = COALESCE($3, secret) + WHERE id = $4 + RETURNING id, name, table_name, events, url, enabled`, + [data.enabled ?? null, data.url ?? null, data.secret ?? null, c.req.param("id")], + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ webhook: rows[0] }); + }, +); + +webhookRoutes.delete("/:id", async (c) => { + const pool = getPool(); + const { rows } = await pool.query( + "DELETE FROM betterbase_meta.webhooks WHERE id = $1 RETURNING id", + [c.req.param("id")], + ); + if (rows.length === 0) return c.json({ error: "Not found" }, 404); + return c.json({ success: true }); +}); diff --git a/packages/server/src/routes/device/index.ts b/packages/server/src/routes/device/index.ts new file mode 100644 index 0000000..c3ce8a9 --- /dev/null +++ b/packages/server/src/routes/device/index.ts @@ -0,0 +1,152 @@ +import { zValidator } from "@hono/zod-validator"; +import { Hono } from "hono"; +import { nanoid } from "nanoid"; +import { z } from "zod"; +import { signAdminToken } from "../../lib/auth"; +import { getPool } from "../../lib/db"; + +export const deviceRouter = new Hono(); + +const CODE_EXPIRY_MINUTES = 10; + +// POST /device/code — CLI calls this to initiate login +deviceRouter.post("/code", async (c) => { + const pool = getPool(); + + const deviceCode = nanoid(32); + const userCode = nanoid(8).toUpperCase(); // Human-readable: shown in CLI + const expiresAt = new Date(Date.now() + CODE_EXPIRY_MINUTES * 60 * 1000); + + await pool.query( + `INSERT INTO betterbase_meta.device_codes (user_code, device_code, expires_at) + VALUES ($1, $2, $3)`, + [userCode, deviceCode, expiresAt], + ); + + const baseUrl = + process.env.BETTERBASE_PUBLIC_URL ?? `http://localhost:${process.env.PORT ?? 3001}`; + + return c.json({ + device_code: deviceCode, + user_code: userCode, + verification_uri: `${baseUrl}/device/verify`, + expires_in: CODE_EXPIRY_MINUTES * 60, + interval: 5, // CLI polls every 5 seconds + }); +}); + +// GET /device/verify — Browser opens this page to approve +deviceRouter.get("/verify", async (c) => { + const userCode = c.req.query("code"); + // Return minimal HTML form for verification + const html = ` + +Betterbase CLI Login + + + +

Betterbase CLI Login

+

Enter your admin credentials to authorize the CLI.

+
+ + + + + + +`; + return c.html(html); +}); + +// POST /device/verify — Form submission +deviceRouter.post("/verify", async (c) => { + const body = await c.req.parseBody(); + const userCode = String(body.user_code ?? "") + .toUpperCase() + .trim(); + const email = String(body.email ?? "").trim(); + const password = String(body.password ?? ""); + + const pool = getPool(); + + // Verify admin credentials + const { rows: admins } = await pool.query( + "SELECT id, password_hash FROM betterbase_meta.admin_users WHERE email = $1", + [email], + ); + if (admins.length === 0) { + return c.html(`

Invalid credentials.

`); + } + + const { verifyPassword } = await import("../../lib/auth"); + const valid = await verifyPassword(password, admins[0].password_hash); + if (!valid) { + return c.html(`

Invalid credentials.

`); + } + + // Find and verify the device code + const { rows: codes } = await pool.query( + `SELECT user_code FROM betterbase_meta.device_codes + WHERE user_code = $1 AND verified = FALSE AND expires_at > NOW()`, + [userCode], + ); + if (codes.length === 0) { + return c.html(`

Code not found or expired.

`); + } + + // Mark verified, associate admin user + await pool.query( + `UPDATE betterbase_meta.device_codes + SET verified = TRUE, admin_user_id = $1 + WHERE user_code = $2`, + [admins[0].id, userCode], + ); + + return c.html(`

✓ CLI authorized. You can close this tab.

`); +}); + +// POST /device/token — CLI polls this to get the token once verified +deviceRouter.post( + "/token", + zValidator("json", z.object({ device_code: z.string() })), + async (c) => { + const { device_code } = c.req.valid("json"); + const pool = getPool(); + + const { rows } = await pool.query( + `SELECT verified, admin_user_id, expires_at + FROM betterbase_meta.device_codes + WHERE device_code = $1`, + [device_code], + ); + + if (rows.length === 0) { + return c.json({ error: "invalid_device_code" }, 400); + } + + const code = rows[0]; + + if (new Date(code.expires_at) < new Date()) { + return c.json({ error: "expired_token" }, 400); + } + + if (!code.verified) { + return c.json({ error: "authorization_pending" }, 202); + } + + // Issue token, clean up device code + const token = await signAdminToken(code.admin_user_id); + await pool.query("DELETE FROM betterbase_meta.device_codes WHERE device_code = $1", [ + device_code, + ]); + + return c.json({ access_token: token, token_type: "Bearer" }); + }, +); diff --git a/packages/server/tsconfig.json b/packages/server/tsconfig.json new file mode 100644 index 0000000..fbcfccc --- /dev/null +++ b/packages/server/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../../tsconfig.base.json", + "compilerOptions": { + "outDir": "dist", + "rootDir": "src" + }, + "include": ["src/**/*", "migrations/**/*"] +}