diff --git a/.github/README.md b/.github/README.md index 00ae8f1..ab2aa89 100644 --- a/.github/README.md +++ b/.github/README.md @@ -228,12 +228,12 @@ Respond to feedback and update your PR as needed. The workflows will re-run auto ## Troubleshooting Common Issues ### "PR targets main branch" -❌ **Problem:** You opened a PR to `main` instead of `test` +❌ **Problem:** You opened a PR to `main` instead of `develop` ✅ **Solution:** 1. Close this PR -2. Create a new PR to the `test` branch -3. Follow the workflow for releases from `test` → `main` +2. Create a new PR to the `develop` branch +3. Follow the workflow for releases from `develop` → `main` ### "Commit message doesn't match format" ❌ **Problem:** Commit message like `"fixed bug"` or `"Update stuff"` diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index f345c3a..7b008a9 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -153,7 +153,7 @@ jobs: issue_number: context.issue.number, owner: context.repo.owner, repo: context.repo.repo, - body: '🔒 **Security Audit:** Dependencies have been checked for known vulnerabilities. Address any critical or high-severity issues before merge. See [SECURITY.md](../SECURITY.md) for guidelines.' + body: `🔒 **Security Audit:** Dependencies have been checked for known vulnerabilities. Address any critical or high-severity issues before merge. See [SECURITY.md](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/.github/SECURITY.md) for guidelines.` }); build-check: diff --git a/.github/workflows/issue-assignment.yml b/.github/workflows/issue-assignment.yml index 5e26cbc..22426e0 100644 --- a/.github/workflows/issue-assignment.yml +++ b/.github/workflows/issue-assignment.yml @@ -43,7 +43,7 @@ jobs: issue_number: issue.number, owner: context.repo.owner, repo: context.repo.repo, - body: `🚫 **Unauthorized Assignment Attempt**\n\n@${commenter}, only maintainers can use the \`/assign\` command.\n\n**To get assigned to an issue:**\n1. Comment expressing interest: "I'd like to work on this issue"\n2. Wait for a maintainer to review your request\n3. Maintainer will use \`/assign @username\` to officially assign it\n\n**Authorized to assign issues:**\n- Repository owner (@${repoOwner})\n- Maintainers: ${AUTHORIZED_ASSIGNERS.map(m => '@' + m).join(', ')}\n\nSee [CONTRIBUTING.md](../../Docs/CONTRIBUTING.md#step-2-request-assignment) for details.` + body: `🚫 **Unauthorized Assignment Attempt**\n\n@${commenter}, only maintainers can use the \`/assign\` command.\n\n**To get assigned to an issue:**\n1. Comment expressing interest: "I'd like to work on this issue"\n2. Wait for a maintainer to review your request\n3. Maintainer will use \`/assign @username\` to officially assign it\n\n**Authorized to assign issues:**\n- Repository owner (@${repoOwner})\n- Maintainers: ${AUTHORIZED_ASSIGNERS.map(m => '@' + m).join(', ')}\n\nSee [CONTRIBUTING.md](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/Docs/CONTRIBUTING.md#step-2-request-assignment) for details.` }); return; } @@ -106,7 +106,7 @@ jobs: issue_number: issue.number, owner: context.repo.owner, repo: context.repo.repo, - body: `✅ Issue assigned to @${assignee}. Please follow the [CONTRIBUTING.md](../../Docs/CONTRIBUTING.md) guidelines.` + body: `✅ Issue assigned to @${assignee}. Please follow the [CONTRIBUTING.md](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/Docs/CONTRIBUTING.md) guidelines.` }); } catch (error) { github.rest.issues.createComment({ @@ -183,7 +183,7 @@ jobs: issue_number: issue.number, owner: context.repo.owner, repo: context.repo.repo, - body: `🚫 **Unauthorized assignment detected and removed!**\n\n**This issue cannot be assigned without authorization.**\n\nIssues can only be assigned by:\n- Repository owner (@${repoOwner})\n- Maintainers: ${AUTHORIZED_ASSIGNERS.map(m => '@' + m).join(', ')}\n\n**To request assignment:**\nAsk a maintainer to use: \`/assign @username\`\n\n**Why this rule exists:**\nThis ensures proper tracking and prevents unauthorized assignments. See [CONTRIBUTING.md](../../Docs/CONTRIBUTING.md) for details.` + body: `🚫 **Unauthorized assignment detected and removed!**\n\n**This issue cannot be assigned without authorization.**\n\nIssues can only be assigned by:\n- Repository owner (@${repoOwner})\n- Maintainers: ${AUTHORIZED_ASSIGNERS.map(m => '@' + m).join(', ')}\n\n**To request assignment:**\nAsk a maintainer to use: \`/assign @username\`\n\n**Why this rule exists:**\nThis ensures proper tracking and prevents unauthorized assignments. See [CONTRIBUTING.md](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/Docs/CONTRIBUTING.md) for details.` }); console.log(`Removed unauthorized assignment for issue #${issue.number}`); diff --git a/.github/workflows/maintenance.yml b/.github/workflows/maintenance.yml index b34aa6c..8cb40b6 100644 --- a/.github/workflows/maintenance.yml +++ b/.github/workflows/maintenance.yml @@ -234,6 +234,6 @@ jobs: owner: context.repo.owner, repo: context.repo.repo, title: '🔒 Security: Dependency Vulnerabilities Detected', - body: '⚠️ **Automated Security Alert**\n\nDependency vulnerabilities have been detected in the latest scan.\n\n**Action Required:**\n1. Run `npm audit` to see details\n2. Run `npm audit fix` for automatic fixes\n3. Review high/critical vulnerabilities\n4. Create PR to fix issues\n\nSee [SECURITY.md](../SECURITY.md) for guidelines.', + body: `⚠️ **Automated Security Alert**\n\nDependency vulnerabilities have been detected in the latest scan.\n\n**Action Required:**\n1. Run \`npm audit\` to see details\n2. Run \`npm audit fix\` for automatic fixes\n3. Review high/critical vulnerabilities\n4. Create PR to fix issues\n\nSee [SECURITY.md](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/.github/SECURITY.md) for guidelines.`, labels: ['security', 'priority-high'] }); diff --git a/.github/workflows/pr-review.yml b/.github/workflows/pr-review.yml index b1d728b..196f078 100644 --- a/.github/workflows/pr-review.yml +++ b/.github/workflows/pr-review.yml @@ -58,7 +58,7 @@ jobs: comment += '\n**Valid Types:** feat, fix, docs, style, refactor, perf, test, chore\n'; comment += '**Example:** `feat(issue-45): add new authentication endpoint`\n'; - comment += '\nPlease see [CONTRIBUTING.md](../../CONTRIBUTING.md#commit-guidelines) for detailed guidelines.'; + comment += `\nPlease see [CONTRIBUTING.md](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/Docs/CONTRIBUTING.md#commit-guidelines) for detailed guidelines.`; github.rest.issues.createComment({ issue_number: context.issue.number, @@ -161,7 +161,7 @@ jobs: issue_number: pr.number, owner: context.repo.owner, repo: context.repo.repo, - body: `🚫 **Branch Protection:** This PR targets \`main\`. All changes must go through the \`test\` branch first.\n\n**Action Required:**\n1. Close this PR\n2. Create a new PR targeting the \`test\` branch\n3. Once merged to \`test\`, create a release PR from \`test\` to \`main\`\n\n**Note:** Only authorized admins can create PRs to \`main\`.\n\nSee [CONTRIBUTING.md](../../Docs/CONTRIBUTING.md) for details.` + body: `🚫 **Branch Protection:** This PR targets \`main\`. All changes must go through the \`test\` branch first.\n\n**Action Required:**\n1. Close this PR\n2. Create a new PR targeting the \`test\` branch\n3. Once merged to \`test\`, create a release PR from \`test\` to \`main\`\n\n**Note:** Only authorized admins can create PRs to \`main\`.\n\nSee [CONTRIBUTING.md](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/Docs/CONTRIBUTING.md) for details.` }); } diff --git a/.github/workflows/protect-main.yml b/.github/workflows/protect-main.yml index 961f048..3a11fd3 100644 --- a/.github/workflows/protect-main.yml +++ b/.github/workflows/protect-main.yml @@ -43,7 +43,7 @@ jobs: issue_number: pr.number, owner: context.repo.owner, repo: context.repo.repo, - body: `🚫 **Branch Protection:** This PR targets \`main\`. All changes must go through the \`test\` branch first.\n\n**Action Required:**\n1. Close this PR\n2. Create a new PR targeting the \`test\` branch\n3. Once merged to \`test\`, create a release PR from \`test\` to \`main\`\n\n**Note:** Only authorized admins can create PRs directly to \`main\`.\n\nSee [CONTRIBUTING.md](../../Docs/CONTRIBUTING.md) for details.` + body: `🚫 **Branch Protection:** This PR targets \`main\`. All changes must go through the \`test\` branch first.\n\n**Action Required:**\n1. Close this PR\n2. Create a new PR targeting the \`test\` branch\n3. Once merged to \`test\`, create a release PR from \`test\` to \`main\`\n\n**Note:** Only authorized admins can create PRs directly to \`main\`.\n\nSee [CONTRIBUTING.md](https://github.com/${context.repo.owner}/${context.repo.repo}/blob/main/Docs/CONTRIBUTING.md) for details.` }); core.setFailed('Unauthorized PR to main branch'); process.exit(1); diff --git a/.gitignore b/.gitignore index 653b955..513a607 100644 --- a/.gitignore +++ b/.gitignore @@ -6,8 +6,6 @@ node_modules/ .env *.env **/*.env -*.local -**/*.local # Build Outputs dist/ @@ -51,6 +49,8 @@ api-gateway/src/postgres.env WORKERS/build-worker/.env WORKERS/clone-worker/.env +packages/domain/.env + # Temp / Runtime folders clones/ **/clones/ diff --git a/BUILDER_IMAGES/build-image-backend/publisher.js b/BUILDER_IMAGES/build-image-backend/publisher.js index 64f318f..2770a85 100644 --- a/BUILDER_IMAGES/build-image-backend/publisher.js +++ b/BUILDER_IMAGES/build-image-backend/publisher.js @@ -10,7 +10,7 @@ const sns = new SNSClient({ const TOPIC_ARN = process.env.DOMAIN_EVENTS_TOPIC_ARN; -async function publilishEvent(type, projectId, deploymentId, payload) { +async function publishEvent(type, projectId, deploymentId, payload) { await sns.send( new PublishCommand({ TopicArn: TOPIC_ARN, @@ -27,9 +27,9 @@ async function publilishEvent(type, projectId, deploymentId, payload) { ); } -module.exports = { publilishEvent }; +module.exports = { publishEvent }; -console.log("publilishEvent type:", typeof publilishEvent); +console.log("publishEvent type:", typeof publishEvent); -module.exports = { publilishEvent }; \ No newline at end of file +module.exports = { publishEvent }; \ No newline at end of file diff --git a/BUILDER_IMAGES/build-image-backend/script.js b/BUILDER_IMAGES/build-image-backend/script.js index ea733d1..8abbd8e 100644 --- a/BUILDER_IMAGES/build-image-backend/script.js +++ b/BUILDER_IMAGES/build-image-backend/script.js @@ -2,7 +2,7 @@ const { spawn } = require("child_process") const path = require("path"); const fs = require("fs") const { Kafka } = require("kafkajs"); -const { publilishEvent } = require("./publisher"); +const { publishEvent } = require("./publisher"); const PROJECT_ID = process.env.PROJECT_ID; const ECR_URI = process.env.ECR_URI; @@ -85,7 +85,7 @@ async function init() { throw new Error(`Build context path does not exist: ${buildContext}`); } - publilishEvent("BACKEND_PROCESSING", PROJECT_ID, DEPLOYMENTID, { backendDir: BACKEND_DIR }) + publishEvent("BACKEND_PROCESSING", PROJECT_ID, DEPLOYMENTID, { backendDir: BACKEND_DIR }) if (!NODE_VERSION) { @@ -154,7 +154,7 @@ async function init() { "BUILD", `Image build failed with code: ${code}` ); - publilishEvent("BACKEND_BUILD_FAILED", PROJECT_ID, DEPLOYMENTID, { msg: "Image build failed with code: ${code}" }) + publishEvent("BACKEND_BUILD_FAILED", PROJECT_ID, DEPLOYMENTID, { msg: "Image build failed with code: ${code}" }) await safeExit(code, "Build failed"); } }); @@ -163,7 +163,7 @@ async function init() { init().catch(err => { console.log("INIT ERROR ", err); publishLog("FAILURE", "INIT", err.message) - publilishEvent("BACKEND_BUILD_FAILED", PROJECT_ID, DEPLOYMENTID, { msg: err.message }) + publishEvent("BACKEND_BUILD_FAILED", PROJECT_ID, DEPLOYMENTID, { msg: err.message }) .finally(() => safeExit(1, "Build Init Failed")); });; diff --git a/BUILDER_IMAGES/builder-image-frontend/publisher.js b/BUILDER_IMAGES/builder-image-frontend/publisher.js index ed235ad..9365000 100644 --- a/BUILDER_IMAGES/builder-image-frontend/publisher.js +++ b/BUILDER_IMAGES/builder-image-frontend/publisher.js @@ -9,7 +9,7 @@ const sns = new SNSClient({ const TOPIC_ARN = process.env.DOMAIN_EVENTS_TOPIC_ARN; -async function publilishEvent(type, projectId, deploymentId, payload) { +async function publishEvent(type, projectId, deploymentId, payload) { await sns.send( new PublishCommand({ TopicArn: TOPIC_ARN, @@ -23,4 +23,4 @@ async function publilishEvent(type, projectId, deploymentId, payload) { ) } -module.exports = { publilishEvent }; \ No newline at end of file +module.exports = { publishEvent }; \ No newline at end of file diff --git a/BUILDER_IMAGES/builder-image-frontend/script.js b/BUILDER_IMAGES/builder-image-frontend/script.js index 6192488..3602b31 100644 --- a/BUILDER_IMAGES/builder-image-frontend/script.js +++ b/BUILDER_IMAGES/builder-image-frontend/script.js @@ -5,7 +5,7 @@ const mime = require("mime-types"); const readDirRecursive = require("./utils/readDirRecursive"); const { S3Client, PutObjectCommand } = require("@aws-sdk/client-s3"); const { Kafka } = require("kafkajs"); -const { publilishEvent } = require("./publisher"); +const { publishEvent } = require("./publisher"); /* ---------------- ENV ---------------- */ @@ -104,7 +104,7 @@ process.on("SIGTERM", () => safeExit(143, "SIGTERM")); async function init() { await producer.connect(); - await publilishEvent( + await publishEvent( "FRONTEND_PROCESSING", PROJECT_ID, DEPLOYMENTID, @@ -117,7 +117,7 @@ async function init() { const frontendPath = path.join(outputDir, FRONTENDPATH || "."); if (!fs.existsSync(frontendPath)) { - await publilishEvent( + await publishEvent( "FRONTEND_BUILT_FAILED", PROJECT_ID, DEPLOYMENTID, @@ -138,7 +138,7 @@ async function init() { build.on("close", async (code) => { if (code !== 0) { - await publilishEvent( + await publishEvent( "FRONTEND_BUILT_FAILED", PROJECT_ID, DEPLOYMENTID, @@ -168,7 +168,7 @@ async function init() { })); } - await publilishEvent( + await publishEvent( "FRONTEND_BUILT_SUCCESS", PROJECT_ID, DEPLOYMENTID, @@ -179,7 +179,7 @@ async function init() { await safeExit(0, "Success"); } catch (err) { - await publilishEvent( + await publishEvent( "FRONTEND_BUILT_FAILED", PROJECT_ID, DEPLOYMENTID, @@ -191,7 +191,7 @@ async function init() { } init().catch(async (err) => { - await publilishEvent( + await publishEvent( "FRONTEND_BUILT_FAILED", PROJECT_ID, DEPLOYMENTID, @@ -203,7 +203,7 @@ init().catch(async (err) => { /* ---------------- TIMEOUT ---------------- */ setTimeout(async () => { - await publilishEvent( + await publishEvent( "FRONTEND_BUILT_FAILED", PROJECT_ID, DEPLOYMENTID, diff --git a/Docs/BUILDER_IMAGES_MAPPING.md b/Docs/BUILDER_IMAGES_MAPPING.md new file mode 100644 index 0000000..ae48182 --- /dev/null +++ b/Docs/BUILDER_IMAGES_MAPPING.md @@ -0,0 +1,269 @@ +# Builder Images & ECS Task Mapping + +This document outlines the relationship between Workers, ECS Tasks, and Builder Images in the Veren deployment pipeline. + +## Architecture Overview + +``` +WORKERS/build-worker + └─> Triggers ECS Tasks (via AWS ECS API) + └─> ECS Tasks run containers from ECR + └─> ECR containers built from BUILDER_IMAGES/ +``` + +--- + +## Worker → ECS Task → Builder Image Flow + +### 1. Frontend Build Pipeline + +**Worker Path:** `WORKERS/build-worker/src/services/distributionHandler/buildFrontend.ts` + +#### Node.js 18 Projects +- **ECS Config:** `frontendConfig18` in `WORKERS/build-worker/src/config/ECSconfig.ts` +- **ECS Task Definition:** `arn:aws:ecs:ap-south-1:account_id:task-definition/frontend-builder-18:4` +- **ECS Cluster:** `arn:aws:ecs:ap-south-1:account_id:cluster/builder` +- **Container Name:** `frontend-builder-18` (from `FRONTEND18CONTAINER` env var) +- **Builder Image Source:** `BUILDER_IMAGES/builder-image-frontend/` +- **Dockerfile:** `BUILDER_IMAGES/builder-image-frontend/dockerfiles/Dockerfile.18x.dev` +- **Base Image:** `ubuntu:focal` with Node.js 18 +- **Image in ECR:** `{account_id}.dkr.ecr.ap-south-1.amazonaws.com/frontend-builder-18` + +**What it does:** +- Clones user's repository +- Navigates to frontend directory +- Runs `npm install` (or custom install command) +- Runs `npm run build` (or custom build command) +- Uploads build artifacts to S3 bucket `veren-v2/__outputs/{PROJECT_ID}/` + +#### Node.js 20 Projects +- **ECS Config:** `frontendConfig20` in `WORKERS/build-worker/src/config/ECSconfig.ts` +- **ECS Task Definition:** `arn:aws:ecs:ap-south-1:account_id:task-definition/builder-task-20:5` +- **ECS Cluster:** `arn:aws:ecs:ap-south-1:account_id:cluster/builder` +- **Container Name:** `builder-task-20-image` (from `FRONTEND20CONTAINER` env var) +- **Builder Image Source:** `BUILDER_IMAGES/builder-image-frontend/` +- **Dockerfile:** `BUILDER_IMAGES/builder-image-frontend/dockerfiles/Dockerfile.20x.dev` +- **Base Image:** `ubuntu:focal` with Node.js 20 +- **Image in ECR:** `{account_id}.dkr.ecr.ap-south-1.amazonaws.com/builder-task-20-image` + +**What it does:** +- Same as Node 18 pipeline but with Node.js 20 runtime + +--- + +### 2. Backend Build Pipeline + +**Worker Path:** `WORKERS/build-worker/src/services/distributionHandler/buildBackend.ts` + +- **ECS Config:** `backendECSConfig` in `WORKERS/build-worker/src/config/ECSconfig.ts` +- **ECS Task Definition:** `arn:aws:ecs:ap-south-1:account_id:task-definition/backend-build-worker:1` +- **ECS Cluster:** `arn:aws:ecs:ap-south-1:account_id:cluster/backend-builder` +- **Container Name:** `backend-build-worker` (from `BACKEND_CONTAINER` env var) +- **Builder Image Source:** `BUILDER_IMAGES/build-image-backend/` +- **Dockerfile:** `BUILDER_IMAGES/build-image-backend/Dockerfile` +- **Base Image:** `node:20-bullseye` + Kaniko executor +- **Image in ECR:** `{account_id}.dkr.ecr.ap-south-1.amazonaws.com/backend-build-worker` + +**What it does:** +- Clones user's repository +- Navigates to backend directory +- Copies appropriate Dockerfile based on Node version (18 or 20): + - Node 18: `BUILDER_IMAGES/build-image-backend/dockerbackend/Dockerfile_node18/Dockerfile` + - Node 20: `BUILDER_IMAGES/build-image-backend/dockerbackend/Dockerfile_node20/Dockerfile` +- Uses Kaniko to build Docker image for user's backend +- Pushes built image to ECR with tag: `{ECR_URI}:{PROJECT_ID}-{DEPLOYMENTID}` + +--- + +## Builder Images Detailed Breakdown + +### Frontend Builder Images + +Located in: `BUILDER_IMAGES/builder-image-frontend/` + +| Component | Purpose | +|-----------|---------| +| `main.sh` | Entry point: clones git repo and executes `script.js` | +| `script.js` | Main build logic: installs deps, runs build, uploads to S3 | +| `publisher.js` | Kafka event publisher for build status updates | +| `utils/` | Helper utilities (e.g., recursive directory reading) | +| `kafka.pem` | Kafka SSL certificate for secure connections | +| `dockerfiles/Dockerfile.18x.dev` | Node.js 18 builder image | +| `dockerfiles/Dockerfile.20x.dev` | Node.js 20 builder image | + +**Environment Variables Required:** +- `GIT_REPOSITORY__URL` - Repository to clone +- `PROJECT_ID` - Unique project identifier +- `DEPLOYMENTID` - Deployment instance ID +- `FRONTENDPATH` - Path to frontend directory +- `BUILDCOMMAND` - Build command (default: `npm run build`) +- `INSTALLCOMMAND` - Install command (default: `npm install`) +- `AWS_ACCESS_KEY_ID` - AWS credentials for S3 upload +- `AWS_SECRET_ACCESS_KEY` - AWS credentials for S3 upload +- `KAFKA_*` - Kafka connection details + +--- + +### Backend Builder Image + +Located in: `BUILDER_IMAGES/build-image-backend/` + +| Component | Purpose | +|-----------|---------| +| `Dockerfile` | Main builder image with Kaniko + Node.js 20 | +| `main.sh` | Entry point: clones git repo and executes `script.js` | +| `script.js` | Main build logic: copies Dockerfile, builds with Kaniko, pushes to ECR | +| `publisher.js` | Kafka event publisher for build status updates | +| `dockerbackend/Dockerfile_node18/` | Template Dockerfile for Node.js 18 backends | +| `dockerbackend/Dockerfile_node20/` | Template Dockerfile for Node.js 20 backends | +| `kafka.pem` | Kafka SSL certificate | + +**Environment Variables Required:** +- `GIT_REPOSITORY__URL` - Repository to clone +- `PROJECT_ID` - Unique project identifier +- `DEPLOYMENTID` - Deployment instance ID +- `BACKEND_PATH` - Path to backend directory +- `NODE_VERSION` - Node.js version (18 or 20) +- `ECR_URI` - ECR repository URI for pushing images +- `AWS_ACCESS_KEY_ID` - AWS credentials for ECR push +- `AWS_SECRET_ACCESS_KEY` - AWS credentials for ECR push +- `AWS_REGION` - AWS region +- `KAFKA_*` - Kafka connection details + +--- + +## Configuration Files + +### ECS Configuration +File: `WORKERS/build-worker/src/config/ECSconfig.ts` + +```typescript +export const frontendConfig18 = { + CLUSTER: process.env.AWS_FRONTEND_CLUSTER, + TASK: process.env.TASK18, + CONTAINERNAME: process.env.FRONTEND18CONTAINER +} + +export const frontendConfig20 = { + CLUSTER: process.env.AWS_FRONTEND_CLUSTER, + TASK: process.env.TASK20, + CONTAINERNAME: process.env.FRONTEND20CONTAINER +} + +export const backendECSConfig = { + CLUSTER: process.env.AWS_BACKEND_CLUSTER, + TASK: process.env.TASKBACKEND, + CONTAINERNAME: process.env.BACKEND_CONTAINER +} +``` + +### Environment Variables (Build Worker) +File: `WORKERS/build-worker/.env` + +```env +# ECS Task Definitions +TASK18=arn:aws:ecs:ap-south-1:account_id:task-definition/frontend-builder-18:4 +TASK20=arn:aws:ecs:ap-south-1:account_id:task-definition/builder-task-20:5 +TASKBACKEND=arn:aws:ecs:ap-south-1:account_id:task-definition/backend-build-worker:1 + +# ECS Clusters +AWS_FRONTEND_CLUSTER=arn:aws:ecs:ap-south-1:account_id:cluster/builder +AWS_BACKEND_CLUSTER=arn:aws:ecs:ap-south-1:account_id:cluster/backend-builder + +# ECR Configuration +ECR_URI=account_id.dkr.ecr.ap-south-1.amazonaws.com/my-backend +FRONTEND18CONTAINER=frontend-builder-18 +FRONTEND20CONTAINER=builder-task-20-image +BACKEND_CONTAINER=backend-build-worker +``` + +--- + +## Build Flow Sequence + +### Frontend Build Flow +1. **User submits project** → API Gateway receives request +2. **Clone Worker** → Analyzes project, detects Node.js version (18 or 20) +3. **Build Worker** (`WORKERS/build-worker/src/workers/build.ts`) → Receives job from queue +4. **Build Worker** calls `buildFrontend()` → Selects appropriate ECS config based on Node version +5. **ECS Task starts** → Runs container from ECR (frontend-builder-18 or builder-task-20-image) +6. **Container executes:** + - Clones repository + - Installs dependencies + - Runs build command + - Uploads to S3 +7. **Kafka events published** → Status updates sent to API Gateway +8. **Success/Failure** → User notified via WebSocket + +### Backend Build Flow +1. **User submits project** → API Gateway receives request +2. **Clone Worker** → Analyzes project, detects Node.js version (18 or 20) +3. **Build Worker** receives job, calls `buildBackend()` +4. **ECS Task starts** → Runs backend-build-worker container from ECR +5. **Container executes:** + - Clones repository + - Copies appropriate Dockerfile (Node 18 or 20) + - Builds Docker image with Kaniko + - Pushes image to ECR with tag `{PROJECT_ID}-{DEPLOYMENTID}` +6. **Kafka events published** → Status updates sent +7. **Success/Failure** → User notified + +--- + +## ECR Repositories Summary + +| ECR Repository | Source Builder Image | Node Version | Purpose | +|----------------|---------------------|--------------|---------| +| `frontend-builder-18` | `BUILDER_IMAGES/builder-image-frontend/` (Dockerfile.18x.dev) | 18 | Build frontend projects using Node.js 18 | +| `builder-task-20-image` | `BUILDER_IMAGES/builder-image-frontend/` (Dockerfile.20x.dev) | 20 | Build frontend projects using Node.js 20 | +| `backend-build-worker` | `BUILDER_IMAGES/build-image-backend/` (Dockerfile) | 20 (runtime) | Build backend Docker images using Kaniko | +| `{PROJECT_ID}-{DEPLOYMENTID}` | User's backend code | 18 or 20 | User's deployed backend application | + +--- + +## Key Technologies + +- **Kaniko:** Builds Docker images inside containers without Docker daemon (used in backend builder) +- **AWS ECS Fargate:** Serverless container execution +- **AWS ECR:** Container registry for storing builder images and user application images +- **AWS S3:** Storage for frontend build artifacts +- **Kafka:** Event streaming for build status updates +- **Redis:** Build job queue and pub/sub for logs + +--- + +## Notes + +1. **Frontend builders** are lightweight - they only need Node.js to install deps and run build commands +2. **Backend builder** is heavier - includes Kaniko for building Docker images +3. **Node version detection** happens in Clone Worker (`WORKERS/clone-worker/src/services/detector/detectProjectType.ts`) +4. **Build artifacts:** + - Frontend: Static files in S3 (`veren-v2/__outputs/{PROJECT_ID}/`) + - Backend: Docker images in ECR (`{ECR_URI}:{PROJECT_ID}-{DEPLOYMENTID}`) +5. **Container name mismatch:** There's a typo in `ECSconfig.ts` - `frontendConfig20` uses `FRONTEND18CONTAINER` instead of `FRONTEND20CONTAINER` + +--- + +## Contributing Guidelines + +**⚠️ IMPORTANT for Issue Reporters:** + +When creating issues or reporting bugs related to this build pipeline, you **MUST** specify the exact file paths you are referring to. This repository has a complex structure with multiple services, workers, and builder images. + +**Good Examples:** +- ✅ "Bug in `WORKERS/build-worker/src/services/distributionHandler/buildFrontend.ts` line 105" +- ✅ "Environment variable missing in `BUILDER_IMAGES/build-image-backend/script.js`" +- ✅ "Update needed in `WORKERS/build-worker/src/config/ECSconfig.ts`" + +**Bad Examples:** +- ❌ "Bug in the build worker" +- ❌ "Frontend builder is broken" +- ❌ "Fix the Dockerfile" + +**Why this matters:** +- We have multiple workers: `build-worker`, `clone-worker`, `repo-analyzer` +- We have multiple builder images with similar names +- We have multiple Dockerfiles in different directories +- Ambiguous references slow down issue resolution and can lead to fixes in the wrong place + +Please use the file paths shown in this document as reference when reporting issues. diff --git a/Docs/CONTRIBUTING.md b/Docs/CONTRIBUTING.md index c5e14a1..2e7b2d0 100644 --- a/Docs/CONTRIBUTING.md +++ b/Docs/CONTRIBUTING.md @@ -86,10 +86,10 @@ Veren is a **backend-driven deployment system** that automates building and depl - Create a **feature branch** from `main` (e.g., `feature/issue-123-brief-description`) - Make commits with **conventional commit messages** - Push to your fork or branch - - Create a **Pull Request** with clear description linking the issue + - Create a **Pull Request** to the `develop` branch with clear description linking the issue - Wait for **at least 1 code review** - ❌ **DO NOT** force push or directly merge to main - - ❌ **DO NOT** make PRs to main before discussion in the issue + - ❌ **DO NOT** make PRs to main; use `develop` for contributor PRs #### 6. **Communication Guidelines** - Be **respectful and professional** diff --git a/README.md b/README.md index d137ce2..682773d 100644 --- a/README.md +++ b/README.md @@ -3,13 +3,12 @@ ### Veren is a backend driven deployment system that automates building and deploying application from source repositories using a service-oriented architecture. ## Guides - -[~ Documentation](https://main.veren.site/docs) +[~ Documentation]() -[~ API Walkthrough](https://main.veren.site/api-walkthrough) - -[~ Fast Setups?](#project-setup) - -[~ Buy me a Coffee](https://main.veren.site/cofee) +[~ API Walkthrough](https://github.com/atithi4dev/veren/blob/main/Docs/API_DOCUMENTATION.md) +[~ Contribution Guidelines](https://github.com/atithi4dev/veren/blob/main/Docs/CONTRIBUTING.md) +[~ For Contributers](https://github.com/atithi4dev/veren/blob/main/Docs/GITHUB_SETUP.md) +[~ ECR IMAGES FOR USE CASE](https://github.com/atithi4dev/veren/blob/main/Docs/BUILDER_IMAGES_MAPPING.md) ## Project Setup - @@ -20,13 +19,13 @@ git clone cd veren ``` -We assume you have already gone through the required configuration and added the necessary files as described in [@essentials](https://github.com/atithi4dev/veren/tree/test/api-gateway) before starting the project. +We assume you have already gone through the required configuration and added the necessary files as described in [@essentials](https://github.com/atithi4dev/veren/blob/main/Docs/GITHUB_SETUP.md) before starting the project. **Start the services using Docker Compose:** ```bash docker compose up --build ``` -Now you are ready to visit [api-guidelines](https://main.veren.site/docs) to access the differnt routes and supported features. +Now you are ready to visit [api-guidelines](https://github.com/atithi4dev/veren/blob/main/Docs/API_DOCUMENTATION.md) to access the differnt routes and supported features. ## Architecture Overview - @@ -51,7 +50,7 @@ As an aspiring developer, there may be design gaps, edge cases, or implementatio Reasonable changes and improvements will be reviewed and merged when possible, keeping in mind academic and development commitments. -Please follow the [contribution guidelines](#contribution-guidelines) below when opening issues or submitting PRs. +Please follow the [contribution guidelines](https://github.com/atithi4dev/veren/blob/main/Docs/CONTRIBUTING.md) below when opening issues or submitting PRs. ## Contribution Guidelines - This project is open to help and suggestions rather than strict contributions. @@ -69,7 +68,7 @@ Any help or feedback is appreciated ## Support - Open an [issue](https://github.com/atithi4dev/veren/issues) on the GitHub repository. - +Tag on [Discord](https://discord.gg/tACgSEYz) Reach out via [email](atithisingh.dev@gmail.com) or project discussion. @@ -79,6 +78,4 @@ Reach out via [email](atithisingh.dev@gmail.com) or project discussion.


-
- - +
\ No newline at end of file diff --git a/WORKERS/build-worker/src/services/distributionHandler/buildBackend.ts b/WORKERS/build-worker/src/services/distributionHandler/buildBackend.ts index 03da9b1..89d2d85 100644 --- a/WORKERS/build-worker/src/services/distributionHandler/buildBackend.ts +++ b/WORKERS/build-worker/src/services/distributionHandler/buildBackend.ts @@ -3,7 +3,7 @@ import dotenv from 'dotenv'; import { AwsCredentialIdentity } from "@aws-sdk/types"; import { ECSClient, RunTaskCommand } from "@aws-sdk/client-ecs"; import { backendECSConfig } from "../../config/ECSconfig.js" -import { DeploymentStatus, publilishEvent } from "@veren/domain"; +import { DeploymentStatus, publishEvent } from "@veren/domain"; dotenv.config({ path: '../../../.env' @@ -90,7 +90,7 @@ export async function buildBackend( const resp = await ecsClient.send(backendCommand) if (resp.failures && resp.failures.length > 0) { - publilishEvent({ + publishEvent({ type: DeploymentStatus.INTERNAL_ERROR, projectId, deploymentId, diff --git a/WORKERS/build-worker/src/services/distributionHandler/buildFrontend.ts b/WORKERS/build-worker/src/services/distributionHandler/buildFrontend.ts index e58721b..20fe024 100644 --- a/WORKERS/build-worker/src/services/distributionHandler/buildFrontend.ts +++ b/WORKERS/build-worker/src/services/distributionHandler/buildFrontend.ts @@ -4,7 +4,7 @@ import { AwsCredentialIdentity } from "@aws-sdk/types"; import dotenv from "dotenv"; import logger from "../../logger/logger.js"; -import { DeploymentStatus, publilishEvent } from '@veren/domain'; +import { DeploymentStatus, publishEvent } from '@veren/domain'; dotenv.config({ path: '../../../.env' @@ -119,7 +119,7 @@ export async function buildFrontend( const resp = await ecsClient.send(command18) if (resp.failures && resp.failures.length > 0) { - publilishEvent({ + publishEvent({ type: DeploymentStatus.INTERNAL_ERROR, projectId, deploymentId, @@ -161,7 +161,7 @@ export async function buildFrontend( const resp = await ecsClient.send(command20) if (resp.failures && resp.failures.length > 0) { - publilishEvent({ + publishEvent({ type: DeploymentStatus.INTERNAL_ERROR, projectId, deploymentId, diff --git a/WORKERS/build-worker/src/workers/build.ts b/WORKERS/build-worker/src/workers/build.ts index 61838e7..0c3224e 100644 --- a/WORKERS/build-worker/src/workers/build.ts +++ b/WORKERS/build-worker/src/workers/build.ts @@ -7,7 +7,7 @@ import { buildFrontend } from "../services/distributionHandler/buildFrontend.js" import { buildBackend } from "../services/distributionHandler/buildBackend.js" import { safeExecute } from "../types/index.js"; -import { DeploymentStatus, publilishEvent } from '@veren/domain' +import { DeploymentStatus, publishEvent } from '@veren/domain' import { BuildJobError } from "../utils/buildError.js"; dotenv.config({ path: "../../.env" }); @@ -129,7 +129,7 @@ const worker = new Worker('buildQueue', worker.on('completed', async (job, result) => { const { projectId, deploymentId, FrontendtaskArn, BackendtaskArn } = result; - publilishEvent({ + publishEvent({ type: DeploymentStatus.BUILD_QUEUE_SUCCESS, projectId: result.projectId, deploymentId: result.deploymentId, @@ -147,21 +147,21 @@ worker.on('failed', async (job: any, err: any) => { }); if (err instanceof BuildJobError) { if (err.message == "BACKEND_BUILT_FAILED") { - publilishEvent({ + publishEvent({ type: DeploymentStatus.BACKEND_QUEUE_FAILED, projectId: job?.data?.projectId!, deploymentId: job?.data?.deploymentId!, payload: err.payload, }); } else if (err.message == "FRONTEND_BUILT_FAILED") { - publilishEvent({ + publishEvent({ type: DeploymentStatus.FRONTEND_QUEUE_FAILED, projectId: job?.data?.projectId!, deploymentId: job?.data?.deploymentId!, payload: err.payload, }); } else { - publilishEvent({ + publishEvent({ type: DeploymentStatus.BUILD_UNKNOWN_FAILURE, projectId: job?.data?.projectId!, deploymentId: job?.data?.deploymentId!, @@ -169,7 +169,7 @@ worker.on('failed', async (job: any, err: any) => { }); } } else { - publilishEvent({ + publishEvent({ type: DeploymentStatus.INTERNAL_ERROR, projectId: job?.data?.projectId!, deploymentId: job?.data?.deploymentId!, diff --git a/WORKERS/clone-worker/src/workers/clone.ts b/WORKERS/clone-worker/src/workers/clone.ts index da32e85..42ff728 100644 --- a/WORKERS/clone-worker/src/workers/clone.ts +++ b/WORKERS/clone-worker/src/workers/clone.ts @@ -5,7 +5,7 @@ import fs from "fs/promises"; import logger from "../logger/logger.js"; import { cloneRepo } from "../GitHandler/gitHandler.js"; import repoConfigGenerator, { IBuild } from "../services/repoConfigGenerator.js"; -import { DeploymentStatus, publilishEvent } from "@veren/domain"; +import { DeploymentStatus, publishEvent } from "@veren/domain"; import { CloneJobError } from "../utils/JobError.js"; /* ---------------- TYPES ---------------- */ @@ -154,7 +154,7 @@ worker.on("completed", async (job, result) => { deploymentId: result.deploymentId, }); - publilishEvent({ + publishEvent({ type: DeploymentStatus.REPO_ANALYSIS_SUCCESS, projectId: result.projectId, deploymentId: result.deploymentId, @@ -173,14 +173,14 @@ worker.on("failed", async (job, err) => { }); if (err instanceof CloneJobError) { - publilishEvent({ + publishEvent({ type: DeploymentStatus.REPO_ANALYSIS_FAILED, projectId: job?.data?.projectId!, deploymentId: job?.data?.deploymentId!, payload: err.payload, }); } else { - publilishEvent({ + publishEvent({ type: DeploymentStatus.INTERNAL_ERROR, projectId: job?.data?.projectId!, deploymentId: job?.data?.deploymentId!, diff --git a/api-gateway/src/controllers/deployment.controller.ts b/api-gateway/src/controllers/deployment.controller.ts index b12a2cb..fc19cb4 100644 --- a/api-gateway/src/controllers/deployment.controller.ts +++ b/api-gateway/src/controllers/deployment.controller.ts @@ -7,7 +7,7 @@ import { cloneQueue } from "../Queue/clone-queue.js"; import logger from "../logger/logger.js"; -import { Project, DeploymentStatus, publilishEvent } from "@veren/domain"; +import { Project, DeploymentStatus, publishEvent } from "@veren/domain"; import { Deployment } from "@veren/domain"; @@ -84,7 +84,7 @@ const deployProject = asyncHandler(async (req: Request, res: Response) => { logger.info(`Clone job added for project ${projectId}`); - publilishEvent({ + publishEvent({ type: DeploymentStatus.CREATED, projectId: projectId, deploymentId: newDeployment._id.toString(), @@ -102,6 +102,10 @@ const deployProject = asyncHandler(async (req: Request, res: Response) => { return res.status(200).json({ message: "Deployment triggered successfully." }); }) +const roleBackProject = asyncHandler(async (req:Request, res: Response) =>{ + +}) + const deployTo = asyncHandler(async (req: Request, res: Response) => { // const { projectId }= req.body; // const lastDeployment = await Deployment.findOne({ projectId }) @@ -110,5 +114,5 @@ const deployTo = asyncHandler(async (req: Request, res: Response) => { }) export { - deployProject, deployTo + deployProject, deployTo, roleBackProject } \ No newline at end of file diff --git a/api-gateway/src/controllers/url.controller.ts b/api-gateway/src/controllers/url.controller.ts deleted file mode 100644 index 624e1f9..0000000 --- a/api-gateway/src/controllers/url.controller.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { Request, Response } from "express"; -import axios from "axios"; - -// REMOVE AFTER UPDATES IN OTHER MAIN ROUTES -export async function handleFirstDeployment(req: Request, res: Response) { - // try { - // const { - // projectName, - // url, - // frontendPath, - // backendPath, - // frontendEnv, - // backendEnv, - // FrontendBuildCommand, - // BackendBuildCommand - // } = req.body; - - // if (!url || !req.session.githubToken) { - // return res.status(400).json({ success: false, message: "URL and valid session token are required" }); - // } - - // if (!frontendPath || !backendPath) { - // return res.status(400).json({ success: false, message: "Both frontendPath and backendPath are required" }); - // } - // console.log("Forwarding request to submission service..."); - - // const repoConfig = { - // envs: { - // frontendEnv, - // backendEnv - // }, - // buildCommand: { - // FrontendBuildCommand: "npm run build", - // BackendBuildCommand: "npm run build", - // } - // } - - // const pathToFolder = { - // frontendPath, - // backendPath, - // } - // const response = await axios.post( - // "http://submission-service:3000/api/v1/url", - // { - // projectName, - // url, - // pathToFolder, - // repoConfig, - // token: req.session.githubToken - // }, - // { timeout: 10000 } - // ); - - // console.log("Response from submission service:", response.data); - - // return res.json({ - // success: true, - // message: "Please wait while we process your request", - // }); - // } catch (error: any) { - // return res.status(500).json({ success: false, error: error.message }); - // } -} - - -export async function handleEnvironmentVariable(req:Request, res: Response) { - -} \ No newline at end of file diff --git a/api-gateway/src/models/deployment.model.ts b/api-gateway/src/models/deployment.model.ts deleted file mode 100644 index 5929591..0000000 --- a/api-gateway/src/models/deployment.model.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { Schema, model } from 'mongoose' -import IDeployment from "../types/deployment.js" - -const deploymentSchema = new Schema({ - projectId: { - type: Schema.Types.ObjectId, - ref: "Project", - required: true - }, - owner: { - type: Schema.Types.ObjectId, - ref: "User", - required: true - }, - status: { - type: String, - enum: ["queued", "building", "deployed", "failed"], - default: null - }, - number : { - type: Number, - required: true - }, - commitHash: { - type: String, - trim: true, - }, - commitMessage: { - type: String, - trim: true - }, - buildLogsUrl: { - type: String - }, - backendImageUrl: { - type: String - }, - frontendTaskArn: { - type: String - }, - backendTaskArn: { - type: String - }, - artifactUrl: { - type: String - }, - rollBackArtifactUrl: { - type: String - }, - startedAt: { - type: Date, - default: Date.now - }, - finishedAt: { - type: Date, - default: null - }, -}, - { - timestamps: true - } -) - -deploymentSchema.index({ projectId: 1, number: -1 }); -deploymentSchema.index({ projectId: 1, createdAt: -1 }); - -const Deployment = model("Deployment", deploymentSchema); - -export default Deployment; \ No newline at end of file diff --git a/api-gateway/src/models/project.model.ts b/api-gateway/src/models/project.model.ts deleted file mode 100644 index dfe9248..0000000 --- a/api-gateway/src/models/project.model.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { Schema, model } from "mongoose"; -import IProject from "../types/project.js"; - -const envSchema = new Schema( - { - key: { - type: String, - required: true - }, - value: { - type: String, - required: true - } - }, - { _id: false } -) - -const projectSchema = new Schema({ - name: { - type: String, - required: true, - unique: true, - trim: true - }, - git: { - provider: { - type: String, - enum: ["github"], - default: "github" - }, - repoUrl: { - type: String, - required: true - }, - branch: { - type: String, - default: "main" - }, - rootDir: { - type: String, - default: "/" - }, - }, - envs: { - frontendEnv: { - type: [envSchema], - default: [] - }, - backendEnv: { - type: [envSchema], - default: [], - } - }, - repoPath: { - frontendDirPath: { - type: String, - default: "./frontend" - }, - backendDirPath: { - type: String, - default: "./backend" - } - }, - domains: { - subdomain: { - type: String, - unique: true, - }, - customDomain: { - type: String, - } - }, - build: { - framework: { - type: String, - }, - frontendBuildCommand: { - type: String, - default: "npm run build" - }, - frontendInstallCommand: { - type: String, - default: "npm install" - }, - backendInstallCommand: { - type: String, - default: "npm install" - }, - frontendOutDir: { - type: String, - default: "./build" - } - }, - runtime: { - frontend: { - type: { - type: String, - enum: ["static", "server"], - default: "static", - }, - port: Number - }, - backend: { - type: { - type: String, - enum: ["static", "server"], - default: "server" - }, - port: Number - } - }, - status: { - type: String, - enum: ["active", "paused", "deleted"], - default: "active", - }, - deployments: [ - { - type: Schema.Types.ObjectId, - ref: "Deployment" - } - ], - currentDeployment: { - type: Schema.Types.ObjectId, - ref: "Deployment" - }, - createdBy: { - type: Schema.Types.ObjectId, - ref: "User", - required: true, - } -}, - { - timestamps: true - } -) - - -envSchema.pre("save", function () { - // encrypt value -}); - -const Project = model("Project", projectSchema); - -export default Project; \ No newline at end of file diff --git a/api-gateway/src/models/user.model.ts b/api-gateway/src/models/user.model.ts index ac0f3ae..d0e67df 100644 --- a/api-gateway/src/models/user.model.ts +++ b/api-gateway/src/models/user.model.ts @@ -53,7 +53,7 @@ const userSchema = new Schema({ userSchema.methods.generateAccessToken = function ():string { const ACCESS_TOKEN_SECRET = process.env.ACCESS_TOKEN_SECRET - const ACCESS_TOKEN_EXPIRY = "7d"; + const ACCESS_TOKEN_EXPIRY = "15m"; if (!ACCESS_TOKEN_SECRET) { throw new Error("Access Token is not defined"); diff --git a/api-gateway/src/routes/deployment.route.ts b/api-gateway/src/routes/deployment.route.ts index 0a06cf3..7a73794 100644 --- a/api-gateway/src/routes/deployment.route.ts +++ b/api-gateway/src/routes/deployment.route.ts @@ -1,6 +1,6 @@ import { Router } from "express" import { verifyJwt } from "../middlewares/auth.middlewares.js"; -import {deployProject} from "../controllers/deployment.controller.js" +import {deployProject, deployTo, roleBackProject} from "../controllers/deployment.controller.js" const router = Router(); router.use(verifyJwt); @@ -8,8 +8,8 @@ router.use(verifyJwt); router.route('/:projectId') .get(deployProject) router.route('/d/:projectId') - .post(deployProject) + .post(deployTo) router.route('/r/:projectId') - .post(deployProject) + .post(roleBackProject) export default router; \ No newline at end of file diff --git a/api-gateway/src/routes/url.route.ts b/api-gateway/src/routes/url.route.ts deleted file mode 100644 index 115d9ae..0000000 --- a/api-gateway/src/routes/url.route.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { Router, Request, Response } from "express"; -import {handleFirstDeployment, handleEnvironmentVariable} from "../controllers/url.controller.js"; -import axios from "axios"; -const router = Router(); - -router.post("/", handleFirstDeployment); -router.post("/env", handleEnvironmentVariable) - -export default router; diff --git a/notification-service/src/app.ts b/notification-service/src/app.ts index 9ed7ea4..0615426 100644 --- a/notification-service/src/app.ts +++ b/notification-service/src/app.ts @@ -21,12 +21,6 @@ app.use(express.urlencoded({extended: true, limit: '16kb'})) app.use(cookieParser()); app.use(express.static('public')); - -app.post('api/v1/log/:dp',(req,res)=>{ - console.log("NOTIFICATION SERVICE RECIVED API REQUEST."); - return res.json({msg: "DONE MAN"}); -}); - app.use(errorHandler) export default app; diff --git a/packages/domain/src/index.ts b/packages/domain/src/index.ts index d1defb3..9434313 100644 --- a/packages/domain/src/index.ts +++ b/packages/domain/src/index.ts @@ -5,4 +5,4 @@ export { Project } from "./project.model.js" export type { IProject } from "./types/project.js"; export { DeploymentStatus } from './enums.js' -export { publilishEvent } from './publisher.js' \ No newline at end of file +export { publishEvent } from './publisher.js' \ No newline at end of file diff --git a/packages/domain/src/publisher.ts b/packages/domain/src/publisher.ts index e3510db..e6b3e67 100644 --- a/packages/domain/src/publisher.ts +++ b/packages/domain/src/publisher.ts @@ -10,7 +10,7 @@ const sns = new SNSClient({ const TOPIC_ARN = process.env.DOMAIN_EVENTS_TOPIC_ARN!; -export async function publilishEvent(event: { +export async function publishEvent(event: { type: DeploymentStatusI; projectId: string; deploymentId: string;