Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 15 additions & 50 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
@@ -1,19 +1,12 @@
name: Build, Push, and Deploy

on:
pull_request:
branches:
- main

env:
DOCKER_BUILD_RECORD_RETENTION_DAYS: 1
BUILDX_CACHE_MAX_SIZE: 5GB
ACTIONS_CACHE_KEY_PREFIX: v1
on:
push:
branches: [ main ]

jobs:
build_frontend_image:
name: 1. Build Frontend Image
runs-on: ubuntu-latest
build_and_push:
runs-on: docker
steps:
- name: Checkout repository
uses: actions/checkout@v4
Expand Down Expand Up @@ -46,29 +39,21 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@v4

# НОВЫЙ ШАГ: Устанавливаем Node.js
- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '22'
cache: 'npm' # Включаем кеширование для npm
cache-dependency-path: backend/package-lock.json

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

# ИЗМЕНЕННЫЙ ШАГ: Логика кеширования и сборки для Node.js
- name: Build image and save as artifact
uses: docker/build-push-action@v5
with:
context: ./backend
push: false
load: true
tags: diametrfq/website-backend:latest
cache-from: type=gha,scope=backend-build-v1
cache-to: type=gha,mode=max,scope=backend-build-v1

cache-from: |
type=gha,scope=backend-build-${{ hashFiles('backend/**') }}
type=gha,scope=backend-build
cache-to: |
type=gha,mode=max,scope=backend-build-${{ hashFiles('backend/**') }}
type=gha,mode=min,scope=backend-build
- name: Save image to tar
run: docker save diametrfq/website-backend:latest -o /tmp/backend.tar
- name: Upload image artifact
Expand Down Expand Up @@ -100,9 +85,7 @@ jobs:
- name: Load and Push Frontend Image
run: |
docker load --input /tmp/frontend.tar
docker push diametrfq/website:latest &
- name: Load and Push Backend Image
run: |
docker push diametrfq/website:latest
docker load --input /tmp/backend.tar
docker push diametrfq/website-backend:latest &
- name: Wait for all pushes to complete
Expand All @@ -114,36 +97,22 @@ jobs:
needs: [push_images]
steps:
- uses: actions/checkout@v4

# ----- НОВЫЕ ШАГИ -----
- name: Create deployment archive
run: tar -czvf deploy.tar.gz docker-compose.yml configs

# ----- ИЗМЕНЕННЫЙ ШАГ -----
- name: Transfer Files to Server
uses: appleboy/scp-action@master
uses: appleboy/scp-action@v0.1.4
with:
host: ${{ secrets.HOSTING_SERVER }}
username: ${{ secrets.HOSTING_NAME }}
password: ${{ secrets.HOSTING_PASSWORD }}
source: "deploy.tar.gz" # Передаем только один файл
source: "./docker-compose.yml,./configs,./nginx.prod.conf"
target: "/myPath"

- name: Deploy to Server
uses: appleboy/ssh-action@master
with:
host: ${{ secrets.HOSTING_SERVER }}
username: ${{ secrets.HOSTING_NAME }}
password: ${{ secrets.HOSTING_PASSWORD }}
script: |
mkdir -p /myPath
cd /myPath

echo "Unpacking deployment files..."
tar -xzvf deploy.tar.gz

rm deploy.tar.gz

export GRAFANA_ADMIN_PASSWORD=${{ secrets.GRAFANA_ADMIN_PASSWORD }}
export SPOTIFY_CLIENT_ID=${{ secrets.SPOTIFY_CLIENT_ID }}
export SPOTIFY_CLIENT_SECRET=${{ secrets.SPOTIFY_CLIENT_SECRET }}
Expand All @@ -154,9 +123,5 @@ jobs:

echo "Pulling latest images..."
docker-compose pull

echo "Starting services..."
docker-compose up -d --force-recreate --remove-orphans

echo "Cleaning up old images..."
docker system prune -af
docker system prune -af
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
**.local**
**.dev**

getPrompt.js
gp
file_index.json
4 changes: 3 additions & 1 deletion backend/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,6 @@
npm-debug.log*

# misc
.DS_Store
.DS_Store

/target
142 changes: 142 additions & 0 deletions backend/src/endpoints/api/telegram/services.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
use super::errors::{AppError, AppResult};
use super::models::{Post, TelegramCache};
use async_trait::async_trait;
use bytes::Bytes;
use rss::Channel;
use scraper::Html;
use std::time::{Duration, Instant};
use tokio::sync::Mutex;

const TELEGRAM_CHANNEL: &str = "diametrpd";
const CACHE_TTL_SECONDS: u64 = 600;

#[async_trait]
pub trait RssFetcher {
async fn fetch_rss_content(&self, url: &str) -> AppResult<Bytes>;
}

pub struct RealRssFetcher;

#[async_trait]
impl RssFetcher for RealRssFetcher {
async fn fetch_rss_content(&self, url: &str) -> AppResult<Bytes> {
let response = reqwest::get(url).await.map_err(|e| {
log::error!("Network error while fetching RSS: {}", e);
AppError::ServiceErrorWithFallback
})?;

if !response.status().is_success() {
log::error!(
"External service responded with non-success status: {}",
response.status()
);
return Err(AppError::ServiceErrorWithFallback);
}

Ok(response.bytes().await?)
}
}

pub async fn fetch_telegram_posts(
fetcher: &(dyn RssFetcher + Sync),
cache_mutex: &Mutex<Option<TelegramCache>>,
) -> AppResult<Vec<Post>> {

{
let cache_guard = cache_mutex.lock().await;
if let Some(cache) = &*cache_guard {
if cache.last_updated.elapsed() < Duration::from_secs(CACHE_TTL_SECONDS) {
log::info!("Returning Telegram posts from CACHE");
return Ok(cache.posts.clone());
}
}
}

log::info!("Cache expired or empty, fetching from RSS...");
let feed_url = format!("https://rsshub.app/telegram/channel/{}", TELEGRAM_CHANNEL);

let fetch_result = fetcher.fetch_rss_content(&feed_url).await;

match fetch_result {
Ok(content) => {
let channel = Channel::read_from(&content[..]).map_err(|e| {
log::error!("Failed to parse RSS feed: {}", e);
AppError::ServiceErrorWithFallback
})?;

let posts: Vec<Post> = channel
.items()
.iter()
.map(|item| {
let raw_html_snippet = item
.description()
.unwrap_or_else(|| item.content().unwrap_or(""))
.to_string();
let plain_text_snippet = html_to_plaintext(&raw_html_snippet);
Post {
title: item.title().unwrap_or("Без заголовка").to_string(),
link: item.link().unwrap_or("#").to_string(),
content_snippet: plain_text_snippet,
image_url: extract_image_url(item),
}
})
.collect();

{
let mut cache_guard = cache_mutex.lock().await;
*cache_guard = Some(TelegramCache {
posts: posts.clone(),
last_updated: Instant::now(),
});
}
log::info!("Telegram posts updated successfully");
Ok(posts)
}
Err(e) => {
log::warn!("Failed to fetch new posts: {}. Trying fallback to stale cache.", e);
let cache_guard = cache_mutex.lock().await;
if let Some(cache) = &*cache_guard {
log::info!("Returning STALE Telegram posts from cache");
return Ok(cache.posts.clone());
}
Err(e)
}
}
}

fn html_to_plaintext(html_content: &str) -> String {
if html_content.is_empty() {
return String::new();
}
let document = Html::parse_fragment(html_content);
let text_content = document.root_element().text().collect::<Vec<_>>().join(" ");
text_content
.split_whitespace()
.collect::<Vec<&str>>()
.join(" ")
}

fn extract_image_url(item: &rss::Item) -> Option<String> {
if let Some(enclosure) = item.enclosure() {
if enclosure.mime_type().starts_with("image/") {
return Some(enclosure.url().to_string());
}
}
let content_to_search = item.description().or_else(|| item.content()).unwrap_or("");
if content_to_search.is_empty() {
return None;
}
match regex::Regex::new(r#"<img[^>]+src=["']([^"']+)["']"#) {
Ok(re) => {
if let Some(cap) = re.captures(content_to_search) {
if let Some(url_match) = cap.get(1) {
return Some(url_match.as_str().to_string());
}
}
}
Err(e) => {
log::error!("Regex compilation error in extract_image_url: {}", e);
}
}
None
}
24 changes: 12 additions & 12 deletions frontend/app/[locale]/layout.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ import "@fontsource/material-symbols-outlined";

type Props = {
children: React.ReactNode;
params: { locale: string };
params: Promise<{ locale: string }>;
};

export async function generateMetadata({ params }: Props): Promise<Metadata> {
Expand All @@ -40,11 +40,10 @@ export function generateStaticParams() {
}

export default async function LocaleLayout({ children, params }: Props) {
// КЛЮЧЕВОЕ ИЗМЕНЕНИЕ: НЕ деструктурируем в сигнатуре.
const { locale } = await params;

unstable_setRequestLocale(locale);
const messages = await getMessages();
const messages = await getMessages({ locale });

return (
<html lang={locale} suppressHydrationWarning>
Expand All @@ -60,15 +59,16 @@ export default async function LocaleLayout({ children, params }: Props) {
__html: `
(function(m,e,t,r,i,k,a){m[i]=m[i]||function(){(m[i].a=m[i].a||[]).push(arguments)};
m[i].l=1*new Date();
for (var j = 0; j < document.scripts.length; j++) {if (document.scripts[j].src === r) { return; }}\\n
k=e.createElement(t),a=e.getElementsByTagName(t)[0],k.async=1,k.src=r,a.parentNode.insertBefore(k,a)})\\n
(window, document, "script", "https://mc.yandex.ru/metrika/tag.js", "ym");\\n
ym(102356747, "init", {\\n
clickmap:true,\\n
trackLinks:true,\\n
accurateTrackBounce:true,\\n
webvisor:true\\n
});\\n
for (var j = 0; j < document.scripts.length; j++) {if (document.scripts[j].src === r) { return; }}
k=e.createElement(t),a=e.getElementsByTagName(t)[0],k.async=1,k.src=r,a.parentNode.insertBefore(k,a)})
(window, document, "script", "https://mc.yandex.ru/metrika/tag.js", "ym");

ym(102356747, "init", {
clickmap:true,
trackLinks:true,
accurateTrackBounce:true,
webvisor:true
});
`
}}
/>
Expand Down
Loading