KR_Celery - somaz94/python-study GitHub Wiki

Python Celery κ°œλ… 정리


1️⃣ Celery κΈ°λ³Έ μ„€μ •

CeleryλŠ” λΆ„μ‚° μž‘μ—… 큐λ₯Ό μ œκ³΅ν•˜λŠ” 비동기 μž‘μ—… 큐이닀.

# tasks.py
from celery import Celery

# Celery μΈμŠ€ν„΄μŠ€ 생성
app = Celery('tasks',
             broker='redis://localhost:6379/0',
             backend='redis://localhost:6379/0')

# κΈ°λ³Έ νƒœμŠ€ν¬ μ •μ˜
@app.task
def add(x: int, y: int) -> int:
    return x + y

# νƒœμŠ€ν¬ μ„€μ •
app.conf.update(
    task_serializer='json',
    accept_content=['json'],
    result_serializer='json',
    timezone='Asia/Seoul',
    enable_utc=True,
)

βœ… νŠΉμ§•:

  • Redis/RabbitMQ 브둜컀 지원
  • 비동기 μž‘μ—… 처리
  • μœ μ—°ν•œ μ„€μ • 관리
  • λΆ„μ‚° μž‘μ—… 처리
  • κ²°κ³Ό λ°±μ—”λ“œ μ €μž₯


2️⃣ νƒœμŠ€ν¬ μ •μ˜μ™€ μ‹€ν–‰

μž‘μ—…μ„ νƒœμŠ€ν¬λ‘œ μ •μ˜ν•˜κ³  λΉ„λ™κΈ°μ μœΌλ‘œ μ‹€ν–‰ν•˜λŠ” 방법이닀.

from celery import Task
from typing import Any, Dict

class CustomTask(Task):
    def on_success(self, retval: Any, task_id: str, args: tuple, kwargs: Dict) -> None:
        print(f"Task {task_id} completed successfully")
    
    def on_failure(self, exc: Exception, task_id: str, args: tuple, kwargs: Dict, einfo: Any) -> None:
        print(f"Task {task_id} failed: {exc}")

@app.task(base=CustomTask, bind=True)
def process_data(self, data: Dict) -> Dict:
    try:
        # 데이터 처리 둜직
        result = {'processed': data['value'] * 2}
        return result
    except Exception as e:
        self.retry(exc=e, countdown=60)  # 1λΆ„ ν›„ μž¬μ‹œλ„

# νƒœμŠ€ν¬ 체이닝
from celery import chain

@app.task
def validate_data(data: Dict) -> Dict:
    if 'value' not in data:
        raise ValueError("Missing 'value' key")
    return data

# νƒœμŠ€ν¬ 체인 μ‹€ν–‰
task_chain = chain(
    validate_data.s({'value': 10}),
    process_data.s()
)
result = task_chain()

βœ… νŠΉμ§•:

  • μ»€μŠ€ν…€ νƒœμŠ€ν¬ 클래슀
  • νƒœμŠ€ν¬ 체이닝
  • μ—λŸ¬ μ²˜λ¦¬μ™€ μž¬μ‹œλ„
  • νƒœμŠ€ν¬ μƒνƒœ 좔적
  • μ‹œκ·Έλ‹ˆμ²˜λ₯Ό ν†΅ν•œ 인자 전달


3️⃣ 주기적 νƒœμŠ€ν¬ μ„€μ •

일정 주기둜 μžλ™ μ‹€ν–‰λ˜λŠ” νƒœμŠ€ν¬λ₯Ό μ„€μ •ν•˜λŠ” 방법이닀.

from celery.schedules import crontab

app.conf.beat_schedule = {
    'daily-cleanup': {
        'task': 'tasks.cleanup',
        'schedule': crontab(hour=0, minute=0),  # 맀일 μžμ •
    },
    'hourly-check': {
        'task': 'tasks.health_check',
        'schedule': 3600.0,  # 1μ‹œκ°„λ§ˆλ‹€
        'args': ('system',)
    }
}

@app.task
def cleanup():
    """일일 정리 μž‘μ—…"""
    # 정리 μž‘μ—… μˆ˜ν–‰
    pass

@app.task
def health_check(system: str):
    """μ‹œμŠ€ν…œ μƒνƒœ 확인"""
    # μƒνƒœ 확인 둜직
    pass

βœ… νŠΉμ§•:

  • Crontab μŠ€μΌ€μ€„λ§
  • 주기적 μž‘μ—… μ‹€ν–‰
  • 인자 전달 지원
  • λ‹€μ–‘ν•œ μ‹œκ°„ λ‹¨μœ„ 지원
  • 동적 μŠ€μΌ€μ€„ λ³€κ²½ κ°€λŠ₯


4️⃣ μ—λŸ¬ μ²˜λ¦¬μ™€ μž¬μ‹œλ„

νƒœμŠ€ν¬ μ‹€ν–‰ 쀑 λ°œμƒν•˜λŠ” 였λ₯˜λ₯Ό μ²˜λ¦¬ν•˜κ³  μž¬μ‹œλ„ν•˜λŠ” λ©”μ»€λ‹ˆμ¦˜μ΄λ‹€.

from celery.exceptions import MaxRetriesExceededError
from typing import Optional

@app.task(
    bind=True,
    max_retries=3,
    default_retry_delay=60,
    autoretry_for=(ConnectionError,),
    retry_backoff=True
)
def process_with_retry(self, data: Dict) -> Optional[Dict]:
    try:
        # 처리 둜직
        result = external_api_call(data)
        return result
    except ConnectionError as exc:
        # μžλ™ μž¬μ‹œλ„
        raise self.retry(exc=exc)
    except Exception as exc:
        # λ‹€λ₯Έ μ˜ˆμ™Έ 처리
        self.update_state(state='FAILURE', meta={'error': str(exc)})
        return None

class TaskManager:
    def __init__(self):
        self.tasks = {}
    
    def register_task(self, task_id: str, task):
        self.tasks[task_id] = task
    
    def revoke_task(self, task_id: str):
        if task_id in self.tasks:
            app.control.revoke(task_id, terminate=True)
            del self.tasks[task_id]

βœ… νŠΉμ§•:

  • μžλ™ μž¬μ‹œλ„ μ„€μ •
  • μƒμ„Έν•œ μ—λŸ¬ 처리
  • νƒœμŠ€ν¬ μƒνƒœ 관리
  • μ§€μˆ˜ λ°±μ˜€ν”„ 지원
  • νƒœμŠ€ν¬ μ·¨μ†Œ κΈ°λŠ₯


5️⃣ μ›Œμ»€ μ„€μ •κ³Ό λͺ¨λ‹ˆν„°λ§

Celery μ›Œμ»€μ˜ μ„€μ •κ³Ό λͺ¨λ‹ˆν„°λ§μ„ μœ„ν•œ 방법이닀.

from celery.signals import worker_ready, worker_shutting_down
from prometheus_client import Counter, Gauge

# λ©”νŠΈλ¦­ μ •μ˜
task_counter = Counter('celery_tasks_total', 'Total number of Celery tasks')
task_latency = Gauge('celery_task_latency_seconds', 'Task processing latency')

@worker_ready.connect
def worker_ready_handler(**kwargs):
    print("Celery worker is ready!")

@worker_shutting_down.connect
def worker_shutdown_handler(**kwargs):
    print("Celery worker is shutting down...")

class MonitoredTask(Task):
    def __call__(self, *args, **kwargs):
        task_counter.inc()
        with task_latency.time():
            return super().__call__(*args, **kwargs)

@app.task(base=MonitoredTask)
def monitored_task():
    # μž‘μ—… μˆ˜ν–‰
    pass

βœ… νŠΉμ§•:

  • μ›Œμ»€ 생λͺ…μ£ΌκΈ° 관리
  • λ©”νŠΈλ¦­ μˆ˜μ§‘
  • λͺ¨λ‹ˆν„°λ§ 톡합
  • μ‹œκ·Έλ„ 처리
  • μ›Œμ»€ 컨트둀 λͺ…λ Ή


6️⃣ μ‹€μš©μ μΈ 예제

Celeryλ₯Ό μ‚¬μš©ν•œ μ‹€μ œ μ• ν”Œλ¦¬μΌ€μ΄μ…˜ κ΅¬ν˜„ 예제λ₯Ό μ‚΄νŽ΄λ³΄μž.

이메일 λ°œμ†‘ μ‹œμŠ€ν…œ:

from celery import group
from typing import List, Dict
import smtplib
from email.mime.text import MIMEText

@app.task(rate_limit='100/m')  # λΆ„λ‹Ή 100개 μ œν•œ
def send_email(to_email: str, subject: str, body: str) -> bool:
    try:
        msg = MIMEText(body)
        msg['Subject'] = subject
        msg['To'] = to_email
        
        with smtplib.SMTP('smtp.gmail.com', 587) as server:
            server.starttls()
            server.login('[email protected]', 'password')
            server.send_message(msg)
        return True
    except Exception as e:
        print(f"Failed to send email: {e}")
        return False

def send_bulk_emails(email_list: List[Dict]):
    # 병렬 처리둜 이메일 λ°œμ†‘
    tasks = group(
        send_email.s(
            email['to'],
            email['subject'],
            email['body']
        ) for email in email_list
    )
    result = tasks.apply_async()
    return result

파일 처리 μ‹œμŠ€ν…œ:

import os
from PIL import Image
from typing import Tuple

@app.task
def process_image(image_path: str, size: Tuple[int, int]) -> str:
    try:
        with Image.open(image_path) as img:
            # 이미지 리사이징
            img.thumbnail(size)
            
            # μ €μž₯ 경둜 생성
            filename = os.path.basename(image_path)
            output_path = f"processed_{filename}"
            
            # 처리된 이미지 μ €μž₯
            img.save(output_path)
            return output_path
    except Exception as e:
        print(f"Image processing failed: {e}")
        raise

class ImageProcessor:
    def __init__(self):
        self.processing_queue = []
    
    def add_image(self, image_path: str, size: Tuple[int, int]):
        task = process_image.delay(image_path, size)
        self.processing_queue.append(task)
    
    def get_results(self):
        return [task.get() for task in self.processing_queue if task.ready()]

데이터 처리 νŒŒμ΄ν”„λΌμΈ:

from celery import chord
import pandas as pd
import numpy as np
from typing import List, Dict, Any

@app.task
def extract_data(source: str) -> pd.DataFrame:
    """데이터 μ†ŒμŠ€μ—μ„œ 데이터 μΆ”μΆœ"""
    if source.endswith('.csv'):
        return pd.read_csv(source)
    elif source.endswith('.json'):
        return pd.read_json(source)
    else:
        raise ValueError(f"Unsupported source format: {source}")

@app.task
def transform_data(df: pd.DataFrame) -> pd.DataFrame:
    """데이터 λ³€ν™˜ 및 μ „μ²˜λ¦¬"""
    # 결츑치 처리
    df = df.fillna(0)
    
    # νŠΉμ„± μ—”μ§€λ‹ˆμ–΄λ§
    if 'date' in df.columns:
        df['year'] = pd.to_datetime(df['date']).dt.year
        df['month'] = pd.to_datetime(df['date']).dt.month
    
    # μ΄μƒμΉ˜ 처리
    for col in df.select_dtypes(include=[np.number]).columns:
        q1 = df[col].quantile(0.25)
        q3 = df[col].quantile(0.75)
        iqr = q3 - q1
        df[col] = df[col].clip(q1 - 1.5 * iqr, q3 + 1.5 * iqr)
    
    return df

@app.task
def load_data(df: pd.DataFrame, destination: str) -> bool:
    """처리된 데이터 μ €μž₯"""
    if destination.endswith('.csv'):
        df.to_csv(destination, index=False)
    elif destination.endswith('.json'):
        df.to_json(destination, orient='records')
    else:
        raise ValueError(f"Unsupported destination format: {destination}")
    return True

@app.task
def notify_completion(results: List[Any]) -> Dict:
    """λͺ¨λ“  μž‘μ—… μ™„λ£Œ ν›„ μ•Œλ¦Ό"""
    return {
        'status': 'completed',
        'processed_files': len(results),
        'success_count': sum(1 for r in results if r)
    }

def process_data_pipeline(sources: List[str], destination_prefix: str):
    """ETL νŒŒμ΄ν”„λΌμΈ μ‹€ν–‰"""
    # 병렬 μΆ”μΆœ 및 λ³€ν™˜ μž‘μ—…
    tasks = []
    for i, source in enumerate(sources):
        destination = f"{destination_prefix}_{i}.csv"
        
        # κ°œλ³„ ETL νŒŒμ΄ν”„λΌμΈ
        pipeline = chain(
            extract_data.s(source),
            transform_data.s(),
            load_data.s(destination)
        )
        tasks.append(pipeline)
    
    # chordλ₯Ό μ‚¬μš©ν•˜μ—¬ λͺ¨λ“  νŒŒμ΄ν”„λΌμΈ μ™„λ£Œ ν›„ μ•Œλ¦Ό
    workflow = chord(tasks)(notify_completion.s())
    return workflow

βœ… νŠΉμ§•:

  • λŒ€λŸ‰ μž‘μ—…μ˜ 병렬 처리
  • μž‘μ—… 속도 μ œν•œ
  • λ³΅μž‘ν•œ μ›Œν¬ν”Œλ‘œμš° ꡬ성
  • 이벀트 기반 처리
  • μžμ› 효율적 μ‚¬μš©
  • ν™•μž₯μ„± μžˆλŠ” 섀계

7️⃣ κ³ κΈ‰ Celery νŒ¨ν„΄

λ³΅μž‘ν•œ λΆ„μ‚° μ‹œμŠ€ν…œμ„ μœ„ν•œ κ³ κΈ‰ Celery νŒ¨ν„΄μ΄λ‹€.

μš°μ„ μˆœμœ„ 큐 μ„€μ •:

# μš°μ„ μˆœμœ„ 큐 μ„€μ •
app.conf.task_routes = {
    'tasks.high_priority': {'queue': 'high_priority'},
    'tasks.default_priority': {'queue': 'default'},
    'tasks.low_priority': {'queue': 'low_priority'},
}

@app.task(queue='high_priority')
def high_priority(data):
    """κΈ΄κΈ‰ μ²˜λ¦¬κ°€ ν•„μš”ν•œ μž‘μ—…"""
    # κ³ μš°μ„ μˆœμœ„ μž‘μ—… 처리
    return "High priority task completed"

@app.task(queue='default')
def default_priority(data):
    """일반 μž‘μ—…"""
    # 일반 μž‘μ—… 처리
    return "Default task completed"

@app.task(queue='low_priority')
def low_priority(data):
    """λ°°κ²½ μž‘μ—…"""
    # μ €μš°μ„ μˆœμœ„ μž‘μ—… 처리
    return "Low priority task completed"

# μ›Œμ»€ μ‹œμž‘ λͺ…λ Ήμ–΄ μ˜ˆμ‹œ:
# celery -A tasks worker -Q high_priority,default,low_priority -l info

μ›Œμ»€ ν’€ μ΅œμ ν™”:

# celeryconfig.py
worker_concurrency = 8  # CPU μ½”μ–΄ μˆ˜μ— 맞좀
worker_prefetch_multiplier = 1  # μž‘μ—…λ‹Ή ν•˜λ‚˜μ”©λ§Œ κ°€μ Έμ˜€λ„λ‘ μ„€μ •
worker_max_tasks_per_child = 1000  # λ©”λͺ¨λ¦¬ λˆ„μˆ˜ λ°©μ§€
task_time_limit = 3600  # 1μ‹œκ°„ μ œν•œ
task_soft_time_limit = 3000  # μ†Œν”„νŠΈ μ œν•œ 50λΆ„

# μž‘μ—… μ’…λ₯˜λ³„ μ΅œμ ν™”
task_annotations = {
    'tasks.cpu_intensive': {'pool': 'solo'},  # CPU μž‘μ—…μ€ 별도 ν”„λ‘œμ„ΈμŠ€
    'tasks.io_intensive': {'pool': 'gevent', 'rate_limit': '100/m'},  # I/O μž‘μ—…μ€ gevent
}

# λ°λ“œλ ˆν„° 큐 μ„€μ •
task_reject_on_worker_lost = True
task_acks_late = True

μΊ”λ²„μŠ€ μ›Œν¬ν”Œλ‘œμš°:

from celery import group, chain, chord, signature

@app.task
def analysis_task(data_chunk):
    # 데이터 뢄석 μž‘μ—…
    return {'chunk_id': data_chunk['id'], 'result': len(data_chunk['data'])}

@app.task
def reduce_results(results):
    # κ²°κ³Ό 집계
    total = sum(result['result'] for result in results)
    return {'total_count': total}

def map_reduce_workflow(data_chunks):
    """λ§΅λ¦¬λ“€μŠ€ νŒ¨ν„΄ κ΅¬ν˜„"""
    # λ§΅ 단계 (병렬 처리)
    map_tasks = group(analysis_task.s(chunk) for chunk in data_chunks)
    
    # λ¦¬λ“€μŠ€ 단계 (κ²°κ³Ό 집계)
    workflow = chord(map_tasks)(reduce_results.s())
    
    return workflow

# 동적 μ›Œν¬ν”Œλ‘œμš° 생성
def create_dynamic_workflow(initial_data):
    """μ‹€ν–‰ μ‹œμ μ— λ™μ μœΌλ‘œ μ›Œν¬ν”Œλ‘œμš° 생성"""
    if initial_data['type'] == 'simple':
        return simple_task.s(initial_data)
    elif initial_data['type'] == 'complex':
        # λ³΅μž‘ν•œ 체인 생성
        tasks = [initial_task.s(initial_data)]
        
        # 쑰건에 따라 λ‹€λ₯Έ νƒœμŠ€ν¬ μΆ”κ°€
        if initial_data.get('needs_validation'):
            tasks.append(validation_task.s())
        
        if initial_data.get('needs_processing'):
            tasks.append(processing_task.s())
        
        # κ²°κ³Ό ν˜•μ‹ μ§€μ •
        tasks.append(format_result.s())
        
        # 동적 체인 생성
        return chain(*tasks)
    else:
        raise ValueError(f"Unknown workflow type: {initial_data['type']}")

βœ… νŠΉμ§•:

  • μš°μ„ μˆœμœ„ 기반 μž‘μ—… 처리
  • λ¦¬μ†ŒμŠ€λ³„ μ›Œμ»€ μ΅œμ ν™”
  • λ³΅μž‘ν•œ μ›Œν¬ν”Œλ‘œμš° νŒ¨ν„΄
  • λ§΅λ¦¬λ“€μŠ€ κ΅¬ν˜„
  • 동적 μ›Œν¬ν”Œλ‘œμš° ꡬ성

8️⃣ 운영 ν™˜κ²½ ꡬ성

ν”„λ‘œλ•μ…˜ ν™˜κ²½μ—μ„œ Celeryλ₯Ό μ•ˆμ •μ μœΌλ‘œ μš΄μ˜ν•˜κΈ° μœ„ν•œ ꡬ성이닀.

κ°μ‹œμ™€ μžλ™ μž¬μ‹œμž‘:

# supervisord.conf
[program:celery]
command=/path/to/venv/bin/celery -A tasks worker --loglevel=INFO
directory=/path/to/project
user=celery
numprocs=1
stdout_logfile=/var/log/celery/worker.log
stderr_logfile=/var/log/celery/worker.log
autostart=true
autorestart=true
startsecs=10
stopasgroup=true
priority=999

[program:celerybeat]
command=/path/to/venv/bin/celery -A tasks beat --loglevel=INFO
directory=/path/to/project
user=celery
numprocs=1
stdout_logfile=/var/log/celery/beat.log
stderr_logfile=/var/log/celery/beat.log
autostart=true
autorestart=true
startsecs=10
stopasgroup=true
priority=999

[group:celery-cluster]
programs=celery,celerybeat
priority=999

ν”„λ‘œλ•μ…˜ μ„€μ •:

# celeryconfig_prod.py
import os
from kombu.common import Broadcast, Queue

# 브둜컀 및 λ°±μ—”λ“œ μ„€μ •
broker_url = os.environ.get('CELERY_BROKER_URL', 'redis://redis:6379/0')
result_backend = os.environ.get('CELERY_RESULT_BACKEND', 'redis://redis:6379/0')

# λ³΄μ•ˆ μ„€μ •
task_serializer = 'json'
accept_content = ['json']
result_serializer = 'json'
enable_utc = True

# 큐 μ„€μ •
task_default_queue = 'default'
task_queues = (
    Queue('default', routing_key='task.#'),
    Queue('high_priority', routing_key='high_task.#'),
    Queue('low_priority', routing_key='low_task.#'),
    Broadcast('broadcast'),  # λΈŒλ‘œλ“œμΊμŠ€νŠΈ 큐
)

# λΌμš°νŒ… μ„€μ •
task_routes = {
    'tasks.critical_task': {'queue': 'high_priority', 'routing_key': 'high_task.critical'},
    'tasks.background_task': {'queue': 'low_priority', 'routing_key': 'low_task.background'},
}

# μ›Œμ»€ μ„€μ •
worker_concurrency = int(os.environ.get('CELERY_CONCURRENCY', 8))
worker_prefetch_multiplier = 1
worker_max_tasks_per_child = 1000

# λ‘œκΉ… μ„€μ •
worker_hijack_root_logger = False
worker_log_format = '[%(asctime)s: %(levelname)s/%(processName)s] %(message)s'
worker_task_log_format = '[%(asctime)s: %(levelname)s/%(processName)s][%(task_name)s(%(task_id)s)] %(message)s'

# μ—λŸ¬ 처리
task_acks_late = True
task_reject_on_worker_lost = True
task_acks_on_failure_or_timeout = False

# λͺ¨λ‹ˆν„°λ§
worker_send_task_events = True
task_send_sent_event = True

도컀 컴포즈 ꡬ성:

# docker-compose.yml
version: '3.8'

services:
  redis:
    image: redis:6-alpine
    ports:
      - "6379:6379"
    volumes:
      - redis-data:/data
    restart: unless-stopped
    healthcheck:
      test: ["CMD", "redis-cli", "ping"]
      interval: 30s
      timeout: 10s
      retries: 3

  celery-worker:
    build: .
    command: celery -A tasks worker --loglevel=INFO
    volumes:
      - .:/app
    environment:
      - CELERY_BROKER_URL=redis://redis:6379/0
      - CELERY_RESULT_BACKEND=redis://redis:6379/0
      - CELERY_CONCURRENCY=8
    depends_on:
      - redis
    restart: unless-stopped

  celery-beat:
    build: .
    command: celery -A tasks beat --loglevel=INFO
    volumes:
      - .:/app
    environment:
      - CELERY_BROKER_URL=redis://redis:6379/0
      - CELERY_RESULT_BACKEND=redis://redis:6379/0
    depends_on:
      - redis
    restart: unless-stopped

  flower:
    build: .
    command: celery -A tasks flower --port=5555
    ports:
      - "5555:5555"
    environment:
      - CELERY_BROKER_URL=redis://redis:6379/0
      - CELERY_RESULT_BACKEND=redis://redis:6379/0
    depends_on:
      - redis
      - celery-worker
    restart: unless-stopped

volumes:
  redis-data:

βœ… νŠΉμ§•:

  • κ³ κ°€μš©μ„± ꡬ성
  • μ»¨ν…Œμ΄λ„ˆν™”λœ 배포
  • κ°μ‹œ 및 λ‘œκΉ… μ„€μ •
  • 큐 및 λΌμš°νŒ… μ΅œμ ν™”
  • ν™•μž₯ κ°€λŠ₯ν•œ μ•„ν‚€ν…μ²˜
  • λ³΄μ•ˆ μ„€μ •
  • λ¦¬μ†ŒμŠ€ 관리

μ£Όμš” 팁

βœ… λͺ¨λ²” 사둀:

  • λ©±λ“±μ„± 보μž₯: νƒœμŠ€ν¬λŠ” μ—¬λŸ¬ 번 싀행해도 λ™μΌν•œ κ²°κ³Όκ°€ λ‚˜μ˜€λ„λ‘ 섀계
  • μ μ ˆν•œ 큐 뢄리: μž‘μ—… νŠΉμ„±μ— 따라 큐λ₯Ό λΆ„λ¦¬ν•˜μ—¬ μžμ› 경쟁 λ°©μ§€
  • 데이터 직렬화: νƒœμŠ€ν¬ λ°μ΄ν„°λŠ” μ΅œμ†Œν•œμœΌλ‘œ μœ μ§€ν•˜κ³  직렬화 κ°€λŠ₯ν•΄μ•Ό 함
  • μ‹€ν–‰ μ‹œκ°„ λͺ¨λ‹ˆν„°λ§: 였래 μ‹€ν–‰λ˜λŠ” νƒœμŠ€ν¬λŠ” λͺ¨λ‹ˆν„°λ§ν•˜κ³  μ‹œκ°„ μ œν•œ μ„€μ •
  • λ©”λͺ¨λ¦¬ 관리: worker_max_tasks_per_child둜 λ©”λͺ¨λ¦¬ λˆ„μˆ˜ λ°©μ§€
  • μž¬μ‹œλ„ μ „λž΅: μΌμ‹œμ  였λ₯˜λŠ” μ§€μˆ˜ λ°±μ˜€ν”„λ‘œ μž¬μ‹œλ„, 영ꡬ적 였λ₯˜λŠ” λ°λ“œλ ˆν„° 큐둜 이동
  • νƒœμŠ€ν¬ μ·¨μ†Œ 처리: νƒœμŠ€ν¬ μ·¨μ†Œ μ‹œ λ¦¬μ†ŒμŠ€ 정리 둜직 κ΅¬ν˜„
  • λ‘œκΉ… μ „λž΅: κ΅¬μ‘°ν™”λœ λ‘œκΉ…μœΌλ‘œ νƒœμŠ€ν¬ 좔적 μš©μ΄ν•˜κ²Œ μ„€μ •
  • λ°±μ—”λ“œ 선택: 데이터 양에 따라 μ μ ˆν•œ κ²°κ³Ό λ°±μ—”λ“œ 선택 (Redis, RabbitMQ, λ°μ΄ν„°λ² μ΄μŠ€)
  • 브둜컀 κ³ κ°€μš©μ„±: μ€‘μš” μ‹œμŠ€ν…œμ€ 브둜컀 ν΄λŸ¬μŠ€ν„°λ§ ꡬ성
  • μ›Œμ»€ ν™•μž₯: λ‘œλ“œμ— 따라 μ›Œμ»€ 수λ₯Ό λ™μ μœΌλ‘œ μ‘°μ •ν•˜λŠ” λ©”μ»€λ‹ˆμ¦˜ κ΅¬ν˜„
  • λͺ¨λ‹ˆν„°λ§ 도ꡬ ν™œμš©: Flower, Prometheus λ“±μœΌλ‘œ μ‹€μ‹œκ°„ λͺ¨λ‹ˆν„°λ§
  • 주기적 정리: μ™„λ£Œλœ νƒœμŠ€ν¬ κ²°κ³Ό μ •κΈ°μ μœΌλ‘œ 정리
  • νƒœμŠ€ν¬ νƒ€μž„μ•„μ›ƒ: λͺ¨λ“  νƒœμŠ€ν¬μ— νƒ€μž„μ•„μ›ƒ μ„€μ •
  • λ³΄μ•ˆ κ³ λ €: 브둜컀 및 κ²°κ³Ό λ°±μ—”λ“œμ— 인증 μ„€μ •


</rewritten_file>

⚠️ **GitHub.com Fallback** ⚠️