KR_MySQL - somaz94/python-study GitHub Wiki
MySQL์ ๊ฐ์ฅ ๋๋ฆฌ ์ฌ์ฉ๋๋ ์คํ์์ค ๊ด๊ณํ ๋ฐ์ดํฐ๋ฒ ์ด์ค์ด๋ค.
import mysql.connector
# ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ฐ๊ฒฐ
config = {
'user': 'username',
'password': 'password',
'host': 'localhost',
'database': 'mydatabase',
'raise_on_warnings': True,
'charset': 'utf8mb4',
'collation': 'utf8mb4_unicode_ci',
'autocommit': False
}
conn = mysql.connector.connect(**config)
cursor = conn.cursor(dictionary=True) # ๊ฒฐ๊ณผ๋ฅผ ๋์
๋๋ฆฌ ํํ๋ก ๋ฐํ
# ํ๊ฒฝ ๋ณ์๋ฅผ ํตํ ์ฐ๊ฒฐ ๋ฌธ์์ด ๊ด๋ฆฌ
import os
from dotenv import load_dotenv
load_dotenv() # .env ํ์ผ์์ ํ๊ฒฝ ๋ณ์ ๋ก๋
db_config = {
'user': os.getenv('MYSQL_USER'),
'password': os.getenv('MYSQL_PASSWORD'),
'host': os.getenv('MYSQL_HOST'),
'database': os.getenv('MYSQL_DATABASE')
}
secure_conn = mysql.connector.connect(**db_config)
# ์ฐ๊ฒฐ ํ์ธ ๋ฐ ์ข
๋ฃ
def check_connection():
try:
cursor.execute("SELECT VERSION()")
version = cursor.fetchone()
print(f"Database version: {version['VERSION()']}")
return True
except mysql.connector.Error as err:
print(f"์๋ฌ: {err}")
return False
finally:
if conn.is_connected():
print("์ฐ๊ฒฐ ์ฑ๊ณต!")
# ์ฐ๊ฒฐ ์ข
๋ฃ
def close_connection():
if conn.is_connected():
cursor.close()
conn.close()
print("MySQL ์ฐ๊ฒฐ์ด ๋ซํ์ต๋๋ค.")
โ
ํน์ง:
- ๊ด๊ณํ ๋ฐ์ดํฐ๋ฒ ์ด์ค ๊ด๋ฆฌ ์์คํ (RDBMS)
- SQL ๊ธฐ๋ฐ ์ฟผ๋ฆฌ ์ธ์ด
- ๋ค์ํ ๋ฐ์ดํฐ ํ์ ์ง์
- ํธ๋์ญ์ ๋ฐ ACID ์์ฑ ๋ณด์ฅ
- ์ธ๋ฑ์ฑ ๋ฐ ์ต์ ํ ๊ธฐ๋ฅ
- ๋ค์ค ์ฌ์ฉ์ ์ ๊ทผ ์ง์
- ๋ณด์ ๋ฐ ๊ถํ ๊ด๋ฆฌ
- ํ์ฅ์ฑ ๋ฐ ๋ณต์ ๊ธฐ๋ฅ
๋ฐ์ดํฐ๋ฒ ์ด์ค์ ๊ธฐ๋ณธ ์์
์ธ ์์ฑ(Create), ์กฐํ(Read), ์์ (Update), ์ญ์ (Delete) ๊ธฐ๋ฅ์ ๊ตฌํํ๋ค.
# ํ
์ด๋ธ ์์ฑ
def create_tables():
cursor.execute('''
CREATE TABLE IF NOT EXISTS users (
id INT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(100) NOT NULL,
email VARCHAR(100) UNIQUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
status ENUM('active', 'inactive', 'suspended') DEFAULT 'active',
INDEX idx_email (email),
INDEX idx_status (status)
)
''')
cursor.execute('''
CREATE TABLE IF NOT EXISTS posts (
id INT AUTO_INCREMENT PRIMARY KEY,
user_id INT NOT NULL,
title VARCHAR(200) NOT NULL,
content TEXT,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (user_id) REFERENCES users(id) ON DELETE CASCADE,
FULLTEXT INDEX idx_content (title, content)
)
''')
conn.commit()
# ๋ฐ์ดํฐ ์ฝ์
(Create)
def insert_user(name, email, status='active'):
try:
cursor.execute('''
INSERT INTO users (name, email, status)
VALUES (%s, %s, %s)
''', (name, email, status))
conn.commit()
return cursor.lastrowid
except mysql.connector.Error as err:
print(f"์๋ฌ ๋ฐ์: {err}")
conn.rollback()
return None
# ๋ค์ ๋ ์ฝ๋ ์ฝ์
def insert_many_users(users_data):
try:
cursor.executemany('''
INSERT INTO users (name, email, status)
VALUES (%s, %s, %s)
''', users_data)
conn.commit()
return cursor.rowcount
except mysql.connector.Error as err:
print(f"์๋ฌ ๋ฐ์: {err}")
conn.rollback()
return 0
# ๋ฐ์ดํฐ ์กฐํ (Read)
def get_user(user_id):
cursor.execute('''
SELECT * FROM users WHERE id = %s
''', (user_id,))
return cursor.fetchone()
# ๋ค์ ๋ ์ฝ๋ ์กฐํ
def get_users(limit=10, offset=0, status=None):
query = "SELECT * FROM users"
params = []
if status:
query += " WHERE status = %s"
params.append(status)
query += " ORDER BY created_at DESC LIMIT %s OFFSET %s"
params.extend([limit, offset])
cursor.execute(query, params)
return cursor.fetchall()
# ๋ฐ์ดํฐ ์กฐํ - JOIN ์ฌ์ฉ
def get_user_with_posts(user_id):
cursor.execute('''
SELECT u.*, p.id as post_id, p.title, p.content, p.created_at as post_created_at
FROM users u
LEFT JOIN posts p ON u.id = p.user_id
WHERE u.id = %s
''', (user_id,))
results = cursor.fetchall()
if not results:
return None
user = {
'id': results[0]['id'],
'name': results[0]['name'],
'email': results[0]['email'],
'created_at': results[0]['created_at'],
'status': results[0]['status'],
'posts': []
}
for row in results:
if row['post_id']:
user['posts'].append({
'id': row['post_id'],
'title': row['title'],
'content': row['content'],
'created_at': row['post_created_at']
})
return user
# ๋ฐ์ดํฐ ์์ (Update)
def update_user(user_id, name=None, email=None, status=None):
update_fields = []
params = []
if name:
update_fields.append("name = %s")
params.append(name)
if email:
update_fields.append("email = %s")
params.append(email)
if status:
update_fields.append("status = %s")
params.append(status)
if not update_fields:
return False
query = f"UPDATE users SET {', '.join(update_fields)} WHERE id = %s"
params.append(user_id)
try:
cursor.execute(query, params)
conn.commit()
return cursor.rowcount > 0
except mysql.connector.Error as err:
print(f"์๋ฌ ๋ฐ์: {err}")
conn.rollback()
return False
# ๋ฐ์ดํฐ ์ญ์ (Delete)
def delete_user(user_id):
try:
cursor.execute("DELETE FROM users WHERE id = %s", (user_id,))
conn.commit()
return cursor.rowcount > 0
except mysql.connector.Error as err:
print(f"์๋ฌ ๋ฐ์: {err}")
conn.rollback()
return False
# ์กฐ๊ฑด๋ถ ์ญ์
def delete_inactive_users(days=90):
try:
cursor.execute('''
DELETE FROM users
WHERE status = 'inactive'
AND updated_at < DATE_SUB(NOW(), INTERVAL %s DAY)
''', (days,))
conn.commit()
return cursor.rowcount
except mysql.connector.Error as err:
print(f"์๋ฌ ๋ฐ์: {err}")
conn.rollback()
return 0
# ์ฌ์ฉ ์์
def crud_example():
# ํ
์ด๋ธ ์์ฑ
create_tables()
# ์ฌ์ฉ์ ์ถ๊ฐ
user_id = insert_user('ํ๊ธธ๋', '[email protected]')
print(f"์ถ๊ฐ๋ ์ฌ์ฉ์ ID: {user_id}")
# ์ฌ๋ฌ ์ฌ์ฉ์ ์ถ๊ฐ
users = [
('๊น์ฒ ์', '[email protected]', 'active'),
('์ด์ํฌ', '[email protected]', 'active'),
('๋ฐ์ง์ฑ', '[email protected]', 'inactive')
]
inserted = insert_many_users(users)
print(f"{inserted}๋ช
์ ์ฌ์ฉ์๊ฐ ์ถ๊ฐ๋์์ต๋๋ค.")
# ์ฌ์ฉ์ ์กฐํ
user = get_user(user_id)
print(f"์กฐํ๋ ์ฌ์ฉ์: {user['name']} ({user['email']})")
# ์ฌ์ฉ์ ์ ๋ณด ์์
update_user(user_id, name='ํ๊ธธ์', status='active')
# ์ฌ์ฉ์ ์ญ์
deleted = delete_inactive_users(30)
print(f"{deleted}๋ช
์ ๋นํ์ฑ ์ฌ์ฉ์๊ฐ ์ญ์ ๋์์ต๋๋ค.")
โ
ํน์ง:
- CRUD ์ฐ์ฐ์ ์์ ํ ๊ตฌํ
- ํ๋ผ๋ฏธํฐํ๋ ์ฟผ๋ฆฌ๋ก SQL ์ธ์ ์ ๋ฐฉ์ง
- ๋ค์ค ๋ ์ฝ๋ ์ฒ๋ฆฌ ๊ธฐ๋ฅ
- ์กฐ์ธ์ ํตํ ๊ด๊ณํ ๋ฐ์ดํฐ ์กฐํ
- ๋์ ์ฟผ๋ฆฌ ์์ฑ
- ํธ๋์ญ์ ๊ด๋ฆฌ ๋ฐ ๋กค๋ฐฑ
- ์๋ฌ ์ฒ๋ฆฌ ๋ฐ ์์ธ ๊ด๋ฆฌ
- ์ธ๋ฑ์ค ์ค์ ๋ฐ ํ์ฉ
๋ณต์กํ ํธ๋์ญ์
์ฒ๋ฆฌ, ์คํ ์ด๋ ํ๋ก์์ , ๊ณ ๊ธ ์ฟผ๋ฆฌ ๊ธฐ๋ฒ ๋ฑ์ ํ์ฉํ๋ค.
# ํธ๋์ญ์
์ฒ๋ฆฌ
def transfer_money(from_account, to_account, amount):
try:
cursor.execute('START TRANSACTION')
# ์ถ๊ธ ๊ณ์ข ์กฐํ
cursor.execute('''
SELECT balance FROM accounts WHERE id = %s FOR UPDATE
''', (from_account,))
from_balance = cursor.fetchone()
if not from_balance or from_balance['balance'] < amount:
raise Exception("์์ก ๋ถ์กฑ ๋๋ ๊ณ์ข ์์")
# ์ถ๊ธ
cursor.execute('''
UPDATE accounts
SET balance = balance - %s,
updated_at = NOW(),
transaction_count = transaction_count + 1
WHERE id = %s
''', (amount, from_account))
# ์
๊ธ ๊ณ์ข ํ์ธ
cursor.execute('''
SELECT id FROM accounts WHERE id = %s FOR UPDATE
''', (to_account,))
if not cursor.fetchone():
raise Exception("๋์ ๊ณ์ข ์์")
# ์
๊ธ
cursor.execute('''
UPDATE accounts
SET balance = balance + %s,
updated_at = NOW(),
transaction_count = transaction_count + 1
WHERE id = %s
''', (amount, to_account))
# ๊ฑฐ๋ ๋ด์ญ ๊ธฐ๋ก
cursor.execute('''
INSERT INTO transactions
(from_account_id, to_account_id, amount, transaction_type, description)
VALUES (%s, %s, %s, 'transfer', '๊ณ์ข ์ด์ฒด')
''', (from_account, to_account, amount))
cursor.execute('COMMIT')
return True
except Exception as e:
cursor.execute('ROLLBACK')
print(f"๊ฑฐ๋ ์คํจ: {e}")
return False
# ์คํ ์ด๋ ํ๋ก์์ ์์ฑ ๋ฐ ํธ์ถ
def create_stored_procedures():
# ์ฌ์ฉ์ ์ํ ์
๋ฐ์ดํธ ํ๋ก์์
cursor.execute('''
DROP PROCEDURE IF EXISTS update_user_status
''')
cursor.execute('''
CREATE PROCEDURE update_user_status(
IN user_id INT,
IN new_status VARCHAR(20)
)
BEGIN
UPDATE users
SET status = new_status,
updated_at = NOW()
WHERE id = user_id;
SELECT ROW_COUNT() AS affected_rows;
END
''')
# ๋นํ์ฑ ์ฌ์ฉ์ ์ ๋ฆฌ ํ๋ก์์
cursor.execute('''
DROP PROCEDURE IF EXISTS cleanup_inactive_users
''')
cursor.execute('''
CREATE PROCEDURE cleanup_inactive_users(
IN days_threshold INT
)
BEGIN
DECLARE affected INT;
START TRANSACTION;
DELETE FROM users
WHERE status = 'inactive'
AND updated_at < DATE_SUB(NOW(), INTERVAL days_threshold DAY);
SET affected = ROW_COUNT();
COMMIT;
SELECT affected AS deleted_users;
END
''')
conn.commit()
# ์คํ ์ด๋ ํ๋ก์์ ํธ์ถ
def call_stored_procedure(user_id, new_status):
try:
cursor.callproc('update_user_status', (user_id, new_status))
# ๊ฒฐ๊ณผ ๊ฐ์ ธ์ค๊ธฐ
for result in cursor.stored_results():
return result.fetchone()['affected_rows'] > 0
except mysql.connector.Error as err:
print(f"ํ๋ก์์ ํธ์ถ ์๋ฌ: {err}")
return False
# ๊ณ ๊ธ ์ฟผ๋ฆฌ - ์ง๊ณ ๋ฐ ๊ทธ๋ฃนํ
def user_statistics():
cursor.execute('''
SELECT
status,
COUNT(*) as user_count,
MIN(created_at) as oldest_user,
MAX(created_at) as newest_user,
AVG(TIMESTAMPDIFF(DAY, created_at, NOW())) as avg_account_age_days
FROM users
GROUP BY status
ORDER BY user_count DESC
''')
return cursor.fetchall()
# ๊ณ ๊ธ ์ฟผ๋ฆฌ - ์๋์ฐ ํจ์ ์ฌ์ฉ (MySQL 8.0+)
def post_analytics():
cursor.execute('''
SELECT
p.*,
COUNT(*) OVER(PARTITION BY DATE(p.created_at)) as posts_same_day,
RANK() OVER(ORDER BY p.view_count DESC) as popularity_rank
FROM posts p
JOIN users u ON p.user_id = u.id
WHERE u.status = 'active'
ORDER BY p.created_at DESC
LIMIT 100
''')
return cursor.fetchall()
# ์ ์ฒดํ
์คํธ ๊ฒ์
def search_posts(query):
cursor.execute('''
SELECT *
FROM posts
WHERE MATCH(title, content) AGAINST(%s IN NATURAL LANGUAGE MODE)
ORDER BY MATCH(title, content) AGAINST(%s IN NATURAL LANGUAGE MODE) DESC
LIMIT 20
''', (query, query))
return cursor.fetchall()
โ
ํน์ง:
- ๋ณต์กํ ํธ๋์ญ์ ์ฒ๋ฆฌ
- ๋์์ฑ ์ ์ด(FOR UPDATE)
- ์คํ ์ด๋ ํ๋ก์์ ํ์ฉ
- ์ง๊ณ ํจ์ ๋ฐ ๊ทธ๋ฃนํ
- ๊ณ ๊ธ ๋ถ์ ์ฟผ๋ฆฌ
- ์๋์ฐ ํจ์ (MySQL 8.0+)
- ์ ์ฒดํ ์คํธ ๊ฒ์ ๊ธฐ๋ฅ
- ์ต์ ํ๋ ์ฟผ๋ฆฌ ์์ฑ
๋ฐ์ดํฐ๋ฒ ์ด์ค ์ฐ๊ฒฐ์ ํจ์จ์ ์ผ๋ก ๊ด๋ฆฌํ๊ธฐ ์ํ ์ปค๋ฅ์
ํ๋ง ๊ธฐ๋ฒ์ด๋ค.
from mysql.connector import pooling
class DatabasePool:
_pool = None
@classmethod
def init_pool(cls, pool_name, pool_size=5, **kwargs):
dbconfig = {
"pool_name": pool_name,
"pool_size": pool_size,
"pool_reset_session": True, # ์ฐ๊ฒฐ ๋ฐํ ์ ์ธ์
์ด๊ธฐํ
**kwargs
}
cls._pool = mysql.connector.pooling.MySQLConnectionPool(**dbconfig)
print(f"{pool_name} ํ์ด {pool_size}๊ฐ์ ์ฐ๊ฒฐ๋ก ์ด๊ธฐํ๋์์ต๋๋ค.")
@classmethod
def get_connection(cls):
if cls._pool is None:
raise Exception("ํ์ด ์ด๊ธฐํ๋์ง ์์์ต๋๋ค.")
return cls._pool.get_connection()
@classmethod
def execute_query(cls, query, params=None, commit=False, dictionary=True):
"""ํ์์ ์ฐ๊ฒฐ์ ๊ฐ์ ธ์ ์ฟผ๋ฆฌ๋ฅผ ์คํํ๊ณ ๊ฒฐ๊ณผ๋ฅผ ๋ฐํํ๋ค"""
conn = None
cursor = None
try:
conn = cls.get_connection()
cursor = conn.cursor(dictionary=dictionary)
cursor.execute(query, params or ())
result = None
if query.strip().upper().startswith("SELECT"):
result = cursor.fetchall()
else:
if commit:
conn.commit()
result = {"affected_rows": cursor.rowcount, "last_id": cursor.lastrowid}
return result
except Exception as e:
if conn and commit:
conn.rollback()
raise e
finally:
if cursor:
cursor.close()
if conn:
conn.close() # ํ์ ๋ฐํ
# ์ปค๋ฅ์
ํ ์ฌ์ฉ ์์
def connection_pool_example():
# ํ ์ด๊ธฐํ
DatabasePool.init_pool(
"mypool",
pool_size=10,
user=os.getenv('MYSQL_USER'),
password=os.getenv('MYSQL_PASSWORD'),
host=os.getenv('MYSQL_HOST'),
database=os.getenv('MYSQL_DATABASE')
)
# ๋์ ์ฟผ๋ฆฌ ์๋ฎฌ๋ ์ด์
def worker(user_id):
try:
result = DatabasePool.execute_query(
"SELECT * FROM users WHERE id = %s", (user_id,)
)
return result[0] if result else None
except Exception as e:
print(f"์ฟผ๋ฆฌ ์คํ ์ค ์ค๋ฅ: {e}")
return None
# ๋ณ๋ ฌ ์ฒ๋ฆฌ ์์
import concurrent.futures
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
user_ids = [1, 2, 3, 4, 5]
results = list(executor.map(worker, user_ids))
for user_id, result in zip(user_ids, results):
if result:
print(f"์ฌ์ฉ์ {user_id}: {result['name']}")
else:
print(f"์ฌ์ฉ์ {user_id}๋ฅผ ์ฐพ์ ์ ์์ต๋๋ค.")
# ๋๊ท๋ชจ ์ ํ๋ฆฌ์ผ์ด์
์ ์ํ ํฅ์๋ ํ๋ง
import queue
import threading
import time
class AdvancedConnectionPool:
def __init__(self, config, min_connections=5, max_connections=20, timeout=30):
self.config = config
self.min_conn = min_connections
self.max_conn = max_connections
self.timeout = timeout
self.connections = queue.Queue(maxsize=max_connections)
self.active_connections = 0
self.lock = threading.RLock()
self.initialize()
def initialize(self):
for _ in range(self.min_conn):
self._add_connection()
def _add_connection(self):
with self.lock:
if self.active_connections >= self.max_conn:
return False
try:
connection = mysql.connector.connect(**self.config)
self.connections.put(connection)
self.active_connections += 1
return True
except mysql.connector.Error as err:
print(f"์ฐ๊ฒฐ ์์ฑ ์ค๋ฅ: {err}")
return False
def get_connection(self, timeout=None):
timeout = timeout or self.timeout
try:
# ๊ฐ๋ฅํ ์ฐ๊ฒฐ ํ์ธ
connection = self.connections.get(block=True, timeout=timeout)
# ์ฐ๊ฒฐ ์ ํจ์ฑ ๊ฒ์ฌ
if not connection.is_connected():
connection.reconnect()
return connection
except queue.Empty:
# ํ์ ์ฌ์ฉ ๊ฐ๋ฅํ ์ฐ๊ฒฐ์ด ์์ผ๋ฉด ์๋ก ์์ฑ ์๋
with self.lock:
if self._add_connection():
return self.connections.get(block=False)
raise Exception("์ฐ๊ฒฐ ํ์์์: ์ฌ์ฉ ๊ฐ๋ฅํ ์ฐ๊ฒฐ ์์")
def release_connection(self, connection):
if connection.is_connected():
# ์ด๋ฆฐ ํธ๋์ญ์
์ด ์์ผ๋ฉด ๋กค๋ฐฑ
connection.rollback()
self.connections.put(connection)
else:
# ๋์ด์ง ์ฐ๊ฒฐ์ ์๋ก์ด ์ฐ๊ฒฐ๋ก ๋์ฒด
with self.lock:
self.active_connections -= 1
self._add_connection()
def close_all(self):
with self.lock:
while not self.connections.empty():
try:
conn = self.connections.get(block=False)
if conn.is_connected():
conn.close()
except queue.Empty:
break
self.active_connections = 0
โ
ํน์ง:
- ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ฐ๊ฒฐ ์ฌ์ฌ์ฉ
- ์ฐ๊ฒฐ ์์ฑ ๋น์ฉ ์ ๊ฐ
- ์ฐ๊ฒฐ ์ ์ ํ์ผ๋ก ๋ฆฌ์์ค ๊ด๋ฆฌ
- ๋์์ฑ ์ฒ๋ฆฌ ์ต์ ํ
- ์ฐ๊ฒฐ ์ ํจ์ฑ ๊ฒ์ฌ ๋ฐ ๋ณต๊ตฌ
- ํ์์์ ์ค์
- ํธ๋์ญ์ ์ํ ๊ด๋ฆฌ
- ์ค๋ ๋ ์์ ๊ตฌํ
๋ฐ์ดํฐ๋ฒ ์ด์ค ์คํค๋ง ๋ณ๊ฒฝ์ ๊ด๋ฆฌํ๋ ๋ง์ด๊ทธ๋ ์ด์
์์คํ
์ด๋ค.
class DatabaseMigration:
def __init__(self, connection):
self.conn = connection
self.cursor = connection.cursor(dictionary=True)
def create_migrations_table(self):
self.cursor.execute('''
CREATE TABLE IF NOT EXISTS migrations (
id INT AUTO_INCREMENT PRIMARY KEY,
migration_name VARCHAR(255) NOT NULL UNIQUE,
batch INT NOT NULL,
executed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
''')
self.conn.commit()
def get_executed_migrations(self):
self.cursor.execute("SELECT migration_name FROM migrations")
return [row['migration_name'] for row in self.cursor.fetchall()]
def record_migration(self, migration_name, batch):
self.cursor.execute(
"INSERT INTO migrations (migration_name, batch) VALUES (%s, %s)",
(migration_name, batch)
)
self.conn.commit()
def migrate(self, migrations_directory):
self.create_migrations_table()
executed = self.get_executed_migrations()
# ๋ง์ด๊ทธ๋ ์ด์
ํ์ผ ์ฐพ๊ธฐ
import os
import importlib.util
import re
migration_files = [f for f in os.listdir(migrations_directory)
if f.endswith('.py') and re.match(r'\d+_\w+\.py', f)]
migration_files.sort() # ๋ ์ง์/์ด๋ฆ์ ์ ๋ ฌ
# ๊ฐ์ฅ ์ต๊ทผ ๋ฑ์น ๋ฒํธ ์ฐพ๊ธฐ
self.cursor.execute("SELECT MAX(batch) as max_batch FROM migrations")
result = self.cursor.fetchone()
current_batch = (result['max_batch'] or 0) + 1
executed_count = 0
for filename in migration_files:
migration_name = filename[:-3] # .py ์ ๊ฑฐ
if migration_name in executed:
continue # ์ด๋ฏธ ์คํ๋ ๋ง์ด๊ทธ๋ ์ด์
๊ฑด๋๋ฐ๊ธฐ
# ๋ง์ด๊ทธ๋ ์ด์
๋ชจ๋ ๋์ ๋ก๋ ๋ฐ ์คํ
file_path = os.path.join(migrations_directory, filename)
spec = importlib.util.spec_from_file_location(migration_name, file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
print(f"๋ง์ด๊ทธ๋ ์ด์
์คํ: {migration_name}")
# ๋ง์ด๊ทธ๋ ์ด์
์คํ
try:
if hasattr(module, 'up'):
module.up(self.cursor)
self.conn.commit()
self.record_migration(migration_name, current_batch)
executed_count += 1
else:
print(f"๊ฒฝ๊ณ : {migration_name}์ up() ํจ์๊ฐ ์์ต๋๋ค.")
except Exception as e:
self.conn.rollback()
print(f"๋ง์ด๊ทธ๋ ์ด์
์ค๋ฅ {migration_name}: {e}")
break
return executed_count
def rollback(self, steps=1):
self.cursor.execute(
"SELECT migration_name FROM migrations WHERE batch = "
"(SELECT MAX(batch) FROM migrations) ORDER BY id DESC"
)
last_batch_migrations = self.cursor.fetchall()
if not last_batch_migrations:
print("๋กค๋ฐฑํ ๋ง์ด๊ทธ๋ ์ด์
์ด ์์ต๋๋ค.")
return 0
rollback_count = 0
for row in last_batch_migrations:
migration_name = row['migration_name']
# ๋ง์ด๊ทธ๋ ์ด์
๋ชจ๋ ๋ก๋
import os
import importlib.util
file_path = os.path.join('migrations', f"{migration_name}.py")
if not os.path.exists(file_path):
print(f"๊ฒฝ๊ณ : {file_path}๋ฅผ ์ฐพ์ ์ ์์ต๋๋ค.")
continue
spec = importlib.util.spec_from_file_location(migration_name, file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
print(f"๋ง์ด๊ทธ๋ ์ด์
๋กค๋ฐฑ: {migration_name}")
# ๋กค๋ฐฑ ์คํ
try:
if hasattr(module, 'down'):
module.down(self.cursor)
self.conn.commit()
# ๋ง์ด๊ทธ๋ ์ด์
๊ธฐ๋ก ์ญ์
self.cursor.execute(
"DELETE FROM migrations WHERE migration_name = %s",
(migration_name,)
)
self.conn.commit()
rollback_count += 1
else:
print(f"๊ฒฝ๊ณ : {migration_name}์ down() ํจ์๊ฐ ์์ต๋๋ค.")
except Exception as e:
self.conn.rollback()
print(f"๋กค๋ฐฑ ์ค๋ฅ {migration_name}: {e}")
break
# ์ง์ ๋ ๋จ๊ณ๋งํผ ๋กค๋ฐฑ
if rollback_count >= steps:
break
return rollback_count
# ๋ง์ด๊ทธ๋ ์ด์
ํ์ผ ์์ (migrations/20210101_create_users_table.py)
"""
def up(cursor):
cursor.execute('''
CREATE TABLE IF NOT EXISTS users (
id INT AUTO_INCREMENT PRIMARY KEY,
name VARCHAR(100) NOT NULL,
email VARCHAR(100) UNIQUE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
''')
def down(cursor):
cursor.execute('DROP TABLE IF EXISTS users')
"""
โ
ํน์ง:
- ๋ฐ์ดํฐ๋ฒ ์ด์ค ์คํค๋ง ๋ฒ์ ๊ด๋ฆฌ
- ์ ์ง์ ์คํค๋ง ๋ณ๊ฒฝ ์ถ์
- ๋กค๋ฐฑ ๊ธฐ๋ฅ ์ ๊ณต
- ์คํ๋ ๋ง์ด๊ทธ๋ ์ด์ ๊ธฐ๋ก
- ํ์ผ ๊ธฐ๋ฐ ๋ง์ด๊ทธ๋ ์ด์
- ๋ฐฐ์น ์ฒ๋ฆฌ ๋ฐฉ์
- ๋์ ๋ชจ๋ ๋ก๋ฉ
- ์๋ฌ ์ฒ๋ฆฌ ๋ฐ ํธ๋์ญ์ ์์ ์ฑ
๋ฐ์ดํฐ๋ฒ ์ด์ค ์ฐ๊ฒฐ๊ณผ ์ฟผ๋ฆฌ์ ๋ณด์ ๋ฐ ์ฑ๋ฅ์ ์ต์ ํํ๋ ๊ธฐ๋ฒ์ด๋ค.
# ๋ณด์ - SQL ์ธ์ ์
๋ฐฉ์ง
def safe_query_example():
# ์์ ํ์ง ์์ ๋ฐฉ์ (์ ๋ ์ฌ์ฉํ์ง ๋ง ๊ฒ)
def unsafe_search(username):
# ์ํ: ์ฌ์ฉ์ ์
๋ ฅ์ ์ง์ ์ฟผ๋ฆฌ์ ์ฝ์
cursor.execute(f"SELECT * FROM users WHERE username = '{username}'")
return cursor.fetchall()
# ์์ ํ ๋ฐฉ์ - ํ๋ผ๋ฏธํฐํ๋ ์ฟผ๋ฆฌ
def safe_search(username):
cursor.execute("SELECT * FROM users WHERE username = %s", (username,))
return cursor.fetchall()
# ๋์ ์ฟผ๋ฆฌ๋ฅผ ์์ ํ๊ฒ ์์ฑ
def dynamic_safe_search(criteria):
query = "SELECT * FROM users WHERE 1=1"
params = []
if 'username' in criteria:
query += " AND username = %s"
params.append(criteria['username'])
if 'status' in criteria:
query += " AND status = %s"
params.append(criteria['status'])
if 'created_after' in criteria:
query += " AND created_at > %s"
params.append(criteria['created_after'])
cursor.execute(query, params)
return cursor.fetchall()
# ์ต์ ํ - ์ฟผ๋ฆฌ ์คํ ๊ณํ ๋ถ์
def explain_query(query, params=None):
cursor.execute(f"EXPLAIN {query}", params or ())
return cursor.fetchall()
# ์ต์ ํ - ์ธ๋ฑ์ค ๊ด๋ฆฌ
def optimize_indexes():
# ์ธ๋ฑ์ค ์ถ๊ฐ
def add_index(table, columns, index_name=None, unique=False):
index_type = "UNIQUE" if unique else ""
name = index_name or f"idx_{'_'.join(columns)}"
columns_str = ', '.join(columns)
try:
cursor.execute(f"CREATE {index_type} INDEX {name} ON {table}({columns_str})")
conn.commit()
return True
except mysql.connector.Error as err:
print(f"์ธ๋ฑ์ค ์์ฑ ์ค๋ฅ: {err}")
return False
# ๋ถํ์ํ ์ธ๋ฑ์ค ์ฐพ๊ธฐ
def find_unused_indexes():
cursor.execute('''
SELECT
t.TABLE_SCHEMA,
t.TABLE_NAME,
s.INDEX_NAME,
s.COLUMN_NAME
FROM information_schema.STATISTICS s
JOIN information_schema.TABLES t
ON s.TABLE_SCHEMA = t.TABLE_SCHEMA
AND s.TABLE_NAME = t.TABLE_NAME
LEFT JOIN performance_schema.table_io_waits_summary_by_index_usage i
ON i.OBJECT_SCHEMA = t.TABLE_SCHEMA
AND i.OBJECT_NAME = t.TABLE_NAME
AND i.INDEX_NAME = s.INDEX_NAME
WHERE t.TABLE_SCHEMA = DATABASE()
AND i.COUNT_STAR IS NULL
OR i.COUNT_STAR = 0
ORDER BY t.TABLE_NAME, s.INDEX_NAME
''')
return cursor.fetchall()
# ํ
์ด๋ธ ์ต์ ํ
def optimize_table(table):
cursor.execute(f"OPTIMIZE TABLE {table}")
return cursor.fetchall()
# ์ฑ๋ฅ - ๋๋ฆฐ ์ฟผ๋ฆฌ ๋ถ์
def analyze_slow_queries():
# ๋๋ฆฐ ์ฟผ๋ฆฌ ๋ก๊ทธ ํ์ฑํ
cursor.execute("SET GLOBAL slow_query_log = 'ON'")
cursor.execute("SET GLOBAL long_query_time = 1") # 1์ด ์ด์ ์ฟผ๋ฆฌ ๊ธฐ๋ก
conn.commit()
# ๋๋ฆฐ ์ฟผ๋ฆฌ ์กฐํ
cursor.execute('''
SELECT
start_time,
query_time,
sql_text
FROM mysql.slow_log
ORDER BY start_time DESC
LIMIT 10
''')
return cursor.fetchall()
# ์ฑ๋ฅ - ์บ์ฑ ๋ ์ด์ด ๊ตฌํ
import functools
import hashlib
import pickle
import time
class QueryCache:
def __init__(self, max_size=100, default_ttl=300):
self.cache = {} # {key: (value, timestamp, ttl)}
self.max_size = max_size
self.default_ttl = default_ttl
def _generate_key(self, query, params):
key_data = f"{query}:{pickle.dumps(params) if params else ''}".encode('utf-8')
return hashlib.md5(key_data).hexdigest()
def get(self, key):
if key not in self.cache:
return None
value, timestamp, ttl = self.cache[key]
if time.time() - timestamp > ttl:
del self.cache[key]
return None
return value
def set(self, key, value, ttl=None):
ttl = ttl or self.default_ttl
# ์บ์ ํฌ๊ธฐ ๊ด๋ฆฌ
if len(self.cache) >= self.max_size:
# ๊ฐ์ฅ ์ค๋๋ ํญ๋ชฉ ์ ๊ฑฐ
oldest_key = min(self.cache, key=lambda k: self.cache[k][1])
del self.cache[oldest_key]
self.cache[key] = (value, time.time(), ttl)
def clear(self):
self.cache.clear()
# ์บ์ ์ฌ์ฉ ์์
def cached_database_access():
cache = QueryCache()
def get_user_cached(user_id, ttl=60):
query = "SELECT * FROM users WHERE id = %s"
params = (user_id,)
cache_key = cache._generate_key(query, params)
cached_result = cache.get(cache_key)
if cached_result:
print("์บ์ ํํธ!")
return cached_result
print("์บ์ ๋ฏธ์ค, DB ์กฐํ ์ค...")
cursor.execute(query, params)
result = cursor.fetchone()
if result:
cache.set(cache_key, result, ttl)
return result
# ์ฌ๋ฌ ๋ฒ ๊ฐ์ ์ฌ์ฉ์ ์กฐํ
user = get_user_cached(1) # ์บ์ ๋ฏธ์ค
user = get_user_cached(1) # ์บ์ ํํธ
โ
ํน์ง:
- SQL ์ธ์ ์ ๋ฐฉ์ง
- ํ๋ผ๋ฏธํฐํ๋ ์ฟผ๋ฆฌ ์ฌ์ฉ
- ๋์ ์ฟผ๋ฆฌ ์์ ๊ตฌํ
- ์ฟผ๋ฆฌ ์คํ ๊ณํ ๋ถ์
- ํจ์จ์ ์ธ ์ธ๋ฑ์ค ๊ด๋ฆฌ
- ๋๋ฆฐ ์ฟผ๋ฆฌ ์๋ณ
- ๋ฉ๋ชจ๋ฆฌ ์บ์ฑ ๊ตฌํ
- ๋ฆฌ์์ค ์ฌ์ฉ ์ต์ ํ
MySQL๊ณผ ๊ฐ์ฒด ๊ด๊ณ ๋งคํ(ORM) ํ๋ ์์ํฌ๋ฅผ ํตํฉํ๋ ๋ฐฉ๋ฒ์ด๋ค.
# SQLAlchemy ORM ์์
from sqlalchemy import create_engine, Column, Integer, String, DateTime, ForeignKey, Text, func
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
import datetime
# ๋ฐ์ดํฐ๋ฒ ์ด์ค ์ฐ๊ฒฐ
connection_string = f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/{os.getenv('MYSQL_DATABASE')}"
engine = create_engine(connection_string, echo=False)
# ๋ชจ๋ธ ์ ์
Base = declarative_base()
class User(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(100), nullable=False)
email = Column(String(100), unique=True)
created_at = Column(DateTime, default=datetime.datetime.utcnow)
# ๊ด๊ณ ์ ์
posts = relationship("Post", back_populates="author", cascade="all, delete-orphan")
def __repr__(self):
return f"<User(name='{self.name}', email='{self.email}')>"
class Post(Base):
__tablename__ = 'posts'
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'), nullable=False)
title = Column(String(200), nullable=False)
content = Column(Text)
created_at = Column(DateTime, default=datetime.datetime.utcnow)
# ๊ด๊ณ ์ ์
author = relationship("User", back_populates="posts")
def __repr__(self):
return f"<Post(title='{self.title}')>"
# ํ
์ด๋ธ ์์ฑ
Base.metadata.create_all(engine)
# ์ธ์
์์ฑ
Session = sessionmaker(bind=engine)
def orm_example():
session = Session()
try:
# ์ฌ์ฉ์ ์ถ๊ฐ
new_user = User(name='๊น์ฒ ์', email='[email protected]')
session.add(new_user)
session.commit()
# ์ฌ์ฉ์์ ์ฐ๊ฒฐ๋ ๊ฒ์๋ฌผ ์ถ๊ฐ
new_post = Post(
title='ORM ์ฌ์ฉํ๊ธฐ',
content='SQLAlchemy๋ ๊ฐ๋ ฅํ Python ORM ํ๋ ์์ํฌ์
๋๋ค.',
author=new_user
)
session.add(new_post)
session.commit()
# ์กฐํ - ๋จ์ผ ๋ ์ฝ๋
user = session.query(User).filter_by(email='[email protected]').first()
print(f"์ฌ์ฉ์: {user.name}, ์ด๋ฉ์ผ: {user.email}")
# ์กฐํ - JOIN
results = session.query(User, Post).join(Post).filter(User.id == new_user.id).all()
for user, post in results:
print(f"์ฌ์ฉ์: {user.name}, ๊ฒ์๋ฌผ: {post.title}")
# ์กฐํ - ์ง๊ณ
post_count = session.query(func.count(Post.id)).scalar()
print(f"์ด ๊ฒ์๋ฌผ ์: {post_count}")
# ํํฐ๋ง ๋ฐ ์ ๋ ฌ
recent_posts = session.query(Post).order_by(Post.created_at.desc()).limit(5).all()
for post in recent_posts:
print(f"์ต๊ทผ ๊ฒ์๋ฌผ: {post.title}")
# ์์
user.name = '๊น์ฒ ์ (์์ ๋จ)'
session.commit()
# ์ญ์
session.delete(new_post)
session.commit()
except Exception as e:
session.rollback()
print(f"ORM ์์
์ค๋ฅ: {e}")
finally:
session.close()
โ
ํน์ง:
- ๊ฐ์ฒด-๊ด๊ณ ๋งคํ(ORM)
- SQL ์ถ์ํ
- ๋ชจ๋ธ ๊ธฐ๋ฐ ๋ฐ์ดํฐ ์ ์
- ๊ด๊ณ ์๋ ๊ด๋ฆฌ
- ๋ณต์กํ ์ฟผ๋ฆฌ ๋จ์ํ
- ์ธ์ ๊ธฐ๋ฐ ํธ๋์ญ์
- ๋ง์ด๊ทธ๋ ์ด์ ์๋ํ
- ์ ์ง๋ณด์์ฑ ํฅ์
โ
๋ชจ๋ฒ ์ฌ๋ก:
- ์ปค๋ฅ์
ํ๋ง ์ฌ์ฉ
- ์ ํ๋ฆฌ์ผ์ด์ ์์ ์ ํ ์ด๊ธฐํ
- ์ฐ๊ฒฐ ์๋ฅผ ์๋ฒ ๋ฆฌ์์ค์ ๋ง๊ฒ ์กฐ์
- ๊ธด ํธ๋์ญ์ ์ ๋ณ๋ ์ฐ๊ฒฐ ์ฌ์ฉ ๊ณ ๋ ค
- ํ๋ผ๋ฏธํฐํ๋ ์ฟผ๋ฆฌ ์ฌ์ฉ
- SQL ์ธ์ ์ ๋ฐฉ์ง๋ฅผ ์ํด ํญ์ ์ ์ฉ
- ๋์ ์ฟผ๋ฆฌ ์์ฑ ์ ํนํ ์ฃผ์
- ORM ์ฌ์ฉ ์์๋ ์์น ์ ์ง
- ํธ๋์ญ์
์ ์ ํ ํ์ฉ
- ์์์ ์์ ์ ํธ๋์ญ์ ์ผ๋ก ๋ฌถ๊ธฐ
- ํธ๋์ญ์ ์ ์ต๋ํ ์งง๊ฒ ์ ์ง
- ๊ฐ๋ฅํ ๊ฒฝ์ฐ ์๋ ์ปค๋ฐ ํ์ฑํ
- ์ธ๋ฑ์ค ์ต์ ํ
- ์์ฃผ ์ฌ์ฉ๋๋ ์ฟผ๋ฆฌ ํจํด์ ๋ง๊ฒ ์ค๊ณ
- ๋ณตํฉ ์ธ๋ฑ์ค ํ์ฉ
- ๋ถํ์ํ ์ธ๋ฑ์ค ์ ๊ฑฐ
- EXPLAIN ๋ช ๋ น์ด๋ก ์ฟผ๋ฆฌ ๊ณํ ๊ฒ์ฌ
- ์ฃผ๊ธฐ์ ์ธ ๋ฐฑ์
- ์๋ํ๋ ๋ฐฑ์ ์์คํ ๊ตฌ์ถ
- ์ ์ฒด ๋ฐ ์ฆ๋ถ ๋ฐฑ์ ์ ๋ต ์๋ฆฝ
- ๋ณต๊ตฌ ์ ์ฐจ ์ ๊ธฐ์ ํ ์คํธ
- ๋ณด์ ์ค์ ํ์ธ
- ์ต์ ๊ถํ ์์น ์ ์ฉ
- ๋คํธ์ํฌ ๋ฐฉํ๋ฒฝ ์ค์
- SSL/TLS ์ฐ๊ฒฐ ์ฌ์ฉ
- ๋น๋ฐ๋ฒํธ ์ ์ฑ ๊ฐํ
- ์ฑ๋ฅ ๋ชจ๋ํฐ๋ง
- ๋๋ฆฐ ์ฟผ๋ฆฌ ๋ก๊ทธ ํ์ฑํ
- ๋ฆฌ์์ค ์ฌ์ฉ๋ ์ถ์
- ๋ณ๋ชฉ ํ์ ์กฐ๊ธฐ ๋ฐ๊ฒฌ
- ์ฑ๋ฅ ์งํ ์๊ฐํ
- ์๋ฌ ์ฒ๋ฆฌ ๊ตฌํ
- ๋ชจ๋ ๋ฐ์ดํฐ๋ฒ ์ด์ค ์์ ์ ์์ธ ์ฒ๋ฆฌ
- ์ค๋ฅ ๋ก๊น ๋ฐ ์๋ฆผ ์ค์
- ์ฌ์๋ ๋ฉ์ปค๋์ฆ ๊ตฌํ
- ์ฌ์ฉ์ ์นํ์ ์ค๋ฅ ๋ฉ์์ง
- ๊ฐ์ฒด ๊ด๊ณ ๋งคํ(ORM) ๊ณ ๋ ค
- SQL ์ถ์ํ๋ก ์์ฐ์ฑ ํฅ์
- N+1 ์ฟผ๋ฆฌ ๋ฌธ์ ์ธ์ ๋ฐ ํด๊ฒฐ
- ๋๋ ์์ ์ ๋ค์ดํฐ๋ธ ์ฟผ๋ฆฌ ์ฌ์ฉ
- ๋ณต์กํ ์ฟผ๋ฆฌ๋ ์ง์ SQL ์์ฑ ๊ณ ๋ ค