Logging and Analytics - openguard-bot/openguard GitHub Wiki
Logging and Analytics
AIMod provides comprehensive logging and analytics capabilities to help server administrators monitor activity, track performance, and make data-driven decisions about server management.
📊 Analytics Architecture
Data Collection Pipeline
Discord Events → Event Processors → Database Storage → Analytics Engine → Dashboard
↓ ↓ ↓ ↓ ↓
User Actions Data Validation PostgreSQL Aggregation Real-time
Mod Actions Event Filtering Time-series Calculations Visualizations
Bot Actions Data Enrichment Structured Trend Analysis Reports
Storage Strategy
Time-Series Data:
- Event logs with timestamps
- Partitioned by date for performance
- Automatic archival of old data
- Compressed storage for efficiency
Aggregated Metrics:
- Pre-calculated statistics
- Hourly, daily, weekly, monthly summaries
- Real-time counters and gauges
- Cached frequently accessed data
📝 Comprehensive Logging System
Event Categories
Moderation Events
MODERATION_EVENTS = {
"BAN": "User banned",
"UNBAN": "User unbanned",
"KICK": "User kicked",
"TIMEOUT": "User timed out",
"UNTIMEOUT": "User timeout removed",
"WARN": "User warned",
"GLOBAL_BAN": "User globally banned",
"AI_ACTION": "AI moderation action taken"
}
User Events
USER_EVENTS = {
"JOIN": "Member joined server",
"LEAVE": "Member left server",
"NICKNAME_CHANGE": "Member changed nickname",
"ROLE_ADD": "Role added to member",
"ROLE_REMOVE": "Role removed from member",
"AVATAR_CHANGE": "Member changed avatar",
"USERNAME_CHANGE": "Member changed username"
}
Server Events
SERVER_EVENTS = {
"CHANNEL_CREATE": "Channel created",
"CHANNEL_DELETE": "Channel deleted",
"CHANNEL_UPDATE": "Channel updated",
"ROLE_CREATE": "Role created",
"ROLE_DELETE": "Role deleted",
"ROLE_UPDATE": "Role updated",
"INVITE_CREATE": "Invite created",
"INVITE_DELETE": "Invite deleted"
}
Message Events
MESSAGE_EVENTS = {
"MESSAGE_DELETE": "Message deleted",
"MESSAGE_EDIT": "Message edited",
"BULK_DELETE": "Messages bulk deleted",
"REACTION_ADD": "Reaction added",
"REACTION_REMOVE": "Reaction removed"
}
Logging Implementation
Core Logging System
class LoggingSystem:
def __init__(self, bot):
self.bot = bot
self.log_queue = asyncio.Queue()
self.webhook_cache = {}
async def log_event(
self,
guild_id: int,
event_type: str,
data: dict,
user_id: int = None,
channel_id: int = None
):
"""Log an event to the database and external systems."""
log_entry = {
"guild_id": guild_id,
"event_type": event_type,
"user_id": user_id,
"channel_id": channel_id,
"data": data,
"timestamp": datetime.utcnow()
}
# Add to queue for processing
await self.log_queue.put(log_entry)
# Send to Discord channel if configured
await self.send_discord_log(guild_id, log_entry)
# Send to webhook if configured
await self.send_webhook_log(guild_id, log_entry)
async def process_log_queue(self):
"""Process queued log entries."""
while True:
try:
log_entry = await self.log_queue.get()
await self.store_log_entry(log_entry)
self.log_queue.task_done()
except Exception as e:
print(f"Error processing log entry: {e}")
async def store_log_entry(self, log_entry: dict):
"""Store log entry in database."""
async with get_connection() as conn:
await conn.execute("""
INSERT INTO event_logs (
guild_id, event_type, user_id, channel_id,
data, timestamp
) VALUES ($1, $2, $3, $4, $5, $6)
""",
log_entry["guild_id"],
log_entry["event_type"],
log_entry["user_id"],
log_entry["channel_id"],
json.dumps(log_entry["data"]),
log_entry["timestamp"]
)
Event Listeners
@commands.Cog.listener()
async def on_member_ban(self, guild: discord.Guild, user: discord.User):
"""Log member ban events."""
# Get ban information
try:
ban_entry = await guild.fetch_ban(user)
reason = ban_entry.reason or "No reason provided"
except:
reason = "Unknown"
await self.logging_system.log_event(
guild_id=guild.id,
event_type="BAN",
data={
"user_id": user.id,
"username": str(user),
"reason": reason,
"moderator": "Unknown" # Discord doesn't provide this directly
},
user_id=user.id
)
@commands.Cog.listener()
async def on_message_delete(self, message: discord.Message):
"""Log message deletion events."""
if message.guild is None or message.author.bot:
return
await self.logging_system.log_event(
guild_id=message.guild.id,
event_type="MESSAGE_DELETE",
data={
"user_id": message.author.id,
"username": str(message.author),
"channel_id": message.channel.id,
"channel_name": message.channel.name,
"content": message.content[:500], # Truncate long messages
"attachments": len(message.attachments),
"message_id": message.id
},
user_id=message.author.id,
channel_id=message.channel.id
)
Log Configuration
Discord Channel Logging
async def configure_discord_logging(
guild_id: int,
channel_id: int,
events: List[str]
):
"""Configure Discord channel logging."""
config = {
"enabled": True,
"channel_id": channel_id,
"events": events,
"embed_format": True,
"include_attachments": True
}
await set_guild_config(guild_id, "LOGGING_CONFIG", config)
Webhook Logging
async def configure_webhook_logging(
guild_id: int,
webhook_url: str,
events: List[str]
):
"""Configure external webhook logging."""
config = {
"enabled": True,
"webhook_url": webhook_url,
"events": events,
"format": "json",
"include_metadata": True
}
await set_guild_config(guild_id, "WEBHOOK_LOGGING", config)
📈 Analytics Engine
Real-Time Metrics
Server Activity Metrics
async def get_server_activity_metrics(guild_id: int, timeframe: str = "24h"):
"""Get real-time server activity metrics."""
timeframe_hours = {
"1h": 1, "6h": 6, "24h": 24, "7d": 168, "30d": 720
}[timeframe]
since = datetime.utcnow() - timedelta(hours=timeframe_hours)
async with get_connection() as conn:
# Message activity
message_count = await conn.fetchval("""
SELECT COUNT(*) FROM event_logs
WHERE guild_id = $1 AND event_type = 'MESSAGE_SEND'
AND timestamp >= $2
""", guild_id, since)
# Member joins/leaves
joins = await conn.fetchval("""
SELECT COUNT(*) FROM event_logs
WHERE guild_id = $1 AND event_type = 'JOIN'
AND timestamp >= $2
""", guild_id, since)
leaves = await conn.fetchval("""
SELECT COUNT(*) FROM event_logs
WHERE guild_id = $1 AND event_type = 'LEAVE'
AND timestamp >= $2
""", guild_id, since)
# Moderation actions
mod_actions = await conn.fetchval("""
SELECT COUNT(*) FROM event_logs
WHERE guild_id = $1 AND event_type IN ('BAN', 'KICK', 'TIMEOUT', 'WARN')
AND timestamp >= $2
""", guild_id, since)
return {
"timeframe": timeframe,
"message_count": message_count,
"member_joins": joins,
"member_leaves": leaves,
"net_member_change": joins - leaves,
"moderation_actions": mod_actions,
"activity_score": calculate_activity_score(message_count, joins, mod_actions)
}
User Activity Analytics
async def get_user_activity_analytics(
guild_id: int,
user_id: int,
days: int = 30
):
"""Get detailed user activity analytics."""
since = datetime.utcnow() - timedelta(days=days)
async with get_connection() as conn:
# Message activity by day
daily_messages = await conn.fetch("""
SELECT
DATE_TRUNC('day', timestamp) as date,
COUNT(*) as message_count
FROM event_logs
WHERE guild_id = $1 AND user_id = $2
AND event_type = 'MESSAGE_SEND'
AND timestamp >= $3
GROUP BY DATE_TRUNC('day', timestamp)
ORDER BY date
""", guild_id, user_id, since)
# Channel activity distribution
channel_activity = await conn.fetch("""
SELECT
channel_id,
COUNT(*) as message_count
FROM event_logs
WHERE guild_id = $1 AND user_id = $2
AND event_type = 'MESSAGE_SEND'
AND timestamp >= $3
GROUP BY channel_id
ORDER BY message_count DESC
LIMIT 10
""", guild_id, user_id, since)
# Infractions
infractions = await conn.fetch("""
SELECT timestamp, rule_violated, action_taken, reasoning
FROM user_infractions
WHERE guild_id = $1 AND user_id = $2
AND timestamp >= $3
ORDER BY timestamp DESC
""", guild_id, user_id, since)
return {
"user_id": user_id,
"analysis_period": f"{days} days",
"daily_messages": [
{"date": row["date"].isoformat(), "count": row["message_count"]}
for row in daily_messages
],
"channel_activity": [
{"channel_id": row["channel_id"], "count": row["message_count"]}
for row in channel_activity
],
"infractions": [
{
"timestamp": row["timestamp"].isoformat(),
"rule": row["rule_violated"],
"action": row["action_taken"],
"reason": row["reasoning"]
}
for row in infractions
],
"total_messages": sum(row["message_count"] for row in daily_messages),
"infraction_count": len(infractions)
}
Command Usage Analytics
async def get_command_analytics(guild_id: int, days: int = 30):
"""Get command usage analytics."""
since = datetime.utcnow() - timedelta(days=days)
async with get_connection() as conn:
# Top commands
top_commands = await conn.fetch("""
SELECT
command_name,
COUNT(*) as usage_count,
COUNT(DISTINCT user_id) as unique_users,
MAX(timestamp) as last_used
FROM command_logs
WHERE guild_id = $1 AND timestamp >= $2
GROUP BY command_name
ORDER BY usage_count DESC
LIMIT 20
""", guild_id, since)
# Daily usage trends
daily_usage = await conn.fetch("""
SELECT
DATE_TRUNC('day', timestamp) as date,
COUNT(*) as command_count
FROM command_logs
WHERE guild_id = $1 AND timestamp >= $2
GROUP BY DATE_TRUNC('day', timestamp)
ORDER BY date
""", guild_id, since)
# User command activity
user_activity = await conn.fetch("""
SELECT
user_id,
COUNT(*) as command_count,
COUNT(DISTINCT command_name) as unique_commands
FROM command_logs
WHERE guild_id = $1 AND timestamp >= $2
GROUP BY user_id
ORDER BY command_count DESC
LIMIT 10
""", guild_id, since)
total_commands = sum(row["usage_count"] for row in top_commands)
return {
"analysis_period": f"{days} days",
"total_commands": total_commands,
"unique_commands": len(top_commands),
"top_commands": [
{
"command": row["command_name"],
"usage_count": row["usage_count"],
"unique_users": row["unique_users"],
"last_used": row["last_used"].isoformat()
}
for row in top_commands
],
"daily_usage": [
{"date": row["date"].isoformat(), "count": row["command_count"]}
for row in daily_usage
],
"top_users": [
{
"user_id": row["user_id"],
"command_count": row["command_count"],
"unique_commands": row["unique_commands"]
}
for row in user_activity
]
}
Moderation Analytics
async def get_moderation_analytics(guild_id: int, days: int = 30):
"""Get comprehensive moderation analytics."""
since = datetime.utcnow() - timedelta(days=days)
async with get_connection() as conn:
# Action type distribution
action_distribution = await conn.fetch("""
SELECT
action_taken,
COUNT(*) as count,
ROUND(COUNT(*) * 100.0 / SUM(COUNT(*)) OVER (), 2) as percentage
FROM user_infractions
WHERE guild_id = $1 AND timestamp >= $2
GROUP BY action_taken
ORDER BY count DESC
""", guild_id, since)
# Top moderators
top_moderators = await conn.fetch("""
SELECT
moderator_id,
COUNT(*) as action_count,
COUNT(DISTINCT target_user_id) as unique_targets
FROM moderation_logs
WHERE guild_id = $1 AND created_at >= $2
AND moderator_id IS NOT NULL
GROUP BY moderator_id
ORDER BY action_count DESC
LIMIT 10
""", guild_id, since)
# Daily moderation trends
daily_actions = await conn.fetch("""
SELECT
DATE_TRUNC('day', timestamp) as date,
action_taken,
COUNT(*) as count
FROM user_infractions
WHERE guild_id = $1 AND timestamp >= $2
GROUP BY DATE_TRUNC('day', timestamp), action_taken
ORDER BY date, action_taken
""", guild_id, since)
# AI vs Human moderation
ai_vs_human = await conn.fetch("""
SELECT
CASE
WHEN reasoning LIKE '%AI Mod:%' THEN 'AI'
ELSE 'Human'
END as moderator_type,
COUNT(*) as count
FROM user_infractions
WHERE guild_id = $1 AND timestamp >= $2
GROUP BY moderator_type
""", guild_id, since)
return {
"analysis_period": f"{days} days",
"action_distribution": [
{
"action": row["action_taken"],
"count": row["count"],
"percentage": float(row["percentage"])
}
for row in action_distribution
],
"top_moderators": [
{
"moderator_id": row["moderator_id"],
"action_count": row["action_count"],
"unique_targets": row["unique_targets"]
}
for row in top_moderators
],
"daily_trends": group_daily_trends(daily_actions),
"ai_vs_human": [
{
"type": row["moderator_type"],
"count": row["count"]
}
for row in ai_vs_human
],
"total_actions": sum(row["count"] for row in action_distribution)
}
📊 Dashboard Integration
Real-Time Updates
// Frontend real-time updates
const useRealTimeAnalytics = (guildId) => {
const [analytics, setAnalytics] = useState(null);
useEffect(() => {
const eventSource = new EventSource(`/api/guilds/${guildId}/analytics/stream`);
eventSource.onmessage = (event) => {
const data = JSON.parse(event.data);
setAnalytics(prev => ({
...prev,
...data
}));
};
return () => eventSource.close();
}, [guildId]);
return analytics;
};
Export Capabilities
@router.get("/guilds/{guild_id}/analytics/export")
async def export_analytics(
guild_id: int,
format: str = "json",
timeframe: str = "30d"
):
"""Export analytics data in various formats."""
analytics_data = await get_comprehensive_analytics(guild_id, timeframe)
if format == "csv":
return generate_csv_export(analytics_data)
elif format == "xlsx":
return generate_excel_export(analytics_data)
else:
return analytics_data
Next: Installation Guide - Complete setup and installation instructions