Common Tasks - Mardens-Inc/Pricing-App GitHub Wiki

Practical guides for frequent development tasks in the Pricing App.

Table of Contents


Adding a New Backend Endpoint

Step 1: Add Endpoint Handler

File: src-actix/{module}/{module}_endpoint.rs

use actix_web::{web, HttpResponse, Responder};
use sqlx::MySqlPool;
use serde_hash::hashids::decode_single;

#[utoipa::path(
    get,
    path = "/api/mymodule/{id}",
    params(
        ("id" = String, Path, description = "Resource ID")
    ),
    responses(
        (status = 200, description = "Success", body = MyResponse),
        (status = 404, description = "Not found")
    ),
    tag = "MyModule"
)]
pub async fn get_resource(
    pool: web::Data<MySqlPool>,
    id: web::Path<String>,
) -> database_common_lib::http_error::Result<impl Responder> {
    // Decode hashed ID
    let resource_id = decode_single(&id)
        .map_err(|e| database_common_lib::http_error::bad_request(format!("Invalid ID: {}", e)))?;

    // Call database function
    let resource = my_module_db::get_by_id(&pool, resource_id)
        .await
        .map_err(|_| database_common_lib::http_error::not_found("Resource not found"))?;

    Ok(web::Json(resource))
}

Step 2: Register Endpoint

File: src-actix/{module}/{module}_endpoint.rs

pub fn configure(cfg: &mut web::ServiceConfig) {
    cfg.service(
        web::scope("/api/mymodule")
            .route("/{id}", web::get().to(get_resource))
            .route("/{id}", web::put().to(update_resource))
            .route("", web::post().to(create_resource))
    );
}

Step 3: Add to API Documentation

File: src-actix/api_doc.rs

#[derive(OpenApi)]
#[openapi(
    paths(
        // Add your new endpoint
        mymodule::get_resource,
        mymodule::update_resource,
        mymodule::create_resource,
    ),
    components(schemas(
        MyResponse,
        MyRequest,
    )),
    tags(
        (name = "MyModule", description = "My module endpoints")
    )
)]
struct ApiDoc;

Step 4: Write Test

File: tests/mymodule.rs

#[actix_web::test]
async fn test_get_resource() {
    let pool = setup_test_db().await;

    let app = test::init_service(
        App::new()
            .app_data(web::Data::new(pool.clone()))
            .configure(mymodule::configure)
    ).await;

    let req = test::TestRequest::get()
        .uri("/api/mymodule/x7J8kLm9N2pQr4Tv")
        .to_request();

    let resp = test::call_service(&app, req).await;
    assert_eq!(resp.status(), 200);
}

Adding a New Frontend Component

Step 1: Create Component File

File: src/components/MyComponent.tsx

import React, { useState } from 'react';
import { Button, Input } from '@heroui/react';

interface MyComponentProps {
    title: string;
    onSave?: (data: string) => void;
}

export default function MyComponent({ title, onSave }: MyComponentProps) {
    const [value, setValue] = useState<string>("");

    const handleSave = () => {
        if (onSave) {
            onSave(value);
        }
    };

    return (
        <div className="p-4">
            <h2 className="text-2xl font-bold mb-4">{title}</h2>
            <Input
                label="Value"
                value={value}
                onChange={(e) => setValue(e.target.value)}
            />
            <Button
                color="primary"
                onPress={handleSave}
                className="mt-4"
            >
                Save
            </Button>
        </div>
    );
}

Step 2: Import and Use

import MyComponent from './components/MyComponent';

function ParentComponent() {
    const handleSave = (data: string) => {
        console.log('Saved:', data);
    };

    return <MyComponent title="My Title" onSave={handleSave} />;
}

Adding a New Database Table

Step 1: Create Table in Module's DB File

File: src-actix/{module}/{module}_db.rs

pub async fn initialize(pool: &MySqlPool) -> Result<()> {
    pool.execute(
        r#"CREATE TABLE IF NOT EXISTS my_table (
            id BIGINT UNSIGNED AUTO_INCREMENT PRIMARY KEY,
            name VARCHAR(255) NOT NULL,
            description TEXT,
            active BOOLEAN DEFAULT TRUE NOT NULL,
            date DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
            last_modified_date DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP NOT NULL
        )"#
    ).await?;

    Ok(())
}

Step 2: Call Initialize in lib.rs

File: src-actix/lib.rs

// In startup code
my_module::my_module_db::initialize(&pool).await?;

Step 3: Create Data Structures

File: src-actix/{module}/{module}_data.rs

use serde::{Serialize, Deserialize};
use sqlx::FromRow;

#[derive(Debug, Serialize, Deserialize, FromRow, utoipa::ToSchema)]
pub struct MyTable {
    #[serde(serialize_with = "serde_hash::hashids::serialize")]
    pub id: u64,
    pub name: String,
    pub description: Option<String>,
    pub active: bool,
    pub date: chrono::DateTime<chrono::Utc>,
}

#[derive(Debug, Deserialize, utoipa::ToSchema)]
pub struct MyTableRequest {
    pub name: String,
    pub description: Option<String>,
    pub active: Option<bool>,
}

Step 4: Add CRUD Operations

pub async fn get_all(pool: &MySqlPool) -> Result<Vec<MyTable>> {
    sqlx::query_as("SELECT * FROM my_table")
        .fetch_all(pool)
        .await
        .context("Failed to fetch records")
}

pub async fn create(pool: &MySqlPool, data: &MyTableRequest) -> Result<MyTable> {
    let result = sqlx::query(
        "INSERT INTO my_table (name, description, active) VALUES (?, ?, ?)"
    )
    .bind(&data.name)
    .bind(&data.description)
    .bind(data.active.unwrap_or(true))
    .execute(pool)
    .await?;

    let id = result.last_insert_id();
    get_by_id(pool, id).await
}

Adding a New Context Provider

Step 1: Create Provider File

File: src/components/providers/MyProvider.tsx

import React, { createContext, useContext, useState, ReactNode } from 'react';

interface MyContextType {
    data: string[];
    loading: boolean;
    loadData: () => Promise<void>;
    addItem: (item: string) => void;
}

const MyContext = createContext<MyContextType | undefined>(undefined);

export function MyProvider({ children }: { children: ReactNode }) {
    const [data, setData] = useState<string[]>([]);
    const [loading, setLoading] = useState(false);

    const loadData = async () => {
        setLoading(true);
        try {
            const response = await fetch('/api/mydata');
            const result = await response.json();
            setData(result);
        } catch (error) {
            console.error('Failed to load data:', error);
        } finally {
            setLoading(false);
        }
    };

    const addItem = (item: string) => {
        setData(prev => [...prev, item]);
    };

    return (
        <MyContext.Provider value={{ data, loading, loadData, addItem }}>
            {children}
        </MyContext.Provider>
    );
}

export function useMyData() {
    const context = useContext(MyContext);
    if (!context) {
        throw new Error('useMyData must be used within MyProvider');
    }
    return context;
}

Step 2: Add to Provider Stack

File: src/main.tsx

<BrowserRouter>
    <ConnectionProvider>
        <AuthProvider>
            <MyProvider>  {/* Add your provider */}
                <SearchProvider>
                    {/* Rest of providers */}
                </SearchProvider>
            </MyProvider>
        </AuthProvider>
    </ConnectionProvider>
</BrowserRouter>

Step 3: Use in Components

import { useMyData } from './providers/MyProvider';

function MyComponent() {
    const { data, loading, loadData } = useMyData();

    useEffect(() => {
        loadData();
    }, [loadData]);

    if (loading) return <div>Loading...</div>;

    return (
        <ul>
            {data.map((item, i) => <li key={i}>{item}</li>)}
        </ul>
    );
}

Working with Uploaded Files

Backend: Accept File Upload

use actix_multipart::Multipart;
use uuid::Uuid;

#[post("/api/upload")]
async fn upload_file(mut payload: Multipart) -> Result<impl Responder> {
    while let Some(item) = payload.next().await {
        let mut field = item?;

        // Get filename
        let content_disp = field.content_disposition();
        let filename = content_disp.get_filename().unwrap_or("file");

        // Generate unique ID
        let identifier = Uuid::new_v4();
        let extension = std::path::Path::new(filename)
            .extension()
            .and_then(|e| e.to_str())
            .unwrap_or("");

        // Save file
        let filepath = format!("uploads/{}.{}", identifier, extension);
        let mut bytes = Vec::new();

        while let Some(chunk) = field.next().await {
            bytes.extend_from_slice(&chunk?);
        }

        std::fs::write(&filepath, bytes)?;

        return Ok(web::Json(json!({
            "identifier": identifier.to_string(),
            "filename": filename
        })));
    }

    Err(anyhow!("No file in request"))
}

Frontend: Upload File

async function uploadFile(file: File) {
    const formData = new FormData();
    formData.append('file', file);

    const response = await fetch('/api/upload', {
        method: 'POST',
        body: formData
    });

    const result = await response.json();
    return result.identifier;
}

// Usage
function FileUpload() {
    const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
        const file = e.target.files?.[0];
        if (file) {
            const identifier = await uploadFile(file);
            console.log('Uploaded:', identifier);
        }
    };

    return <input type="file" onChange={handleFileChange} />;
}

Adding Real-time Updates

Backend: Broadcast Updates

use actix_web_lab::sse;

// In lib.rs or module
static CHANNELS: OnceLock<DashMap<String, broadcast::Sender<String>>> = OnceLock::new();

#[get("/api/myresource/{id}/updates")]
async fn subscribe_updates(id: web::Path<String>) -> Result<impl Responder> {
    let channels = CHANNELS.get_or_init(|| DashMap::new());

    let tx = channels.entry(id.to_string())
        .or_insert_with(|| {
            let (tx, _) = broadcast::channel(100);
            tx
        })
        .clone();

    let rx = tx.subscribe();
    let stream = BroadcastStream::new(rx)
        .map(|msg| {
            Ok::<_, actix_web::Error>(
                sse::Event::Data(sse::Data::new(msg.unwrap()))
            )
        });

    Ok(sse::Sse::from_stream(stream))
}

// Broadcast function
pub fn broadcast_update(resource_id: &str, event_type: &str, data: &serde_json::Value) {
    if let Some(channels) = CHANNELS.get() {
        if let Some(tx) = channels.get(resource_id) {
            let message = json!({
                "type": event_type,
                "data": data
            }).to_string();

            let _ = tx.send(message);
        }
    }
}

Frontend: Subscribe to Updates

function MyComponent({ resourceId }: { resourceId: string }) {
    const [data, setData] = useState([]);

    useEffect(() => {
        const eventSource = new EventSource(`/api/myresource/${resourceId}/updates`);

        eventSource.onmessage = (event) => {
            const update = JSON.parse(event.data);

            setData(prev => {
                switch (update.type) {
                    case 'created':
                        return [...prev, update.data];
                    case 'updated':
                        return prev.map(item =>
                            item.id === update.data.id ? update.data : item
                        );
                    case 'deleted':
                        return prev.filter(item => item.id !== update.data.id);
                    default:
                        return prev;
                }
            });
        };

        return () => eventSource.close();
    }, [resourceId]);

    return <div>{/* Render data */}</div>;
}

Creating a Migration Tool

Step 1: Create Tool File

File: tools/my_migration_tool.rs

use database_common_lib::database_connection::{DatabaseConnectionData, create_pool};
use sqlx::MySqlPool;
use anyhow::Result;

#[tokio::main]
async fn main() -> Result<()> {
    pretty_env_logger::init();

    log::info!("Starting migration...");

    let data = DatabaseConnectionData::from_file("dev-server.json")?;
    let pool = create_pool(&data).await?;

    perform_migration(&pool).await?;

    log::info!("Migration complete!");
    Ok(())
}

async fn perform_migration(pool: &MySqlPool) -> Result<()> {
    // Your migration logic
    let locations = sqlx::query("SELECT id FROM locations")
        .fetch_all(pool)
        .await?;

    for row in locations {
        let id: u64 = row.get("id");
        log::info!("Migrating location {}", id);

        // Perform migration for this location
    }

    Ok(())
}

Step 2: Register in Cargo.toml

[[example]]
name = "my_migration_tool"
path = "tools/my_migration_tool.rs"

Step 3: Run Migration

# Backup database first!
mysqldump -u user -p pricing > backup.sql

# Run migration
RUST_LOG=debug cargo run --example my_migration_tool

Debugging Common Issues

Backend Won't Start

# Check if port is in use
lsof -i :1421  # Linux/Mac
netstat -ano | findstr :1421  # Windows

# Kill existing process
pkill -f pricing_app  # Linux/Mac

# Check database connection
mysql -h localhost -u user -p pricing -e "SELECT 1"

# Enable debug logging
RUST_LOG=debug cargo run

Frontend Not Loading

# Check if dev server running
lsof -i :3218

# Clear caches
rm -rf node_modules
npm install

# Check for TypeScript errors
npx tsc --noEmit

# Rebuild
npm run build-frontend

Database Errors

# Check table exists
mysql -u user -p pricing -e "SHOW TABLES"

# Check table structure
mysql -u user -p pricing -e "DESCRIBE table_name"

# Enable SQL logging
RUST_LOG=sqlx=debug cargo run

SSE Not Working

// Check connection
const eventSource = new EventSource('/api/inventory/123/updates');

eventSource.addEventListener('open', () => {
    console.log('SSE connected');
});

eventSource.addEventListener('error', (e) => {
    console.error('SSE error:', e);
});

Build Failures

# Clean everything
cargo clean
rm -rf node_modules target/wwwroot
npm install

# Check versions
cargo --version  # Need 1.75+
node --version   # Need 18+

# Try minimal build
cargo build
npm run build-frontend

Last Updated: 2025-11-04

⚠️ **GitHub.com Fallback** ⚠️