database_api.py

openclaw openclaw解答 2

我来为您设计一个完整的AI小龙虾养护数据库集成维护方案。

database_api.py-第1张图片-官方openclaw下载|openclaw官网-国内ai小龙虾下载

数据库架构设计

核心数据模型

-- 1. 基础信息表
CREATE TABLE ponds (
    pond_id INT PRIMARY KEY AUTO_INCREMENT,
    name VARCHAR(100),
    location VARCHAR(200),
    capacity INT, -- 养殖容量(只)
    area DECIMAL(10,2), -- 面积(平方米)
    depth DECIMAL(5,2), -- 深度(米)
    status ENUM('active', 'maintenance', 'empty')
);
-- 2. 环境参数表
CREATE TABLE environment_logs (
    log_id BIGINT PRIMARY KEY AUTO_INCREMENT,
    pond_id INT,
    timestamp DATETIME,
    temperature DECIMAL(5,2), -- 水温(℃)
    ph DECIMAL(4,2), -- pH值
    dissolved_oxygen DECIMAL(5,2), -- 溶解氧(mg/L)
    ammonia_nitrogen DECIMAL(5,3), -- 氨氮(mg/L)
    salinity DECIMAL(5,2), -- 盐度(‰)
    turbidity INT, -- 浊度(NTU)
    device_id VARCHAR(50),
    FOREIGN KEY (pond_id) REFERENCES ponds(pond_id)
);
-- 3. 小龙虾生长记录
CREATE TABLE crayfish_growth (
    growth_id BIGINT PRIMARY KEY AUTO_INCREMENT,
    pond_id INT,
    batch_id VARCHAR(50), -- 批次号
    record_date DATE,
    avg_length DECIMAL(6,2), -- 平均体长(cm)
    avg_weight DECIMAL(6,2), -- 平均体重(g)
    molting_rate DECIMAL(5,2), -- 蜕壳率(%)
    survival_rate DECIMAL(5,2), -- 存活率(%)
    sample_size INT, -- 采样数量
    FOREIGN KEY (pond_id) REFERENCES ponds(pond_id)
);
-- 4. 投喂管理表
CREATE TABLE feeding_records (
    record_id BIGINT PRIMARY KEY AUTO_INCREMENT,
    pond_id INT,
    feed_time DATETIME,
    feed_type VARCHAR(50), -- 饲料类型
    amount DECIMAL(8,2), -- 投喂量(kg)
    remaining_amount DECIMAL(8,2), -- 残饵量(kg)
    water_quality_before JSON, -- 投喂前水质
    water_quality_after JSON, -- 投喂后水质
    operator VARCHAR(50),
    FOREIGN KEY (pond_id) REFERENCES ponds(pond_id)
);
-- 5. 疾病与健康监测
CREATE TABLE health_records (
    record_id BIGINT PRIMARY KEY AUTO_INCREMENT,
    pond_id INT,
    check_date DATE,
    disease_type VARCHAR(100),
    symptoms TEXT,
    infected_count INT,
    treatment_method TEXT,
    medication VARCHAR(200),
    recovery_rate DECIMAL(5,2),
    ai_diagnosis JSON, -- AI诊断结果
    image_paths JSON, -- 病虾图片路径
    FOREIGN KEY (pond_id) REFERENCES ponds(pond_id)
);
-- 6. AI模型参数表
CREATE TABLE ai_models (
    model_id INT PRIMARY KEY AUTO_INCREMENT,
    model_name VARCHAR(100),
    model_type ENUM('growth_prediction', 'disease_detection', 'water_quality'),
    version VARCHAR(20),
    accuracy DECIMAL(5,4),
    created_date DATETIME,
    active BOOLEAN DEFAULT true
);
-- 7. 维护记录表
CREATE TABLE maintenance_logs (
    log_id BIGINT PRIMARY KEY AUTO_INCREMENT,
    pond_id INT,
    maintenance_date DATETIME,
    maintenance_type VARCHAR(50),
    description TEXT,
    duration_minutes INT,
    materials_used JSON,
    cost DECIMAL(10,2),
    technician VARCHAR(50),
    before_images JSON,
    after_images JSON
);

数据库维护策略

日常维护脚本

-- 1. 数据备份脚本
CREATE PROCEDURE backup_daily_data()
BEGIN
    -- 创建每日备份表
    CREATE TABLE IF NOT EXISTS environment_logs_backup_YYYYMMDD 
    AS SELECT * FROM environment_logs WHERE DATE(timestamp) = CURDATE();
    -- 备份重要业务数据
    INSERT INTO daily_backups (backup_date, table_name, row_count)
    SELECT CURDATE(), 'environment_logs', COUNT(*) 
    FROM environment_logs WHERE DATE(timestamp) = CURDATE();
END;
-- 2. 数据清理脚本(保留90天数据)
CREATE EVENT clean_old_data
ON SCHEDULE EVERY 1 DAY
DO
BEGIN
    -- 清理历史数据
    DELETE FROM environment_logs 
    WHERE timestamp < DATE_SUB(NOW(), INTERVAL 90 DAY);
    -- 归档重要数据
    INSERT INTO historical_logs_archive
    SELECT * FROM environment_logs 
    WHERE timestamp < DATE_SUB(NOW(), INTERVAL 30 DAY);
END;
-- 3. 性能优化脚本
CREATE PROCEDURE optimize_tables()
BEGIN
    -- 重建索引
    OPTIMIZE TABLE environment_logs, feeding_records, health_records;
    -- 更新统计信息
    ANALYZE TABLE ponds, crayfish_growth, maintenance_logs;
END;

监控脚本

-- 1. 水质异常检测
CREATE PROCEDURE check_water_quality_anomaly()
BEGIN
    INSERT INTO anomalies_detected (pond_id, anomaly_type, detected_time, value, threshold)
    SELECT 
        pond_id,
        'temperature_high' as anomaly_type,
        timestamp,
        temperature,
        30 as threshold
    FROM environment_logs 
    WHERE temperature > 30 
    AND timestamp > DATE_SUB(NOW(), INTERVAL 1 HOUR)
    UNION ALL
    SELECT 
        pond_id,
        'ph_low' as anomaly_type,
        timestamp,
        ph,
        6.5 as threshold
    FROM environment_logs 
    WHERE ph < 6.5 
    AND timestamp > DATE_SUB(NOW(), INTERVAL 1 HOUR);
END;
-- 2. 生长异常预警
CREATE PROCEDURE growth_anomaly_alert()
BEGIN
    SELECT 
        g.pond_id,
        g.record_date,
        g.avg_weight,
        LAG(g.avg_weight) OVER (PARTITION BY g.pond_id ORDER BY g.record_date) as prev_weight,
        (g.avg_weight - LAG(g.avg_weight) OVER (PARTITION BY g.pond_id ORDER BY g.record_date)) / 
        LAG(g.avg_weight) OVER (PARTITION BY g.pond_id ORDER BY g.record_date) * 100 as growth_rate
    FROM crayfish_growth g
    WHERE g.record_date >= DATE_SUB(CURDATE(), INTERVAL 7 DAY)
    HAVING growth_rate < -10; -- 体重下降超过10%
END;

API接口设计(RESTful)

from flask_sqlalchemy import SQLAlchemy
import pandas as pd
from datetime import datetime, timedelta
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql://user:password@localhost/crayfish_db'
db = SQLAlchemy(app)
class Pond(db.Model):
    __tablename__ = 'ponds'
    pond_id = db.Column(db.Integer, primary_key=True)
    name = db.Column(db.String(100))
    status = db.Column(db.String(20))
@app.route('/api/environment', methods=['GET'])
def get_environment_data():
    """获取环境数据"""
    pond_id = request.args.get('pond_id')
    hours = request.args.get('hours', 24)
    query = """
    SELECT * FROM environment_logs 
    WHERE pond_id = :pond_id 
    AND timestamp > DATE_SUB(NOW(), INTERVAL :hours HOUR)
    ORDER BY timestamp DESC
    """
    result = db.session.execute(query, {'pond_id': pond_id, 'hours': hours})
    return jsonify([dict(row) for row in result])
@app.route('/api/feeding/optimize', methods=['POST'])
def optimize_feeding():
    """AI优化的投喂建议"""
    data = request.json
    pond_id = data['pond_id']
    # 获取最近水质数据
    water_quality = get_recent_water_quality(pond_id)
    # 获取小龙虾生长阶段
    growth_stage = get_growth_stage(pond_id)
    # AI模型计算最优投喂方案
    recommendation = ai_model.predict(
        water_quality=water_quality,
        growth_stage=growth_stage,
        temperature=data.get('temperature'),
        season=datetime.now().month
    )
    return jsonify(recommendation)
@app.route('/api/maintenance/schedule', methods=['GET'])
def get_maintenance_schedule():
    """获取维护计划"""
    pond_id = request.args.get('pond_id')
    schedule = db.session.query(MaintenanceSchedule).filter_by(
        pond_id=pond_id,
        completed=False
    ).order_by('due_date').all()
    return jsonify([{
        'task': s.task_name,
        'due_date': s.due_date.strftime('%Y-%m-%d'),
        'frequency': s.frequency_days,
        'last_completed': s.last_completed
    } for s in schedule])

维护计划表

维护任务 频率 执行时间 负责人 检查项
数据库备份 每日 02:00 系统 完整备份+增量备份
数据清理 每周 周日03:00 系统 清理90天前日志
索引重建 每月 第一个周日04:00 系统 主要业务表索引
性能监控 实时 持续 监控系统 查询响应时间、连接数
安全审计 每月 15日09:00 管理员 用户权限、访问日志
数据一致性检查 每日 06:00 系统 外键约束、数据完整性

灾难恢复方案

-- 1. 数据恢复脚本
CREATE PROCEDURE restore_from_backup(IN backup_date DATE)
BEGIN
    -- 停止写入
    SET GLOBAL read_only = ON;
    -- 从备份恢复
    SOURCE /backup/crayfish_db_${backup_date}.sql;
    -- 应用增量日志
    CALL apply_binlog_since(backup_date);
    -- 恢复写入
    SET GLOBAL read_only = OFF;
END;
-- 2. 主从复制配置
-- 主库配置
CHANGE MASTER TO
    MASTER_HOST='master_host',
    MASTER_USER='repl',
    MASTER_PASSWORD='password',
    MASTER_LOG_FILE='mysql-bin.000001',
    MASTER_LOG_POS=107;
-- 3. 监控告警配置
CREATE TABLE system_alerts (
    alert_id BIGINT PRIMARY KEY AUTO_INCREMENT,
    alert_type VARCHAR(50),
    severity ENUM('critical', 'warning', 'info'),
    message TEXT,
    alert_time DATETIME,
    resolved BOOLEAN DEFAULT FALSE,
    resolved_time DATETIME
);

性能优化建议

  1. 查询优化
    -- 创建复合索引
    CREATE INDEX idx_env_pond_time ON environment_logs(pond_id, timestamp);
    CREATE INDEX idx_growth_pond_date ON crayfish_growth(pond_id, record_date);

-- 分区表(按时间) ALTER TABLE environment_logs PARTITION BY RANGE (YEAR(timestamp)*100 + MONTH(timestamp)) ( PARTITION p202401 VALUES LESS THAN (202402), PARTITION p202402 VALUES LESS THAN (202403) );


2. **缓存策略**
```python
# Redis缓存配置
import redis
from functools import lru_cache
cache = redis.Redis(host='localhost', port=6379, db=0)
@lru_cache(maxsize=1000)
def get_pond_status(pond_id):
    # 先查缓存
    cached = cache.get(f'pond_status:{pond_id}')
    if cached:
        return json.loads(cached)
    # 查数据库
    result = db.query(Pond).filter_by(pond_id=pond_id).first()
    # 写入缓存(过期时间1小时)
    cache.setex(f'pond_status:{pond_id}', 3600, json.dumps(result))
    return result

这个数据库系统可以支持:

  • 实时监控小龙虾养殖环境
  • AI驱动的决策支持
  • 自动化维护提醒
  • 数据分析和报表生成
  • 多用户权限管理

需要根据实际养殖规模和需求调整具体参数。

标签: database api

抱歉,评论功能暂时关闭!