一个模块化的多模型接入方案,让你的插件能够灵活切换 DeepSeek、OpenAI、通义千问、Kimi 等多种模型

🔌 多模型接入架构设计

整体架构图

┌─────────────────────────────────────────────┐
│             插件核心 (Plugin.php)             │
│        配置管理、路由分发、安全控制              │
└─────────────────────────────────────────────┘
                      │
                      ▼
┌─────────────────────────────────────────────┐
│           模型适配器层 (Adapter Layer)        │
├───────────────┬───────────────┬─────────────┤
│ DeepSeek适配器 │ OpenAI适配器   │ 阿里百炼适配器 │
│ 通义千问适配器  │ Kimi适配器     │ 自定义适配器   │
└───────────────┴───────────────┴─────────────┘
                      │
                      ▼
┌─────────────────────────────────────────────┐
│            统一API调用接口                    │
│     接收标准请求,返回统一格式响应              │
└─────────────────────────────────────────────┘

📝 多模型接入代码实现

1. 配置文件扩展 - Plugin.php

在原有的配置表单中添加模型提供商选择和相关参数:

public static function config(Form $form)
{
    // 模型提供商选择
    $provider = new \Typecho\Widget\Helper\Form\Element\Select(
        'provider',
        [
            'deepseek' => 'DeepSeek',
            'openai' => 'OpenAI (ChatGPT)',
            'aliyun' => '阿里云百炼 (通义千问/DeepSeek)',
            'moonshot' => 'Moonshot Kimi',
            'custom' => '自定义 OpenAI 兼容接口'
        ],
        'deepseek',
        _t('AI 模型提供商'),
        _t('选择你想要使用的 AI 服务商')
    );
    $form->addInput($provider);
    
    // DeepSeek 配置
    $deepseekApiKey = new Text(
        'deepseek_api_key',
        null,
        null,
        _t('DeepSeek API 密钥'),
        _t('访问 https://platform.deepseek.com/ 获取')
    );
    $form->addInput($deepseekApiKey);
    
    // DeepSeek 模型选择
    $deepseekModel = new \Typecho\Widget\Helper\Form\Element\Select(
        'deepseek_model',
        [
            'deepseek-chat' => 'DeepSeek Chat (通用对话)',
            'deepseek-coder' => 'DeepSeek Coder (代码助手)',
            'deepseek-reasoner' => 'DeepSeek Reasoner (推理模型)'
        ],
        'deepseek-chat',
        _t('DeepSeek 模型'),
        _t('选择具体使用的 DeepSeek 模型')
    );
    $form->addInput($deepseekModel);
    
    // OpenAI 配置
    $openaiApiKey = new Text(
        'openai_api_key',
        null,
        null,
        _t('OpenAI API 密钥'),
        _t('访问 https://platform.openai.com/ 获取')
    );
    $form->addInput($openaiApiKey);
    
    $openaiModel = new \Typecho\Widget\Helper\Form\Element\Select(
        'openai_model',
        [
            'gpt-4o' => 'GPT-4o',
            'gpt-4-turbo' => 'GPT-4 Turbo',
            'gpt-3.5-turbo' => 'GPT-3.5 Turbo'
        ],
        'gpt-3.5-turbo',
        _t('OpenAI 模型'),
        _t('选择具体使用的 OpenAI 模型')
    );
    $form->addInput($openaiModel);
    
    // 阿里云百炼配置 
    $aliyunApiKey = new Text(
        'aliyun_api_key',
        null,
        null,
        _t('阿里云百炼 API Key'),
        _t('在阿里云百炼控制台创建')
    );
    $form->addInput($aliyunApiKey);
    
    $aliyunModel = new \Typecho\Widget\Helper\Form\Element\Select(
        'aliyun_model',
        [
            'qwen-max' => '通义千问 Max',
            'qwen-plus' => '通义千问 Plus',
            'qwen-turbo' => '通义千问 Turbo',
            'deepseek-r1' => 'DeepSeek-R1 满血版'
        ],
        'qwen-max',
        _t('阿里云模型'),
        _t('选择阿里云百炼提供的模型')
    );
    $form->addInput($aliyunModel);
    
    $aliyunWorkspaceId = new Text(
        'aliyun_workspace_id',
        null,
        null,
        _t('阿里云业务空间 ID'),
        _t('在阿里云百炼控制台获取')
    );
    $form->addInput($aliyunWorkspaceId);
    
    // Kimi (Moonshot) 配置 
    $moonshotApiKey = new Text(
        'moonshot_api_key',
        null,
        null,
        _t('Moonshot API 密钥'),
        _t('访问 https://platform.moonshot.cn/ 获取')
    );
    $form->addInput($moonshotApiKey);
    
    $moonshotModel = new \Typecho\Widget\Helper\Form\Element\Select(
        'moonshot_model',
        [
            'moonshot-v1-8k' => 'Moonshot v1 8K',
            'moonshot-v1-32k' => 'Moonshot v1 32K',
            'moonshot-v1-128k' => 'Moonshot v1 128K'
        ],
        'moonshot-v1-8k',
        _t('Moonshot 模型'),
        _t('选择上下文窗口大小')
    );
    $form->addInput($moonshotModel);
    
    // 自定义 OpenAI 兼容接口
    $customApiUrl = new Text(
        'custom_api_url',
        null,
        null,
        _t('自定义 API 地址'),
        _t('例如:https://api.openai.com/v1/chat/completions')
    );
    $form->addInput($customApiUrl);
    
    $customApiKey = new Text(
        'custom_api_key',
        null,
        null,
        _t('自定义 API 密钥'),
        _t('根据服务商提供的密钥')
    );
    $form->addInput($customApiKey);
    
    $customModel = new Text(
        'custom_model',
        null,
        null,
        _t('自定义模型名称'),
        _t('输入服务商提供的模型标识')
    );
    $form->addInput($customModel);
    
    // 通用参数
    $temperature = new Text(
        'temperature',
        null,
        '0.7',
        _t('温度参数 (0-2)'),
        _t('值越高回复越随机,越低越稳定')
    );
    $form->addInput($temperature);
    
    $maxTokens = new Text(
        'max_tokens',
        null,
        '1000',
        _t('最大输出长度'),
        _t('控制 AI 回复的字数')
    );
    $form->addInput($maxTokens);
}

2. 模型适配器层 - Adapters.php

创建统一的模型适配器接口和各个实现类:

<?php
namespace TypechoPlugin\AIDeepSeek;

/**
 * AI 模型适配器接口
 */
interface AIModelAdapter
{
    /**
     * 发送聊天请求
     * @param array $messages 消息数组
     * @param array $params 额外参数
     * @return string 回复内容
     */
    public function chat($messages, $params = []);
    
    /**
     * 获取适配器名称
     */
    public function getName();
}

/**
 * DeepSeek 适配器
 */
class DeepSeekAdapter implements AIModelAdapter
{
    private $apiKey;
    private $model;
    private $apiUrl = 'https://api.deepseek.com/v1/chat/completions';
    
    public function __construct($apiKey, $model = 'deepseek-chat')
    {
        $this->apiKey = $apiKey;
        $this->model = $model;
    }
    
    public function chat($messages, $params = [])
    {
        $data = [
            'model' => $this->model,
            'messages' => $messages,
            'temperature' => $params['temperature'] ?? 0.7,
            'max_tokens' => $params['max_tokens'] ?? 1000,
            'stream' => false
        ];
        
        return $this->callAPI($this->apiUrl, $data);
    }
    
    private function callAPI($url, $data)
    {
        $ch = curl_init($url);
        curl_setopt($ch, CURLOPT_HTTPHEADER, [
            'Content-Type: application/json',
            'Authorization: Bearer ' . $this->apiKey
        ]);
        curl_setopt($ch, CURLOPT_POST, true);
        curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($data));
        curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
        curl_setopt($ch, CURLOPT_TIMEOUT, 30);
        
        $response = curl_exec($ch);
        $httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
        curl_close($ch);
        
        if ($httpCode !== 200) {
            throw new \Exception('DeepSeek API 错误: HTTP ' . $httpCode);
        }
        
        $result = json_decode($response, true);
        return $result['choices'][0]['message']['content'] ?? '';
    }
    
    public function getName()
    {
        return 'DeepSeek (' . $this->model . ')';
    }
}

/**
 * OpenAI 适配器
 */
class OpenAIAdapter implements AIModelAdapter
{
    private $apiKey;
    private $model;
    private $apiUrl = 'https://api.openai.com/v1/chat/completions';
    
    public function __construct($apiKey, $model = 'gpt-3.5-turbo')
    {
        $this->apiKey = $apiKey;
        $this->model = $model;
    }
    
    public function chat($messages, $params = [])
    {
        $data = [
            'model' => $this->model,
            'messages' => $messages,
            'temperature' => $params['temperature'] ?? 0.7,
            'max_tokens' => $params['max_tokens'] ?? 1000
        ];
        
        $ch = curl_init($this->apiUrl);
        curl_setopt($ch, CURLOPT_HTTPHEADER, [
            'Content-Type: application/json',
            'Authorization: Bearer ' . $this->apiKey
        ]);
        curl_setopt($ch, CURLOPT_POST, true);
        curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($data));
        curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
        
        $response = curl_exec($ch);
        $httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
        curl_close($ch);
        
        if ($httpCode !== 200) {
            throw new \Exception('OpenAI API 错误: HTTP ' . $httpCode);
        }
        
        $result = json_decode($response, true);
        return $result['choices'][0]['message']['content'] ?? '';
    }
    
    public function getName()
    {
        return 'OpenAI (' . $this->model . ')';
    }
}

/**
 * 阿里云百炼适配器 
 */
class AliyunBailianAdapter implements AIModelAdapter
{
    private $apiKey;
    private $model;
    private $workspaceId;
    private $apiUrl = 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation';
    
    public function __construct($apiKey, $model, $workspaceId = '')
    {
        $this->apiKey = $apiKey;
        $this->model = $model;
        $this->workspaceId = $workspaceId;
    }
    
    public function chat($messages, $params = [])
    {
        // 转换消息格式为百炼格式
        $prompt = $this->buildPrompt($messages);
        
        $data = [
            'model' => $this->model,
            'input' => [
                'messages' => $messages
            ],
            'parameters' => [
                'temperature' => $params['temperature'] ?? 0.7,
                'max_tokens' => $params['max_tokens'] ?? 1000,
                'result_format' => 'message'
            ]
        ];
        
        $headers = [
            'Content-Type: application/json',
            'Authorization: Bearer ' . $this->apiKey
        ];
        
        if ($this->workspaceId) {
            $headers[] = 'X-DashScope-Workspace: ' . $this->workspaceId;
        }
        
        $ch = curl_init($this->apiUrl);
        curl_setopt($ch, CURLOPT_HTTPHEADER, $headers);
        curl_setopt($ch, CURLOPT_POST, true);
        curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($data));
        curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
        
        $response = curl_exec($ch);
        $httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
        curl_close($ch);
        
        if ($httpCode !== 200) {
            throw new \Exception('阿里云百炼 API 错误: HTTP ' . $httpCode);
        }
        
        $result = json_decode($response, true);
        return $result['output']['choices'][0]['message']['content'] ?? '';
    }
    
    private function buildPrompt($messages)
    {
        $prompt = '';
        foreach ($messages as $msg) {
            $prompt .= $msg['role'] . ': ' . $msg['content'] . "\n";
        }
        return $prompt;
    }
    
    public function getName()
    {
        return '阿里云百炼 (' . $this->model . ')';
    }
}

/**
 * Moonshot (Kimi) 适配器 
 */
class MoonshotAdapter implements AIModelAdapter
{
    private $apiKey;
    private $model;
    private $apiUrl = 'https://api.moonshot.cn/v1/chat/completions';
    
    public function __construct($apiKey, $model = 'moonshot-v1-8k')
    {
        $this->apiKey = $apiKey;
        $this->model = $model;
    }
    
    public function chat($messages, $params = [])
    {
        $data = [
            'model' => $this->model,
            'messages' => $messages,
            'temperature' => $params['temperature'] ?? 0.7,
            'max_tokens' => $params['max_tokens'] ?? 1000
        ];
        
        $ch = curl_init($this->apiUrl);
        curl_setopt($ch, CURLOPT_HTTPHEADER, [
            'Content-Type: application/json',
            'Authorization: Bearer ' . $this->apiKey
        ]);
        curl_setopt($ch, CURLOPT_POST, true);
        curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($data));
        curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
        
        $response = curl_exec($ch);
        $httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
        curl_close($ch);
        
        if ($httpCode !== 200) {
            throw new \Exception('Moonshot API 错误: HTTP ' . $httpCode);
        }
        
        $result = json_decode($response, true);
        return $result['choices'][0]['message']['content'] ?? '';
    }
    
    public function getName()
    {
        return 'Moonshot Kimi (' . $this->model . ')';
    }
}

/**
 * 自定义 OpenAI 兼容适配器 
 */
class CustomOpenAIAdapter implements AIModelAdapter
{
    private $apiKey;
    private $model;
    private $apiUrl;
    
    public function __construct($apiUrl, $apiKey, $model)
    {
        $this->apiUrl = rtrim($apiUrl, '/');
        $this->apiKey = $apiKey;
        $this->model = $model;
    }
    
    public function chat($messages, $params = [])
    {
        $data = [
            'model' => $this->model,
            'messages' => $messages,
            'temperature' => $params['temperature'] ?? 0.7,
            'max_tokens' => $params['max_tokens'] ?? 1000
        ];
        
        $ch = curl_init($this->apiUrl);
        curl_setopt($ch, CURLOPT_HTTPHEADER, [
            'Content-Type: application/json',
            'Authorization: Bearer ' . $this->apiKey
        ]);
        curl_setopt($ch, CURLOPT_POST, true);
        curl_setopt($ch, CURLOPT_POSTFIELDS, json_encode($data));
        curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
        
        $response = curl_exec($ch);
        $httpCode = curl_getinfo($ch, CURLINFO_HTTP_CODE);
        curl_close($ch);
        
        if ($httpCode !== 200) {
            throw new \Exception('自定义 API 错误: HTTP ' . $httpCode);
        }
        
        $result = json_decode($response, true);
        return $result['choices'][0]['message']['content'] ?? '';
    }
    
    public function getName()
    {
        return '自定义 (' . $this->model . ')';
    }
}

/**
 * 模型工厂类 - 根据配置创建对应的适配器
 */
class ModelFactory
{
    public static function create($config)
    {
        $provider = $config->provider ?? 'deepseek';
        $temperature = $config->temperature ?? 0.7;
        $maxTokens = $config->max_tokens ?? 1000;
        
        switch ($provider) {
            case 'deepseek':
                if (empty($config->deepseek_api_key)) {
                    throw new \Exception('请配置 DeepSeek API 密钥');
                }
                $adapter = new DeepSeekAdapter(
                    $config->deepseek_api_key,
                    $config->deepseek_model ?? 'deepseek-chat'
                );
                break;
                
            case 'openai':
                if (empty($config->openai_api_key)) {
                    throw new \Exception('请配置 OpenAI API 密钥');
                }
                $adapter = new OpenAIAdapter(
                    $config->openai_api_key,
                    $config->openai_model ?? 'gpt-3.5-turbo'
                );
                break;
                
            case 'aliyun':
                if (empty($config->aliyun_api_key)) {
                    throw new \Exception('请配置阿里云百炼 API 密钥');
                }
                $adapter = new AliyunBailianAdapter(
                    $config->aliyun_api_key,
                    $config->aliyun_model ?? 'qwen-max',
                    $config->aliyun_workspace_id ?? ''
                );
                break;
                
            case 'moonshot':
                if (empty($config->moonshot_api_key)) {
                    throw new \Exception('请配置 Moonshot API 密钥');
                }
                $adapter = new MoonshotAdapter(
                    $config->moonshot_api_key,
                    $config->moonshot_model ?? 'moonshot-v1-8k'
                );
                break;
                
            case 'custom':
                if (empty($config->custom_api_url) || empty($config->custom_api_key)) {
                    throw new \Exception('请配置自定义 API 地址和密钥');
                }
                $adapter = new CustomOpenAIAdapter(
                    $config->custom_api_url,
                    $config->custom_api_key,
                    $config->custom_model ?? 'custom-model'
                );
                break;
                
            default:
                throw new \Exception('不支持的模型提供商: ' . $provider);
        }
        
        return $adapter;
    }
}

3. 更新 Action.php 使用模型工厂

修改原有的 ask 方法,使用模型工厂创建适配器:

public function ask()
{
    try {
        // 1. 获取配置
        $config = Options::alloc()->plugin('AIDeepSeek');
        
        // 2. 安全验证(保持不变)
        $this->securityCheck($config);
        
        // 3. 获取输入
        $question = $this->request->get('question');
        $context = $this->request->get('context');
        $title = $this->request->get('title');
        
        // 4. 构建消息
        $messages = $this->buildMessages($config, $title, $context, $question);
        
        // 5. 创建模型适配器并调用
        $adapter = ModelFactory::create($config);
        $response = $adapter->chat($messages, [
            'temperature' => $config->temperature ?? 0.7,
            'max_tokens' => $config->max_tokens ?? 1000
        ]);
        
        // 6. 记录使用的模型(可选,用于调试)
        $this->logModelUsage($adapter->getName());
        
        // 7. 返回结果
        $this->response->throwJson(['success' => true, 'data' => $response]);
        
    } catch (\Exception $e) {
        $this->response->throwJson(['success' => false, 'message' => $e->getMessage()]);
    }
}

private function logModelUsage($modelName)
{
    // 可以记录到日志或数据库,方便统计各模型使用情况
    if (defined('__TYPECHO_DEBUG__') && __TYPECHO_DEBUG__) {
        error_log('AI 助手使用模型: ' . $modelName);
    }
}

🎯 多模型选择的优势

灵活性

用户可以根据需求选择最适合的模型,如:

  • DeepSeek:性价比高,适合日常对话
  • OpenAI:综合能力强,但成本较高
  • 阿里云百炼:国内访问稳定,支持通义千问和DeepSeek-R1
  • Moonshot Kimi:长上下文处理能力强

成本控制

用户可以根据预算选择不同价位的模型,DeepSeek 相对 OpenAI 更经济实惠

地域优化

国内用户可以选择阿里云百炼,避免网络延迟问题

扩展性

未来要接入新的模型(如 Claude、Gemini 等),只需增加对应的适配器类即可,无需修改核心逻辑

📋 安装使用建议

  1. 插件设置页面:建议将配置项分组显示,使用 JavaScript 控制不同提供商的配置字段显示/隐藏,避免界面过于复杂
  2. 默认配置:建议将 DeepSeek 设为默认选项,因为它对国内用户友好且性价比高
  3. 测试模式:可以添加一个测试按钮,让用户测试当前配置的 API 是否正常工作
  4. 错误提示:当选择的提供商未配置对应密钥时,给出明确的错误提示

通过这样的设计,你的插件就能无缝接入多种 AI 模型,满足不同用户的需求,同时保持良好的代码结构和扩展性。

已有 77 条评论

    1. Katherine Moore Katherine Moore

      建议增加一个模型更新提示,当有新版模型时通知用户

    2. Jackson Lee Jackson Lee

      DeepSeek的coder模型写代码确实很专业

    3. Isabella King Isabella King

      Moonshot的API文档说支持函数调用,插件后续会支持吗

    4. Harrison James Harrison James

      能不能支持Markdown格式的回复?现在都是纯文本

    5. Giselle Ivy Giselle Ivy

      建议增加一个模型缓存功能,相同问题直接返回缓存结果