模型详情 (POST /api/show)

模型详情接口返回指定模型的完整信息,包括模板、参数、系统提示等配置。

基本用法

curl http://localhost:11434/api/show -d '{
  "name": "llama3.2"
}'

响应:

{
  "modelfile": "# Modelfile generated by \"ollama show\"\n...",
  "parameters": "stop \"<|start_header_id|>\"\nstop \"<|end_header_id|>\"\nstop \"<|eot_id|>\"",
  "template": "{{ .System }}<|start_header_id|>user<|end_header_id|>\n\n{{ .Prompt }}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
  "details": {
    "format": "gguf",
    "family": "llama",
    "parameter_size": "3B",
    "quantization_level": "Q4_K_M"
  },
  "model_info": {
    "general.architecture": "llama",
    "general.parameter_count": 3210000000,
    "llama.context_length": 131072,
    "llama.embedding_length": 3072
  }
}

响应字段

字段说明
modelfile模型的 Modelfile 源码
parameters模型参数设置
template提示模板
details模型基本信息
model_info模型详细参数

modelfile 字段

完整的 Modelfile 内容,可以用来重建或修改模型:

FROM ./model.gguf
TEMPLATE """{{ .System }}..."""
PARAMETER stop "<|start_header_id|>"
PARAMETER temperature 1.0
...

parameters 字段

模型的默认参数,以文本形式呈现:

stop "<|start_header_id|>"
stop "<|end_header_id|>"
stop "<|eot_id|>"
temperature 1.0

template 字段

模型使用的提示模板:

{{ .System }}<|start_header_id|>user<|end_header_id|>

{{ .Prompt }}<|eot_id|><|start_header_id|>assistant<|end_header_id|>

model_info 字段

模型的底层参数:

字段说明
general.architecture模型架构
general.parameter_count参数数量
llama.context_length上下文长度
llama.embedding_length嵌入维度

代码示例

Python

import requests

def get_model_details(model_name):
    response = requests.post(
        "http://localhost:11434/api/show",
        json={"name": model_name}
    )
    return response.json()

details = get_model_details("llama3.2")

print(f"模板:\n{details['template']}")
print(f"\n参数:\n{details['parameters']}")
print(f"\n上下文长度: {details['model_info'].get('llama.context_length', 'N/A')}")

获取模型参数

def parse_parameters(param_string):
    params = {}
    for line in param_string.strip().split('\n'):
        if line.startswith('stop '):
            stop_word = line.split(' ', 1)[1].strip('"')
            params.setdefault('stop', []).append(stop_word)
        elif ' ' in line:
            key, value = line.split(' ', 1)
            params[key] = value
    return params

details = get_model_details("llama3.2")
params = parse_parameters(details.get("parameters", ""))
print(f"停止词: {params.get('stop', [])}")
print(f"温度: {params.get('temperature', 'default')}")

查看模型模板

def show_template(model_name):
    details = get_model_details(model_name)
    template = details.get("template", "")
    
    print(f"模型 {model_name} 的模板:")
    print("-" * 40)
    print(template)
    print("-" * 40)
    
    if "{{ .System }}" in template:
        print("支持系统提示")
    if "{{ .Prompt }}" in template:
        print("支持用户提示")

show_template("llama3.2")

JavaScript

async function getModelDetails(modelName) {
    const response = await fetch('http://localhost:11434/api/show', {
        method: 'POST',
        headers: { 'Content-Type': 'application/json' },
        body: JSON.stringify({ name: modelName })
    });
    
    return await response.json();
}

const details = await getModelDetails('llama3.2');
console.log('模板:', details.template);
console.log('参数:', details.parameters);

Go

package main

import (
    "bytes"
    "encoding/json"
    "fmt"
    "io"
    "net/http"
)

type ShowRequest struct {
    Name string `json:"name"`
}

type ModelDetails struct {
    Modelfile  string                 `json:"modelfile"`
    Parameters string                 `json:"parameters"`
    Template   string                 `json:"template"`
    ModelInfo  map[string]interface{} `json:"model_info"`
}

func getModelDetails(modelName string) (*ModelDetails, error) {
    req := ShowRequest{Name: modelName}
    body, _ := json.Marshal(req)
    
    resp, err := http.Post(
        "http://localhost:11434/api/show",
        "application/json",
        bytes.NewReader(body),
    )
    if err != nil {
        return nil, err
    }
    defer resp.Body.Close()
    
    data, _ := io.ReadAll(resp.Body)
    var result ModelDetails
    json.Unmarshal(data, &result)
    
    return &result, nil
}

func main() {
    details, _ := getModelDetails("llama3.2")
    fmt.Printf("模板:\n%s\n", details.Template)
    fmt.Printf("参数:\n%s\n", details.Parameters)
}

实际应用

复制模型配置

def copy_model_config(source_model, new_model_name):
    details = get_model_details(source_model)
    modelfile = details.get("modelfile", "")
    
    response = requests.post(
        "http://localhost:11434/api/create",
        json={
            "name": new_model_name,
            "modelfile": modelfile
        }
    )
    
    return response.json()

copy_model_config("llama3.2", "my-llama")

比较模型配置

def compare_models(model1, model2):
    d1 = get_model_details(model1)
    d2 = get_model_details(model2)
    
    print(f"比较 {model1}{model2}:")
    print()
    
    info1 = d1.get("model_info", {})
    info2 = d2.get("model_info", {})
    
    print(f"上下文长度: {info1.get('llama.context_length')} vs {info2.get('llama.context_length')}")
    print(f"参数数量: {info1.get('general.parameter_count')} vs {info2.get('general.parameter_count')}")

compare_models("llama3.2", "mistral")

导出 Modelfile

def export_modelfile(model_name, output_file):
    details = get_model_details(model_name)
    modelfile = details.get("modelfile", "")
    
    with open(output_file, "w") as f:
        f.write(modelfile)
    
    print(f"Modelfile 已导出到 {output_file}")

export_modelfile("llama3.2", "llama3.2.Modelfile")