Go 生成与聊天

Go 语言调用 Ollama API 进行文本生成和聊天操作。

聊天接口

基本聊天

package main

import (
    "bytes"
    "encoding/json"
    "fmt"
    "io"
    "net/http"
)

type Message struct {
    Role    string `json:"role"`
    Content string `json:"content"`
}

type ChatRequest struct {
    Model    string    `json:"model"`
    Messages []Message `json:"messages"`
    Stream   bool      `json:"stream"`
}

type ChatResponse struct {
    Model    string  `json:"model"`
    Message  Message `json:"message"`
    Done     bool    `json:"done"`
}

func chat(model string, messages []Message) (string, error) {
    req := ChatRequest{
        Model:    model,
        Messages: messages,
        Stream:   false,
    }
    
    body, _ := json.Marshal(req)
    resp, err := http.Post(
        "http://localhost:11434/api/chat",
        "application/json",
        bytes.NewReader(body),
    )
    if err != nil {
        return "", err
    }
    defer resp.Body.Close()
    
    data, _ := io.ReadAll(resp.Body)
    var result ChatResponse
    json.Unmarshal(data, &result)
    
    return result.Message.Content, nil
}

func main() {
    reply, _ := chat("llama3.2", []Message{
        {Role: "user", Content: "你好"},
    })
    fmt.Println(reply)
}

多轮对话

func multiTurnChat() {
    messages := []Message{
        {Role: "user", Content: "我叫小明"},
        {Role: "assistant", Content: "你好小明!"},
        {Role: "user", Content: "我叫什么名字?"},
    }
    
    reply, _ := chat("llama3.2", messages)
    fmt.Println(reply)
}

带参数

type ChatRequestWithOptions struct {
    Model    string                 `json:"model"`
    Messages []Message              `json:"messages"`
    Stream   bool                   `json:"stream"`
    Options  map[string]interface{} `json:"options,omitempty"`
}

func chatWithOptions(model string, messages []Message, options map[string]interface{}) (string, error) {
    req := ChatRequestWithOptions{
        Model:    model,
        Messages: messages,
        Stream:   false,
        Options:  options,
    }
    
    body, _ := json.Marshal(req)
    resp, err := http.Post(
        "http://localhost:11434/api/chat",
        "application/json",
        bytes.NewReader(body),
    )
    if err != nil {
        return "", err
    }
    defer resp.Body.Close()
    
    data, _ := io.ReadAll(resp.Body)
    var result ChatResponse
    json.Unmarshal(data, &result)
    
    return result.Message.Content, nil
}

func main() {
    options := map[string]interface{}{
        "temperature": 0.7,
        "num_ctx":     4096,
    }
    
    reply, _ := chatWithOptions("llama3.2", []Message{
        {Role: "user", Content: "写一首诗"},
    }, options)
    fmt.Println(reply)
}

生成接口

基本生成

type GenerateRequest struct {
    Model  string `json:"model"`
    Prompt string `json:"prompt"`
    Stream bool   `json:"stream"`
}

type GenerateResponse struct {
    Response string `json:"response"`
    Done     bool   `json:"done"`
}

func generate(model, prompt string) (string, error) {
    req := GenerateRequest{
        Model:  model,
        Prompt: prompt,
        Stream: false,
    }
    
    body, _ := json.Marshal(req)
    resp, err := http.Post(
        "http://localhost:11434/api/generate",
        "application/json",
        bytes.NewReader(body),
    )
    if err != nil {
        return "", err
    }
    defer resp.Body.Close()
    
    data, _ := io.ReadAll(resp.Body)
    var result GenerateResponse
    json.Unmarshal(data, &result)
    
    return result.Response, nil
}

func main() {
    result, _ := generate("llama3.2", "用 Go 写一个快速排序")
    fmt.Println(result)
}

JSON 格式输出

type GenerateWithFormat struct {
    Model  string `json:"model"`
    Prompt string `json:"prompt"`
    Format string `json:"format"`
    Stream bool   `json:"stream"`
}

func generateJSON(model, prompt string) (string, error) {
    req := GenerateWithFormat{
        Model:  model,
        Prompt: prompt,
        Format: "json",
        Stream: false,
    }
    
    body, _ := json.Marshal(req)
    resp, err := http.Post(
        "http://localhost:11434/api/generate",
        "application/json",
        bytes.NewReader(body),
    )
    if err != nil {
        return "", err
    }
    defer resp.Body.Close()
    
    data, _ := io.ReadAll(resp.Body)
    var result GenerateResponse
    json.Unmarshal(data, &result)
    
    return result.Response, nil
}

嵌入接口

type EmbeddingRequest struct {
    Model  string `json:"model"`
    Prompt string `json:"prompt"`
}

type EmbeddingResponse struct {
    Embedding []float64 `json:"embedding"`
}

func getEmbedding(model, text string) ([]float64, error) {
    req := EmbeddingRequest{
        Model:  model,
        Prompt: text,
    }
    
    body, _ := json.Marshal(req)
    resp, err := http.Post(
        "http://localhost:11434/api/embeddings",
        "application/json",
        bytes.NewReader(body),
    )
    if err != nil {
        return nil, err
    }
    defer resp.Body.Close()
    
    data, _ := io.ReadAll(resp.Body)
    var result EmbeddingResponse
    json.Unmarshal(data, &result)
    
    return result.Embedding, nil
}

func main() {
    emb, _ := getEmbedding("nomic-embed-text", "这是一段测试文本")
    fmt.Printf("向量维度: %d\n", len(emb))
}

模型管理

列出模型

type ModelInfo struct {
    Name   string `json:"name"`
    Size   int64  `json:"size"`
    Digest string `json:"digest"`
}

type ModelsResponse struct {
    Models []ModelInfo `json:"models"`
}

func listModels() ([]ModelInfo, error) {
    resp, err := http.Get("http://localhost:11434/api/tags")
    if err != nil {
        return nil, err
    }
    defer resp.Body.Close()
    
    data, _ := io.ReadAll(resp.Body)
    var result ModelsResponse
    json.Unmarshal(data, &result)
    
    return result.Models, nil
}

func main() {
    models, _ := listModels()
    for _, m := range models {
        sizeGB := float64(m.Size) / (1024 * 1024 * 1024)
        fmt.Printf("%s: %.2f GB\n", m.Name, sizeGB)
    }
}

删除模型

func deleteModel(name string) error {
    req := map[string]string{"name": name}
    body, _ := json.Marshal(req)
    
    httpReq, _ := http.NewRequest(
        "DELETE",
        "http://localhost:11434/api/delete",
        bytes.NewReader(body),
    )
    httpReq.Header.Set("Content-Type", "application/json")
    
    client := &http.Client{}
    resp, err := client.Do(httpReq)
    if err != nil {
        return err
    }
    defer resp.Body.Close()
    
    return nil
}

封装对话会话

package main

import (
    "bytes"
    "encoding/json"
    "fmt"
    "io"
    "net/http"
)

type ChatSession struct {
    Model    string
    Messages []Message
}

func NewChatSession(model, system string) *ChatSession {
    session := &ChatSession{
        Model:    model,
        Messages: []Message{},
    }
    if system != "" {
        session.Messages = append(session.Messages, Message{
            Role: "system", Content: system,
        })
    }
    return session
}

func (s *ChatSession) Send(content string) (string, error) {
    s.Messages = append(s.Messages, Message{
        Role: "user", Content: content,
    })
    
    req := ChatRequest{
        Model:    s.Model,
        Messages: s.Messages,
        Stream:   false,
    }
    
    body, _ := json.Marshal(req)
    resp, err := http.Post(
        "http://localhost:11434/api/chat",
        "application/json",
        bytes.NewReader(body),
    )
    if err != nil {
        return "", err
    }
    defer resp.Body.Close()
    
    data, _ := io.ReadAll(resp.Body)
    var result ChatResponse
    json.Unmarshal(data, &result)
    
    s.Messages = append(s.Messages, result.Message)
    
    return result.Message.Content, nil
}

func main() {
    session := NewChatSession("llama3.2", "你是一个友好的助手")
    
    reply1, _ := session.Send("你好")
    fmt.Println(reply1)
    
    reply2, _ := session.Send("我叫小明")
    fmt.Println(reply2)
    
    reply3, _ := session.Send("我叫什么名字?")
    fmt.Println(reply3)
}