构建代码助手

代码助手是 Ollama 的典型应用场景,可以帮助生成代码、解释代码、调试问题等。

基本代码助手

import ollama

class CodeAssistant:
    def __init__(self, model='codellama'):
        self.model = model
        self.system_prompt = """你是一个专业的编程助手。你的任务是:
1. 编写清晰、高效的代码
2. 添加必要的注释
3. 遵循最佳实践
4. 考虑边界情况和错误处理"""
    
    def generate_code(self, prompt, language='Python'):
        response = ollama.chat(
            model=self.model,
            messages=[
                {'role': 'system', 'content': self.system_prompt},
                {'role': 'user', 'content': f'用 {language} 实现:{prompt}'}
            ]
        )
        return response['message']['content']
    
    def explain_code(self, code):
        response = ollama.chat(
            model=self.model,
            messages=[
                {'role': 'system', 'content': '你是一个代码解释专家,用简洁易懂的语言解释代码。'},
                {'role': 'user', 'content': f'请解释以下代码:\n\n```\n{code}\n```'}
            ]
        )
        return response['message']['content']
    
    def debug_code(self, code, error=None):
        prompt = f'请检查以下代码是否有问题'
        if error:
            prompt += f',错误信息:\n{error}'
        prompt += f':\n\n```\n{code}\n```'
        
        response = ollama.chat(
            model=self.model,
            messages=[
                {'role': 'system', 'content': '你是一个代码调试专家,帮助找出代码问题并提供修复建议。'},
                {'role': 'user', 'content': prompt}
            ]
        )
        return response['message']['content']
    
    def optimize_code(self, code):
        response = ollama.chat(
            model=self.model,
            messages=[
                {'role': 'system', 'content': '你是一个代码优化专家,帮助优化代码性能和可读性。'},
                {'role': 'user', 'content': f'请优化以下代码:\n\n```\n{code}\n```'}
            ]
        )
        return response['message']['content']

# 使用
assistant = CodeAssistant()

code = assistant.generate_code('一个快速排序算法')
print("生成的代码:")
print(code)

explanation = assistant.explain_code(code)
print("\n代码解释:")
print(explanation)

交互式代码助手

import ollama
from rich.console import Console
from rich.panel import Panel
from rich.syntax import Syntax
from rich.markdown import Markdown

console = Console()

class InteractiveCodeAssistant:
    def __init__(self, model='codellama'):
        self.model = model
        self.history = []
    
    def send(self, message, stream=True):
        self.history.append({'role': 'user', 'content': message})
        
        if stream:
            return self._stream_response()
        else:
            return self._sync_response()
    
    def _stream_response(self):
        stream = ollama.chat(
            model=self.model,
            messages=self.history,
            stream=True
        )
        
        full_response = ""
        for chunk in stream:
            text = chunk['message']['content']
            if text:
                console.print(text, end="")
                full_response += text
        
        console.print()
        self.history.append({'role': 'assistant', 'content': full_response})
        return full_response
    
    def _sync_response(self):
        response = ollama.chat(
            model=self.model,
            messages=self.history
        )
        reply = response['message']['content']
        self.history.append({'role': 'assistant', 'content': reply})
        return reply
    
    def clear(self):
        self.history = []

def main():
    assistant = InteractiveCodeAssistant()
    
    console.print(Panel.fit("代码助手", style="bold green"))
    console.print("命令: /clear 清空对话, /exit 退出\n")
    
    while True:
        try:
            user_input = console.input("[bold blue]你:[/] ").strip()
        except KeyboardInterrupt:
            console.print("\n再见!")
            break
        
        if not user_input:
            continue
        
        if user_input == '/exit':
            console.print("再见!")
            break
        elif user_input == '/clear':
            assistant.clear()
            console.print("[yellow]对话已清空[/]\n")
            continue
        
        console.print("[bold green]助手:[/]", end=" ")
        assistant.send(user_input)
        console.print()

if __name__ == "__main__":
    main()

代码补全服务

import ollama
from flask import Flask, request, jsonify

app = Flask(__name__)

def complete_code(prefix, language='python'):
    response = ollama.generate(
        model='codellama',
        prompt=f'<PRE> {prefix} <SUF>',
        options={
            'temperature': 0.2,
            'num_predict': 100
        }
    )
    return response['response']

@app.route('/complete', methods=['POST'])
def complete():
    data = request.json
    prefix = data.get('prefix', '')
    language = data.get('language', 'python')
    
    completion = complete_code(prefix, language)
    
    return jsonify({
        'completion': completion
    })

@app.route('/explain', methods=['POST'])
def explain():
    data = request.json
    code = data.get('code', '')
    
    response = ollama.chat(
        model='codellama',
        messages=[
            {'role': 'user', 'content': f'解释这段代码:\n```\n{code}\n```'}
        ]
    )
    
    return jsonify({
        'explanation': response['message']['content']
    })

if __name__ == '__main__':
    app.run(port=5000)

多语言代码助手

import ollama

class MultiLanguageCodeAssistant:
    LANGUAGE_PROMPTS = {
        'python': 'Python',
        'javascript': 'JavaScript',
        'go': 'Go',
        'java': 'Java',
        'rust': 'Rust',
        'cpp': 'C++',
        'typescript': 'TypeScript'
    }
    
    def __init__(self, model='codellama'):
        self.model = model
    
    def generate(self, description, language='python'):
        lang_name = self.LANGUAGE_PROMPTS.get(language, language)
        
        response = ollama.chat(
            model=self.model,
            messages=[
                {
                    'role': 'system',
                    'content': f'你是一个{lang_name}编程专家。只输出代码,不要解释。'
                },
                {
                    'role': 'user',
                    'content': f'用{lang_name}实现:{description}'
                }
            ]
        )
        
        return response['message']['content']
    
    def translate(self, code, from_lang, to_lang):
        from_name = self.LANGUAGE_PROMPTS.get(from_lang, from_lang)
        to_name = self.LANGUAGE_PROMPTS.get(to_lang, to_lang)
        
        response = ollama.chat(
            model=self.model,
            messages=[
                {
                    'role': 'system',
                    'content': f'你是一个代码翻译专家,将{from_name}代码翻译成{to_name}。'
                },
                {
                    'role': 'user',
                    'content': f'将以下{from_name}代码翻译成{to_name}:\n```\n{code}\n```'
                }
            ]
        )
        
        return response['message']['content']
    
    def add_tests(self, code, language='python'):
        lang_name = self.LANGUAGE_PROMPTS.get(language, language)
        
        response = ollama.chat(
            model=self.model,
            messages=[
                {
                    'role': 'system',
                    'content': f'你是一个{lang_name}测试专家,为代码编写单元测试。'
                },
                {
                    'role': 'user',
                    'content': f'为以下{lang_name}代码编写单元测试:\n```\n{code}\n```'
                }
            ]
        )
        
        return response['message']['content']
    
    def add_documentation(self, code, language='python'):
        lang_name = self.LANGUAGE_PROMPTS.get(language, language)
        
        response = ollama.chat(
            model=self.model,
            messages=[
                {
                    'role': 'system',
                    'content': f'你是一个{lang_name}文档专家,为代码添加文档注释。'
                },
                {
                    'role': 'user',
                    'content': f'为以下{lang_name}代码添加文档注释:\n```\n{code}\n```'
                }
            ]
        )
        
        return response['message']['content']

# 使用
assistant = MultiLanguageCodeAssistant()

python_code = assistant.generate('一个快速排序算法', 'python')
print("Python 代码:")
print(python_code)

go_code = assistant.translate(python_code, 'python', 'go')
print("\nGo 代码:")
print(go_code)