from continuedev.core.config import ContinueConfig
from continuedev.core.models import Models
from continuedev.libs.llm import Ollama

config = ContinueConfig(
    allow_anonymous_telemetry=False,
    models=Models(
        default=Ollama(
			title="CodeLlama-7b-Instruct",
			model="codellama:7b-instruct"
		),
        summarize=Ollama(
			title="CodeLlama-7b-Instruct",
			model="codellama:7b-instruct"
		)
    ),
    system_message=None,
    temperature=0.5,
    custom_commands=[],
    slash_commands=[],
    context_providers=[],
)