Zack Saadioui
4/25/2025
1
ollama1
2
bash
curl -fsSL https://ollama.com/install.sh | sh1
2
bash
ollama run llama3.31
2
3
4
5
python
test_cases = [
    {"prompt": "What is the capital of France?", "expected_output": "Paris"},
    {"prompt": "Who wrote 'Pride and Prejudice'?", "expected_output": "Jane Austen"}
]1
curl1
2
bash
curl -X POST http://localhost:11434/api/generate -d '{"model": "llama3.3", "prompt": "What is the capital of France?"}'1
2
bash
ollama run llama2:7b-q4_01
2
3
bash
export OLLAMA_CUDA=1
export OLLAMA_NUM_THREADS=8Copyright © Arsturn 2025