Zack Saadioui
8/26/2024
1
2
bash
 pip install ollama1
2
bash
 curl -fsSL https://ollama.com/install.sh | sh1
2
3
4
python
 import ollama  
 response = ollama.chat(model='llama3.1', messages=[{'role': 'user', 'content': 'Why is the sky blue?'}])
 print(response['message']['content'])1
2
3
4
python
stream = ollama.chat(model='llama3.1', messages=[{'role': 'user', 'content': 'Tell me about the universe?'}], stream=True)
for chunk in stream:
    print(chunk['message']['content'], end='', flush=True)1
2
bash
pip install ollama1
2
3
4
python
stream = ollama.chat(model='llama3.1', messages=[{'role': 'user', 'content': 'Explain quantum physics.'}], stream=True)
for chunk in stream:
    print(chunk['message']['content'], end='', flush=True)1
2
3
4
python
result = ollama.generate(model='stable-code', prompt='// c function reverse string
')
print(result['response'])1
2
python
embeddings = ollama.embeddings(model='llama3.1', prompt='The quick brown fox jumps over the lazy dog')Copyright © Arsturn 2025