Zack Saadioui
8/27/2024
1
2
3
bash
brew install ollama
brew services start ollama1
2
bash
pip install -U langchain-ollama1
2
3
python
from langchain_core.prompts import ChatPromptTemplate
from langchain_ollama.llms import OllamaLLM1
2
3
4
python
template = """Question: {question} 
Answer: Let's think step by step."""
prompt = ChatPromptTemplate.from_template(template)1
2
python
model = OllamaLLM(model="llama2")1
2
3
4
python
chain = prompt | model
response = chain.invoke({"question": "What is LangChain?"})
print(response)1
ollama pull <model-name>1
model-name1
nomic-embed-text1
bakllava1
2
bash
ollama pull bakllavaCopyright © Arsturn 2025