Skip to content




ollama api


Table of Content

ollama api

https://github.com/ollama/ollama/blob/main/docs/api.md

examples

Assume that the server is 192.0.2.98:11434...

export OLLAMA_SERVER=192.0.2.98:11434
# request response only for the final output
curl http://$OLLAMA_SERVER/api/generate -d '{
  "model": "gemma3:4b",
  "prompt": "Why is the sky blue?",
  "stream": false
}'
---
# chat version
curl http://$OLLAMA_SERVER/api/chat -d '{
  "model": "gemma3:4b",
  "messages": [
    {
      "role": "user",
      "content": "why is the sky blue?"
    }
  ],
  "stream": false
}'

curl http://$OLLAMA_SERVER/api/chat -d '{
  "model": "gemma3:4b",
  "messages": [
    {
      "role": "user",
      "content": "What are the different roles other than user that I can use to send prompt to an LLM?"
    }
  ],
  "stream": false
}'
---
# chat with system role prompt added
curl http://$OLLAMA_SERVER/api/chat -d '{
  "model": "gemma3:4b",
  "messages": [
    {
      "role": "system",
      "content": "You are a helpful assistant yet you can only respond two sentences at a time."
    },
    {
      "role": "user",
      "content": "What are the different roles other than user that I can use to send prompt to an LLM?"
    }
  ],
  "stream": false
}'

---
# chat, json
curl http://$OLLAMA_SERVER/api/chat -d '{
  "model": "gemma3:4b",
  "messages": [
    {
      "role": "user",
      "content": "why is the sky blue?"
    }
  ],
  "format": "json",
  "stream": false
}'

curl http://$OLLAMA_SERVER/api/chat -d '{
  "model": "gemma3:4b",
  "messages": [
    {
      "role": "system",
      "content": "You are a helpful assistant yet you can only respond two sentences at a time. Always respond in json format."
    },
    {
      "role": "user",
      "content": "What are the different roles other than user that I can use to send prompt to an LLM?"
    }
  ],
  "format": "json",
  "stream": false
}'
---
# unload model
curl http://$OLLAMA_SERVER/api/generate -d '{
  "model": "gemma3:4b",
  "keep_alive": 0
}'
---
# chat with histories
curl http://localhost:11434/api/chat -d '{
  "model": "llama3.2",
  "messages": [
    {
      "role": "user",
      "content": "why is the sky blue?"
    },
    {
      "role": "assistant",
      "content": "due to rayleigh scattering."
    },
    {
      "role": "user",
      "content": "how is that different than mie scattering?"
    }
  ]
}'