An Elixir wrapper of ollama ’s REST API with a few niceties built-in, such as dealing with endless LLM (Large Language Model) repetitions through a timeout.

Ollamex is written based on the ollama REST API documentation for the following endpoints:

Installation

The package is available in Hex and can be installed by adding ollamex to your list of dependencies in mix.exs:

def deps do
  [
    {:ollamex, "~> 0.1.0"}
  ]
end

Documentation

The docs can be found at https://hexdocs.pm/ollamex .

Examples

API initialization

iex> api = Ollamex.API.new()
%Ollamex.API{
  uri: "http://localhost:11434/api",
  models: [
    %Ollamex.LLModel{
      name: "llama2:latest",
      digest: "78e26419b4469263f75331927a00a0284ef6544c1975b826b15abdaef17bb962",
      modified_at: "2024-01-09T22:24:14.925918123+02:00",
      size: 3826793677,
      details: %{
        "families" => ["llama"],
        "family" => "llama",
        "format" => "gguf",
        "parameter_size" => "7B",
        "quantization_level" => "Q4_0"
      },
      modelfile: nil,
      parameters: nil,
      template: nil
    },
    %Ollamex.LLModel{
      name: "mistral:latest",
      digest: "61e88e884507ba5e06c49b40e6226884b2a16e872382c2b44a42f2d119d804a5",
      modified_at: "2024-01-08T17:49:54.570542101+02:00",
      size: 4109865159,
      details: %{
        "families" => ["llama"],
        "family" => "llama",
        "format" => "gguf",
        "parameter_size" => "7B",
        "quantization_level" => "Q4_0"
      },
      modelfile: nil,
      parameters: nil,
      template: nil
    }
  ],
  timeout: 120000,
  errors: []
  }
  iex> Ollamex.API.list_models(api)
  ["llama2:latest", "mistral:latest"]

Generate a completion (/generate endpoint)

iex> p = %Ollamex.PromptRequest{model: "mistral:latest", prompt: "Explain using a simple paragraph like I'm 5 years old: Why is the sky not black like space?"}
%Ollamex.PromptRequest{
  model: "mistral:latest",
  prompt: "Explain using a simple paragraph like I'm 5 years old: Why is the sky not black like space?",
  raw: false,
  format: nil,
  stream: true,
  options: nil,
  images: []
}

iex> Ollamex.generate_with_timeout(p, api)
{:ok,
  %Ollamex.LLMResponse{
  context: [733, 16289, 28793, ...],
  created_at: "2024-01-10T19:23:12.943599755Z",
  done: true,
  eval_count: 100,
  eval_duration: 16850322000,
  model: "mistral:latest",
  prompt_eval_count: 33,
  prompt_eval_duration: 2865358000,
  response: " The sky isn't black like space because it has ...
  pretty colors, and nighttime with stars and the moon!",
  total_duration: 24862993618,
  message: nil,
  errors: nil
}}

Generate a chat completion

messages =
  []
  |> Ollamex.ChatMessage.append("user", "why is the sky blue?")
  |> Ollamex.ChatMessage.append("assistant", "due to rayleigh scattering!")
  |> Ollamex.ChatMessage.append("user", "how is that different to Mie scattering?")
  |> Enum.map(&Map.from_struct(&1))
iex>
[
%{content: "why is the sky blue?", images: [], role: "user"},
%{content: "due to rayleigh scattering!", images: [], role: "assistant"},
%{
  content: "how is that different to Mie scattering?",
  images: [],
  role: "user"
}
]

iex> cr = %Ollamex.ChatRequest{messages: messages, model: "llama2", stream: true}
%Ollamex.ChatRequest{
  model: "llama2",
  messages: [
    %{content: "why is the sky blue?", images: [], role: "user"},
    %{content: "due to rayleigh scattering!", images: [], role: "assistant"},
    %{
      content: "how is that different to Mie scattering?",
      images: [],
      role: "user"
    }
  ],
  format: nil,
  options: nil,
  template: nil,
  stream: true
}
iex> Ollamex.chat_with_timeout(cr, api)
{:ok,
 %Ollamex.LLMResponse{
  context: nil,
  created_at: "2024-01-10T19:29:05.771371091Z",
  done: true,
  eval_count: 515,
  eval_duration: 83246108000,
  model: "llama2",
  prompt_eval_count: 61,
  prompt_eval_duration: 7234332000,
  response: nil,
  total_duration: 95606709630,
  message: %{
    content: "Mie scattering is ... while Rayleigh scattering
    is responsible for the reddening of sunlight at sunrise
    and sunset.",
    role: "assistant"
  },
  errors: nil
}}
blog-post
Ollamex 2024
/images/software/stack/Elixir.png Elixir
req