<pre style='margin:0'>
Christopher Nielsen (mascguy) pushed a commit to branch master
in repository macports-ports.
</pre>
<p><a href="https://github.com/macports/macports-ports/commit/7984299a63d2df9d6d2c192a550888cb698e2204">https://github.com/macports/macports-ports/commit/7984299a63d2df9d6d2c192a550888cb698e2204</a></p>
<pre style="white-space: pre; background: #F8F8F8">The following commit(s) were added to refs/heads/master by this push:
<span style='display:block; white-space:pre;color:#404040;'> new 7984299a63d ollama: submission
</span>7984299a63d is described below
<span style='display:block; white-space:pre;color:#808000;'>commit 7984299a63d2df9d6d2c192a550888cb698e2204
</span>Author: i0ntempest <i0ntempest@i0ntempest.com>
AuthorDate: Fri May 30 23:05:17 2025 +1000
<span style='display:block; white-space:pre;color:#404040;'> ollama: submission
</span><span style='display:block; white-space:pre;color:#404040;'>
</span><span style='display:block; white-space:pre;color:#404040;'> Supersedes: https://github.com/macports/macports-ports/pull/27350
</span>---
llm/ollama/Portfile | 94 ++++++++++++++++++++++++++++++++++++++
llm/ollama/files/ollama-wrapper.sh | 5 ++
llm/ollama/files/ollama_env.conf | 10 ++++
llm/ollama/files/patch-6854.diff | 35 ++++++++++++++
4 files changed, 144 insertions(+)
<span style='display:block; white-space:pre;color:#808080;'>diff --git a/llm/ollama/Portfile b/llm/ollama/Portfile
</span>new file mode 100644
<span style='display:block; white-space:pre;color:#808080;'>index 00000000000..6e1aff24280
</span><span style='display:block; white-space:pre;background:#ffe0e0;'>--- /dev/null
</span><span style='display:block; white-space:pre;background:#e0e0ff;'>+++ b/llm/ollama/Portfile
</span><span style='display:block; white-space:pre;background:#e0e0e0;'>@@ -0,0 +1,94 @@
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+PortSystem 1.0
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+PortGroup golang 1.0
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+go.setup github.com/ollama/ollama 0.9.0 v
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+go.offline_build no
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+github.tarball_from archive
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+revision 0
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+categories llm
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+installs_libs no
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+maintainers {@rdallman reed.pizza:github} {i0ntempest @i0ntempest} \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ openmaintainer
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+license MIT
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+description ${name} runs and manages LLMs
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+long_description Get up and running with large language models easily
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+homepage https://ollama.com
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+checksums rmd160 c0fd02350da1684ec1b6e1de351883f1391d8fe3 \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ sha256 bb3f05e0610b36ceb9cb2df380077a97f47f8ffdd0d828baeb6c5c4fddef244c \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ size 8353746
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+# See https://github.com/ollama/ollama/pull/6854
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+patchfiles-append patch-6854.diff
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+patch.args -p1
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+build.post_args -ldflags \"-s -w \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ -X 'github.com/${github.author}/${github.project}/version.Version=${version}' \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ -X 'github.com/${github.author}/${github.project}/server.mode=release'\"
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+set ollama_user ${name}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+set ol_data_path ${prefix}/var/${name}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+set ol_models_path ${ol_data_path}/models
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+set ol_log_file ${prefix}/var/log/${name}.log
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+set config_path ${prefix}/etc/${name}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+set env_file ${name}_env.conf
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+set wrapper_script ${name}-wrapper.sh
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+add_users ${ollama_user} \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ group=${ollama_user} \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ realname=Ollama
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+destroot.keepdirs-append \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ ${destroot}${ol_models_path}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+destroot {
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ xinstall -m 0755 ${worksrcpath}/${name} ${destroot}${prefix}/bin/${name}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ xinstall -d -m 0755 ${destroot}${ol_models_path}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ file attributes ${destroot}${ol_models_path} -owner ${ollama_user} -group ${ollama_user}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ file attributes ${destroot}${ol_data_path} -owner ${ollama_user} -group ${ollama_user}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ file mkdir ${destroot}${prefix}/libexec/${name}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ copy ${filespath}/${wrapper_script} ${destroot}${prefix}/libexec/${name}/${wrapper_script}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ file mkdir ${destroot}${config_path}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ copy ${filespath}/${env_file} ${destroot}${config_path}/${env_file}.example
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ reinplace "s|@@PREFIX@@|${prefix}|g" ${destroot}${prefix}/libexec/${name}/${wrapper_script}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ reinplace "s|@@BIN@@|${name}|g" ${destroot}${prefix}/libexec/${name}/${wrapper_script}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ reinplace "s|@@CONFIG_PATH@@|${config_path}|g" ${destroot}${prefix}/libexec/${name}/${wrapper_script}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ reinplace "s|@@ENV_FILE@@|${env_file}|g" ${destroot}${prefix}/libexec/${name}/${wrapper_script}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ reinplace "s|@@HOME@@|${ol_data_path}|g" ${destroot}${config_path}/${env_file}.example
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ reinplace "s|@@MODELS_PATH@@|${ol_models_path}|g" ${destroot}${config_path}/${env_file}.example
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ system -W ${destroot}${prefix}/etc/LaunchDaemons/${startupitem.uniquename} \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ "/usr/libexec/PlistBuddy -c \"Add :SessionCreate bool true\" ${startupitem.plist} &&\
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ /usr/libexec/PlistBuddy -c \"Add :ProcessType string Interactive\" ${startupitem.plist}"
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+post-activate {
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ if {![file exists ${config_path}/${env_file}]} {
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ copy ${config_path}/${env_file}.example ${config_path}/${env_file}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ }
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+startupitem.create yes
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+startupitem.executable ${prefix}/libexec/${name}/${wrapper_script}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+startupitem.user ${ollama_user}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+startupitem.group ${ollama_user}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+variant logging description {Enable logging for startup item} {
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ startupitem.logfile ${ol_log_file}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ startupitem.logfile.stderr \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ ${ol_log_file}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ startupitem.logevents yes
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+}
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+notes-append "
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ The example config file is copied to ${config_path}/${env_file} and its content\
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ will be preserved across upgrades and reinstalls. This config file configures ${name}\
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ to fetch models to ${ol_models_path}. The startup item will use this config file by default.
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+"
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+github.livecheck.regex \
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ {([0-9.]+)}
</span><span style='display:block; white-space:pre;color:#808080;'>diff --git a/llm/ollama/files/ollama-wrapper.sh b/llm/ollama/files/ollama-wrapper.sh
</span>new file mode 100755
<span style='display:block; white-space:pre;color:#808080;'>index 00000000000..aa15c1470c4
</span><span style='display:block; white-space:pre;background:#ffe0e0;'>--- /dev/null
</span><span style='display:block; white-space:pre;background:#e0e0ff;'>+++ b/llm/ollama/files/ollama-wrapper.sh
</span><span style='display:block; white-space:pre;background:#e0e0e0;'>@@ -0,0 +1,5 @@
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+#!/bin/sh
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+set -a
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+source @@CONFIG_PATH@@/@@ENV_FILE@@
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+@@PREFIX@@/bin/@@BIN@@ serve
</span><span style='display:block; white-space:pre;color:#808080;'>diff --git a/llm/ollama/files/ollama_env.conf b/llm/ollama/files/ollama_env.conf
</span>new file mode 100644
<span style='display:block; white-space:pre;color:#808080;'>index 00000000000..c9b0ec545bb
</span><span style='display:block; white-space:pre;background:#ffe0e0;'>--- /dev/null
</span><span style='display:block; white-space:pre;background:#e0e0ff;'>+++ b/llm/ollama/files/ollama_env.conf
</span><span style='display:block; white-space:pre;background:#e0e0e0;'>@@ -0,0 +1,10 @@
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+# See https://github.com/ollama/ollama/blob/main/docs/faq.md#how-do-i-configure-ollama-server
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+# for environment variables recognized by ollama
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+HOME=@@HOME@@
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+OLLAMA_MODELS=@@MODELS_PATH@@
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+#OLLAMA_CONTEXT_LENGTH=8192
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+#OLLAMA_FLASH_ATTENTION=1
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+#OLLAMA_KEEP_ALIVE=15m
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+#OLLAMA_KV_CACHE_TYPE=q8_0
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+#OLLAMA_NO_MMAP=1
</span><span style='display:block; white-space:pre;color:#808080;'>diff --git a/llm/ollama/files/patch-6854.diff b/llm/ollama/files/patch-6854.diff
</span>new file mode 100644
<span style='display:block; white-space:pre;color:#808080;'>index 00000000000..35879fdbaa0
</span><span style='display:block; white-space:pre;background:#ffe0e0;'>--- /dev/null
</span><span style='display:block; white-space:pre;background:#e0e0ff;'>+++ b/llm/ollama/files/patch-6854.diff
</span><span style='display:block; white-space:pre;background:#e0e0e0;'>@@ -0,0 +1,35 @@
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+diff --git a/envconfig/config.go b/envconfig/config.go
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+index fc702198fa3..09dce62d9d2 100644
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+--- a/envconfig/config.go
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>++++ b/envconfig/config.go
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+@@ -164,6 +164,8 @@ var (
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ SchedSpread = Bool("OLLAMA_SCHED_SPREAD")
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ // IntelGPU enables experimental Intel GPU detection.
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ IntelGPU = Bool("OLLAMA_INTEL_GPU")
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>++ // NoMMap disables memory mapping of the model file.
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>++ NoMMap = Bool("OLLAMA_NO_MMAP")
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ // MultiUserCache optimizes prompt caching for multi-user scenarios
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ MultiUserCache = Bool("OLLAMA_MULTIUSER_CACHE")
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ // Enable the new Ollama engine
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+@@ -251,6 +253,7 @@ func AsMap() map[string]EnvVar {
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ "OLLAMA_MODELS": {"OLLAMA_MODELS", Models(), "The path to the models directory"},
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ "OLLAMA_NOHISTORY": {"OLLAMA_NOHISTORY", NoHistory(), "Do not preserve readline history"},
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ "OLLAMA_NOPRUNE": {"OLLAMA_NOPRUNE", NoPrune(), "Do not prune model blobs on startup"},
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>++ "OLLAMA_NO_MMAP": {"OLLAMA_NO_MMAP", NoMMap(), "Disable memory mapping of the model file"},
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ "OLLAMA_NUM_PARALLEL": {"OLLAMA_NUM_PARALLEL", NumParallel(), "Maximum number of parallel requests"},
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ "OLLAMA_ORIGINS": {"OLLAMA_ORIGINS", AllowedOrigins(), "A comma separated list of allowed origins"},
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ "OLLAMA_SCHED_SPREAD": {"OLLAMA_SCHED_SPREAD", SchedSpread(), "Always schedule model across all GPUs"},
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+diff --git a/llm/server.go b/llm/server.go
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+index e6046db608e..a7b8bda9f71 100644
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+--- a/llm/server.go
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>++++ b/llm/server.go
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+@@ -209,7 +209,8 @@ func NewLlamaServer(gpus discover.GpuInfoList, modelPath string, f *ggml.GGML, a
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ // Windows CUDA should not use mmap for best performance
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ // Linux with a model larger than free space, mmap leads to thrashing
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ // For CPU loads we want the memory to be allocated, not FS cache
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+- if (runtime.GOOS == "windows" && gpus[0].Library == "cuda" && opts.UseMMap == nil) ||
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>++ if envconfig.NoMMap() ||
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>++ (runtime.GOOS == "windows" && gpus[0].Library == "cuda" && opts.UseMMap == nil) ||
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ (runtime.GOOS == "linux" && systemFreeMemory < estimate.TotalSize && opts.UseMMap == nil) ||
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ (gpus[0].Library == "cpu" && opts.UseMMap == nil) ||
</span><span style='display:block; white-space:pre;background:#e0ffe0;'>+ (opts.UseMMap != nil && !*opts.UseMMap) {
</span></pre><pre style='margin:0'>
</pre>