mirror of https://github.com/ollama/ollama.git
discovery: prevent dup OLLAMA_LIBRARY_PATH
This variable isn't currently documented or intended as something the user can override, but if the user happens to set OLLAMA_LIBRARY_PATH we were doubling this in the subprocess environment which will cause problems with the new bootstrap discovery logic.
This commit is contained in:
parent
292767afb4
commit
fa8ff672ed
|
@ -442,15 +442,18 @@ func bootstrapDevices(ctx context.Context, ollamaLibDirs []string, extraEnvs []s
|
||||||
cmd.Stderr = os.Stderr
|
cmd.Stderr = os.Stderr
|
||||||
}
|
}
|
||||||
// cmd.SysProcAttr = llm.LlamaServerSysProcAttr // circular dependency - bring back once refactored
|
// cmd.SysProcAttr = llm.LlamaServerSysProcAttr // circular dependency - bring back once refactored
|
||||||
cmd.Env = append(cmd.Env, "OLLAMA_LIBRARY_PATH="+strings.Join(ollamaLibDirs, string(filepath.ListSeparator)))
|
|
||||||
pathEnvVal := strings.Join(libraryPaths, string(filepath.ListSeparator))
|
pathEnvVal := strings.Join(libraryPaths, string(filepath.ListSeparator))
|
||||||
pathNeeded := true
|
pathNeeded := true
|
||||||
|
ollamaPathNeeded := true
|
||||||
extraDone := make([]bool, len(extraEnvs))
|
extraDone := make([]bool, len(extraEnvs))
|
||||||
for i := range cmd.Env {
|
for i := range cmd.Env {
|
||||||
cmp := strings.SplitN(cmd.Env[i], "=", 2)
|
cmp := strings.SplitN(cmd.Env[i], "=", 2)
|
||||||
if strings.EqualFold(cmp[0], pathEnv) {
|
if strings.EqualFold(cmp[0], pathEnv) {
|
||||||
cmd.Env[i] = pathEnv + "=" + pathEnvVal
|
cmd.Env[i] = pathEnv + "=" + pathEnvVal
|
||||||
pathNeeded = false
|
pathNeeded = false
|
||||||
|
} else if strings.EqualFold(cmp[0], "OLLAMA_LIBRARY_PATH") {
|
||||||
|
cmd.Env[i] = "OLLAMA_LIBRARY_PATH=" + strings.Join(ollamaLibDirs, string(filepath.ListSeparator))
|
||||||
|
ollamaPathNeeded = false
|
||||||
} else {
|
} else {
|
||||||
for j := range extraEnvs {
|
for j := range extraEnvs {
|
||||||
if extraDone[j] {
|
if extraDone[j] {
|
||||||
|
@ -467,6 +470,9 @@ func bootstrapDevices(ctx context.Context, ollamaLibDirs []string, extraEnvs []s
|
||||||
if pathNeeded {
|
if pathNeeded {
|
||||||
cmd.Env = append(cmd.Env, pathEnv+"="+pathEnvVal)
|
cmd.Env = append(cmd.Env, pathEnv+"="+pathEnvVal)
|
||||||
}
|
}
|
||||||
|
if ollamaPathNeeded {
|
||||||
|
cmd.Env = append(cmd.Env, "OLLAMA_LIBRARY_PATH="+strings.Join(ollamaLibDirs, string(filepath.ListSeparator)))
|
||||||
|
}
|
||||||
for i := range extraDone {
|
for i := range extraDone {
|
||||||
if !extraDone[i] {
|
if !extraDone[i] {
|
||||||
cmd.Env = append(cmd.Env, extraEnvs[i])
|
cmd.Env = append(cmd.Env, extraEnvs[i])
|
||||||
|
|
|
@ -359,20 +359,22 @@ func NewLlamaServer(gpus discover.GpuInfoList, modelPath string, f *ggml.GGML, a
|
||||||
s.cmd.Stderr = s.status
|
s.cmd.Stderr = s.status
|
||||||
s.cmd.SysProcAttr = LlamaServerSysProcAttr
|
s.cmd.SysProcAttr = LlamaServerSysProcAttr
|
||||||
|
|
||||||
s.cmd.Env = append(s.cmd.Env, "OLLAMA_LIBRARY_PATH="+strings.Join(ggmlPaths, string(filepath.ListSeparator)))
|
|
||||||
|
|
||||||
// Always filter down the set of GPUs in case there are any unsupported devices that might crash
|
// Always filter down the set of GPUs in case there are any unsupported devices that might crash
|
||||||
envWorkarounds := gpus.GetVisibleDevicesEnv()
|
envWorkarounds := gpus.GetVisibleDevicesEnv()
|
||||||
pathEnvVal := strings.Join(libraryPaths, string(filepath.ListSeparator))
|
pathEnvVal := strings.Join(libraryPaths, string(filepath.ListSeparator))
|
||||||
|
|
||||||
// Update or add the path variable with our adjusted version
|
// Update or add the path variable with our adjusted version
|
||||||
pathNeeded := true
|
pathNeeded := true
|
||||||
|
ollamaPathNeeded := true
|
||||||
envWorkaroundDone := make([]bool, len(envWorkarounds))
|
envWorkaroundDone := make([]bool, len(envWorkarounds))
|
||||||
for i := range s.cmd.Env {
|
for i := range s.cmd.Env {
|
||||||
cmp := strings.SplitN(s.cmd.Env[i], "=", 2)
|
cmp := strings.SplitN(s.cmd.Env[i], "=", 2)
|
||||||
if strings.EqualFold(cmp[0], pathEnv) {
|
if strings.EqualFold(cmp[0], pathEnv) {
|
||||||
s.cmd.Env[i] = pathEnv + "=" + pathEnvVal
|
s.cmd.Env[i] = pathEnv + "=" + pathEnvVal
|
||||||
pathNeeded = false
|
pathNeeded = false
|
||||||
|
} else if strings.EqualFold(cmp[0], "OLLAMA_LIBRARY_PATH") {
|
||||||
|
s.cmd.Env[i] = "OLLAMA_LIBRARY_PATH=" + strings.Join(ggmlPaths, string(filepath.ListSeparator))
|
||||||
|
ollamaPathNeeded = false
|
||||||
} else if len(envWorkarounds) != 0 {
|
} else if len(envWorkarounds) != 0 {
|
||||||
for j, kv := range envWorkarounds {
|
for j, kv := range envWorkarounds {
|
||||||
tmp := strings.SplitN(kv, "=", 2)
|
tmp := strings.SplitN(kv, "=", 2)
|
||||||
|
@ -386,6 +388,9 @@ func NewLlamaServer(gpus discover.GpuInfoList, modelPath string, f *ggml.GGML, a
|
||||||
if pathNeeded {
|
if pathNeeded {
|
||||||
s.cmd.Env = append(s.cmd.Env, pathEnv+"="+pathEnvVal)
|
s.cmd.Env = append(s.cmd.Env, pathEnv+"="+pathEnvVal)
|
||||||
}
|
}
|
||||||
|
if ollamaPathNeeded {
|
||||||
|
s.cmd.Env = append(s.cmd.Env, "OLLAMA_LIBRARY_PATH="+strings.Join(ggmlPaths, string(filepath.ListSeparator)))
|
||||||
|
}
|
||||||
for i, done := range envWorkaroundDone {
|
for i, done := range envWorkaroundDone {
|
||||||
if !done {
|
if !done {
|
||||||
s.cmd.Env = append(s.cmd.Env, envWorkarounds[i])
|
s.cmd.Env = append(s.cmd.Env, envWorkarounds[i])
|
||||||
|
|
Loading…
Reference in New Issue