time=2025-04-07T18:08:00.567-06:00 level=INFO source=logging.go:50 msg="ollama app started" time=2025-04-07T18:08:00.568-06:00 level=INFO source=lifecycle.go:19 msg="app config" env="map[CUDA_VISIBLE_DEVICES: GPU_DEVICE_ORDINAL: HIP_VISIBLE_DEVICES: HSA_OVERRIDE_GFX_VERSION: HTTPS_PROXY: HTTP_PROXY: NO_PROXY: OLLAMA_CONTEXT_LENGTH:2048 OLLAMA_DEBUG:false OLLAMA_FLASH_ATTENTION:false OLLAMA_GPU_OVERHEAD:0 OLLAMA_HOST:http://127.0.0.1:11434 OLLAMA_INTEL_GPU:false OLLAMA_KEEP_ALIVE:5m0s OLLAMA_KV_CACHE_TYPE: OLLAMA_LLM_LIBRARY: OLLAMA_LOAD_TIMEOUT:5m0s OLLAMA_MAX_LOADED_MODELS:0 OLLAMA_MAX_QUEUE:512 OLLAMA_MODELS:C:\\Users\\jamie\\.ollama\\models OLLAMA_MULTIUSER_CACHE:false OLLAMA_NEW_ENGINE:false OLLAMA_NOHISTORY:false OLLAMA_NOPRUNE:false OLLAMA_NUM_PARALLEL:0 OLLAMA_ORIGINS:[http://localhost https://localhost http://localhost:* https://localhost:* http://127.0.0.1 https://127.0.0.1 http://127.0.0.1:* https://127.0.0.1:* http://0.0.0.0 https://0.0.0.0 http://0.0.0.0:* https://0.0.0.0:* app://* file://* tauri://* vscode-webview://* vscode-file://*] OLLAMA_SCHED_SPREAD:false ROCR_VISIBLE_DEVICES:]" time=2025-04-07T18:08:00.583-06:00 level=INFO source=server.go:182 msg="unable to connect to server" time=2025-04-07T18:08:00.583-06:00 level=INFO source=server.go:141 msg="starting server..." time=2025-04-07T18:08:00.588-06:00 level=INFO source=server.go:127 msg="started ollama server with pid 16196" time=2025-04-07T18:08:00.588-06:00 level=INFO source=server.go:129 msg="ollama server logs C:\\Users\\jamie\\AppData\\Local\\Ollama\\server.log" time=2025-04-07T18:09:56.207-06:00 level=INFO source=lifecycle.go:89 msg="Waiting for ollama server to shutdown..." time=2025-04-07T18:09:56.361-06:00 level=INFO source=server.go:158 msg="server shutdown with exit code 0" time=2025-04-07T18:09:56.361-06:00 level=INFO source=lifecycle.go:93 msg="Ollama app exiting"