diff --git a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx
index 1192ce675..8a18fb999 100644
--- a/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx
+++ b/frontend/src/components/EmbeddingSelection/LMStudioOptions/index.jsx
@@ -1,48 +1,112 @@
 import React, { useEffect, useState } from "react";
 import System from "@/models/system";
+import PreLoader from "@/components/Preloader";
+import { LMSTUDIO_COMMON_URLS } from "@/utils/constants";
+import { CaretDown, CaretUp } from "@phosphor-icons/react";
+import useProviderEndpointAutoDiscovery from "@/hooks/useProviderEndpointAutoDiscovery";
 
 export default function LMStudioEmbeddingOptions({ settings }) {
-  const [basePathValue, setBasePathValue] = useState(
-    settings?.EmbeddingBasePath
+  const {
+    autoDetecting: loading,
+    basePath,
+    basePathValue,
+    showAdvancedControls,
+    setShowAdvancedControls,
+    handleAutoDetectClick,
+  } = useProviderEndpointAutoDiscovery({
+    provider: "lmstudio",
+    initialBasePath: settings?.EmbeddingBasePath,
+    ENDPOINTS: LMSTUDIO_COMMON_URLS,
+  });
+
+  const [maxChunkLength, setMaxChunkLength] = useState(
+    settings?.EmbeddingModelMaxChunkLength || 8192
   );
-  const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath);
+
+  const handleMaxChunkLengthChange = (e) => {
+    setMaxChunkLength(Number(e.target.value));
+  };
 
   return (
     <div className="w-full flex flex-col gap-y-4">
-      <div className="w-full flex items-center gap-4">
+      <div className="w-full flex items-start gap-4">
+        <LMStudioModelSelection settings={settings} basePath={basePath.value} />
         <div className="flex flex-col w-60">
-          <label className="text-white text-sm font-semibold block mb-4">
-            LMStudio Base URL
-          </label>
-          <input
-            type="url"
-            name="EmbeddingBasePath"
-            className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
-            placeholder="http://localhost:1234/v1"
-            defaultValue={settings?.EmbeddingBasePath}
-            onChange={(e) => setBasePathValue(e.target.value)}
-            onBlur={() => setBasePath(basePathValue)}
-            required={true}
-            autoComplete="off"
-            spellCheck={false}
-          />
-        </div>
-        <LMStudioModelSelection settings={settings} basePath={basePath} />
-        <div className="flex flex-col w-60">
-          <label className="text-white text-sm font-semibold block mb-4">
-            Max embedding chunk length
+          <label className="text-white text-sm font-semibold block mb-2">
+            Max Embedding Chunk Length
           </label>
           <input
             type="number"
             name="EmbeddingModelMaxChunkLength"
-            className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
             placeholder="8192"
             min={1}
+            value={maxChunkLength}
+            onChange={handleMaxChunkLengthChange}
             onScroll={(e) => e.target.blur()}
-            defaultValue={settings?.EmbeddingModelMaxChunkLength}
-            required={false}
+            required={true}
             autoComplete="off"
           />
+          <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+            Maximum length of text chunks for embedding.
+          </p>
+        </div>
+      </div>
+      <div className="flex justify-start mt-4">
+        <button
+          onClick={(e) => {
+            e.preventDefault();
+            setShowAdvancedControls(!showAdvancedControls);
+          }}
+          className="text-white hover:text-white/70 flex items-center text-sm"
+        >
+          {showAdvancedControls ? "Hide" : "Show"} Manual Endpoint Input
+          {showAdvancedControls ? (
+            <CaretUp size={14} className="ml-1" />
+          ) : (
+            <CaretDown size={14} className="ml-1" />
+          )}
+        </button>
+      </div>
+
+      <div hidden={!showAdvancedControls}>
+        <div className="w-full flex items-start gap-4">
+          <div className="flex flex-col w-60">
+            <div className="flex justify-between items-center mb-2">
+              <label className="text-white text-sm font-semibold">
+                LM Studio Base URL
+              </label>
+              {loading ? (
+                <PreLoader size="6" />
+              ) : (
+                <>
+                  {!basePathValue.value && (
+                    <button
+                      onClick={handleAutoDetectClick}
+                      className="bg-primary-button text-xs font-medium px-2 py-1 rounded-lg hover:bg-secondary hover:text-white shadow-[0_4px_14px_rgba(0,0,0,0.25)]"
+                    >
+                      Auto-Detect
+                    </button>
+                  )}
+                </>
+              )}
+            </div>
+            <input
+              type="url"
+              name="EmbeddingBasePath"
+              className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
+              placeholder="http://localhost:1234/v1"
+              value={basePathValue.value}
+              required={true}
+              autoComplete="off"
+              spellCheck={false}
+              onChange={basePath.onChange}
+              onBlur={basePath.onBlur}
+            />
+            <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+              Enter the URL where LM Studio is running.
+            </p>
+          </div>
         </div>
       </div>
     </div>
@@ -55,14 +119,23 @@ function LMStudioModelSelection({ settings, basePath = null }) {
 
   useEffect(() => {
     async function findCustomModels() {
-      if (!basePath || !basePath.includes("/v1")) {
+      if (!basePath) {
         setCustomModels([]);
         setLoading(false);
         return;
       }
       setLoading(true);
-      const { models } = await System.customModels("lmstudio", null, basePath);
-      setCustomModels(models || []);
+      try {
+        const { models } = await System.customModels(
+          "lmstudio",
+          null,
+          basePath
+        );
+        setCustomModels(models || []);
+      } catch (error) {
+        console.error("Failed to fetch custom models:", error);
+        setCustomModels([]);
+      }
       setLoading(false);
     }
     findCustomModels();
@@ -71,8 +144,8 @@ function LMStudioModelSelection({ settings, basePath = null }) {
   if (loading || customModels.length == 0) {
     return (
       <div className="flex flex-col w-60">
-        <label className="text-white text-sm font-semibold block mb-4">
-          Chat Model Selection
+        <label className="text-white text-sm font-semibold block mb-2">
+          LM Studio Embedding Model
         </label>
         <select
           name="EmbeddingModelPref"
@@ -80,19 +153,23 @@ function LMStudioModelSelection({ settings, basePath = null }) {
           className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
         >
           <option disabled={true} selected={true}>
-            {basePath?.includes("/v1")
-              ? "-- loading available models --"
-              : "-- waiting for URL --"}
+            {!!basePath
+              ? "--loading available models--"
+              : "Enter LM Studio URL first"}
           </option>
         </select>
+        <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+          Select the LM Studio model for embeddings. Models will load after
+          entering a valid LM Studio URL.
+        </p>
       </div>
     );
   }
 
   return (
     <div className="flex flex-col w-60">
-      <label className="text-white text-sm font-semibold block mb-4">
-        Chat Model Selection
+      <label className="text-white text-sm font-semibold block mb-2">
+        LM Studio Embedding Model
       </label>
       <select
         name="EmbeddingModelPref"
@@ -115,6 +192,9 @@ function LMStudioModelSelection({ settings, basePath = null }) {
           </optgroup>
         )}
       </select>
+      <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+        Choose the LM Studio model you want to use for generating embeddings.
+      </p>
     </div>
   );
 }
diff --git a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx
index 3213f5d39..fca1ae755 100644
--- a/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx
+++ b/frontend/src/components/EmbeddingSelection/OllamaOptions/index.jsx
@@ -1,55 +1,122 @@
 import React, { useEffect, useState } from "react";
 import System from "@/models/system";
+import PreLoader from "@/components/Preloader";
+import { OLLAMA_COMMON_URLS } from "@/utils/constants";
+import { CaretDown, CaretUp } from "@phosphor-icons/react";
+import useProviderEndpointAutoDiscovery from "@/hooks/useProviderEndpointAutoDiscovery";
 
 export default function OllamaEmbeddingOptions({ settings }) {
-  const [basePathValue, setBasePathValue] = useState(
-    settings?.EmbeddingBasePath
+  const {
+    autoDetecting: loading,
+    basePath,
+    basePathValue,
+    showAdvancedControls,
+    setShowAdvancedControls,
+    handleAutoDetectClick,
+  } = useProviderEndpointAutoDiscovery({
+    provider: "ollama",
+    initialBasePath: settings?.EmbeddingBasePath,
+    ENDPOINTS: OLLAMA_COMMON_URLS,
+  });
+
+  const [maxChunkLength, setMaxChunkLength] = useState(
+    settings?.EmbeddingModelMaxChunkLength || 8192
   );
-  const [basePath, setBasePath] = useState(settings?.EmbeddingBasePath);
+
+  const handleMaxChunkLengthChange = (e) => {
+    setMaxChunkLength(Number(e.target.value));
+  };
 
   return (
     <div className="w-full flex flex-col gap-y-4">
-      <div className="w-full flex items-center gap-4">
+      <div className="w-full flex items-start gap-4">
+        <OllamaEmbeddingModelSelection
+          settings={settings}
+          basePath={basePath.value}
+        />
         <div className="flex flex-col w-60">
-          <label className="text-white text-sm font-semibold block mb-4">
-            Ollama Base URL
-          </label>
-          <input
-            type="url"
-            name="EmbeddingBasePath"
-            className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
-            placeholder="http://127.0.0.1:11434"
-            defaultValue={settings?.EmbeddingBasePath}
-            onChange={(e) => setBasePathValue(e.target.value)}
-            onBlur={() => setBasePath(basePathValue)}
-            required={true}
-            autoComplete="off"
-            spellCheck={false}
-          />
-        </div>
-        <OllamaLLMModelSelection settings={settings} basePath={basePath} />
-        <div className="flex flex-col w-60">
-          <label className="text-white text-sm font-semibold block mb-4">
-            Max embedding chunk length
+          <label className="text-white text-sm font-semibold block mb-2">
+            Max Embedding Chunk Length
           </label>
           <input
             type="number"
             name="EmbeddingModelMaxChunkLength"
-            className="bg-zinc-900 text-white placeholder-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
+            className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
             placeholder="8192"
             min={1}
+            value={maxChunkLength}
+            onChange={handleMaxChunkLengthChange}
             onScroll={(e) => e.target.blur()}
-            defaultValue={settings?.EmbeddingModelMaxChunkLength}
-            required={false}
+            required={true}
             autoComplete="off"
           />
+          <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+            Maximum length of text chunks for embedding.
+          </p>
+        </div>
+      </div>
+      <div className="flex justify-start mt-4">
+        <button
+          onClick={(e) => {
+            e.preventDefault();
+            setShowAdvancedControls(!showAdvancedControls);
+          }}
+          className="text-white hover:text-white/70 flex items-center text-sm"
+        >
+          {showAdvancedControls ? "Hide" : "Show"} Manual Endpoint Input
+          {showAdvancedControls ? (
+            <CaretUp size={14} className="ml-1" />
+          ) : (
+            <CaretDown size={14} className="ml-1" />
+          )}
+        </button>
+      </div>
+
+      <div hidden={!showAdvancedControls}>
+        <div className="w-full flex items-start gap-4">
+          <div className="flex flex-col w-60">
+            <div className="flex justify-between items-center mb-2">
+              <label className="text-white text-sm font-semibold">
+                Ollama Base URL
+              </label>
+              {loading ? (
+                <PreLoader size="6" />
+              ) : (
+                <>
+                  {!basePathValue.value && (
+                    <button
+                      onClick={handleAutoDetectClick}
+                      className="bg-primary-button text-xs font-medium px-2 py-1 rounded-lg hover:bg-secondary hover:text-white shadow-[0_4px_14px_rgba(0,0,0,0.25)]"
+                    >
+                      Auto-Detect
+                    </button>
+                  )}
+                </>
+              )}
+            </div>
+            <input
+              type="url"
+              name="EmbeddingBasePath"
+              className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
+              placeholder="http://127.0.0.1:11434"
+              value={basePathValue.value}
+              required={true}
+              autoComplete="off"
+              spellCheck={false}
+              onChange={basePath.onChange}
+              onBlur={basePath.onBlur}
+            />
+            <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+              Enter the URL where Ollama is running.
+            </p>
+          </div>
         </div>
       </div>
     </div>
   );
 }
 
-function OllamaLLMModelSelection({ settings, basePath = null }) {
+function OllamaEmbeddingModelSelection({ settings, basePath = null }) {
   const [customModels, setCustomModels] = useState([]);
   const [loading, setLoading] = useState(true);
 
@@ -61,8 +128,13 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
         return;
       }
       setLoading(true);
-      const { models } = await System.customModels("ollama", null, basePath);
-      setCustomModels(models || []);
+      try {
+        const { models } = await System.customModels("ollama", null, basePath);
+        setCustomModels(models || []);
+      } catch (error) {
+        console.error("Failed to fetch custom models:", error);
+        setCustomModels([]);
+      }
       setLoading(false);
     }
     findCustomModels();
@@ -71,33 +143,37 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
   if (loading || customModels.length == 0) {
     return (
       <div className="flex flex-col w-60">
-        <label className="text-white text-sm font-semibold block mb-4">
-          Embedding Model Selection
+        <label className="text-white text-sm font-semibold block mb-2">
+          Ollama Embedding Model
         </label>
         <select
           name="EmbeddingModelPref"
           disabled={true}
-          className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
+          className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
         >
           <option disabled={true} selected={true}>
             {!!basePath
-              ? "-- loading available models --"
-              : "-- waiting for URL --"}
+              ? "--loading available models--"
+              : "Enter Ollama URL first"}
           </option>
         </select>
+        <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+          Select the Ollama model for embeddings. Models will load after
+          entering a valid Ollama URL.
+        </p>
       </div>
     );
   }
 
   return (
     <div className="flex flex-col w-60">
-      <label className="text-white text-sm font-semibold block mb-4">
-        Embedding Model Selection
+      <label className="text-white text-sm font-semibold block mb-2">
+        Ollama Embedding Model
       </label>
       <select
         name="EmbeddingModelPref"
         required={true}
-        className="bg-zinc-900 border border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
+        className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
       >
         {customModels.length > 0 && (
           <optgroup label="Your loaded models">
@@ -115,6 +191,9 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
           </optgroup>
         )}
       </select>
+      <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+        Choose the Ollama model you want to use for generating embeddings.
+      </p>
     </div>
   );
 }
diff --git a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx
index 9a1c59bc7..d3e1df58f 100644
--- a/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx
+++ b/frontend/src/components/LLMSelection/LMStudioOptions/index.jsx
@@ -1,13 +1,32 @@
 import { useEffect, useState } from "react";
-import { Info } from "@phosphor-icons/react";
+import { Info, CaretDown, CaretUp } from "@phosphor-icons/react";
 import paths from "@/utils/paths";
 import System from "@/models/system";
+import PreLoader from "@/components/Preloader";
+import { LMSTUDIO_COMMON_URLS } from "@/utils/constants";
+import useProviderEndpointAutoDiscovery from "@/hooks/useProviderEndpointAutoDiscovery";
 
 export default function LMStudioOptions({ settings, showAlert = false }) {
-  const [basePathValue, setBasePathValue] = useState(
-    settings?.LMStudioBasePath
+  const {
+    autoDetecting: loading,
+    basePath,
+    basePathValue,
+    showAdvancedControls,
+    setShowAdvancedControls,
+    handleAutoDetectClick,
+  } = useProviderEndpointAutoDiscovery({
+    provider: "lmstudio",
+    initialBasePath: settings?.LMStudioBasePath,
+    ENDPOINTS: LMSTUDIO_COMMON_URLS,
+  });
+
+  const [maxTokens, setMaxTokens] = useState(
+    settings?.LMStudioTokenLimit || 4096
   );
-  const [basePath, setBasePath] = useState(settings?.LMStudioBasePath);
+
+  const handleMaxTokensChange = (e) => {
+    setMaxTokens(Number(e.target.value));
+  };
 
   return (
     <div className="w-full flex flex-col">
@@ -28,45 +47,86 @@ export default function LMStudioOptions({ settings, showAlert = false }) {
           </a>
         </div>
       )}
-      <div className="w-full flex items-center gap-4">
+      <div className="w-full flex items-start gap-4">
+        <LMStudioModelSelection settings={settings} basePath={basePath.value} />
         <div className="flex flex-col w-60">
-          <label className="text-white text-sm font-semibold block mb-4">
-            LMStudio Base URL
+          <label className="text-white text-sm font-semibold block mb-2">
+            Max Tokens
           </label>
           <input
-            type="url"
-            name="LMStudioBasePath"
+            type="number"
+            name="LMStudioTokenLimit"
             className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
-            placeholder="http://localhost:1234/v1"
-            defaultValue={settings?.LMStudioBasePath}
+            placeholder="4096"
+            defaultChecked="4096"
+            min={1}
+            value={maxTokens}
+            onChange={handleMaxTokensChange}
+            onScroll={(e) => e.target.blur()}
             required={true}
             autoComplete="off"
-            spellCheck={false}
-            onChange={(e) => setBasePathValue(e.target.value)}
-            onBlur={() => setBasePath(basePathValue)}
           />
+          <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+            Maximum number of tokens for context and response.
+          </p>
         </div>
-        {!settings?.credentialsOnly && (
-          <>
-            <LMStudioModelSelection settings={settings} basePath={basePath} />
-            <div className="flex flex-col w-60">
-              <label className="text-white text-sm font-semibold block mb-4">
-                Token context window
+      </div>
+      <div className="flex justify-start mt-4">
+        <button
+          onClick={(e) => {
+            e.preventDefault();
+            setShowAdvancedControls(!showAdvancedControls);
+          }}
+          className="text-white hover:text-white/70 flex items-center text-sm"
+        >
+          {showAdvancedControls ? "Hide" : "Show"} Manual Endpoint Input
+          {showAdvancedControls ? (
+            <CaretUp size={14} className="ml-1" />
+          ) : (
+            <CaretDown size={14} className="ml-1" />
+          )}
+        </button>
+      </div>
+
+      <div hidden={!showAdvancedControls}>
+        <div className="w-full flex items-start gap-4 mt-4">
+          <div className="flex flex-col w-60">
+            <div className="flex justify-between items-center mb-2">
+              <label className="text-white text-sm font-semibold">
+                LM Studio Base URL
               </label>
-              <input
-                type="number"
-                name="LMStudioTokenLimit"
-                className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
-                placeholder="4096"
-                min={1}
-                onScroll={(e) => e.target.blur()}
-                defaultValue={settings?.LMStudioTokenLimit}
-                required={true}
-                autoComplete="off"
-              />
+              {loading ? (
+                <PreLoader size="6" />
+              ) : (
+                <>
+                  {!basePathValue.value && (
+                    <button
+                      onClick={handleAutoDetectClick}
+                      className="bg-primary-button text-xs font-medium px-2 py-1 rounded-lg hover:bg-secondary hover:text-white shadow-[0_4px_14px_rgba(0,0,0,0.25)]"
+                    >
+                      Auto-Detect
+                    </button>
+                  )}
+                </>
+              )}
             </div>
-          </>
-        )}
+            <input
+              type="url"
+              name="LMStudioBasePath"
+              className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
+              placeholder="http://localhost:1234/v1"
+              value={basePathValue.value}
+              required={true}
+              autoComplete="off"
+              spellCheck={false}
+              onChange={basePath.onChange}
+              onBlur={basePath.onBlur}
+            />
+            <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+              Enter the URL where LM Studio is running.
+            </p>
+          </div>
+        </div>
       </div>
     </div>
   );
@@ -78,14 +138,23 @@ function LMStudioModelSelection({ settings, basePath = null }) {
 
   useEffect(() => {
     async function findCustomModels() {
-      if (!basePath || !basePath.includes("/v1")) {
+      if (!basePath) {
         setCustomModels([]);
         setLoading(false);
         return;
       }
       setLoading(true);
-      const { models } = await System.customModels("lmstudio", null, basePath);
-      setCustomModels(models || []);
+      try {
+        const { models } = await System.customModels(
+          "lmstudio",
+          null,
+          basePath
+        );
+        setCustomModels(models || []);
+      } catch (error) {
+        console.error("Failed to fetch custom models:", error);
+        setCustomModels([]);
+      }
       setLoading(false);
     }
     findCustomModels();
@@ -94,8 +163,8 @@ function LMStudioModelSelection({ settings, basePath = null }) {
   if (loading || customModels.length == 0) {
     return (
       <div className="flex flex-col w-60">
-        <label className="text-white text-sm font-semibold block mb-4">
-          Chat Model Selection
+        <label className="text-white text-sm font-semibold block mb-2">
+          LM Studio Model
         </label>
         <select
           name="LMStudioModelPref"
@@ -103,19 +172,23 @@ function LMStudioModelSelection({ settings, basePath = null }) {
           className="bg-zinc-900 border-gray-500 text-white text-sm rounded-lg block w-full p-2.5"
         >
           <option disabled={true} selected={true}>
-            {basePath?.includes("/v1")
-              ? "-- loading available models --"
-              : "-- waiting for URL --"}
+            {!!basePath
+              ? "--loading available models--"
+              : "Enter LM Studio URL first"}
           </option>
         </select>
+        <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+          Select the LM Studio model you want to use. Models will load after
+          entering a valid LM Studio URL.
+        </p>
       </div>
     );
   }
 
   return (
     <div className="flex flex-col w-60">
-      <label className="text-white text-sm font-semibold block mb-4">
-        Chat Model Selection
+      <label className="text-white text-sm font-semibold block mb-2">
+        LM Studio Model
       </label>
       <select
         name="LMStudioModelPref"
@@ -138,6 +211,9 @@ function LMStudioModelSelection({ settings, basePath = null }) {
           </optgroup>
         )}
       </select>
+      <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+        Choose the LM Studio model you want to use for your conversations.
+      </p>
     </div>
   );
 }
diff --git a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx
index b08f29447..841943937 100644
--- a/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx
+++ b/frontend/src/components/LLMSelection/OllamaLLMOptions/index.jsx
@@ -1,53 +1,117 @@
-import { useEffect, useState } from "react";
+import React, { useEffect, useState } from "react";
 import System from "@/models/system";
+import PreLoader from "@/components/Preloader";
+import { OLLAMA_COMMON_URLS } from "@/utils/constants";
+import { CaretDown, CaretUp } from "@phosphor-icons/react";
+import useProviderEndpointAutoDiscovery from "@/hooks/useProviderEndpointAutoDiscovery";
 
 export default function OllamaLLMOptions({ settings }) {
-  const [basePathValue, setBasePathValue] = useState(
-    settings?.OllamaLLMBasePath
+  const {
+    autoDetecting: loading,
+    basePath,
+    basePathValue,
+    showAdvancedControls,
+    setShowAdvancedControls,
+    handleAutoDetectClick,
+  } = useProviderEndpointAutoDiscovery({
+    provider: "ollama",
+    initialBasePath: settings?.OllamaLLMBasePath,
+    ENDPOINTS: OLLAMA_COMMON_URLS,
+  });
+
+  const [maxTokens, setMaxTokens] = useState(
+    settings?.OllamaLLMTokenLimit || 4096
   );
-  const [basePath, setBasePath] = useState(settings?.OllamaLLMBasePath);
+
+  const handleMaxTokensChange = (e) => {
+    setMaxTokens(Number(e.target.value));
+  };
 
   return (
     <div className="w-full flex flex-col gap-y-4">
-      <div className="w-full flex items-center gap-4">
+      <div className="w-full flex items-start gap-4">
+        <OllamaLLMModelSelection
+          settings={settings}
+          basePath={basePath.value}
+        />
         <div className="flex flex-col w-60">
-          <label className="text-white text-sm font-semibold block mb-4">
-            Ollama Base URL
+          <label className="text-white text-sm font-semibold block mb-2">
+            Max Tokens
           </label>
           <input
-            type="url"
-            name="OllamaLLMBasePath"
+            type="number"
+            name="OllamaLLMTokenLimit"
             className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
-            placeholder="http://127.0.0.1:11434"
-            defaultValue={settings?.OllamaLLMBasePath}
+            placeholder="4096"
+            defaultChecked="4096"
+            min={1}
+            value={maxTokens}
+            onChange={handleMaxTokensChange}
+            onScroll={(e) => e.target.blur()}
             required={true}
             autoComplete="off"
-            spellCheck={false}
-            onChange={(e) => setBasePathValue(e.target.value)}
-            onBlur={() => setBasePath(basePathValue)}
           />
+          <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+            Maximum number of tokens for context and response.
+          </p>
         </div>
-        {!settings?.credentialsOnly && (
-          <>
-            <OllamaLLMModelSelection settings={settings} basePath={basePath} />
-            <div className="flex flex-col w-60">
-              <label className="text-white text-sm font-semibold block mb-4">
-                Token context window
+      </div>
+      <div className="flex justify-start mt-4">
+        <button
+          onClick={(e) => {
+            e.preventDefault();
+            setShowAdvancedControls(!showAdvancedControls);
+          }}
+          className="text-white hover:text-white/70 flex items-center text-sm"
+        >
+          {showAdvancedControls ? "Hide" : "Show"} Manual Endpoint Input
+          {showAdvancedControls ? (
+            <CaretUp size={14} className="ml-1" />
+          ) : (
+            <CaretDown size={14} className="ml-1" />
+          )}
+        </button>
+      </div>
+
+      <div hidden={!showAdvancedControls}>
+        <div className="w-full flex items-start gap-4">
+          <div className="flex flex-col w-60">
+            <div className="flex justify-between items-center mb-2">
+              <label className="text-white text-sm font-semibold">
+                Ollama Base URL
               </label>
-              <input
-                type="number"
-                name="OllamaLLMTokenLimit"
-                className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
-                placeholder="4096"
-                min={1}
-                onScroll={(e) => e.target.blur()}
-                defaultValue={settings?.OllamaLLMTokenLimit}
-                required={true}
-                autoComplete="off"
-              />
+              {loading ? (
+                <PreLoader size="6" />
+              ) : (
+                <>
+                  {!basePathValue.value && (
+                    <button
+                      onClick={handleAutoDetectClick}
+                      className="bg-primary-button text-xs font-medium px-2 py-1 rounded-lg hover:bg-secondary hover:text-white shadow-[0_4px_14px_rgba(0,0,0,0.25)]"
+                    >
+                      Auto-Detect
+                    </button>
+                  )}
+                </>
+              )}
             </div>
-          </>
-        )}
+            <input
+              type="url"
+              name="OllamaLLMBasePath"
+              className="bg-zinc-900 text-white placeholder:text-white/20 text-sm rounded-lg focus:border-white block w-full p-2.5"
+              placeholder="http://127.0.0.1:11434"
+              value={basePathValue.value}
+              required={true}
+              autoComplete="off"
+              spellCheck={false}
+              onChange={basePath.onChange}
+              onBlur={basePath.onBlur}
+            />
+            <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+              Enter the URL where Ollama is running.
+            </p>
+          </div>
+        </div>
       </div>
     </div>
   );
@@ -65,8 +129,13 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
         return;
       }
       setLoading(true);
-      const { models } = await System.customModels("ollama", null, basePath);
-      setCustomModels(models || []);
+      try {
+        const { models } = await System.customModels("ollama", null, basePath);
+        setCustomModels(models || []);
+      } catch (error) {
+        console.error("Failed to fetch custom models:", error);
+        setCustomModels([]);
+      }
       setLoading(false);
     }
     findCustomModels();
@@ -75,8 +144,8 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
   if (loading || customModels.length == 0) {
     return (
       <div className="flex flex-col w-60">
-        <label className="text-white text-sm font-semibold block mb-4">
-          Chat Model Selection
+        <label className="text-white text-sm font-semibold block mb-2">
+          Ollama Model
         </label>
         <select
           name="OllamaLLMModelPref"
@@ -85,18 +154,22 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
         >
           <option disabled={true} selected={true}>
             {!!basePath
-              ? "-- loading available models --"
-              : "-- waiting for URL --"}
+              ? "--loading available models--"
+              : "Enter Ollama URL first"}
           </option>
         </select>
+        <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+          Select the Ollama model you want to use. Models will load after
+          entering a valid Ollama URL.
+        </p>
       </div>
     );
   }
 
   return (
     <div className="flex flex-col w-60">
-      <label className="text-white text-sm font-semibold block mb-4">
-        Chat Model Selection
+      <label className="text-white text-sm font-semibold block mb-2">
+        Ollama Model
       </label>
       <select
         name="OllamaLLMModelPref"
@@ -119,6 +192,9 @@ function OllamaLLMModelSelection({ settings, basePath = null }) {
           </optgroup>
         )}
       </select>
+      <p className="text-xs leading-[18px] font-base text-white text-opacity-60 mt-2">
+        Choose the Ollama model you want to use for your conversations.
+      </p>
     </div>
   );
 }
diff --git a/frontend/src/hooks/useProviderEndpointAutoDiscovery.js b/frontend/src/hooks/useProviderEndpointAutoDiscovery.js
new file mode 100644
index 000000000..956b09075
--- /dev/null
+++ b/frontend/src/hooks/useProviderEndpointAutoDiscovery.js
@@ -0,0 +1,99 @@
+import { useEffect, useState } from "react";
+import System from "@/models/system";
+import showToast from "@/utils/toast";
+
+export default function useProviderEndpointAutoDiscovery({
+  provider = null,
+  initialBasePath = "",
+  ENDPOINTS = [],
+}) {
+  const [loading, setLoading] = useState(false);
+  const [basePath, setBasePath] = useState(initialBasePath);
+  const [basePathValue, setBasePathValue] = useState(initialBasePath);
+  const [autoDetectAttempted, setAutoDetectAttempted] = useState(false);
+  const [showAdvancedControls, setShowAdvancedControls] = useState(true);
+
+  async function autoDetect(isInitialAttempt = false) {
+    setLoading(true);
+    setAutoDetectAttempted(true);
+    const possibleEndpoints = [];
+    ENDPOINTS.forEach((endpoint) => {
+      possibleEndpoints.push(
+        new Promise((resolve, reject) => {
+          System.customModels(provider, null, endpoint, 2_000)
+            .then((results) => {
+              if (!results?.models || results.models.length === 0)
+                throw new Error("No models");
+              resolve({ endpoint, models: results.models });
+            })
+            .catch(() => {
+              reject(`${provider} @ ${endpoint} did not resolve.`);
+            });
+        })
+      );
+    });
+
+    const { endpoint, models } = await Promise.any(possibleEndpoints)
+      .then((resolved) => resolved)
+      .catch(() => {
+        console.error("All endpoints failed to resolve.");
+        return { endpoint: null, models: null };
+      });
+
+    if (models !== null) {
+      setBasePath(endpoint);
+      setBasePathValue(endpoint);
+      setLoading(false);
+      showToast("Provider endpoint discovered automatically.", "success", {
+        clear: true,
+      });
+      setShowAdvancedControls(false);
+      return;
+    }
+
+    setLoading(false);
+    setShowAdvancedControls(true);
+    showToast(
+      "Couldn't automatically discover the provider endpoint. Please enter it manually.",
+      "info",
+      { clear: true }
+    );
+  }
+
+  function handleAutoDetectClick(e) {
+    e.preventDefault();
+    autoDetect();
+  }
+
+  function handleBasePathChange(e) {
+    const value = e.target.value;
+    setBasePathValue(value);
+  }
+
+  function handleBasePathBlur() {
+    setBasePath(basePathValue);
+  }
+
+  useEffect(() => {
+    if (!initialBasePath && !autoDetectAttempted) autoDetect(true);
+  }, [initialBasePath, autoDetectAttempted]);
+
+  return {
+    autoDetecting: loading,
+    autoDetectAttempted,
+    showAdvancedControls,
+    setShowAdvancedControls,
+    basePath: {
+      value: basePath,
+      set: setBasePathValue,
+      onChange: handleBasePathChange,
+      onBlur: handleBasePathBlur,
+    },
+    basePathValue: {
+      value: basePathValue,
+      set: setBasePathValue,
+    },
+    handleAutoDetectClick,
+    runAutoDetect: autoDetect,
+  };
+}
diff --git a/frontend/src/models/system.js b/frontend/src/models/system.js
index b922457b7..d6c724b7d 100644
--- a/frontend/src/models/system.js
+++ b/frontend/src/models/system.js
@@ -512,10 +512,23 @@ const System = {
         return false;
       });
   },
-  customModels: async function (provider, apiKey = null, basePath = null) {
+  customModels: async function (
+    provider,
+    apiKey = null,
+    basePath = null,
+    timeout = null
+  ) {
+    const controller = new AbortController();
+    if (!!timeout) {
+      setTimeout(() => {
+        controller.abort("Request timed out.");
+      }, timeout);
+    }
+
     return fetch(`${API_BASE}/system/custom-models`, {
       method: "POST",
       headers: baseHeaders(),
+      signal: controller.signal,
       body: JSON.stringify({
         provider,
         apiKey,
diff --git a/frontend/src/utils/constants.js b/frontend/src/utils/constants.js
index 3f637617f..a08439d0b 100644
--- a/frontend/src/utils/constants.js
+++ b/frontend/src/utils/constants.js
@@ -10,6 +10,19 @@ export const SEEN_WATCH_ALERT = "anythingllm_watched_document_alert";
 export const USER_BACKGROUND_COLOR = "bg-historical-msg-user";
 export const AI_BACKGROUND_COLOR = "bg-historical-msg-system";
 
+export const OLLAMA_COMMON_URLS = [
+  "http://127.0.0.1:11434",
+  "http://host.docker.internal:11434",
+  "http://172.17.0.1:11434",
+];
+
+export const LMSTUDIO_COMMON_URLS = [
+  "http://localhost:1234/v1",
+  "http://127.0.0.1:1234/v1",
+  "http://host.docker.internal:1234/v1",
+  "http://172.17.0.1:1234/v1",
+];
+
 export function fullApiUrl() {
   if (API_BASE !== "/api") return API_BASE;
   return `${window.location.origin}/api`;