diff --git a/docsite/docs/mcp-server.md b/docsite/docs/mcp-server.md
index e694626..97dba2a 100644
--- a/docsite/docs/mcp-server.md
+++ b/docsite/docs/mcp-server.md
@@ -31,10 +31,102 @@ With the server running, you can connect to it from any MCP-compatible client. T
Popular clients that support MCP include AI-powered IDEs and standalone applications. Here’s how to configure a few of them:
-- **Cursor**: [Configuring MCP Servers](https://docs.cursor.com/en/context/mcp#configuring-mcp-servers)
-- **Claude Code**: [Using MCP with Claude Code](https://docs.claude.com/en/docs/claude-code/mcp)
-- **Claude Desktop**: [User Quickstart](https://modelcontextprotocol.info/docs/quickstart/user/)
-- **Gemini CLI**: [Configure MCP Servers](https://cloud.google.com/gemini/docs/codeassist/use-agentic-chat-pair-programmer#configure-mcp-servers)
+
+Install in Cursor
+
+Go to: `Settings` -> `Cursor Settings` -> `MCP` -> `Add new global MCP server`
+
+Pasting the following configuration into your Cursor `~/.cursor/mcp.json` file is the recommended approach. See [Cursor MCP docs](https://docs.cursor.com/context/model-context-protocol) for more info.
+
+```json
+{
+ "mcpServers": {
+ "intugle": {
+ "url": "http://localhost:8080/semantic_layer/mcp",
+ "trust": true
+ }
+ }
+}
+```
+
+
+
+
+Install in VS Code
+
+Add this to your VS Code `settings.json` file. See [VS Code MCP docs](https://code.visualstudio.com/docs/copilot/chat/mcp-servers) for more info.
+
+```json
+"mcp": {
+ "servers": {
+ "intugle": {
+ "type": "http",
+ "url": "http://localhost:8080/semantic_layer/mcp"
+ }
+ }
+}
+```
+
+
+
+
+Install in Gemini CLI
+
+See [Gemini CLI Configuration](https://google-gemini.github.io/gemini-cli/docs/tools/mcp-server.html) for details.
+
+1. Open the Gemini CLI settings file at `~/.gemini/settings.json`.
+2. Add the following to the `mcpServers` object in your `settings.json` file:
+
+```json
+{
+ "mcpServers": {
+ "intugle": {
+ "httpUrl": "http://localhost:8080/semantic_layer/mcp",
+ "trust" : true
+ }
+ }
+}
+```
+
+If the `mcpServers` object does not exist, create it.
+
+
+
+
+Install in JetBrains AI Assistant
+
+See [JetBrains AI Assistant Documentation](https://www.jetbrains.com/help/ai-assistant/configure-an-mcp-server.html) for more details.
+
+1. In your JetBrains IDE, go to `Settings` -> `Tools` -> `AI Assistant` -> `Model Context Protocol (MCP)`.
+2. Click `+ Add`.
+3. Select `As JSON` from the list.
+4. Add this configuration and click `OK`:
+
+```json
+{
+ "mcpServers": {
+ "intugle": {
+ "type": "streamable-http",
+ "url": "http://localhost:8080/semantic_layer/mcp"
+ }
+ }
+}
+```
+
+5. Click `Apply` to save changes.
+
+
+
+
+Install in Claude Code
+
+Run this command in your terminal. See [Claude Code MCP docs](https://docs.anthropic.com/en/docs/claude-code/mcp) for more info.
+
+```sh
+claude mcp add --transport http intugle http://localhost:8080/semantic_layer/mcp
+```
+
+
## 2. Data Discovery Tools
diff --git a/docsite/docs/vibe-coding.md b/docsite/docs/vibe-coding.md
index 25ad0de..5aace13 100644
--- a/docsite/docs/vibe-coding.md
+++ b/docsite/docs/vibe-coding.md
@@ -35,10 +35,102 @@ With the server running, you can connect to it from any MCP-compatible client. T
Popular clients that support MCP include AI-powered IDEs and standalone applications. Here’s how to configure a few of them:
-- **Cursor**: [Configuring MCP Servers](https://docs.cursor.com/en/context/mcp#configuring-mcp-servers)
-- **Claude Code**: [Using MCP with Claude Code](https://docs.claude.com/en/docs/claude-code/mcp)
-- **Claude Desktop**: [User Quickstart](https://modelcontextprotocol.info/docs/quickstart/user/)
-- **Gemini CLI**: [Configure MCP Servers](https://cloud.google.com/gemini/docs/codeassist/use-agentic-chat-pair-programmer#configure-mcp-servers)
+
+Install in Cursor
+
+Go to: `Settings` -> `Cursor Settings` -> `MCP` -> `Add new global MCP server`
+
+Pasting the following configuration into your Cursor `~/.cursor/mcp.json` file is the recommended approach. See [Cursor MCP docs](https://docs.cursor.com/context/model-context-protocol) for more info.
+
+```json
+{
+ "mcpServers": {
+ "intugle": {
+ "url": "http://localhost:8080/semantic_layer/mcp",
+ "trust": true
+ }
+ }
+}
+```
+
+
+
+
+Install in VS Code
+
+Add this to your VS Code `settings.json` file. See [VS Code MCP docs](https://code.visualstudio.com/docs/copilot/chat/mcp-servers) for more info.
+
+```json
+"mcp": {
+ "servers": {
+ "intugle": {
+ "type": "http",
+ "url": "http://localhost:8080/semantic_layer/mcp"
+ }
+ }
+}
+```
+
+
+
+
+Install in Gemini CLI
+
+See [Gemini CLI Configuration](https://google-gemini.github.io/gemini-cli/docs/tools/mcp-server.html) for details.
+
+1. Open the Gemini CLI settings file at `~/.gemini/settings.json`.
+2. Add the following to the `mcpServers` object in your `settings.json` file:
+
+```json
+{
+ "mcpServers": {
+ "intugle": {
+ "httpUrl": "http://localhost:8080/semantic_layer/mcp",
+ "trust" : true
+ }
+ }
+}
+```
+
+If the `mcpServers` object does not exist, create it.
+
+
+
+
+Install in JetBrains AI Assistant
+
+See [JetBrains AI Assistant Documentation](https://www.jetbrains.com/help/ai-assistant/configure-an-mcp-server.html) for more details.
+
+1. In your JetBrains IDE, go to `Settings` -> `Tools` -> `AI Assistant` -> `Model Context Protocol (MCP)`.
+2. Click `+ Add`.
+3. Select `As JSON` from the list.
+4. Add this configuration and click `OK`:
+
+```json
+{
+ "mcpServers": {
+ "intugle": {
+ "type": "streamable-http",
+ "url": "http://localhost:8080/semantic_layer/mcp"
+ }
+ }
+}
+```
+
+5. Click `Apply` to save changes.
+
+
+
+
+Install in Claude Code
+
+Run this command in your terminal. See [Claude Code MCP docs](https://docs.anthropic.com/en/docs/claude-code/mcp) for more info.
+
+```sh
+claude mcp add --transport http intugle http://localhost:8080/semantic_layer/mcp
+```
+
+
## 2. Vibe Coding
diff --git a/pyproject.toml b/pyproject.toml
index 8558b77..f206f95 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project]
name = "intugle"
-version = "1.0.5"
+version = "1.0.6"
authors = [
{ name="Intugle", email="hello@intugle.ai" },
]
diff --git a/src/intugle/mcp/docs_search/service.py b/src/intugle/mcp/docs_search/service.py
index 6c88ad0..98671c4 100644
--- a/src/intugle/mcp/docs_search/service.py
+++ b/src/intugle/mcp/docs_search/service.py
@@ -10,6 +10,7 @@ class DocsSearchService:
BASE_URL = "https://raw.githubusercontent.com/Intugle/data-tools/main/docsite/docs/"
API_URL = "https://api.github.com/repos/Intugle/data-tools/contents/docsite/docs"
+ BLACKLISTED_ROUTES = ["mcp-server.md", "vibe-coding.md"]
def __init__(self):
self._doc_paths = None
@@ -39,8 +40,9 @@ async def _fetch_paths_recursively(self, session: aiohttp.ClientSession, url: st
for item in items:
if item['type'] == 'file' and (item['name'].endswith('.md') or item['name'].endswith('.mdx')):
- # Strip the base 'docsite/docs/' part to make it a relative path
- paths.append(item['path'].replace('docsite/docs/', '', 1))
+ relative_path = item['path'].replace('docsite/docs/', '', 1)
+ if relative_path not in self.BLACKLISTED_ROUTES:
+ paths.append(relative_path)
elif item['type'] == 'dir':
paths.extend(await self._fetch_paths_recursively(session, item['url']))
except Exception as e:
diff --git a/uv.lock b/uv.lock
index c864996..dc97f25 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1668,7 +1668,7 @@ wheels = [
[[package]]
name = "intugle"
-version = "1.0.5"
+version = "1.0.6"
source = { editable = "." }
dependencies = [
{ name = "aiohttp" },