Skip to content

Commit 68f0d84

Browse files
committed
feat(mongodb): mongodb_mcp TencentBlueKing#16073
1 parent f392066 commit 68f0d84

28 files changed

+1531
-0
lines changed

dbm-ui/backend/dbm_aiagent/mcp_tools/constants.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,12 @@ class DBMMcpTools(StrStructuredEnum):
3333
REDIS_QUERY_ALARM = EnumField("redis-query-alarm", "redis-query-alarm")
3434
REDIS_BILL = EnumField("redis-bill", "redis-bill")
3535
REDIS_JOB = EnumField("redis-job", "redis-job")
36+
MONGODB_QUERY_META = EnumField("mongodb-query-meta", "mongodb-query-meta")
37+
MONGODB_QUERY_STATUS = EnumField("mongodb-query-status", "mongodb-query-status")
38+
MONGODB_QUERY_LOG = EnumField("mongodb-query-log", "mongodb-query-log")
39+
MONGODB_QUERY_ALARM = EnumField("mongodb-query-alarm", "mongodb-query-alarm")
40+
MONGODB_BILL = EnumField("mongodb-bill", "mongodb-bill")
41+
MONGODB_JOB = EnumField("mongodb-job", "mongodb-job")
3642

3743

3844
class DBMMCPTags(StrStructuredEnum):

dbm-ui/backend/dbm_aiagent/mcp_tools/mongodb/__init__.py

Whitespace-only changes.

dbm-ui/backend/dbm_aiagent/mcp_tools/mongodb/impl/__init__.py

Whitespace-only changes.
Lines changed: 236 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,236 @@
1+
"""
2+
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-DB管理系统(BlueKing-BK-DBM) available.
3+
Copyright (C) 2017-2023 THL A29 Limited, a Tencent company. All rights reserved.
4+
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at https://opensource.org/licenses/MIT
6+
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
7+
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
8+
specific language governing permissions and limitations under the License.
9+
"""
10+
from collections import defaultdict
11+
from typing import Dict, List
12+
13+
from django.db.models import F
14+
15+
from backend.configuration.constants import DBType
16+
from backend.configuration.models import DBAdministrator
17+
from backend.db_meta.enums import ClusterType, MachineType
18+
from backend.db_meta.models import AppCache, Cluster, ClusterEntry, Machine, ProxyInstance, StorageInstance
19+
20+
21+
def list_my_mongodb_bizs(username: str) -> List:
22+
res = []
23+
for app in AppCache.objects.all():
24+
bk_biz_id = app.bk_biz_id
25+
if DBAdministrator.objects.filter(
26+
bk_biz_id=bk_biz_id, users__0=username, db_type=DBType.MongoDB.value
27+
).exists():
28+
res.append({"bk_biz_id": bk_biz_id, "app_name": app.bk_biz_name, "abbr": app.db_app_abbr})
29+
return res
30+
31+
32+
def mongodb_list_clusters(bk_biz_id: int) -> List:
33+
clusters = Cluster.objects.filter(
34+
bk_biz_id=bk_biz_id,
35+
cluster_type__in=[ClusterType.MongoReplicaSet, ClusterType.MongoShardedCluster],
36+
)
37+
result = []
38+
for c in clusters:
39+
mongos_count = c.proxyinstance_set.filter(machine_type=MachineType.MONGOS).count()
40+
storage_count = c.storageinstance_set.count()
41+
shard_count = 0
42+
if c.cluster_type == ClusterType.MongoShardedCluster:
43+
shard_count = (
44+
c.nosqlstoragesetdtl_set.filter(
45+
instance__machine__machine_type=MachineType.MONGODB
46+
)
47+
.values("seg_range")
48+
.distinct()
49+
.count()
50+
)
51+
elif c.cluster_type == ClusterType.MongoReplicaSet:
52+
shard_count = 1
53+
result.append(
54+
{
55+
"cluster_id": c.id,
56+
"bk_cloud_id": c.bk_cloud_id,
57+
"cluster_type": c.cluster_type,
58+
"immute_domain": c.immute_domain,
59+
"alias": c.alias,
60+
"region": c.region,
61+
"mongos_count": mongos_count,
62+
"shard_count": shard_count,
63+
"storage_count": storage_count,
64+
"mongodb_version": c.major_version,
65+
}
66+
)
67+
return result
68+
69+
70+
def get_machine_stats(all_machine_ids) -> Dict:
71+
machines = Machine.objects.filter(bk_host_id__in=all_machine_ids).select_related("bk_city")
72+
machine_distribution = {
73+
"total_count": len(all_machine_ids),
74+
"by_sub_zone": defaultdict(int),
75+
"by_os": defaultdict(int),
76+
"by_device_class": defaultdict(int),
77+
"spec_summary": defaultdict(int),
78+
}
79+
for machine in machines:
80+
if machine.bk_sub_zone:
81+
machine_distribution["by_sub_zone"][machine.bk_sub_zone] += 1
82+
if machine.bk_os_name:
83+
machine_distribution["by_os"][machine.bk_os_name] += 1
84+
if machine.bk_svr_device_cls_name:
85+
machine_distribution["by_device_class"][machine.bk_svr_device_cls_name] += 1
86+
if machine.spec_id:
87+
machine_distribution["spec_summary"][f"spec_{machine.spec_id}"] += 1
88+
return machine_distribution
89+
90+
91+
def cluster_overview(immute_domain: str) -> Dict:
92+
cluster_obj = Cluster.objects.prefetch_related("tags").get(immute_domain=immute_domain)
93+
stats = {
94+
"bk_cloud_id": cluster_obj.bk_cloud_id,
95+
"bk_biz_id": cluster_obj.bk_biz_id,
96+
"cluster_id": cluster_obj.id,
97+
"immute_domain": cluster_obj.immute_domain,
98+
"alias": cluster_obj.alias,
99+
"cluster_type": cluster_obj.cluster_type,
100+
"major_version": cluster_obj.major_version,
101+
"region": cluster_obj.region,
102+
"disaster_tolerance_level": cluster_obj.disaster_tolerance_level,
103+
"tags": ["{}:{}".format(tag.key, tag.value) for tag in cluster_obj.tags.all()],
104+
"cluster_entries": [
105+
{"entry_type": ce.cluster_entry_type, "entry_addr": ce.entry}
106+
for ce in ClusterEntry.objects.filter(cluster=cluster_obj)
107+
],
108+
}
109+
storage_instances = (
110+
StorageInstance.objects.filter(cluster=cluster_obj)
111+
.select_related("machine", "machine__bk_city")
112+
.prefetch_related("bind_entry")
113+
)
114+
proxy_instances = (
115+
ProxyInstance.objects.filter(cluster=cluster_obj)
116+
.select_related("machine", "machine__bk_city")
117+
.prefetch_related("bind_entry")
118+
)
119+
storage_stats = {
120+
"by_role": defaultdict(int),
121+
"by_status": defaultdict(int),
122+
"by_machine_type": defaultdict(int),
123+
"versions": set(),
124+
"machines": set(),
125+
}
126+
for instance in storage_instances:
127+
storage_stats["by_role"][instance.instance_role or instance.machine.machine_type] += 1
128+
storage_stats["by_status"][instance.status] += 1
129+
storage_stats["by_machine_type"][instance.machine_type] += 1
130+
if instance.version:
131+
storage_stats["versions"].add(instance.version)
132+
storage_stats["machines"].add(instance.machine.bk_host_id)
133+
storage_machines = get_machine_stats(storage_stats["machines"])
134+
stats["storage_instances"] = {
135+
"node_count": storage_instances.count(),
136+
"by_role": dict(sorted(storage_stats["by_role"].items())),
137+
"by_status": dict(sorted(storage_stats["by_status"].items())),
138+
"versions": sorted(list(storage_stats["versions"])),
139+
"machine_count": len(storage_stats["machines"]),
140+
"by_os": dict(sorted(storage_machines["by_os"].items())),
141+
"by_sub_zone": dict(sorted(storage_machines["by_sub_zone"].items())),
142+
"by_device_class": dict(sorted(storage_machines["by_device_class"].items())),
143+
}
144+
proxy_stats = {
145+
"by_status": defaultdict(int),
146+
"by_machine_type": defaultdict(int),
147+
"versions": set(),
148+
"machines": set(),
149+
}
150+
for instance in proxy_instances:
151+
proxy_stats["by_status"][instance.status] += 1
152+
proxy_stats["by_machine_type"][instance.machine_type] += 1
153+
if instance.version:
154+
proxy_stats["versions"].add(instance.version)
155+
proxy_stats["machines"].add(instance.machine.bk_host_id)
156+
proxy_machines = get_machine_stats(proxy_stats["machines"])
157+
stats["proxy_instances"] = {
158+
"node_count": proxy_instances.count(),
159+
"by_status": dict(sorted(proxy_stats["by_status"].items())),
160+
"versions": sorted(list(proxy_stats["versions"])),
161+
"machine_count": len(proxy_stats["machines"]),
162+
"by_os": dict(sorted(proxy_machines["by_os"].items())),
163+
"by_sub_zone": dict(sorted(proxy_machines["by_sub_zone"].items())),
164+
"by_device_class": dict(sorted(proxy_machines["by_device_class"].items())),
165+
}
166+
return stats
167+
168+
169+
def cluster_mongos(immute_domain: str) -> List:
170+
"""集群 Mongos 列表"""
171+
c_obj = Cluster.objects.get(immute_domain=immute_domain)
172+
mongos_instances = c_obj.proxyinstance_set.filter(machine_type=MachineType.MONGOS)
173+
return [
174+
{
175+
"address": "{}:{}".format(s.machine.ip, s.port),
176+
"status": s.status,
177+
"version": s.version or "",
178+
"sub_zone": s.machine.bk_sub_zone or "",
179+
"cls_name": s.machine.bk_svr_device_cls_name or "",
180+
}
181+
for s in mongos_instances
182+
]
183+
184+
185+
def cluster_shards(immute_domain: str) -> List:
186+
"""集群分片(Shard)节点列表,按 IP 聚合端口"""
187+
c_obj = Cluster.objects.get(immute_domain=immute_domain)
188+
storage_objs = c_obj.storageinstance_set.filter(machine_type=MachineType.MONGODB)
189+
host_ports = defaultdict(list)
190+
for ins in storage_objs:
191+
host_ports[ins.machine.ip].append(ins.port)
192+
result = []
193+
for ip, ports in host_ports.items():
194+
m_obj = Machine.objects.filter(ip=ip, bk_cloud_id=c_obj.bk_cloud_id, bk_biz_id=c_obj.bk_biz_id).first()
195+
if m_obj:
196+
result.append(
197+
{
198+
"ip": ip,
199+
"ports": ports,
200+
"sub_zone": m_obj.bk_sub_zone or "",
201+
"cls_name": m_obj.bk_svr_device_cls_name or "",
202+
}
203+
)
204+
return result
205+
206+
207+
def list_clusters_by_hosts(hosts: List) -> List[Dict]:
208+
cluster_host = []
209+
storage_data = (
210+
Cluster.objects.filter(storageinstance__machine__ip__in=hosts)
211+
.values(
212+
"immute_domain",
213+
host=F("storageinstance__machine__ip"),
214+
instance_role=F("storageinstance__instance_role"),
215+
)
216+
.distinct()
217+
)
218+
cluster_host.extend(list(storage_data))
219+
proxy_data = (
220+
Cluster.objects.filter(proxyinstance__machine__ip__in=hosts)
221+
.values(
222+
"immute_domain",
223+
host=F("proxyinstance__machine__ip"),
224+
instance_role=F("proxyinstance__machine_type"),
225+
)
226+
.distinct()
227+
)
228+
cluster_host.extend(list(proxy_data))
229+
seen = set()
230+
unique_results = []
231+
for item in cluster_host:
232+
key = (item["immute_domain"], item["host"], str(item.get("instance_role", "")))
233+
if key not in seen:
234+
seen.add(key)
235+
unique_results.append(item)
236+
return unique_results
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
# -*- coding: utf-8 -*-
2+
"""
3+
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-DB管理系统(BlueKing-BK-DBM) available.
4+
Copyright (C) 2017-2023 THL A29 Limited, a Tencent company. All rights reserved.
5+
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
6+
You may obtain a copy of the License at https://opensource.org/licenses/MIT
7+
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
8+
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
9+
specific language governing permissions and limitations under the License.
10+
"""
11+
from itertools import chain
12+
13+
from backend.db_meta.enums import ClusterType
14+
from backend.db_meta.models import Cluster
15+
from backend.dbm_aiagent.mcp_tools.redis.impl.get_source_access_impl import generate_cluster_query_report
16+
17+
18+
def generate_mongodb_cluster_query_report(job_log_resp, cluster_domain: str, cluster_all_ips: list):
19+
"""复用 Redis 访问来源报告生成逻辑(StorageInstance/ProxyInstance 通用)"""
20+
tcp_report = generate_cluster_query_report(job_log_resp, cluster_domain, cluster_all_ips)
21+
return tcp_report
Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
"""
2+
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-DB管理系统(BlueKing-BK-DBM) available.
3+
Copyright (C) 2017-2023 THL A29 Limited, a Tencent company. All rights reserved.
4+
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at https://opensource.org/licenses/MIT
6+
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
7+
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
8+
specific language governing permissions and limitations under the License.
9+
"""
10+
import time
11+
from datetime import datetime
12+
from typing import Dict, List, Optional, Union
13+
14+
from backend import env
15+
from backend.components import BKMonitorV3Api
16+
from backend.dbm_aiagent.mcp_tools.mongodb.tools.comm_tools import parse_time2_long
17+
18+
19+
def get_cluster_alarms(
20+
immute_domain: Optional[str] = None,
21+
start_time: Optional[Union[int, str, datetime]] = None,
22+
end_time: Optional[Union[int, str, datetime]] = None,
23+
) -> Dict:
24+
alarms = get_alarms_flat(immute_domain=immute_domain, start_time=start_time, end_time=end_time)
25+
by_alert = alarms.get(immute_domain, {})
26+
cluster_alarms = [{"alert_name": k, "alert_detail": v} for k, v in by_alert.items()]
27+
total = sum(len(v) for v in by_alert.values())
28+
return {"total_alarms": total, "alarm_detail": cluster_alarms}
29+
30+
31+
def get_alarms_flat(
32+
appid: Optional[Union[str, int]] = None,
33+
immute_domain: Optional[str] = None,
34+
start_time: Optional[Union[int, str, datetime]] = None,
35+
end_time: Optional[Union[int, str, datetime]] = None,
36+
n_hour: Optional[int] = None,
37+
limit: int = 5000,
38+
) -> Dict[str, List]:
39+
"""获取告警列表,支持按业务或集群筛选。标签使用 DBM_MONGODB。"""
40+
if not appid and not immute_domain:
41+
raise ValueError("必须指定 appid 或 immute_domain 中的至少一个")
42+
if start_time is not None and end_time is not None:
43+
start_timestamp = parse_time2_long(start_time)
44+
end_timestamp = parse_time2_long(end_time)
45+
else:
46+
n_hour = n_hour or 24
47+
end_timestamp = int(time.time())
48+
start_timestamp = end_timestamp - n_hour * 60 * 60
49+
query_conditions = ['labels: "DBM_MONGODB"']
50+
if appid:
51+
query_conditions.append(f'tags.appid : "{appid}"')
52+
if immute_domain:
53+
query_conditions.append(f'tags.cluster_domain : "{immute_domain}"')
54+
query_string = " AND ".join(query_conditions)
55+
data = BKMonitorV3Api.search_alert(
56+
{
57+
"bk_biz_ids": [env.DBA_APP_BK_BIZ_ID],
58+
"start_time": start_timestamp,
59+
"end_time": end_timestamp,
60+
"offset": 0,
61+
"limit": limit,
62+
"query_string": query_string,
63+
}
64+
)
65+
# 与 Redis 一致:alerts 列表,按 domain -> alert_name -> list 聚合
66+
result = {}
67+
for alt in data.get("alerts", []):
68+
alarm = {
69+
"alert_name": alt.get("alert_name", ""),
70+
"description": alt.get("description", ""),
71+
"begin_time": alt.get("begin_time", 0),
72+
"target_key": alt.get("target_key", ""),
73+
}
74+
for tag in alt.get("tags", []):
75+
alarm[tag["key"]] = tag["value"]
76+
immute_domain = alarm.get("cluster_domain", "unknown")
77+
alert_name = alarm.get("alert_name", "unknown")
78+
alarm.pop("cluster_domain", None)
79+
alarm.pop("appid", None)
80+
if immute_domain not in result:
81+
result[immute_domain] = {}
82+
if alert_name not in result[immute_domain]:
83+
result[immute_domain][alert_name] = []
84+
result[immute_domain][alert_name].append(alarm)
85+
return result

0 commit comments

Comments
 (0)