-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdependency_analyzer.py
More file actions
553 lines (461 loc) · 22 KB
/
dependency_analyzer.py
File metadata and controls
553 lines (461 loc) · 22 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
HarmonyOS ETS文件依赖关系分析和数据集生成工具
该脚本用于分析ETS文件之间的依赖关系,并生成用于机器学习的数据集
"""
import os
import re
import json
from typing import Dict, List, Set, Tuple, Optional
from pathlib import Path
import argparse
from collections import defaultdict, Counter
class ETSDependencyAnalyzer:
"""ETS文件依赖关系分析器"""
def __init__(self, root_path: str):
self.root_path = Path(root_path)
self.dependencies = defaultdict(set) # 文件 -> 依赖的文件集合
self.reverse_dependencies = defaultdict(set) # 文件 -> 被依赖的文件集合
self.import_patterns = {} # 文件 -> import语句列表
self.export_patterns = {} # 文件 -> export语句列表
self.file_metadata = {} # 文件元数据
self.module_mappings = {} # 模块名 -> 模块路径映射
self.module_entry_files = {} # 模块名 -> 入口文件路径
self.entry_file_exports = {} # 入口文件 -> 导出的文件列表
def analyze_project(self) -> Dict:
"""分析整个项目的依赖关系"""
print(f"开始分析项目: {self.root_path}")
# 1. 构建模块映射
self._build_module_mappings()
print(f"找到 {len(self.module_mappings)} 个模块")
# 2. 扫描所有ETS文件
ets_files = self._find_ets_files()
print(f"找到 {len(ets_files)} 个ETS文件")
# 3. 分析每个文件的导入导出
for file_path in ets_files:
self._analyze_file(file_path)
# 4. 解析依赖关系
self._resolve_dependencies()
# 5. 生成分析报告
return self._generate_report()
def _find_ets_files(self) -> List[Path]:
"""查找所有ETS文件"""
ets_files = []
for file_path in self.root_path.rglob("*.ets"):
# 排除测试文件和临时文件
if not any(exclude in str(file_path) for exclude in
['test', 'Test', 'ohosTest', 'node_modules', '.hvigor']):
ets_files.append(file_path)
return ets_files
def _analyze_file(self, file_path: Path) -> None:
"""分析单个ETS文件"""
try:
with open(file_path, 'r', encoding='utf-8') as f:
content = f.read()
# 提取文件元数据
self.file_metadata[str(file_path)] = {
'size': len(content),
'lines': len(content.splitlines()),
'relative_path': str(file_path.relative_to(self.root_path)),
'module_type': self._detect_module_type(file_path, content),
'component_type': self._detect_component_type(content)
}
# 分析导入语句
imports = self._extract_imports(content)
self.import_patterns[str(file_path)] = imports
# 分析导出语句
exports = self._extract_exports(content)
self.export_patterns[str(file_path)] = exports
except Exception as e:
print(f"分析文件失败 {file_path}: {e}")
def _extract_imports(self, content: str) -> List[Dict]:
"""提取导入语句"""
imports = []
# 匹配各种import模式
patterns = [
# import { xxx } from 'module'
r"import\s*\{\s*([^}]+)\s*\}\s*from\s*['\"]([^'\"]+)['\"]",
# import xxx from 'module'
r"import\s+(\w+)\s+from\s*['\"]([^'\"]+)['\"]",
# import * as xxx from 'module'
r"import\s*\*\s*as\s+(\w+)\s+from\s*['\"]([^'\"]+)['\"]",
# import 'module'
r"import\s*['\"]([^'\"]+)['\"]"
]
for pattern in patterns:
matches = re.finditer(pattern, content, re.MULTILINE)
for match in matches:
if len(match.groups()) == 2:
# 有具体导入项
imports.append({
'type': 'named_import',
'items': [item.strip() for item in match.group(1).split(',')],
'module': match.group(2),
'full_statement': match.group(0)
})
elif len(match.groups()) == 1:
# 仅导入模块
imports.append({
'type': 'side_effect_import',
'module': match.group(1),
'full_statement': match.group(0)
})
return imports
def _extract_exports(self, content: str) -> List[Dict]:
"""提取导出语句"""
exports = []
# 匹配各种export模式
patterns = [
# export interface/class/function/const/let/var
r"export\s+(interface|class|function|const|let|var)\s+(\w+)",
# export { xxx }
r"export\s*\{\s*([^}]+)\s*\}",
# export default
r"export\s+default\s+(\w+|\{[^}]*\})",
# export * from
r"export\s*\*\s*from\s*['\"]([^'\"]+)['\"]"
]
for pattern in patterns:
matches = re.finditer(pattern, content, re.MULTILINE)
for match in matches:
exports.append({
'type': 'export',
'content': match.group(0),
'name': match.group(1) if len(match.groups()) >= 1 else None
})
return exports
def _detect_module_type(self, file_path: Path, content: str) -> str:
"""检测模块类型"""
path_str = str(file_path)
if '/pages/' in path_str:
return 'page'
elif '/components/' in path_str:
return 'component'
elif '/models/' in path_str or '/model/' in path_str:
return 'model'
elif '/utils/' in path_str or '/Utils/' in path_str:
return 'utility'
elif '/commons/' in path_str or '/common/' in path_str:
return 'common'
elif '/constants/' in path_str:
return 'constant'
elif '/viewmodels/' in path_str or '/viewModels/' in path_str:
return 'viewmodel'
elif '/logic/' in path_str:
return 'logic'
elif '/service/' in path_str or '/services/' in path_str:
return 'service'
else:
return 'other'
def _detect_component_type(self, content: str) -> str:
"""检测组件类型"""
if '@ComponentV2' in content:
return 'component_v2'
elif '@Component' in content:
return 'component'
elif '@Builder' in content:
return 'builder'
elif 'export interface' in content:
return 'interface'
elif 'export class' in content:
return 'class'
elif 'export function' in content:
return 'function'
elif 'export const' in content or 'export let' in content:
return 'constant'
else:
return 'module'
def _resolve_dependencies(self) -> None:
"""解析文件间的依赖关系"""
for file_path, imports in self.import_patterns.items():
for import_info in imports:
module_path = import_info['module']
# 解析相对路径
if module_path.startswith('./') or module_path.startswith('../'):
target_file = self._resolve_relative_path(file_path, module_path)
if target_file:
self.dependencies[file_path].add(target_file)
self.reverse_dependencies[target_file].add(file_path)
# 解析模块名引用
elif not module_path.startswith('@'):
target_files = self._resolve_module_path(file_path, module_path)
if target_files:
for target_file in target_files:
self.dependencies[file_path].add(target_file)
self.reverse_dependencies[target_file].add(file_path)
def _resolve_relative_path(self, from_file: str, relative_path: str) -> Optional[str]:
"""解析相对路径引用"""
from_dir = Path(from_file).parent
# 尝试添加.ets扩展名
possible_paths = [
from_dir / f"{relative_path}.ets",
from_dir / relative_path / "index.ets",
from_dir / f"{relative_path}/Index.ets"
]
for path in possible_paths:
if path.exists():
return str(path)
return None
def _parse_json5(self, content: str) -> dict:
"""简单的 JSON5 解析(去除注释和尾随逗号)"""
# 移除单行注释
content = re.sub(r'//.*?$', '', content, flags=re.MULTILINE)
# 移除多行注释
content = re.sub(r'/\*.*?\*/', '', content, flags=re.DOTALL)
# 移除尾随逗号
content = re.sub(r',(\s*[}\]])', r'\1', content)
# 尝试解析为 JSON
return json.loads(content)
def _build_module_mappings(self) -> None:
"""构建模块名到模块路径的映射"""
# 查找所有 oh-package.json5 文件
package_files = list(self.root_path.rglob("oh-package.json5"))
for package_file in package_files:
try:
with open(package_file, 'r', encoding='utf-8') as f:
content = f.read()
# 简单解析 JSON5
package_data = self._parse_json5(content)
# 获取模块名
module_name = package_data.get('name')
if not module_name:
continue
# 获取模块路径(包含oh-package.json5的目录)
module_dir = package_file.parent
self.module_mappings[module_name] = str(module_dir)
# 获取入口文件
main_file = package_data.get('main', 'Index.ets')
entry_path = module_dir / main_file
if entry_path.exists():
self.module_entry_files[module_name] = str(entry_path)
# 解析入口文件的 export 语句
self._parse_entry_file_exports(str(entry_path))
except Exception as e:
print(f"解析模块配置文件失败 {package_file}: {e}")
def _parse_entry_file_exports(self, entry_file: str) -> None:
"""解析入口文件的 export 语句,找到实际导出的文件"""
try:
with open(entry_file, 'r', encoding='utf-8') as f:
content = f.read()
exported_files = []
entry_dir = Path(entry_file).parent
# 匹配 export { xxx } from 'path' 或 export * from 'path'
export_from_pattern = r"export\s+(?:\*|\{[^}]+\})\s+from\s+['\"]([^'\"]+)['\"]"
matches = re.finditer(export_from_pattern, content, re.MULTILINE)
for match in matches:
relative_path = match.group(1)
# 解析相对路径
target_path = self._resolve_path_from_entry(entry_dir, relative_path)
if target_path:
exported_files.append(str(target_path))
self.entry_file_exports[entry_file] = exported_files
except Exception as e:
print(f"解析入口文件失败 {entry_file}: {e}")
def _resolve_path_from_entry(self, from_dir: Path, relative_path: str) -> Optional[Path]:
"""从入口文件解析相对路径"""
# 尝试添加.ets扩展名
possible_paths = [
from_dir / f"{relative_path}.ets",
from_dir / relative_path / "index.ets",
from_dir / f"{relative_path}/Index.ets"
]
for path in possible_paths:
if path.exists():
return path
return None
def _resolve_module_path(self, from_file: str, module_name: str) -> Optional[List[str]]:
"""解析模块名引用,返回该模块导出的所有文件"""
# 先查找模块映射
if module_name in self.module_entry_files:
entry_file = self.module_entry_files[module_name]
# 返回入口文件导出的所有文件
if entry_file in self.entry_file_exports:
return self.entry_file_exports[entry_file]
else:
# 如果没有解析到导出,至少返回入口文件
return [entry_file]
# 如果没有找到模块映射,使用旧的查找方式(兼容性)
project_root = self.root_path
# 查找可能的模块位置
possible_dirs = [
project_root / module_name,
project_root / "components" / module_name,
project_root / "commons" / module_name,
project_root / "features" / module_name,
project_root / "feature" / module_name
]
for dir_path in possible_dirs:
if dir_path.exists():
# 查找入口文件
entry_files = ['index.ets', 'Index.ets', f"{module_name}.ets"]
for entry in entry_files:
entry_path = dir_path / entry
if entry_path.exists():
return [str(entry_path)]
return None
def _generate_file_dependency_mapping(self) -> Dict:
"""生成详细的文件依赖关系映射"""
file_deps = {}
for file_path in self.file_metadata.keys():
relative_path = str(Path(file_path).relative_to(self.root_path))
# 获取该文件的基本信息
file_info = {
'file_path': relative_path,
'absolute_path': file_path,
'module_type': self.file_metadata[file_path]['module_type'],
'component_type': self.file_metadata[file_path]['component_type'],
'size': self.file_metadata[file_path]['size'],
'lines': self.file_metadata[file_path]['lines']
}
# 获取该文件依赖的其他文件
dependencies = []
if file_path in self.dependencies:
for dep_file in self.dependencies[file_path]:
dep_relative = str(Path(dep_file).relative_to(self.root_path))
dep_info = {
'file_path': dep_relative,
'absolute_path': dep_file,
'module_type': self.file_metadata.get(dep_file, {}).get('module_type', 'unknown'),
'component_type': self.file_metadata.get(dep_file, {}).get('component_type', 'unknown')
}
dependencies.append(dep_info)
# 获取依赖该文件的其他文件
dependents = []
if file_path in self.reverse_dependencies:
for dep_file in self.reverse_dependencies[file_path]:
dep_relative = str(Path(dep_file).relative_to(self.root_path))
dep_info = {
'file_path': dep_relative,
'absolute_path': dep_file,
'module_type': self.file_metadata.get(dep_file, {}).get('module_type', 'unknown'),
'component_type': self.file_metadata.get(dep_file, {}).get('component_type', 'unknown')
}
dependents.append(dep_info)
# 获取该文件的导入信息
imports = []
if file_path in self.import_patterns:
for import_info in self.import_patterns[file_path]:
import_detail = {
'module': import_info['module'],
'type': import_info['type'],
'statement': import_info['full_statement']
}
if 'items' in import_info:
import_detail['items'] = import_info['items']
imports.append(import_detail)
# 获取该文件的导出信息
exports = []
if file_path in self.export_patterns:
for export_info in self.export_patterns[file_path]:
export_detail = {
'type': export_info['type'],
'content': export_info['content']
}
if export_info.get('name'):
export_detail['name'] = export_info['name']
exports.append(export_detail)
# 汇总该文件的完整依赖信息,使用绝对路径作为key
file_deps[file_path] = {
'file_info': file_info,
'dependencies': {
'count': len(dependencies),
'files': dependencies
},
'dependents': {
'count': len(dependents),
'files': dependents
},
'imports': {
'count': len(imports),
'details': imports
},
'exports': {
'count': len(exports),
'details': exports
}
}
return file_deps
def _generate_report(self) -> Dict:
"""生成分析报告"""
total_files = len(self.file_metadata)
# 统计各种类型的文件
module_types = Counter(meta['module_type'] for meta in self.file_metadata.values())
component_types = Counter(meta['component_type'] for meta in self.file_metadata.values())
# 依赖关系统计
dependency_stats = {
'total_dependencies': sum(len(deps) for deps in self.dependencies.values()),
'files_with_dependencies': len([f for f in self.dependencies if self.dependencies[f]]),
'most_dependent_files': sorted(
[(f, len(deps)) for f, deps in self.dependencies.items()],
key=lambda x: x[1], reverse=True
)[:10],
'most_depended_files': sorted(
[(f, len(deps)) for f, deps in self.reverse_dependencies.items()],
key=lambda x: x[1], reverse=True
)[:10]
}
return {
'summary': {
'total_files': total_files,
'module_types': dict(module_types),
'component_types': dict(component_types)
},
'dependencies': dependency_stats,
'file_metadata': self.file_metadata,
'dependency_graph': {
'dependencies': {k: list(v) for k, v in self.dependencies.items()},
'reverse_dependencies': {k: list(v) for k, v in self.reverse_dependencies.items()}
},
'import_patterns': self.import_patterns,
'export_patterns': self.export_patterns
}
def export_dataset(self, output_dir: str, include_dependencies: bool = True) -> str:
"""导出数据集"""
metadata = {
'file_metadata': self.file_metadata,
'import_patterns': self.import_patterns,
'export_patterns': self.export_patterns
}
if include_dependencies:
metadata['dependencies'] = {k: list(v) for k, v in self.dependencies.items()}
metadata['reverse_dependencies'] = {k: list(v) for k, v in self.reverse_dependencies.items()}
# 添加详细的文件依赖关系映射
metadata['file_dependencies'] = self._generate_file_dependency_mapping()
output_path = Path(output_dir)
output_path.mkdir(parents=True, exist_ok=True)
metadata_path = output_path / f"{self.root_path.name}_metadata.json"
with open(metadata_path, 'w', encoding='utf-8') as f:
json.dump(metadata, f, ensure_ascii=False, indent=2)
return str(metadata_path)
def main():
parser = argparse.ArgumentParser(description='HarmonyOS ETS依赖关系分析工具')
parser.add_argument('--root', '-r', required=True, help='项目根目录路径')
parser.add_argument('--output', '-o', default='intermedia_result', help='输出目录 (默认: intermedia_result)')
parser.add_argument('--analyze-only', '-a', action='store_true', help='仅分析不导出文件')
args = parser.parse_args()
# 创建分析器
analyzer = ETSDependencyAnalyzer(args.root)
# 分析项目
report = analyzer.analyze_project()
# 打印分析结果
print("\n=== 分析结果 ===")
print(f"总文件数: {report['summary']['total_files']}")
print(f"模块类型分布: {report['summary']['module_types']}")
print(f"组件类型分布: {report['summary']['component_types']}")
print(f"总依赖关系数: {report['dependencies']['total_dependencies']}")
print(f"有依赖的文件数: {report['dependencies']['files_with_dependencies']}")
print("\n最多依赖的文件:")
for file_path, count in report['dependencies']['most_dependent_files'][:5]:
relative_path = Path(file_path).name
print(f" {relative_path}: {count} 个依赖")
print("\n被依赖最多的文件:")
for file_path, count in report['dependencies']['most_depended_files'][:5]:
relative_path = Path(file_path).name
print(f" {relative_path}: 被 {count} 个文件依赖")
# 导出数据集
if not args.analyze_only:
metadata_path = analyzer.export_dataset(args.output)
print(f"\n元数据已保存到: {metadata_path}")
if __name__ == "__main__":
main()